prompt
stringlengths
45
17.8k
completion
stringlengths
6
107
api
stringlengths
12
42
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] =
Field(primary_key=True, nullable=False)
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str =
Field()
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float =
Field()
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] =
Field(primary_key=True, nullable=False)
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_id: int =
Field(foreign_key="topic.id")
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_id: int = Field(foreign_key="topic.id") topic: "Topic" = Relationship( back_populates="top_words", sa_relationship_kwargs={"cascade": "all,delete"} ) class TopicBase(SQLModel): name: str =
Field()
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_id: int = Field(foreign_key="topic.id") topic: "Topic" = Relationship( back_populates="top_words", sa_relationship_kwargs={"cascade": "all,delete"} ) class TopicBase(SQLModel): name: str = Field() count: int =
Field()
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_id: int = Field(foreign_key="topic.id") topic: "Topic" = Relationship( back_populates="top_words", sa_relationship_kwargs={"cascade": "all,delete"} ) class TopicBase(SQLModel): name: str = Field() count: int = Field() topic_index: int =
Field()
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_id: int = Field(foreign_key="topic.id") topic: "Topic" = Relationship( back_populates="top_words", sa_relationship_kwargs={"cascade": "all,delete"} ) class TopicBase(SQLModel): name: str = Field() count: int = Field() topic_index: int = Field() class TopicWithWords(TopicBase): top_words: List["WordBase"] =
Field(default=[])
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_id: int = Field(foreign_key="topic.id") topic: "Topic" = Relationship( back_populates="top_words", sa_relationship_kwargs={"cascade": "all,delete"} ) class TopicBase(SQLModel): name: str = Field() count: int = Field() topic_index: int = Field() class TopicWithWords(TopicBase): top_words: List["WordBase"] = Field(default=[]) class Topic(TopicBase, table=True): id: Optional[int] =
Field(primary_key=True, nullable=False)
sqlmodel.Field
from typing import List, Optional from functools import wraps from uuid import UUID from sqlalchemy.sql.schema import UniqueConstraint from sqlmodel import Field, Relationship, SQLModel # monkeypath from https://github.com/tiangolo/sqlmodel/issues/9 # without this all database fields are indexed be default def set_default_index(func): """Decorator to set default index for SQLModel Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged """ @wraps(func) def inner(*args, index=False, **kwargs): return func(*args, index=index, **kwargs) return inner # monkey patch field with default index=False # this works as long as we always call Field() Field = set_default_index(Field) class TopicModelBase(SQLModel): model_id: UUID = Field() version: int = Field(default=1) class TopicModel(TopicModelBase, table=True): __tablename__ = "topic_model" __table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),) id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topics: List["Topic"] = Relationship( back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"} ) class WordBase(SQLModel): name: str = Field() score: float = Field() class Word(WordBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_id: int = Field(foreign_key="topic.id") topic: "Topic" = Relationship( back_populates="top_words", sa_relationship_kwargs={"cascade": "all,delete"} ) class TopicBase(SQLModel): name: str = Field() count: int = Field() topic_index: int = Field() class TopicWithWords(TopicBase): top_words: List["WordBase"] = Field(default=[]) class Topic(TopicBase, table=True): id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003 topic_model_id: int =
Field(foreign_key="topic_model.id")
sqlmodel.Field
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine =
create_engine("sqlite:///image.db")
sqlmodel.create_engine
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] =
Field(default=None, primary_key=True)
sqlmodel.Field
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine = create_engine("sqlite:///image.db") @app.get("/") def read_images(): with
Session(engine)
sqlmodel.Session
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine = create_engine("sqlite:///image.db") @app.get("/") def read_images(): with Session(engine) as session: statement =
select(Image)
sqlmodel.select
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine = create_engine("sqlite:///image.db") @app.get("/") def read_images(): with Session(engine) as session: statement = select(Image) images = session.exec(statement).all() return images class Item(BaseModel): key: int label: str = "" @app.post("/") def update_heroes(item: Item): with
Session(engine)
sqlmodel.Session
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine = create_engine("sqlite:///image.db") @app.get("/") def read_images(): with Session(engine) as session: statement = select(Image) images = session.exec(statement).all() return images class Item(BaseModel): key: int label: str = "" @app.post("/") def update_heroes(item: Item): with Session(engine) as session: statement = select(Image).where(Image.key == item.key) results = session.exec(statement) image = results.one() image.label = item.label session.add(image) session.commit() @app.get("/predict/{item}") def predict(item: int): import torch import numpy as np with
Session(engine)
sqlmodel.Session
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine = create_engine("sqlite:///image.db") @app.get("/") def read_images(): with Session(engine) as session: statement = select(Image) images = session.exec(statement).all() return images class Item(BaseModel): key: int label: str = "" @app.post("/") def update_heroes(item: Item): with Session(engine) as session: statement = select(Image).where(Image.key == item.key) results = session.exec(statement) image = results.one() image.label = item.label session.add(image) session.commit() @app.get("/predict/{item}") def predict(item: int): import torch import numpy as np with Session(engine) as session: statement = select(Image).where(Image.key == item) results = session.exec(statement) image = results.one() image_url = image.image_url img = ImagePIL.open(requests.get(image_url, stream=True).raw) img = tv.transforms.functional.pil_to_tensor(img).float().unsqueeze(0) with torch.no_grad(): result = network.model(img) result = torch.nn.functional.softmax(result) ret = { '1' : float(result[0][0]), '2': float(result[0][1]) } return str(ret) @app.post("/upload_image") async def upload_image(files: List[UploadFile]): # return {"filenames": [file.filename for file in files]} from minio import Minio import io with
Session(engine)
sqlmodel.Session
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine = create_engine("sqlite:///image.db") @app.get("/") def read_images(): with Session(engine) as session: statement = select(Image) images = session.exec(statement).all() return images class Item(BaseModel): key: int label: str = "" @app.post("/") def update_heroes(item: Item): with Session(engine) as session: statement =
select(Image)
sqlmodel.select
from typing import Optional, List from fastapi import FastAPI, File, UploadFile, Request from sqlmodel import Field, Session, SQLModel, create_engine, select from pydantic import BaseModel from network import Network import requests from PIL import Image as ImagePIL import torchvision as tv app = FastAPI() network = Network() network.model.eval() class Image(SQLModel, table=True): key: Optional[int] = Field(default=None, primary_key=True) image_name: str label: str image_url: str engine = create_engine("sqlite:///image.db") @app.get("/") def read_images(): with Session(engine) as session: statement = select(Image) images = session.exec(statement).all() return images class Item(BaseModel): key: int label: str = "" @app.post("/") def update_heroes(item: Item): with Session(engine) as session: statement = select(Image).where(Image.key == item.key) results = session.exec(statement) image = results.one() image.label = item.label session.add(image) session.commit() @app.get("/predict/{item}") def predict(item: int): import torch import numpy as np with Session(engine) as session: statement =
select(Image)
sqlmodel.select
from typing import List from app.database import get_session from app.models import Medication, MedicationUpdate from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import select router = APIRouter(prefix="/medications", tags=["medications"]) @router.post("", response_model=Medication) async def create_medication( *, med: Medication, session: AsyncSession = Depends(get_session) ) -> Medication: medication = Medication.from_orm(med) session.add(medication) await session.commit() await session.refresh(medication) return medication @router.get("/{medication_id}", response_model=Medication) async def retrieve_medication( *, medication_id: str, session: AsyncSession = Depends(get_session) ) -> Medication: result = await session.execute( select(Medication).where(Medication.id == medication_id) ) medication = result.scalar_one_or_none() if not medication: raise HTTPException( status_code=404, detail=f"Medication {medication_id} not found" ) return medication @router.patch("/{medication_id}", response_model=Medication) async def update_medication( *, medication_id: str, patch: MedicationUpdate, session: AsyncSession = Depends(get_session), ) -> Medication: result = await session.execute( select(Medication).where(Medication.id == medication_id) ) medication = result.scalar_one_or_none() if not medication: raise HTTPException( status_code=404, detail=f"Medication {medication_id} not found" ) patch_data = patch.dict(exclude_unset=True) for key, value in patch_data.items(): setattr(medication, key, value) session.add(medication) await session.commit() await session.refresh(medication) return medication @router.post("/{medication_id}") async def delete_medication( *, medication_id: str, session: AsyncSession = Depends(get_session) ): result = await session.execute( select(Medication).where(Medication.id == medication_id) ) medication = result.scalar_one_or_none() if not medication: raise HTTPException( status_code=404, detail=f"Medication {medication_id} not found" ) await session.delete(medication) await session.commit() return {"ok": True} @router.get("", response_model=List[Medication]) async def list_medications( *, session: AsyncSession = Depends(get_session) ) -> List[Medication]: result = await session.execute(
select(Medication)
sqlmodel.select
from typing import List from app.database import get_session from app.models import Medication, MedicationUpdate from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import select router = APIRouter(prefix="/medications", tags=["medications"]) @router.post("", response_model=Medication) async def create_medication( *, med: Medication, session: AsyncSession = Depends(get_session) ) -> Medication: medication = Medication.from_orm(med) session.add(medication) await session.commit() await session.refresh(medication) return medication @router.get("/{medication_id}", response_model=Medication) async def retrieve_medication( *, medication_id: str, session: AsyncSession = Depends(get_session) ) -> Medication: result = await session.execute(
select(Medication)
sqlmodel.select
from typing import List from app.database import get_session from app.models import Medication, MedicationUpdate from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import select router = APIRouter(prefix="/medications", tags=["medications"]) @router.post("", response_model=Medication) async def create_medication( *, med: Medication, session: AsyncSession = Depends(get_session) ) -> Medication: medication = Medication.from_orm(med) session.add(medication) await session.commit() await session.refresh(medication) return medication @router.get("/{medication_id}", response_model=Medication) async def retrieve_medication( *, medication_id: str, session: AsyncSession = Depends(get_session) ) -> Medication: result = await session.execute( select(Medication).where(Medication.id == medication_id) ) medication = result.scalar_one_or_none() if not medication: raise HTTPException( status_code=404, detail=f"Medication {medication_id} not found" ) return medication @router.patch("/{medication_id}", response_model=Medication) async def update_medication( *, medication_id: str, patch: MedicationUpdate, session: AsyncSession = Depends(get_session), ) -> Medication: result = await session.execute(
select(Medication)
sqlmodel.select
from typing import List from app.database import get_session from app.models import Medication, MedicationUpdate from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.ext.asyncio import AsyncSession from sqlmodel import select router = APIRouter(prefix="/medications", tags=["medications"]) @router.post("", response_model=Medication) async def create_medication( *, med: Medication, session: AsyncSession = Depends(get_session) ) -> Medication: medication = Medication.from_orm(med) session.add(medication) await session.commit() await session.refresh(medication) return medication @router.get("/{medication_id}", response_model=Medication) async def retrieve_medication( *, medication_id: str, session: AsyncSession = Depends(get_session) ) -> Medication: result = await session.execute( select(Medication).where(Medication.id == medication_id) ) medication = result.scalar_one_or_none() if not medication: raise HTTPException( status_code=404, detail=f"Medication {medication_id} not found" ) return medication @router.patch("/{medication_id}", response_model=Medication) async def update_medication( *, medication_id: str, patch: MedicationUpdate, session: AsyncSession = Depends(get_session), ) -> Medication: result = await session.execute( select(Medication).where(Medication.id == medication_id) ) medication = result.scalar_one_or_none() if not medication: raise HTTPException( status_code=404, detail=f"Medication {medication_id} not found" ) patch_data = patch.dict(exclude_unset=True) for key, value in patch_data.items(): setattr(medication, key, value) session.add(medication) await session.commit() await session.refresh(medication) return medication @router.post("/{medication_id}") async def delete_medication( *, medication_id: str, session: AsyncSession = Depends(get_session) ): result = await session.execute(
select(Medication)
sqlmodel.select
#!/usr/bin/env python3 from sqlalchemy.orm import sessionmaker from sqlmodel import create_engine from . import constants #postgres_url = f"postgresql://postgres:{constants.POSTGRES_PW}@localhost/billsim" if constants.POSTGRES_URL is None: postgres_url = f"postgresql://postgres:{constants.POSTGRES_PW}@localhost" else: postgres_url = constants.POSTGRES_URL engine =
create_engine(postgres_url, echo=True)
sqlmodel.create_engine
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with
Session(engine)
sqlmodel.Session
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with
Session(engine)
sqlmodel.Session
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT previous social row: {result}") return result def create_social(fb: int, ig: int, tw: int, sp: int, yt: int): dt_now = datetime.now().strftime("%Y%m%d_%H%M%S") logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})") social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt) with
Session(engine)
sqlmodel.Session
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT previous social row: {result}") return result def create_social(fb: int, ig: int, tw: int, sp: int, yt: int): dt_now = datetime.now().strftime("%Y%m%d_%H%M%S") logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})") social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt) with Session(engine) as session: session.add(social_row) session.commit() def create_user(telegram_id: int, username: str = None, first_name: str = None, last_name: str = None): logging.info(f"INSERT user: {first_name}") user_row = User(telegram_id=telegram_id, username=username, first_name=first_name, last_name=last_name) with
Session(engine)
sqlmodel.Session
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT previous social row: {result}") return result def create_social(fb: int, ig: int, tw: int, sp: int, yt: int): dt_now = datetime.now().strftime("%Y%m%d_%H%M%S") logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})") social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt) with Session(engine) as session: session.add(social_row) session.commit() def create_user(telegram_id: int, username: str = None, first_name: str = None, last_name: str = None): logging.info(f"INSERT user: {first_name}") user_row = User(telegram_id=telegram_id, username=username, first_name=first_name, last_name=last_name) with Session(engine) as session: session.add(user_row) session.commit() def get_user(telegram_id: int) -> User: with
Session(engine)
sqlmodel.Session
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT previous social row: {result}") return result def create_social(fb: int, ig: int, tw: int, sp: int, yt: int): dt_now = datetime.now().strftime("%Y%m%d_%H%M%S") logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})") social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt) with Session(engine) as session: session.add(social_row) session.commit() def create_user(telegram_id: int, username: str = None, first_name: str = None, last_name: str = None): logging.info(f"INSERT user: {first_name}") user_row = User(telegram_id=telegram_id, username=username, first_name=first_name, last_name=last_name) with Session(engine) as session: session.add(user_row) session.commit() def get_user(telegram_id: int) -> User: with Session(engine) as session: statement = select(User).where(User.telegram_id == telegram_id) result = session.exec(statement).one_or_none() logging.info(f"SELECT user: {result}") return result def get_all_users() -> List[User]: with
Session(engine)
sqlmodel.Session
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT previous social row: {result}") return result def create_social(fb: int, ig: int, tw: int, sp: int, yt: int): dt_now = datetime.now().strftime("%Y%m%d_%H%M%S") logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})") social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt) with Session(engine) as session: session.add(social_row) session.commit() def create_user(telegram_id: int, username: str = None, first_name: str = None, last_name: str = None): logging.info(f"INSERT user: {first_name}") user_row = User(telegram_id=telegram_id, username=username, first_name=first_name, last_name=last_name) with Session(engine) as session: session.add(user_row) session.commit() def get_user(telegram_id: int) -> User: with Session(engine) as session: statement = select(User).where(User.telegram_id == telegram_id) result = session.exec(statement).one_or_none() logging.info(f"SELECT user: {result}") return result def get_all_users() -> List[User]: with Session(engine) as session: statement =
select(User)
sqlmodel.select
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT previous social row: {result}") return result def create_social(fb: int, ig: int, tw: int, sp: int, yt: int): dt_now = datetime.now().strftime("%Y%m%d_%H%M%S") logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})") social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt) with Session(engine) as session: session.add(social_row) session.commit() def create_user(telegram_id: int, username: str = None, first_name: str = None, last_name: str = None): logging.info(f"INSERT user: {first_name}") user_row = User(telegram_id=telegram_id, username=username, first_name=first_name, last_name=last_name) with Session(engine) as session: session.add(user_row) session.commit() def get_user(telegram_id: int) -> User: with Session(engine) as session: statement =
select(User)
sqlmodel.select
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement =
select(Social)
sqlmodel.select
import logging from datetime import datetime from typing import List from sqlmodel import Session, select from db import engine from models import Social, User def get_last_social() -> Social: with Session(engine) as session: statement = select(Social).order_by(Social.id.desc()).limit(1) result = session.exec(statement).one_or_none() logging.info(f"SELECT social row: {result}") return result def get_previous_social() -> Social: with Session(engine) as session: statement =
select(Social)
sqlmodel.select
from typing import Optional from sqlmodel import Field, Session, SQLModel, create_engine class Hero(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str secret_name: str age: Optional[int] = None sqlite_file_name = "database.db" sqlite_url = f"sqlite:///{sqlite_file_name}" engine =
create_engine(sqlite_url, echo=True)
sqlmodel.create_engine
from typing import Optional from sqlmodel import Field, Session, SQLModel, create_engine class Hero(SQLModel, table=True): id: Optional[int] =
Field(default=None, primary_key=True)
sqlmodel.Field
from typing import Optional from sqlmodel import Field, Session, SQLModel, create_engine class Hero(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str secret_name: str age: Optional[int] = None sqlite_file_name = "database.db" sqlite_url = f"sqlite:///{sqlite_file_name}" engine = create_engine(sqlite_url, echo=True) def create_db_and_tables():
SQLModel.metadata.create_all(engine)
sqlmodel.SQLModel.metadata.create_all
from typing import Optional from sqlmodel import Field, Session, SQLModel, create_engine class Hero(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str secret_name: str age: Optional[int] = None sqlite_file_name = "database.db" sqlite_url = f"sqlite:///{sqlite_file_name}" engine = create_engine(sqlite_url, echo=True) def create_db_and_tables(): SQLModel.metadata.create_all(engine) def create_heroes(): hero_1 = Hero(name="Deadpond", secret_name="<NAME>") hero_2 = Hero(name="Spider-Boy", secret_name="<NAME>") hero_3 = Hero(name="Rusty-Man", secret_name="<NAME>", age=48) print("Before interacting with the database") print("Hero 1:", hero_1) print("Hero 2:", hero_2) print("Hero 3:", hero_3) with
Session(engine)
sqlmodel.Session
import uuid from datetime import datetime from typing import Optional from pydantic import UUID4 from sqlmodel import Field, SQLModel class DocumentInput(SQLModel): id: UUID4 =
Field(default_factory=uuid.uuid4, primary_key=True)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from pydantic import UUID4 from sqlmodel import Field, SQLModel class DocumentInput(SQLModel): id: UUID4 = Field(default_factory=uuid.uuid4, primary_key=True) content: str class Document(DocumentInput, table=True): created_at: datetime =
Field(default_factory=datetime.utcnow)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from pydantic import UUID4 from sqlmodel import Field, SQLModel class DocumentInput(SQLModel): id: UUID4 = Field(default_factory=uuid.uuid4, primary_key=True) content: str class Document(DocumentInput, table=True): created_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime =
Field(default_factory=datetime.utcnow)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from pydantic import UUID4 from sqlmodel import Field, SQLModel class DocumentInput(SQLModel): id: UUID4 = Field(default_factory=uuid.uuid4, primary_key=True) content: str class Document(DocumentInput, table=True): created_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow) category: Optional[str] =
Field(default=None)
sqlmodel.Field
"""record model Revision ID: 6c2a16b349b1 Revises: 2bafd0d01ae2 Create Date: 2021-11-20 18:51:45.427996 """ import sqlalchemy as sa import sqlmodel import sqlmodel.sql.sqltypes from alembic import op # revision identifiers, used by Alembic. revision = "6c2a16b349b1" down_revision = "2bafd0d01ae2" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table( "records", sa.Column( "created_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column( "updated_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column("problem_set_id",
sqlmodel.sql.sqltypes.GUID()
sqlmodel.sql.sqltypes.GUID
"""record model Revision ID: 6c2a16b349b1 Revises: 2bafd0d01ae2 Create Date: 2021-11-20 18:51:45.427996 """ import sqlalchemy as sa import sqlmodel import sqlmodel.sql.sqltypes from alembic import op # revision identifiers, used by Alembic. revision = "6c2a16b349b1" down_revision = "2bafd0d01ae2" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table( "records", sa.Column( "created_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column( "updated_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column("problem_set_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("problem_id",
sqlmodel.sql.sqltypes.GUID()
sqlmodel.sql.sqltypes.GUID
"""record model Revision ID: 6c2a16b349b1 Revises: 2bafd0d01ae2 Create Date: 2021-11-20 18:51:45.427996 """ import sqlalchemy as sa import sqlmodel import sqlmodel.sql.sqltypes from alembic import op # revision identifiers, used by Alembic. revision = "6c2a16b349b1" down_revision = "2bafd0d01ae2" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table( "records", sa.Column( "created_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column( "updated_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column("problem_set_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("problem_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("user_id",
sqlmodel.sql.sqltypes.GUID()
sqlmodel.sql.sqltypes.GUID
"""record model Revision ID: 6c2a16b349b1 Revises: 2bafd0d01ae2 Create Date: 2021-11-20 18:51:45.427996 """ import sqlalchemy as sa import sqlmodel import sqlmodel.sql.sqltypes from alembic import op # revision identifiers, used by Alembic. revision = "6c2a16b349b1" down_revision = "2bafd0d01ae2" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table( "records", sa.Column( "created_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column( "updated_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column("problem_set_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("problem_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("id",
sqlmodel.sql.sqltypes.GUID()
sqlmodel.sql.sqltypes.GUID
"""record model Revision ID: 6c2a16b349b1 Revises: 2bafd0d01ae2 Create Date: 2021-11-20 18:51:45.427996 """ import sqlalchemy as sa import sqlmodel import sqlmodel.sql.sqltypes from alembic import op # revision identifiers, used by Alembic. revision = "6c2a16b349b1" down_revision = "2bafd0d01ae2" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table( "records", sa.Column( "created_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column( "updated_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column("problem_set_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("problem_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), sa.Column("status",
sqlmodel.sql.sqltypes.AutoString()
sqlmodel.sql.sqltypes.AutoString
"""record model Revision ID: 6c2a16b349b1 Revises: 2bafd0d01ae2 Create Date: 2021-11-20 18:51:45.427996 """ import sqlalchemy as sa import sqlmodel import sqlmodel.sql.sqltypes from alembic import op # revision identifiers, used by Alembic. revision = "6c2a16b349b1" down_revision = "2bafd0d01ae2" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### op.create_table( "records", sa.Column( "created_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column( "updated_at", sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"), nullable=False, ), sa.Column("problem_set_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("problem_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), sa.Column("status", sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column("score", sa.Integer(), nullable=True), sa.Column("time_ms", sa.Integer(), nullable=True), sa.Column("memory_kb", sa.Integer(), nullable=True), sa.Column("commit_id",
sqlmodel.sql.sqltypes.AutoString()
sqlmodel.sql.sqltypes.AutoString
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str =
Field(index=True, nullable=False)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] =
Field(default=None, nullable=True)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] =
Field(default=None, nullable=True)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] =
Field(default=None, nullable=True)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] =
Field(default=None, nullable=True)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] = Field(default=None, nullable=True) # track invitation information # this is for this LOB to track this entity in Traction invitation_state: Optional[str] =
Field(default=None, nullable=True)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] = Field(default=None, nullable=True) # track invitation information # this is for this LOB to track this entity in Traction invitation_state: Optional[str] = Field(default=None, nullable=True) connection_id: Optional[uuid.UUID] =
Field(default=None)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] = Field(default=None, nullable=True) # track invitation information # this is for this LOB to track this entity in Traction invitation_state: Optional[str] = Field(default=None, nullable=True) connection_id: Optional[uuid.UUID] = Field(default=None) # for matching this student with their traction tenant # this would not be in this LOB data at all!!! # the entity/person/business that this record represents # would be tracking this in their system/data wallet_id: Optional[uuid.UUID] = None alias: Optional[str] =
Field(default=None, nullable=True)
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] = Field(default=None, nullable=True) # track invitation information # this is for this LOB to track this entity in Traction invitation_state: Optional[str] = Field(default=None, nullable=True) connection_id: Optional[uuid.UUID] = Field(default=None) # for matching this student with their traction tenant # this would not be in this LOB data at all!!! # the entity/person/business that this record represents # would be tracking this in their system/data wallet_id: Optional[uuid.UUID] = None alias: Optional[str] = Field(default=None, nullable=True) class Student(StudentBase, BaseTable, table=True): __table_args__ = (UniqueConstraint("name", "sandbox_id"),) sandbox: Optional["Sandbox"] =
Relationship(back_populates="students")
sqlmodel.Relationship
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] = Field(default=None, nullable=True) # track invitation information # this is for this LOB to track this entity in Traction invitation_state: Optional[str] = Field(default=None, nullable=True) connection_id: Optional[uuid.UUID] = Field(default=None) # for matching this student with their traction tenant # this would not be in this LOB data at all!!! # the entity/person/business that this record represents # would be tracking this in their system/data wallet_id: Optional[uuid.UUID] = None alias: Optional[str] = Field(default=None, nullable=True) class Student(StudentBase, BaseTable, table=True): __table_args__ = (UniqueConstraint("name", "sandbox_id"),) sandbox: Optional["Sandbox"] = Relationship(back_populates="students") # noqa: F821 sandbox_id: uuid.UUID =
Field(foreign_key="sandbox.id")
sqlmodel.Field
import uuid from datetime import datetime from typing import Optional from sqlalchemy import UniqueConstraint from sqlmodel import Field, Relationship from pydantic_factories import ModelFactory, Use from faker import Faker from api.db.models.base import BaseModel, BaseTable class StudentBase(BaseModel): name: str = Field(index=True, nullable=False) sandbox_id: uuid.UUID = None # faber line of business data for student degree credentials degree: Optional[str] = Field(default=None, nullable=True) age: Optional[int] = Field(default=None, nullable=True) student_id: Optional[str] = Field(default=None, nullable=True) date: Optional[datetime] = Field(default=None, nullable=True) # track invitation information # this is for this LOB to track this entity in Traction invitation_state: Optional[str] = Field(default=None, nullable=True) connection_id: Optional[uuid.UUID] = Field(default=None) # for matching this student with their traction tenant # this would not be in this LOB data at all!!! # the entity/person/business that this record represents # would be tracking this in their system/data wallet_id: Optional[uuid.UUID] = None alias: Optional[str] = Field(default=None, nullable=True) class Student(StudentBase, BaseTable, table=True): __table_args__ = (UniqueConstraint("name", "sandbox_id"),) sandbox: Optional["Sandbox"] = Relationship(back_populates="students") # noqa: F821 sandbox_id: uuid.UUID = Field(foreign_key="sandbox.id") wallet_id: uuid.UUID =
Field(default=None, nullable=True)
sqlmodel.Field
from typing import TYPE_CHECKING, List, Optional from sqlalchemy import Column from sqlalchemy.dialects.postgresql import ARRAY from sqlmodel import AutoString, Field, Relationship, SQLModel if TYPE_CHECKING: from .application import Application, ApplicationList class SchoolBase(SQLModel): name: str abbreviations: List[str] = Field( default=[], sa_column=Column(ARRAY(AutoString()), nullable=False), ) alternatives: List[str] = Field( default=[], sa_column=Column(ARRAY(AutoString()), nullable=False), ) class School(SchoolBase, table=True): __tablename__ = "schools" id: Optional[str] =
Field(default=None, primary_key=True, nullable=False)
sqlmodel.Field
from typing import TYPE_CHECKING, List, Optional from sqlalchemy import Column from sqlalchemy.dialects.postgresql import ARRAY from sqlmodel import AutoString, Field, Relationship, SQLModel if TYPE_CHECKING: from .application import Application, ApplicationList class SchoolBase(SQLModel): name: str abbreviations: List[str] = Field( default=[], sa_column=Column(ARRAY(AutoString()), nullable=False), ) alternatives: List[str] = Field( default=[], sa_column=Column(ARRAY(AutoString()), nullable=False), ) class School(SchoolBase, table=True): __tablename__ = "schools" id: Optional[str] = Field(default=None, primary_key=True, nullable=False) needs_review: bool = False applications: List["Application"] =
Relationship(back_populates="school")
sqlmodel.Relationship
from typing import TYPE_CHECKING, List, Optional from sqlalchemy import Column from sqlalchemy.dialects.postgresql import ARRAY from sqlmodel import AutoString, Field, Relationship, SQLModel if TYPE_CHECKING: from .application import Application, ApplicationList class SchoolBase(SQLModel): name: str abbreviations: List[str] = Field( default=[], sa_column=Column(ARRAY(
AutoString()
sqlmodel.AutoString
from typing import TYPE_CHECKING, List, Optional from sqlalchemy import Column from sqlalchemy.dialects.postgresql import ARRAY from sqlmodel import AutoString, Field, Relationship, SQLModel if TYPE_CHECKING: from .application import Application, ApplicationList class SchoolBase(SQLModel): name: str abbreviations: List[str] = Field( default=[], sa_column=Column(ARRAY(AutoString()), nullable=False), ) alternatives: List[str] = Field( default=[], sa_column=Column(ARRAY(
AutoString()
sqlmodel.AutoString
#!/usr/bin/env python3 import asyncio import datetime import json import os import time from argparse import ArgumentParser from collections.abc import Sequence import httpx import uvloop from dotenv import load_dotenv from loguru import logger from sqlmodel import Session, create_engine from steam2sqlite import APPIDS_URL, BATCH_SIZE, navigator, utils from steam2sqlite.handler import ( get_appids_from_db, get_apps_achievements, get_apps_data, get_error_appids, store_apps_achievements, store_apps_data, ) load_dotenv() APPIDS_FILE = os.getenv("APPIDS_FILE") sqlite_file_name = "database.db" SQLITE_URL = f"sqlite:///{sqlite_file_name}" async def get_appids_from_steam(local_file: str = None) -> dict[int, str]: if local_file: logger.info(f"Loading appids from local file: {local_file}") with open(local_file) as steam_appids_fp: appid_data = json.load(steam_appids_fp) else: logger.info("Loading appids from Steam API") try: async with httpx.AsyncClient() as client: resp = await navigator.get(client, APPIDS_URL) appid_data = resp.json() await asyncio.sleep(1) except navigator.NavigatorError: logger.error("Error getting the appids from Steam") raise return {item["appid"]: item["name"] for item in appid_data["applist"]["apps"]} def main(argv: Sequence[str] | None = None) -> int: parser = ArgumentParser() parser.add_argument( "-l", "--limit", type=float, default=None, nargs="?", const=1, help="limit runtime (minutes)", ) args = parser.parse_args(argv) logger.info("Starting...") start_time = time.monotonic() uvloop.install() engine =
create_engine(SQLITE_URL, echo=False)
sqlmodel.create_engine
#!/usr/bin/env python3 import asyncio import datetime import json import os import time from argparse import ArgumentParser from collections.abc import Sequence import httpx import uvloop from dotenv import load_dotenv from loguru import logger from sqlmodel import Session, create_engine from steam2sqlite import APPIDS_URL, BATCH_SIZE, navigator, utils from steam2sqlite.handler import ( get_appids_from_db, get_apps_achievements, get_apps_data, get_error_appids, store_apps_achievements, store_apps_data, ) load_dotenv() APPIDS_FILE = os.getenv("APPIDS_FILE") sqlite_file_name = "database.db" SQLITE_URL = f"sqlite:///{sqlite_file_name}" async def get_appids_from_steam(local_file: str = None) -> dict[int, str]: if local_file: logger.info(f"Loading appids from local file: {local_file}") with open(local_file) as steam_appids_fp: appid_data = json.load(steam_appids_fp) else: logger.info("Loading appids from Steam API") try: async with httpx.AsyncClient() as client: resp = await navigator.get(client, APPIDS_URL) appid_data = resp.json() await asyncio.sleep(1) except navigator.NavigatorError: logger.error("Error getting the appids from Steam") raise return {item["appid"]: item["name"] for item in appid_data["applist"]["apps"]} def main(argv: Sequence[str] | None = None) -> int: parser = ArgumentParser() parser.add_argument( "-l", "--limit", type=float, default=None, nargs="?", const=1, help="limit runtime (minutes)", ) args = parser.parse_args(argv) logger.info("Starting...") start_time = time.monotonic() uvloop.install() engine = create_engine(SQLITE_URL, echo=False) # From steam api, dict of: {appids: names} steam_appids_names = asyncio.run(get_appids_from_steam(APPIDS_FILE)) with
Session(engine)
sqlmodel.Session
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float =
Field(description="Value of operation")
sqlmodel.Field
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float = Field(description="Value of operation") operation: OperationType = Field( description="Type of operation", sa_column=Column(Enum(OperationType), nullable=False) ) description: str =
Field(description="Description of operation", min_length=1)
sqlmodel.Field
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float = Field(description="Value of operation") operation: OperationType = Field( description="Type of operation", sa_column=Column(Enum(OperationType), nullable=False) ) description: str = Field(description="Description of operation", min_length=1) created_at: datetime =
Field(default_factory=now_datetime)
sqlmodel.Field
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float = Field(description="Value of operation") operation: OperationType = Field( description="Type of operation", sa_column=Column(Enum(OperationType), nullable=False) ) description: str = Field(description="Description of operation", min_length=1) created_at: datetime = Field(default_factory=now_datetime) class CreateBalance(BaseBalance): @root_validator() def normalize_value(cls, values: Dict[str, Any]) -> float: operation_type = values.get("operation") value = values.get("value") if not operation_type or not value: return values if any(operation_type.name == payment_type.name for payment_type in PaymentType) and value > 0: values["value"] = value * -1 if any(operation_type.name == sale_type.name for sale_type in SaleType) and value < 0: values["value"] = value * -1 return values class QueryBalance(SQLModel): start_date: Optional[date] =
Field(description="Initial date for query")
sqlmodel.Field
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float = Field(description="Value of operation") operation: OperationType = Field( description="Type of operation", sa_column=Column(Enum(OperationType), nullable=False) ) description: str = Field(description="Description of operation", min_length=1) created_at: datetime = Field(default_factory=now_datetime) class CreateBalance(BaseBalance): @root_validator() def normalize_value(cls, values: Dict[str, Any]) -> float: operation_type = values.get("operation") value = values.get("value") if not operation_type or not value: return values if any(operation_type.name == payment_type.name for payment_type in PaymentType) and value > 0: values["value"] = value * -1 if any(operation_type.name == sale_type.name for sale_type in SaleType) and value < 0: values["value"] = value * -1 return values class QueryBalance(SQLModel): start_date: Optional[date] = Field(description="Initial date for query") end_date: Optional[date] =
Field(description="End date for query")
sqlmodel.Field
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float = Field(description="Value of operation") operation: OperationType = Field( description="Type of operation", sa_column=Column(Enum(OperationType), nullable=False) ) description: str = Field(description="Description of operation", min_length=1) created_at: datetime = Field(default_factory=now_datetime) class CreateBalance(BaseBalance): @root_validator() def normalize_value(cls, values: Dict[str, Any]) -> float: operation_type = values.get("operation") value = values.get("value") if not operation_type or not value: return values if any(operation_type.name == payment_type.name for payment_type in PaymentType) and value > 0: values["value"] = value * -1 if any(operation_type.name == sale_type.name for sale_type in SaleType) and value < 0: values["value"] = value * -1 return values class QueryBalance(SQLModel): start_date: Optional[date] = Field(description="Initial date for query") end_date: Optional[date] = Field(description="End date for query") class Balance(BaseBalance, table=True): id: UUID = Field(default_factory=uuid4, sa_column=Column("id", GUID(), primary_key=True)) owner_id: UUID =
Field(description="User ID that owns the balance", foreign_key="users.id")
sqlmodel.Field
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float = Field(description="Value of operation") operation: OperationType = Field( description="Type of operation", sa_column=Column(
Enum(OperationType)
sqlmodel.Enum
from datetime import date, datetime from typing import Any, Dict, Optional from uuid import UUID, uuid4 from pydantic.class_validators import root_validator from sqlmodel import Column, Enum, Field, SQLModel from sqlmodel.sql.sqltypes import GUID from ...utils.date import now_datetime from ..constants import OperationType, PaymentType, SaleType class BaseBalance(SQLModel): value: float = Field(description="Value of operation") operation: OperationType = Field( description="Type of operation", sa_column=Column(Enum(OperationType), nullable=False) ) description: str = Field(description="Description of operation", min_length=1) created_at: datetime = Field(default_factory=now_datetime) class CreateBalance(BaseBalance): @root_validator() def normalize_value(cls, values: Dict[str, Any]) -> float: operation_type = values.get("operation") value = values.get("value") if not operation_type or not value: return values if any(operation_type.name == payment_type.name for payment_type in PaymentType) and value > 0: values["value"] = value * -1 if any(operation_type.name == sale_type.name for sale_type in SaleType) and value < 0: values["value"] = value * -1 return values class QueryBalance(SQLModel): start_date: Optional[date] = Field(description="Initial date for query") end_date: Optional[date] = Field(description="End date for query") class Balance(BaseBalance, table=True): id: UUID = Field(default_factory=uuid4, sa_column=Column("id",
GUID()
sqlmodel.sql.sqltypes.GUID
import sys import os import click from app import config from sqlmodel import Session from sqlmodel import create_engine from app.models.server import Server from sqlmodel import select API_ENVIRONMENT = os.environ.get("API_ENVIRONMENT", "Testing") settings = getattr(sys.modules[__name__].config, API_ENVIRONMENT) engine = create_engine(settings.DATABASE_URI) @click.group() @click.pass_context def main(): pass @main.command(name="settings") def get_settings(): """ Prints current API settings from $API_ENVIRONMENT. """ click.echo(settings) @main.group(name="import") def import_group(): pass @import_group.command(name="catagories") def import_catagories(): """ Commands for importing a database. """ import yaml from app.models.server import Catagory, ServerCatagoryLink print("Looking for catagories.yml") with Session(engine) as session: with open("config.yml", "r") as stream: catagories = yaml.safe_load(stream) for name, data in catagories.get("catagories").items(): print(data) _catagory = Catagory( title = name, meta_ref = name.lower().replace(" ", "-"), color = f"#{data['color']}" ) session.add(_catagory) session.commit() session.refresh(_catagory) _query = select(Server).where(Server.domain_name.like(data['match'])) _result = session.exec(_query).all() for server in _result: _link = ServerCatagoryLink( server_id = server.id, catagory_id = _catagory.id ) session.add(_link) session.commit() @import_group.command(name="csv") def csv_file(file): """ Commands for importing a database. """ import csv with Session(engine) as session: with open(file, "r") as stream: csv_reader = csv.DictReader(stream) line_count = 0 for row in csv_reader: if line_count == 0: line_count += 1 _server = Server( domain_name=row["Domain Name"], domain_type=row["Domain Type"], agency=row["Agency"], organization=row["Organization"], ) session.add(_server) session.commit() @import_group.command(name="file") @click.argument("file") def basic_file(file): with Session(engine) as session: with open(file, "r") as stream: stream = stream.readlines() servers = [] for row in stream: servers.append(row.strip().lower()) for row in list(set(servers)): session.add(Server(domain_name=row)) session.commit() @main.group() def tables(): """ Commands for handling database tables. """ pass @tables.command() def drop(): """ Forcefully remove all tables within the database. """ import sqlalchemy from sqlalchemy import create_engine from sqlalchemy import MetaData from sqlalchemy import inspect from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base engine = create_engine(settings.DATABASE_URI) meta = MetaData() meta.reflect(bind=engine) meta.drop_all(engine, checkfirst=False) print("Dropped tables.") @tables.command() def create(): """ Creates all tables within the API. """ from sqlmodel import SQLModel from app.models.user import User, Log from app.models.server import Server, ServerLog
SQLModel.metadata.create_all(engine)
sqlmodel.SQLModel.metadata.create_all
import sys import os import click from app import config from sqlmodel import Session from sqlmodel import create_engine from app.models.server import Server from sqlmodel import select API_ENVIRONMENT = os.environ.get("API_ENVIRONMENT", "Testing") settings = getattr(sys.modules[__name__].config, API_ENVIRONMENT) engine = create_engine(settings.DATABASE_URI) @click.group() @click.pass_context def main(): pass @main.command(name="settings") def get_settings(): """ Prints current API settings from $API_ENVIRONMENT. """ click.echo(settings) @main.group(name="import") def import_group(): pass @import_group.command(name="catagories") def import_catagories(): """ Commands for importing a database. """ import yaml from app.models.server import Catagory, ServerCatagoryLink print("Looking for catagories.yml") with
Session(engine)
sqlmodel.Session
import sys import os import click from app import config from sqlmodel import Session from sqlmodel import create_engine from app.models.server import Server from sqlmodel import select API_ENVIRONMENT = os.environ.get("API_ENVIRONMENT", "Testing") settings = getattr(sys.modules[__name__].config, API_ENVIRONMENT) engine = create_engine(settings.DATABASE_URI) @click.group() @click.pass_context def main(): pass @main.command(name="settings") def get_settings(): """ Prints current API settings from $API_ENVIRONMENT. """ click.echo(settings) @main.group(name="import") def import_group(): pass @import_group.command(name="catagories") def import_catagories(): """ Commands for importing a database. """ import yaml from app.models.server import Catagory, ServerCatagoryLink print("Looking for catagories.yml") with Session(engine) as session: with open("config.yml", "r") as stream: catagories = yaml.safe_load(stream) for name, data in catagories.get("catagories").items(): print(data) _catagory = Catagory( title = name, meta_ref = name.lower().replace(" ", "-"), color = f"#{data['color']}" ) session.add(_catagory) session.commit() session.refresh(_catagory) _query = select(Server).where(Server.domain_name.like(data['match'])) _result = session.exec(_query).all() for server in _result: _link = ServerCatagoryLink( server_id = server.id, catagory_id = _catagory.id ) session.add(_link) session.commit() @import_group.command(name="csv") def csv_file(file): """ Commands for importing a database. """ import csv with
Session(engine)
sqlmodel.Session
import sys import os import click from app import config from sqlmodel import Session from sqlmodel import create_engine from app.models.server import Server from sqlmodel import select API_ENVIRONMENT = os.environ.get("API_ENVIRONMENT", "Testing") settings = getattr(sys.modules[__name__].config, API_ENVIRONMENT) engine = create_engine(settings.DATABASE_URI) @click.group() @click.pass_context def main(): pass @main.command(name="settings") def get_settings(): """ Prints current API settings from $API_ENVIRONMENT. """ click.echo(settings) @main.group(name="import") def import_group(): pass @import_group.command(name="catagories") def import_catagories(): """ Commands for importing a database. """ import yaml from app.models.server import Catagory, ServerCatagoryLink print("Looking for catagories.yml") with Session(engine) as session: with open("config.yml", "r") as stream: catagories = yaml.safe_load(stream) for name, data in catagories.get("catagories").items(): print(data) _catagory = Catagory( title = name, meta_ref = name.lower().replace(" ", "-"), color = f"#{data['color']}" ) session.add(_catagory) session.commit() session.refresh(_catagory) _query = select(Server).where(Server.domain_name.like(data['match'])) _result = session.exec(_query).all() for server in _result: _link = ServerCatagoryLink( server_id = server.id, catagory_id = _catagory.id ) session.add(_link) session.commit() @import_group.command(name="csv") def csv_file(file): """ Commands for importing a database. """ import csv with Session(engine) as session: with open(file, "r") as stream: csv_reader = csv.DictReader(stream) line_count = 0 for row in csv_reader: if line_count == 0: line_count += 1 _server = Server( domain_name=row["Domain Name"], domain_type=row["Domain Type"], agency=row["Agency"], organization=row["Organization"], ) session.add(_server) session.commit() @import_group.command(name="file") @click.argument("file") def basic_file(file): with
Session(engine)
sqlmodel.Session
import sys import os import click from app import config from sqlmodel import Session from sqlmodel import create_engine from app.models.server import Server from sqlmodel import select API_ENVIRONMENT = os.environ.get("API_ENVIRONMENT", "Testing") settings = getattr(sys.modules[__name__].config, API_ENVIRONMENT) engine = create_engine(settings.DATABASE_URI) @click.group() @click.pass_context def main(): pass @main.command(name="settings") def get_settings(): """ Prints current API settings from $API_ENVIRONMENT. """ click.echo(settings) @main.group(name="import") def import_group(): pass @import_group.command(name="catagories") def import_catagories(): """ Commands for importing a database. """ import yaml from app.models.server import Catagory, ServerCatagoryLink print("Looking for catagories.yml") with Session(engine) as session: with open("config.yml", "r") as stream: catagories = yaml.safe_load(stream) for name, data in catagories.get("catagories").items(): print(data) _catagory = Catagory( title = name, meta_ref = name.lower().replace(" ", "-"), color = f"#{data['color']}" ) session.add(_catagory) session.commit() session.refresh(_catagory) _query =
select(Server)
sqlmodel.select
from sqlmodel import SQLModel, create_engine from sqlalchemy.orm import sessionmaker from sqlmodel.ext.asyncio.session import AsyncSession, AsyncEngine from app.settings import Settings settings = Settings() engine = AsyncEngine(
create_engine(settings.ASYNC_DATABASE_URI, echo=True, future=True)
sqlmodel.create_engine
from datetime import datetime from typing import Optional from sqlmodel import Field, Enum, Column from sqlmodel.main import SQLModel from graphene_example.app.core.structures import TaskStatusEnum class User(SQLModel, table=True): id: Optional[int] =
Field(default=None, primary_key=True)
sqlmodel.Field
from datetime import datetime from typing import Optional from sqlmodel import Field, Enum, Column from sqlmodel.main import SQLModel from graphene_example.app.core.structures import TaskStatusEnum class User(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) full_name: str email: Optional[str] hashed_password: str is_active: bool = True is_superuser: bool = True class Task(SQLModel, table=True): id: Optional[int] =
Field(default=None, primary_key=True)
sqlmodel.Field
from datetime import datetime from typing import Optional from sqlmodel import Field, Enum, Column from sqlmodel.main import SQLModel from graphene_example.app.core.structures import TaskStatusEnum class User(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) full_name: str email: Optional[str] hashed_password: str is_active: bool = True is_superuser: bool = True class Task(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) created_at: datetime =
Field(default=datetime.utcnow)
sqlmodel.Field
from datetime import datetime from typing import Optional from sqlmodel import Field, Enum, Column from sqlmodel.main import SQLModel from graphene_example.app.core.structures import TaskStatusEnum class User(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) full_name: str email: Optional[str] hashed_password: str is_active: bool = True is_superuser: bool = True class Task(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) created_at: datetime = Field(default=datetime.utcnow) title: str status: TaskStatusEnum = Field(sa_column=Column(Enum(TaskStatusEnum)), default=TaskStatusEnum.draft) user_id: Optional[int] =
Field(default=None, foreign_key="user.id")
sqlmodel.Field
from datetime import datetime from typing import Optional from sqlmodel import Field, Enum, Column from sqlmodel.main import SQLModel from graphene_example.app.core.structures import TaskStatusEnum class User(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) full_name: str email: Optional[str] hashed_password: str is_active: bool = True is_superuser: bool = True class Task(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) created_at: datetime = Field(default=datetime.utcnow) title: str status: TaskStatusEnum = Field(sa_column=Column(
Enum(TaskStatusEnum)
sqlmodel.Enum
from typing import TYPE_CHECKING, List, Optional, Type from uuid import UUID from sqlalchemy import event from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint from sqlmodel import Field, Relationship from sqlmodel.sql.sqltypes import GUID from joj.horse.models.base import DomainURLORMModel, url_pre_save from joj.horse.models.link_tables import ProblemProblemSetLink from joj.horse.schemas.problem import ProblemDetail, WithLatestRecordType from joj.horse.services.db import db_session if TYPE_CHECKING: from joj.horse.models import ( Domain, ProblemConfig, ProblemGroup, ProblemSet, Record, User, ) class Problem(DomainURLORMModel, ProblemDetail, table=True): # type: ignore[call-arg] __tablename__ = "problems" __table_args__ = (UniqueConstraint("domain_id", "url"),) domain_id: UUID = Field( sa_column=Column( GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False ) ) domain: "Domain" =
Relationship(back_populates="problems")
sqlmodel.Relationship
from typing import TYPE_CHECKING, List, Optional, Type from uuid import UUID from sqlalchemy import event from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint from sqlmodel import Field, Relationship from sqlmodel.sql.sqltypes import GUID from joj.horse.models.base import DomainURLORMModel, url_pre_save from joj.horse.models.link_tables import ProblemProblemSetLink from joj.horse.schemas.problem import ProblemDetail, WithLatestRecordType from joj.horse.services.db import db_session if TYPE_CHECKING: from joj.horse.models import ( Domain, ProblemConfig, ProblemGroup, ProblemSet, Record, User, ) class Problem(DomainURLORMModel, ProblemDetail, table=True): # type: ignore[call-arg] __tablename__ = "problems" __table_args__ = (UniqueConstraint("domain_id", "url"),) domain_id: UUID = Field( sa_column=Column( GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False ) ) domain: "Domain" = Relationship(back_populates="problems") owner_id: Optional[UUID] = Field( sa_column=Column( GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True ) ) owner: Optional["User"] =
Relationship(back_populates="owned_problems")
sqlmodel.Relationship
from typing import TYPE_CHECKING, List, Optional, Type from uuid import UUID from sqlalchemy import event from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint from sqlmodel import Field, Relationship from sqlmodel.sql.sqltypes import GUID from joj.horse.models.base import DomainURLORMModel, url_pre_save from joj.horse.models.link_tables import ProblemProblemSetLink from joj.horse.schemas.problem import ProblemDetail, WithLatestRecordType from joj.horse.services.db import db_session if TYPE_CHECKING: from joj.horse.models import ( Domain, ProblemConfig, ProblemGroup, ProblemSet, Record, User, ) class Problem(DomainURLORMModel, ProblemDetail, table=True): # type: ignore[call-arg] __tablename__ = "problems" __table_args__ = (UniqueConstraint("domain_id", "url"),) domain_id: UUID = Field( sa_column=Column( GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False ) ) domain: "Domain" = Relationship(back_populates="problems") owner_id: Optional[UUID] = Field( sa_column=Column( GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True ) ) owner: Optional["User"] = Relationship(back_populates="owned_problems") problem_group_id: Optional[UUID] = Field( sa_column=Column( GUID, ForeignKey("problem_groups.id", ondelete="SET NULL"), nullable=True ) ) problem_group: Optional["ProblemGroup"] =
Relationship(back_populates="problems")
sqlmodel.Relationship
from typing import TYPE_CHECKING, List, Optional, Type from uuid import UUID from sqlalchemy import event from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint from sqlmodel import Field, Relationship from sqlmodel.sql.sqltypes import GUID from joj.horse.models.base import DomainURLORMModel, url_pre_save from joj.horse.models.link_tables import ProblemProblemSetLink from joj.horse.schemas.problem import ProblemDetail, WithLatestRecordType from joj.horse.services.db import db_session if TYPE_CHECKING: from joj.horse.models import ( Domain, ProblemConfig, ProblemGroup, ProblemSet, Record, User, ) class Problem(DomainURLORMModel, ProblemDetail, table=True): # type: ignore[call-arg] __tablename__ = "problems" __table_args__ = (UniqueConstraint("domain_id", "url"),) domain_id: UUID = Field( sa_column=Column( GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False ) ) domain: "Domain" = Relationship(back_populates="problems") owner_id: Optional[UUID] = Field( sa_column=Column( GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True ) ) owner: Optional["User"] = Relationship(back_populates="owned_problems") problem_group_id: Optional[UUID] = Field( sa_column=Column( GUID, ForeignKey("problem_groups.id", ondelete="SET NULL"), nullable=True ) ) problem_group: Optional["ProblemGroup"] = Relationship(back_populates="problems") problem_sets: List["ProblemSet"] = Relationship( back_populates="problems", link_model=ProblemProblemSetLink, ) problem_problem_set_links: List[ProblemProblemSetLink] = Relationship( back_populates="problem", ) records: List["Record"] =
Relationship(back_populates="problem")
sqlmodel.Relationship
from typing import TYPE_CHECKING, List, Optional, Type from uuid import UUID from sqlalchemy import event from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint from sqlmodel import Field, Relationship from sqlmodel.sql.sqltypes import GUID from joj.horse.models.base import DomainURLORMModel, url_pre_save from joj.horse.models.link_tables import ProblemProblemSetLink from joj.horse.schemas.problem import ProblemDetail, WithLatestRecordType from joj.horse.services.db import db_session if TYPE_CHECKING: from joj.horse.models import ( Domain, ProblemConfig, ProblemGroup, ProblemSet, Record, User, ) class Problem(DomainURLORMModel, ProblemDetail, table=True): # type: ignore[call-arg] __tablename__ = "problems" __table_args__ = (UniqueConstraint("domain_id", "url"),) domain_id: UUID = Field( sa_column=Column( GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False ) ) domain: "Domain" = Relationship(back_populates="problems") owner_id: Optional[UUID] = Field( sa_column=Column( GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True ) ) owner: Optional["User"] = Relationship(back_populates="owned_problems") problem_group_id: Optional[UUID] = Field( sa_column=Column( GUID, ForeignKey("problem_groups.id", ondelete="SET NULL"), nullable=True ) ) problem_group: Optional["ProblemGroup"] = Relationship(back_populates="problems") problem_sets: List["ProblemSet"] = Relationship( back_populates="problems", link_model=ProblemProblemSetLink, ) problem_problem_set_links: List[ProblemProblemSetLink] = Relationship( back_populates="problem", ) records: List["Record"] = Relationship(back_populates="problem") problem_configs: List["ProblemConfig"] =
Relationship(back_populates="problem")
sqlmodel.Relationship
from datetime import date from typing import List, Optional from uuid import UUID from api.ecoindex.models.responses import ApiEcoindex from api.helper import new_uuid from api.models.enums import Version from ecoindex_scraper.models import Result from sqlalchemy import func from sqlalchemy.ext.asyncio.session import AsyncSession from sqlalchemy.sql.expression import asc from sqlmodel import select from db.helper import date_filter async def save_ecoindex_result_db( session: AsyncSession, ecoindex_result: Result, version: Optional[Version] = Version.v1, ) -> ApiEcoindex: ranking = await get_rank_analysis_db( ecoindex=ecoindex_result, session=session, version=version ) total_results = await get_count_analysis_db(session=session, version=version) db_ecoindex = ApiEcoindex( id=new_uuid(), date=ecoindex_result.date, url=ecoindex_result.url, host=ecoindex_result.url.host, width=ecoindex_result.width, height=ecoindex_result.height, size=ecoindex_result.size, nodes=ecoindex_result.nodes, requests=ecoindex_result.requests, grade=ecoindex_result.grade, score=ecoindex_result.score, ges=ecoindex_result.ges, water=ecoindex_result.water, page_type=ecoindex_result.page_type, version=version.get_version_number(), initial_ranking=ranking if ranking else total_results + 1, initial_total_results=total_results + 1, ) session.add(db_ecoindex) await session.commit() await session.refresh(db_ecoindex) return db_ecoindex async def get_count_analysis_db( session: AsyncSession, version: Optional[Version] = Version.v1, host: Optional[str] = None, date_from: Optional[date] = None, date_to: Optional[date] = None, ) -> int: statement = f"SELECT count(*) FROM apiecoindex WHERE version = {version.get_version_number()}" if host: statement += f" AND host = '{host}'" if date_from: statement += f" AND date >= '{date_from}'" if date_to: statement += f" AND date <= '{date_to}'" result = await session.execute(statement=statement) return result.scalar() async def get_rank_analysis_db( ecoindex: Result, session: AsyncSession, version: Optional[Version] = Version.v1 ) -> Optional[int]: result = await session.execute( ( "SELECT ranking FROM (" "SELECT *, ROW_NUMBER() OVER (ORDER BY score DESC) ranking " "FROM apiecoindex " f"WHERE version={version.get_version_number()} " "ORDER BY score DESC) t " f"WHERE score <= {ecoindex.score} " "LIMIT 1;" ) ) return result.scalar() async def get_ecoindex_result_list_db( session: AsyncSession, version: Optional[Version] = Version.v1, host: Optional[str] = None, date_from: Optional[date] = None, date_to: Optional[date] = None, page: Optional[int] = 1, size: Optional[int] = 50, ) -> List[ApiEcoindex]: statement = ( select(ApiEcoindex) .where(ApiEcoindex.version == version.get_version_number()) .offset((page - 1) * size) .limit(size) ) if host: statement = statement.where(ApiEcoindex.host == host) statement = date_filter(statement=statement, date_from=date_from, date_to=date_to) ecoindexes = await session.execute(statement.order_by(asc("date"))) return ecoindexes.scalars().all() async def get_ecoindex_result_by_id_db( session: AsyncSession, id: UUID, version: Optional[Version] = Version.v1 ) -> ApiEcoindex: statement = ( select(ApiEcoindex) .where(ApiEcoindex.id == id) .where(ApiEcoindex.version == version.get_version_number()) ) ecoindex = await session.execute(statement) return ecoindex.scalar_one_or_none() async def get_count_daily_request_per_host(session: AsyncSession, host: str) -> int: statement =
select(ApiEcoindex)
sqlmodel.select
from datetime import date from typing import List, Optional from uuid import UUID from api.ecoindex.models.responses import ApiEcoindex from api.helper import new_uuid from api.models.enums import Version from ecoindex_scraper.models import Result from sqlalchemy import func from sqlalchemy.ext.asyncio.session import AsyncSession from sqlalchemy.sql.expression import asc from sqlmodel import select from db.helper import date_filter async def save_ecoindex_result_db( session: AsyncSession, ecoindex_result: Result, version: Optional[Version] = Version.v1, ) -> ApiEcoindex: ranking = await get_rank_analysis_db( ecoindex=ecoindex_result, session=session, version=version ) total_results = await get_count_analysis_db(session=session, version=version) db_ecoindex = ApiEcoindex( id=new_uuid(), date=ecoindex_result.date, url=ecoindex_result.url, host=ecoindex_result.url.host, width=ecoindex_result.width, height=ecoindex_result.height, size=ecoindex_result.size, nodes=ecoindex_result.nodes, requests=ecoindex_result.requests, grade=ecoindex_result.grade, score=ecoindex_result.score, ges=ecoindex_result.ges, water=ecoindex_result.water, page_type=ecoindex_result.page_type, version=version.get_version_number(), initial_ranking=ranking if ranking else total_results + 1, initial_total_results=total_results + 1, ) session.add(db_ecoindex) await session.commit() await session.refresh(db_ecoindex) return db_ecoindex async def get_count_analysis_db( session: AsyncSession, version: Optional[Version] = Version.v1, host: Optional[str] = None, date_from: Optional[date] = None, date_to: Optional[date] = None, ) -> int: statement = f"SELECT count(*) FROM apiecoindex WHERE version = {version.get_version_number()}" if host: statement += f" AND host = '{host}'" if date_from: statement += f" AND date >= '{date_from}'" if date_to: statement += f" AND date <= '{date_to}'" result = await session.execute(statement=statement) return result.scalar() async def get_rank_analysis_db( ecoindex: Result, session: AsyncSession, version: Optional[Version] = Version.v1 ) -> Optional[int]: result = await session.execute( ( "SELECT ranking FROM (" "SELECT *, ROW_NUMBER() OVER (ORDER BY score DESC) ranking " "FROM apiecoindex " f"WHERE version={version.get_version_number()} " "ORDER BY score DESC) t " f"WHERE score <= {ecoindex.score} " "LIMIT 1;" ) ) return result.scalar() async def get_ecoindex_result_list_db( session: AsyncSession, version: Optional[Version] = Version.v1, host: Optional[str] = None, date_from: Optional[date] = None, date_to: Optional[date] = None, page: Optional[int] = 1, size: Optional[int] = 50, ) -> List[ApiEcoindex]: statement = ( select(ApiEcoindex) .where(ApiEcoindex.version == version.get_version_number()) .offset((page - 1) * size) .limit(size) ) if host: statement = statement.where(ApiEcoindex.host == host) statement = date_filter(statement=statement, date_from=date_from, date_to=date_to) ecoindexes = await session.execute(statement.order_by(asc("date"))) return ecoindexes.scalars().all() async def get_ecoindex_result_by_id_db( session: AsyncSession, id: UUID, version: Optional[Version] = Version.v1 ) -> ApiEcoindex: statement = (
select(ApiEcoindex)
sqlmodel.select
from datetime import date from typing import List, Optional from uuid import UUID from api.ecoindex.models.responses import ApiEcoindex from api.helper import new_uuid from api.models.enums import Version from ecoindex_scraper.models import Result from sqlalchemy import func from sqlalchemy.ext.asyncio.session import AsyncSession from sqlalchemy.sql.expression import asc from sqlmodel import select from db.helper import date_filter async def save_ecoindex_result_db( session: AsyncSession, ecoindex_result: Result, version: Optional[Version] = Version.v1, ) -> ApiEcoindex: ranking = await get_rank_analysis_db( ecoindex=ecoindex_result, session=session, version=version ) total_results = await get_count_analysis_db(session=session, version=version) db_ecoindex = ApiEcoindex( id=new_uuid(), date=ecoindex_result.date, url=ecoindex_result.url, host=ecoindex_result.url.host, width=ecoindex_result.width, height=ecoindex_result.height, size=ecoindex_result.size, nodes=ecoindex_result.nodes, requests=ecoindex_result.requests, grade=ecoindex_result.grade, score=ecoindex_result.score, ges=ecoindex_result.ges, water=ecoindex_result.water, page_type=ecoindex_result.page_type, version=version.get_version_number(), initial_ranking=ranking if ranking else total_results + 1, initial_total_results=total_results + 1, ) session.add(db_ecoindex) await session.commit() await session.refresh(db_ecoindex) return db_ecoindex async def get_count_analysis_db( session: AsyncSession, version: Optional[Version] = Version.v1, host: Optional[str] = None, date_from: Optional[date] = None, date_to: Optional[date] = None, ) -> int: statement = f"SELECT count(*) FROM apiecoindex WHERE version = {version.get_version_number()}" if host: statement += f" AND host = '{host}'" if date_from: statement += f" AND date >= '{date_from}'" if date_to: statement += f" AND date <= '{date_to}'" result = await session.execute(statement=statement) return result.scalar() async def get_rank_analysis_db( ecoindex: Result, session: AsyncSession, version: Optional[Version] = Version.v1 ) -> Optional[int]: result = await session.execute( ( "SELECT ranking FROM (" "SELECT *, ROW_NUMBER() OVER (ORDER BY score DESC) ranking " "FROM apiecoindex " f"WHERE version={version.get_version_number()} " "ORDER BY score DESC) t " f"WHERE score <= {ecoindex.score} " "LIMIT 1;" ) ) return result.scalar() async def get_ecoindex_result_list_db( session: AsyncSession, version: Optional[Version] = Version.v1, host: Optional[str] = None, date_from: Optional[date] = None, date_to: Optional[date] = None, page: Optional[int] = 1, size: Optional[int] = 50, ) -> List[ApiEcoindex]: statement = (
select(ApiEcoindex)
sqlmodel.select
from typing import TYPE_CHECKING, List, Optional from sqlmodel import Field, Relationship, SQLModel if TYPE_CHECKING: from .hero_model import Hero class Team(SQLModel, table=True): id: Optional[int] =
Field(default=None, primary_key=True)
sqlmodel.Field
from typing import TYPE_CHECKING, List, Optional from sqlmodel import Field, Relationship, SQLModel if TYPE_CHECKING: from .hero_model import Hero class Team(SQLModel, table=True): id: Optional[int] = Field(default=None, primary_key=True) name: str headquarters: str heroes: List["Hero"] =
Relationship(back_populates="team")
sqlmodel.Relationship
from sqlalchemy import inspect from sqlalchemy.engine.reflection import Inspector from sqlmodel import create_engine def test_tutorial001(clear_sqlmodel): from docs_src.tutorial.connect.create_tables import tutorial001 as mod mod.sqlite_url = "sqlite://" mod.engine =
create_engine(mod.sqlite_url)
sqlmodel.create_engine
"""seed schools Revision ID: 8d04b7943264 Revises: <PASSWORD> Create Date: 2022-04-18 00:38:38.618682+00:00 """ import json from os import getcwd from pathlib import Path import sqlalchemy as sa import sqlalchemy.sql as sql import sqlmodel from alembic import context, op # revision identifiers, used by Alembic. revision = "8d04b7943264" down_revision = "0<PASSWORD>" branch_labels = None depends_on = None # Ad-hoc schools table for bulk import schools_table = sql.table( "schools", sql.column("id", sa.String), sql.column("name", sa.String) ) def load_schools(): migrations_dir = Path(getcwd(), context.script.dir) schools_path = migrations_dir.joinpath("schools.json") return json.load(open(schools_path, "r")) def upgrade(): # Change schools.id to a string op.drop_constraint( "applications_school_id_fkey", "applications", type_="foreignkey" ) op.alter_column( "applications", "school_id", type_=
sqlmodel.sql.sqltypes.AutoString()
sqlmodel.sql.sqltypes.AutoString
"""seed schools Revision ID: 8d04b7943264 Revises: <PASSWORD> Create Date: 2022-04-18 00:38:38.618682+00:00 """ import json from os import getcwd from pathlib import Path import sqlalchemy as sa import sqlalchemy.sql as sql import sqlmodel from alembic import context, op # revision identifiers, used by Alembic. revision = "8d04b7943264" down_revision = "0<PASSWORD>" branch_labels = None depends_on = None # Ad-hoc schools table for bulk import schools_table = sql.table( "schools", sql.column("id", sa.String), sql.column("name", sa.String) ) def load_schools(): migrations_dir = Path(getcwd(), context.script.dir) schools_path = migrations_dir.joinpath("schools.json") return json.load(open(schools_path, "r")) def upgrade(): # Change schools.id to a string op.drop_constraint( "applications_school_id_fkey", "applications", type_="foreignkey" ) op.alter_column( "applications", "school_id", type_=sqlmodel.sql.sqltypes.AutoString(), nullable=False, ) op.alter_column( "schools", "id", type_=
sqlmodel.sql.sqltypes.AutoString()
sqlmodel.sql.sqltypes.AutoString
from sqlmodel import create_engine, SQLModel, Session sqlite_file_name = "database.db" sqlite_url = f"sqlite:///{sqlite_file_name}" connect_args = {"check_same_thread": False} engine =
create_engine(sqlite_url, echo=True, connect_args=connect_args)
sqlmodel.create_engine
from sqlmodel import create_engine, SQLModel, Session sqlite_file_name = "database.db" sqlite_url = f"sqlite:///{sqlite_file_name}" connect_args = {"check_same_thread": False} engine = create_engine(sqlite_url, echo=True, connect_args=connect_args) def init_db():
SQLModel.metadata.create_all(engine)
sqlmodel.SQLModel.metadata.create_all