prompt
stringlengths 45
17.8k
| completion
stringlengths 6
107
| api
stringlengths 12
42
|
---|---|---|
from typing import TYPE_CHECKING, List, Optional
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from .hero import Hero
class TeamBase(SQLModel):
name: str
headquarters: str
class Team(TeamBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
heroes: List["Hero"] = | Relationship(back_populates="team") | sqlmodel.Relationship |
from sqlalchemy.sql.schema import MetaData
from sqlmodel import Field, SQLModel
from datetime import datetime
from uuid import UUID, uuid4
class DbtLog(SQLModel, table=True):
"""
Table: __Dbt_Log
"""
__tablename__ = "__Dbt_Log"
Id: UUID = | Field(default_factory=uuid4, primary_key=True) | sqlmodel.Field |
from sqlalchemy.sql.schema import MetaData
from sqlmodel import Field, SQLModel
from datetime import datetime
from uuid import UUID, uuid4
class DbtLog(SQLModel, table=True):
"""
Table: __Dbt_Log
"""
__tablename__ = "__Dbt_Log"
Id: UUID = Field(default_factory=uuid4, primary_key=True)
TaskId: str = | Field(max_length=128) | sqlmodel.Field |
from sqlalchemy.sql.schema import MetaData
from sqlmodel import Field, SQLModel
from datetime import datetime
from uuid import UUID, uuid4
class DbtLog(SQLModel, table=True):
"""
Table: __Dbt_Log
"""
__tablename__ = "__Dbt_Log"
Id: UUID = Field(default_factory=uuid4, primary_key=True)
TaskId: str = Field(max_length=128)
Data: str = | Field(index=False) | sqlmodel.Field |
from sqlalchemy.sql.schema import MetaData
from sqlmodel import Field, SQLModel
from datetime import datetime
from uuid import UUID, uuid4
class DbtLog(SQLModel, table=True):
"""
Table: __Dbt_Log
"""
__tablename__ = "__Dbt_Log"
Id: UUID = Field(default_factory=uuid4, primary_key=True)
TaskId: str = Field(max_length=128)
Data: str = Field(index=False)
Timestamp: datetime = | Field(index=False, default_factory=datetime.utcnow) | sqlmodel.Field |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with | Session(meta_engine) | sqlmodel.Session |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with Session(meta_engine) as session:
result = session.exec(statement)
for model_id, model_name, created_at, last_run, tags in result:
table.add_row((model_id, model_name, created_at, last_run, tags))
styles.theme_typer_print(str(table))
@model_app.command('tag')
def tag(model_id: UUID, tags: List[str], config_file: Path = config_option):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with | Session(meta_engine) | sqlmodel.Session |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with Session(meta_engine) as session:
result = session.exec(statement)
for model_id, model_name, created_at, last_run, tags in result:
table.add_row((model_id, model_name, created_at, last_run, tags))
styles.theme_typer_print(str(table))
@model_app.command('tag')
def tag(model_id: UUID, tags: List[str], config_file: Path = config_option):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
existing_tags = session.exec(select(ModelEntity.tags).where(ModelEntity.id == model_id)).one_or_none()
if existing_tags is None:
raise typer.BadParameter(f"Invalid model_id, no model with ID {model_id}")
new_tags = set(chain(existing_tags, tags))
session.execute(update(ModelEntity).values(tags=new_tags).where(ModelEntity.id == model_id))
session.commit()
@model_app.command('serialize')
def model_serialize(
model_str: str = model_arg_option,
out_file: Path = typer.Option(
None, '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
model = validate_model_str(model_str)
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with | Session(meta_engine) | sqlmodel.Session |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with Session(meta_engine) as session:
result = session.exec(statement)
for model_id, model_name, created_at, last_run, tags in result:
table.add_row((model_id, model_name, created_at, last_run, tags))
styles.theme_typer_print(str(table))
@model_app.command('tag')
def tag(model_id: UUID, tags: List[str], config_file: Path = config_option):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
existing_tags = session.exec(select(ModelEntity.tags).where(ModelEntity.id == model_id)).one_or_none()
if existing_tags is None:
raise typer.BadParameter(f"Invalid model_id, no model with ID {model_id}")
new_tags = set(chain(existing_tags, tags))
session.execute(update(ModelEntity).values(tags=new_tags).where(ModelEntity.id == model_id))
session.commit()
@model_app.command('serialize')
def model_serialize(
model_str: str = model_arg_option,
out_file: Path = typer.Option(
None, '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
model = validate_model_str(model_str)
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
model_row = model._get_model_row()
# Check for existing row and if found grab its created_at
created_at = session.exec(
select(ModelEntity.created_at).where(ModelEntity.id == model.uuid)
).one_or_none()
if created_at is None:
session.merge(model_row)
session.commit()
styles.good_typer_print(f"Loaded model {model.name!r} into the database with ID {model.uuid}")
else:
styles.good_typer_print(f"Model {model.name!r} already existed with ID {model.uuid}")
if out_file:
out_file.write_text(dumps(model_row.graph_json))
styles.good_typer_print(f"Wrote serialized graph to {out_file}")
@model_app.command('export')
def model_export(
model_id: UUID,
out_file: Path = typer.Option(
'model.json', '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with | Session(meta_engine) | sqlmodel.Session |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with Session(meta_engine) as session:
result = session.exec(statement)
for model_id, model_name, created_at, last_run, tags in result:
table.add_row((model_id, model_name, created_at, last_run, tags))
styles.theme_typer_print(str(table))
@model_app.command('tag')
def tag(model_id: UUID, tags: List[str], config_file: Path = config_option):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
existing_tags = session.exec( | select(ModelEntity.tags) | sqlmodel.select |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with Session(meta_engine) as session:
result = session.exec(statement)
for model_id, model_name, created_at, last_run, tags in result:
table.add_row((model_id, model_name, created_at, last_run, tags))
styles.theme_typer_print(str(table))
@model_app.command('tag')
def tag(model_id: UUID, tags: List[str], config_file: Path = config_option):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
existing_tags = session.exec(select(ModelEntity.tags).where(ModelEntity.id == model_id)).one_or_none()
if existing_tags is None:
raise typer.BadParameter(f"Invalid model_id, no model with ID {model_id}")
new_tags = set(chain(existing_tags, tags))
session.execute(update(ModelEntity).values(tags=new_tags).where(ModelEntity.id == model_id))
session.commit()
@model_app.command('serialize')
def model_serialize(
model_str: str = model_arg_option,
out_file: Path = typer.Option(
None, '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
model = validate_model_str(model_str)
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
model_row = model._get_model_row()
# Check for existing row and if found grab its created_at
created_at = session.exec(
| select(ModelEntity.created_at) | sqlmodel.select |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with Session(meta_engine) as session:
result = session.exec(statement)
for model_id, model_name, created_at, last_run, tags in result:
table.add_row((model_id, model_name, created_at, last_run, tags))
styles.theme_typer_print(str(table))
@model_app.command('tag')
def tag(model_id: UUID, tags: List[str], config_file: Path = config_option):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
existing_tags = session.exec(select(ModelEntity.tags).where(ModelEntity.id == model_id)).one_or_none()
if existing_tags is None:
raise typer.BadParameter(f"Invalid model_id, no model with ID {model_id}")
new_tags = set(chain(existing_tags, tags))
session.execute(update(ModelEntity).values(tags=new_tags).where(ModelEntity.id == model_id))
session.commit()
@model_app.command('serialize')
def model_serialize(
model_str: str = model_arg_option,
out_file: Path = typer.Option(
None, '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
model = validate_model_str(model_str)
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
model_row = model._get_model_row()
# Check for existing row and if found grab its created_at
created_at = session.exec(
select(ModelEntity.created_at).where(ModelEntity.id == model.uuid)
).one_or_none()
if created_at is None:
session.merge(model_row)
session.commit()
styles.good_typer_print(f"Loaded model {model.name!r} into the database with ID {model.uuid}")
else:
styles.good_typer_print(f"Model {model.name!r} already existed with ID {model.uuid}")
if out_file:
out_file.write_text(dumps(model_row.graph_json))
styles.good_typer_print(f"Wrote serialized graph to {out_file}")
@model_app.command('export')
def model_export(
model_id: UUID,
out_file: Path = typer.Option(
'model.json', '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
# Check for existing row and if found grab its created_at
graph_json = session.exec(
| select(ModelEntity.graph_json) | sqlmodel.select |
from select import select
from app.schemas.common import (
IGetResponseBase,
IPostResponseBase,
IDeleteResponseBase,
)
from app.utils.zeroshot_nlp import analyze_text
from app.schemas.zeroshot_inference import (
ZeroShotInferenceCreate,
ZeroShotInferenceRead,
)
from fastapi_pagination import Page, Params
from sqlmodel.ext.asyncio.session import AsyncSession
from fastapi import APIRouter, Depends, HTTPException, Query
from app.api import deps
from app import crud
from app.models import ZeroShotInference
from app.models import ZeroShotInferenceBase
from app.models.user import User
from sqlmodel import select
router = APIRouter()
@router.get(
"/zero-shot-classification-inferences/",
response_model=IGetResponseBase[Page[ZeroShotInference]],
)
async def get_zero_shot_classification_inferences(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
inferences = await crud.zeroshot_inference.get_multi_paginated(
db_session, params=params
)
return IGetResponseBase[Page[ZeroShotInference]](data=inferences)
@router.get(
"/zero-shot-classification-inferences/order_by_created_at/",
response_model=IGetResponseBase[Page[ZeroShotInference]],
)
async def zero_shot_classification_inferences_order_by_created_at(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
query = | select(ZeroShotInference) | sqlmodel.select |
from datetime import date
from typing import List, Optional
from api.ecoindex.models.responses import ApiEcoindex
from api.models.enums import Version
from sqlalchemy.ext.asyncio.session import AsyncSession
from sqlmodel import select
from db.helper import date_filter
async def get_host_list_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
q: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
) -> List[str]:
statement = | select(ApiEcoindex.host) | sqlmodel.select |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with Session(engine) as session:
interest_points_sttm = select(InterestPoint).\
where(InterestPoint.is_active == True)
interest_points = session.exec(interest_points_sttm).all()
for interest_point in interest_points:
routes = get_routes_json(
listing.latitude, listing.longitude,
interest_point.latitude, interest_point.longitude)
print('routes')
print(routes)
for route in routes:
ret_.append(Route(
interest_point_id=interest_point.id,
waking_distance=route['waking_distance'],
total_distance=route['total_distance'],
total_time=route['total_time'],
public_transport_count=route['public_transport_count'],
))
print(ret_)
return ret_
def get_places_nearby_json(from_lat, from_long, query):
try:
data = {"lat": from_lat, "long": from_long}
response = requests.post(
'http://location:8000/interest_places_nearby/', data=json.dumps(data))
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_places_nearby(listing: Listing):
ret_ = []
query = 'Grocery'
places = get_places_nearby_json(
from_lat=listing.latitude, from_long=listing.longitude,
query=query)
for place in places:
ret_.append(PlaceNearby(
name=place['name'],
latitude=place['lat'],
longitude=place['long'],
address=place['address'],
distance=place['distance'],
website=place['website'],
website_domain=place['website_domain'],
chain_name=place['chain_name'],
query=query,
))
return ret_
def save_new_listing(search_result, listing_d):
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with Session(engine) as session:
interest_points_sttm = select(InterestPoint).\
where(InterestPoint.is_active == True)
interest_points = session.exec(interest_points_sttm).all()
for interest_point in interest_points:
routes = get_routes_json(
listing.latitude, listing.longitude,
interest_point.latitude, interest_point.longitude)
print('routes')
print(routes)
for route in routes:
ret_.append(Route(
interest_point_id=interest_point.id,
waking_distance=route['waking_distance'],
total_distance=route['total_distance'],
total_time=route['total_time'],
public_transport_count=route['public_transport_count'],
))
print(ret_)
return ret_
def get_places_nearby_json(from_lat, from_long, query):
try:
data = {"lat": from_lat, "long": from_long}
response = requests.post(
'http://location:8000/interest_places_nearby/', data=json.dumps(data))
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_places_nearby(listing: Listing):
ret_ = []
query = 'Grocery'
places = get_places_nearby_json(
from_lat=listing.latitude, from_long=listing.longitude,
query=query)
for place in places:
ret_.append(PlaceNearby(
name=place['name'],
latitude=place['lat'],
longitude=place['long'],
address=place['address'],
distance=place['distance'],
website=place['website'],
website_domain=place['website_domain'],
chain_name=place['chain_name'],
query=query,
))
return ret_
def save_new_listing(search_result, listing_d):
with Session(engine) as session:
listing = Listing()
# Search Result
listing.source = 'daft'
listing.is_active = True
listing.url = search_result['url']
listing.address = search_result['title']
listing.price = search_result['monthly_price']
listing.latitude = search_result['latitude']
listing.longitude = search_result['longitude']
listing.publish_date = dateutil.parser.isoparse(
search_result['publish_date'])
# Details:
listing.source_id = listing_d['id']
listing.source_code = listing_d['daftShortcode']
listing.title = listing_d['title']
listing.bedrooms = listing_d['numBedrooms']
listing.bathrooms = listing_d['numBathrooms']
listing.description = listing_d['description']
listing.last_updated = listing_d['lastUpdateDate']
listing.images_count = listing_d['totalImages']
listing.views = listing_d['listingViews']
facilities_arr = []
for facility in listing_d['facilities']:
facility_sttm = select(Facility).\
where(Facility.name == facility.title()).\
where(Facility.category == 'facilities')
facility_obj = session.exec(facility_sttm).first()
if(not facility_obj):
facility_obj = Facility(
name=facility.title(),
category='facilities'
)
facilities_arr.append(facility_obj)
for facility in listing_d['propertyOverview']:
facility_sttm = select(Facility).\
where(Facility.name == facility.title()).\
where(Facility.category == 'overview')
facility_obj = session.exec(facility_sttm).first()
if(not facility_obj):
facility_obj = Facility(
name=facility.title(),
category='overview'
)
facilities_arr.append(facility_obj)
listing.facilities = facilities_arr
listing.images = [Image(url=x['url'], url_600=x['url_600']) for x in listing_d['images']]
listing.routes = get_routes(listing)
listing.places_nearby = get_places_nearby(listing)
# Saving it
session.add(listing)
session.commit()
def give_it_a_try(how_many = 25):
ret_ = {}
daft_search_results = get_daft_search_result()
daft_result_list = daft_search_results['result_list']
c = 0
details = []
with | Session(engine) | sqlmodel.Session |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with Session(engine) as session:
interest_points_sttm = | select(InterestPoint) | sqlmodel.select |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with Session(engine) as session:
interest_points_sttm = select(InterestPoint).\
where(InterestPoint.is_active == True)
interest_points = session.exec(interest_points_sttm).all()
for interest_point in interest_points:
routes = get_routes_json(
listing.latitude, listing.longitude,
interest_point.latitude, interest_point.longitude)
print('routes')
print(routes)
for route in routes:
ret_.append(Route(
interest_point_id=interest_point.id,
waking_distance=route['waking_distance'],
total_distance=route['total_distance'],
total_time=route['total_time'],
public_transport_count=route['public_transport_count'],
))
print(ret_)
return ret_
def get_places_nearby_json(from_lat, from_long, query):
try:
data = {"lat": from_lat, "long": from_long}
response = requests.post(
'http://location:8000/interest_places_nearby/', data=json.dumps(data))
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_places_nearby(listing: Listing):
ret_ = []
query = 'Grocery'
places = get_places_nearby_json(
from_lat=listing.latitude, from_long=listing.longitude,
query=query)
for place in places:
ret_.append(PlaceNearby(
name=place['name'],
latitude=place['lat'],
longitude=place['long'],
address=place['address'],
distance=place['distance'],
website=place['website'],
website_domain=place['website_domain'],
chain_name=place['chain_name'],
query=query,
))
return ret_
def save_new_listing(search_result, listing_d):
with Session(engine) as session:
listing = Listing()
# Search Result
listing.source = 'daft'
listing.is_active = True
listing.url = search_result['url']
listing.address = search_result['title']
listing.price = search_result['monthly_price']
listing.latitude = search_result['latitude']
listing.longitude = search_result['longitude']
listing.publish_date = dateutil.parser.isoparse(
search_result['publish_date'])
# Details:
listing.source_id = listing_d['id']
listing.source_code = listing_d['daftShortcode']
listing.title = listing_d['title']
listing.bedrooms = listing_d['numBedrooms']
listing.bathrooms = listing_d['numBathrooms']
listing.description = listing_d['description']
listing.last_updated = listing_d['lastUpdateDate']
listing.images_count = listing_d['totalImages']
listing.views = listing_d['listingViews']
facilities_arr = []
for facility in listing_d['facilities']:
facility_sttm = | select(Facility) | sqlmodel.select |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with Session(engine) as session:
interest_points_sttm = select(InterestPoint).\
where(InterestPoint.is_active == True)
interest_points = session.exec(interest_points_sttm).all()
for interest_point in interest_points:
routes = get_routes_json(
listing.latitude, listing.longitude,
interest_point.latitude, interest_point.longitude)
print('routes')
print(routes)
for route in routes:
ret_.append(Route(
interest_point_id=interest_point.id,
waking_distance=route['waking_distance'],
total_distance=route['total_distance'],
total_time=route['total_time'],
public_transport_count=route['public_transport_count'],
))
print(ret_)
return ret_
def get_places_nearby_json(from_lat, from_long, query):
try:
data = {"lat": from_lat, "long": from_long}
response = requests.post(
'http://location:8000/interest_places_nearby/', data=json.dumps(data))
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_places_nearby(listing: Listing):
ret_ = []
query = 'Grocery'
places = get_places_nearby_json(
from_lat=listing.latitude, from_long=listing.longitude,
query=query)
for place in places:
ret_.append(PlaceNearby(
name=place['name'],
latitude=place['lat'],
longitude=place['long'],
address=place['address'],
distance=place['distance'],
website=place['website'],
website_domain=place['website_domain'],
chain_name=place['chain_name'],
query=query,
))
return ret_
def save_new_listing(search_result, listing_d):
with Session(engine) as session:
listing = Listing()
# Search Result
listing.source = 'daft'
listing.is_active = True
listing.url = search_result['url']
listing.address = search_result['title']
listing.price = search_result['monthly_price']
listing.latitude = search_result['latitude']
listing.longitude = search_result['longitude']
listing.publish_date = dateutil.parser.isoparse(
search_result['publish_date'])
# Details:
listing.source_id = listing_d['id']
listing.source_code = listing_d['daftShortcode']
listing.title = listing_d['title']
listing.bedrooms = listing_d['numBedrooms']
listing.bathrooms = listing_d['numBathrooms']
listing.description = listing_d['description']
listing.last_updated = listing_d['lastUpdateDate']
listing.images_count = listing_d['totalImages']
listing.views = listing_d['listingViews']
facilities_arr = []
for facility in listing_d['facilities']:
facility_sttm = select(Facility).\
where(Facility.name == facility.title()).\
where(Facility.category == 'facilities')
facility_obj = session.exec(facility_sttm).first()
if(not facility_obj):
facility_obj = Facility(
name=facility.title(),
category='facilities'
)
facilities_arr.append(facility_obj)
for facility in listing_d['propertyOverview']:
facility_sttm = | select(Facility) | sqlmodel.select |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with Session(engine) as session:
interest_points_sttm = select(InterestPoint).\
where(InterestPoint.is_active == True)
interest_points = session.exec(interest_points_sttm).all()
for interest_point in interest_points:
routes = get_routes_json(
listing.latitude, listing.longitude,
interest_point.latitude, interest_point.longitude)
print('routes')
print(routes)
for route in routes:
ret_.append(Route(
interest_point_id=interest_point.id,
waking_distance=route['waking_distance'],
total_distance=route['total_distance'],
total_time=route['total_time'],
public_transport_count=route['public_transport_count'],
))
print(ret_)
return ret_
def get_places_nearby_json(from_lat, from_long, query):
try:
data = {"lat": from_lat, "long": from_long}
response = requests.post(
'http://location:8000/interest_places_nearby/', data=json.dumps(data))
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_places_nearby(listing: Listing):
ret_ = []
query = 'Grocery'
places = get_places_nearby_json(
from_lat=listing.latitude, from_long=listing.longitude,
query=query)
for place in places:
ret_.append(PlaceNearby(
name=place['name'],
latitude=place['lat'],
longitude=place['long'],
address=place['address'],
distance=place['distance'],
website=place['website'],
website_domain=place['website_domain'],
chain_name=place['chain_name'],
query=query,
))
return ret_
def save_new_listing(search_result, listing_d):
with Session(engine) as session:
listing = Listing()
# Search Result
listing.source = 'daft'
listing.is_active = True
listing.url = search_result['url']
listing.address = search_result['title']
listing.price = search_result['monthly_price']
listing.latitude = search_result['latitude']
listing.longitude = search_result['longitude']
listing.publish_date = dateutil.parser.isoparse(
search_result['publish_date'])
# Details:
listing.source_id = listing_d['id']
listing.source_code = listing_d['daftShortcode']
listing.title = listing_d['title']
listing.bedrooms = listing_d['numBedrooms']
listing.bathrooms = listing_d['numBathrooms']
listing.description = listing_d['description']
listing.last_updated = listing_d['lastUpdateDate']
listing.images_count = listing_d['totalImages']
listing.views = listing_d['listingViews']
facilities_arr = []
for facility in listing_d['facilities']:
facility_sttm = select(Facility).\
where(Facility.name == facility.title()).\
where(Facility.category == 'facilities')
facility_obj = session.exec(facility_sttm).first()
if(not facility_obj):
facility_obj = Facility(
name=facility.title(),
category='facilities'
)
facilities_arr.append(facility_obj)
for facility in listing_d['propertyOverview']:
facility_sttm = select(Facility).\
where(Facility.name == facility.title()).\
where(Facility.category == 'overview')
facility_obj = session.exec(facility_sttm).first()
if(not facility_obj):
facility_obj = Facility(
name=facility.title(),
category='overview'
)
facilities_arr.append(facility_obj)
listing.facilities = facilities_arr
listing.images = [Image(url=x['url'], url_600=x['url_600']) for x in listing_d['images']]
listing.routes = get_routes(listing)
listing.places_nearby = get_places_nearby(listing)
# Saving it
session.add(listing)
session.commit()
def give_it_a_try(how_many = 25):
ret_ = {}
daft_search_results = get_daft_search_result()
daft_result_list = daft_search_results['result_list']
c = 0
details = []
with Session(engine) as session:
for daft_result in daft_result_list:
statement = | select(Listing) | sqlmodel.select |
from minio import Minio
import os
from typing import Optional
from glob import glob
import pathlib
from sqlmodel import Field, Session, SQLModel, create_engine
def get_images(folder:str="../cls_labeling/public/images"):
return glob(str(pathlib.Path(folder,"**","*.jpg")), recursive=True)
class Image(SQLModel, table=True):
key: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from minio import Minio
import os
from typing import Optional
from glob import glob
import pathlib
from sqlmodel import Field, Session, SQLModel, create_engine
def get_images(folder:str="../cls_labeling/public/images"):
return glob(str(pathlib.Path(folder,"**","*.jpg")), recursive=True)
class Image(SQLModel, table=True):
key: Optional[int] = Field(default=None, primary_key=True)
image_name: str
label: str
image_url: str
if __name__ == "__main__":
engine = | create_engine("sqlite:///image.db") | sqlmodel.create_engine |
from minio import Minio
import os
from typing import Optional
from glob import glob
import pathlib
from sqlmodel import Field, Session, SQLModel, create_engine
def get_images(folder:str="../cls_labeling/public/images"):
return glob(str(pathlib.Path(folder,"**","*.jpg")), recursive=True)
class Image(SQLModel, table=True):
key: Optional[int] = Field(default=None, primary_key=True)
image_name: str
label: str
image_url: str
if __name__ == "__main__":
engine = create_engine("sqlite:///image.db")
client = Minio(
"localhost:9001",
secure=False,
access_key="<KEY>",
secret_key="<KEY>"
)
bucket_found = client.bucket_exists("image")
if not bucket_found:
client.make_bucket("image")
else:
for obj in client.list_objects("image"):
client.remove_object("image", obj.object_name)
client.remove_bucket("image")
client.make_bucket("image")
os.remove("./image.db")
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
from minio import Minio
import os
from typing import Optional
from glob import glob
import pathlib
from sqlmodel import Field, Session, SQLModel, create_engine
def get_images(folder:str="../cls_labeling/public/images"):
return glob(str(pathlib.Path(folder,"**","*.jpg")), recursive=True)
class Image(SQLModel, table=True):
key: Optional[int] = Field(default=None, primary_key=True)
image_name: str
label: str
image_url: str
if __name__ == "__main__":
engine = create_engine("sqlite:///image.db")
client = Minio(
"localhost:9001",
secure=False,
access_key="<KEY>",
secret_key="<KEY>"
)
bucket_found = client.bucket_exists("image")
if not bucket_found:
client.make_bucket("image")
else:
for obj in client.list_objects("image"):
client.remove_object("image", obj.object_name)
client.remove_bucket("image")
client.make_bucket("image")
os.remove("./image.db")
SQLModel.metadata.create_all(engine)
images = []
for i, image in enumerate(get_images()):
print(pathlib.Path(image).stem, image)
image_name = pathlib.Path(image).stem+'.jpg'
client.fput_object(
"image", image_name, image
)
image_url = f"http://localhost:9001/image/{image_name}"
images.append(
Image(key=i, image_name=pathlib.Path(image).stem, label="", image_url=image_url)
)
with | Session(engine) | sqlmodel.Session |
"""initial
Revision ID: a57c89b47e7b
Revises:
Create Date: 2021-11-01 04:27:56.134285
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'a57c89b47e7b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('increment',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_increment_id'), 'increment', ['id'], unique=False)
op.create_table('listings',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_listings_id'), 'listings', ['id'], unique=False)
op.create_table('song',
sa.Column('name', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: a57c89b47e7b
Revises:
Create Date: 2021-11-01 04:27:56.134285
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'a57c89b47e7b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('increment',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_increment_id'), 'increment', ['id'], unique=False)
op.create_table('listings',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_listings_id'), 'listings', ['id'], unique=False)
op.create_table('song',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('artist', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
from datetime import datetime, timedelta
import pendulum
import prefect
from prefect import Flow, task
from prefect.run_configs import DockerRun
from prefect.schedules import CronSchedule
from prefect.storage import GitHub
from scrapy.crawler import CrawlerProcess
from sqlmodel import SQLModel, create_engine
from imdb_rating.dependencies.spiders import IMDBSpider
@task
def scrap_movies_from_imdb():
"""
Scrap movies from IMDB and store them into a PostgreSQL database using SQLModel.
Run a scrapy crawler process to launch a spider.
"""
logger = prefect.context.get("logger")
# engine = create_engine('postgresql://postgres:postgres@localhost:5432/imdb')
engine = | create_engine("sqlite:///imdb.db") | sqlmodel.create_engine |
from datetime import datetime, timedelta
import pendulum
import prefect
from prefect import Flow, task
from prefect.run_configs import DockerRun
from prefect.schedules import CronSchedule
from prefect.storage import GitHub
from scrapy.crawler import CrawlerProcess
from sqlmodel import SQLModel, create_engine
from imdb_rating.dependencies.spiders import IMDBSpider
@task
def scrap_movies_from_imdb():
"""
Scrap movies from IMDB and store them into a PostgreSQL database using SQLModel.
Run a scrapy crawler process to launch a spider.
"""
logger = prefect.context.get("logger")
# engine = create_engine('postgresql://postgres:postgres@localhost:5432/imdb')
engine = create_engine("sqlite:///imdb.db")
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
"""
Query related functions.
"""
from datetime import datetime, timezone
from typing import List, Tuple
import sqlparse
from sqlalchemy import text
from sqlmodel import Session, create_engine
from datajunction.config import Settings
from datajunction.models.query import (
ColumnMetadata,
Query,
QueryResults,
QueryState,
QueryWithResults,
StatementResults,
)
from datajunction.typing import ColumnType, Description, SQLADialect, Stream, TypeEnum
def get_columns_from_description(
description: Description,
dialect: SQLADialect,
) -> List[ColumnMetadata]:
"""
Extract column metadata from the cursor description.
For now this uses the information from the cursor description, which only allow us to
distinguish between 4 types (see ``TypeEnum``). In the future we should use a type
inferrer to determine the types based on the query.
"""
type_map = {
TypeEnum.STRING: ColumnType.STR,
TypeEnum.BINARY: ColumnType.BYTES,
TypeEnum.NUMBER: ColumnType.FLOAT,
TypeEnum.DATETIME: ColumnType.DATETIME,
}
columns = []
for column in description or []:
name, native_type = column[:2]
for dbapi_type in TypeEnum:
if native_type == getattr(dialect.dbapi, dbapi_type.value, None):
type_ = type_map[dbapi_type]
break
else:
# fallback to string
type_ = ColumnType.STR
columns.append(ColumnMetadata(name=name, type=type_))
return columns
def run_query(query: Query) -> List[Tuple[str, List[ColumnMetadata], Stream]]:
"""
Run a query and return its results.
For each statement we return a tuple with the statement SQL, a description of the
columns (name and type) and a stream of rows (tuples).
"""
engine = | create_engine(query.database.URI, **query.database.extra_params) | sqlmodel.create_engine |
from datetime import datetime
from typing import List, Optional
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
class ObserverBase(SQLModel):
phone: str
email: str
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Observer(ObserverBase, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from typing import List, Optional
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
class ObserverBase(SQLModel):
phone: str
email: str
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Observer(ObserverBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
measurements: List["Measurement"] = Relationship(
back_populates="observer", sa_relationship_kwargs={"cascade": "all,delete"}
)
class ObserverCreate(ObserverBase):
pass
class ObserverRead(ObserverBase):
id: int
class MeasurementBase(SQLModel):
temperaturescale: str
temperature: int
organizationid: int
siteid: int
date_time: Optional[datetime] = Field(
sa_column=Column(DateTime, default=datetime.utcnow)
)
observer_id: Optional[int] = | Field(default=None, foreign_key="observer.id") | sqlmodel.Field |
from datetime import datetime
from typing import List, Optional
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
class ObserverBase(SQLModel):
phone: str
email: str
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Observer(ObserverBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
measurements: List["Measurement"] = Relationship(
back_populates="observer", sa_relationship_kwargs={"cascade": "all,delete"}
)
class ObserverCreate(ObserverBase):
pass
class ObserverRead(ObserverBase):
id: int
class MeasurementBase(SQLModel):
temperaturescale: str
temperature: int
organizationid: int
siteid: int
date_time: Optional[datetime] = Field(
sa_column=Column(DateTime, default=datetime.utcnow)
)
observer_id: Optional[int] = Field(default=None, foreign_key="observer.id")
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Measurement(MeasurementBase, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from typing import List, Optional
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
class ObserverBase(SQLModel):
phone: str
email: str
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Observer(ObserverBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
measurements: List["Measurement"] = Relationship(
back_populates="observer", sa_relationship_kwargs={"cascade": "all,delete"}
)
class ObserverCreate(ObserverBase):
pass
class ObserverRead(ObserverBase):
id: int
class MeasurementBase(SQLModel):
temperaturescale: str
temperature: int
organizationid: int
siteid: int
date_time: Optional[datetime] = Field(
sa_column=Column(DateTime, default=datetime.utcnow)
)
observer_id: Optional[int] = Field(default=None, foreign_key="observer.id")
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Measurement(MeasurementBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
observer: Optional[Observer] = | Relationship(back_populates="measurements") | sqlmodel.Relationship |
from datetime import datetime
from typing import List, Optional
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
class ObserverBase(SQLModel):
phone: str
email: str
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Observer(ObserverBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
measurements: List["Measurement"] = Relationship(
back_populates="observer", sa_relationship_kwargs={"cascade": "all,delete"}
)
class ObserverCreate(ObserverBase):
pass
class ObserverRead(ObserverBase):
id: int
class MeasurementBase(SQLModel):
temperaturescale: str
temperature: int
organizationid: int
siteid: int
date_time: Optional[datetime] = Field(
sa_column= | Column(DateTime, default=datetime.utcnow) | sqlmodel.Column |
from __future__ import annotations
import typing as t
import strawberry
from sqlmodel import Field, Relationship, SQLModel
from .schema_generation import create_array_relationship_resolver, create_query_root
class AddressModel(SQLModel, table=True):
__tablename__ = "addresses"
id: t.Optional[int] = Field(
default=None, primary_key=True, index=True, nullable=False
)
street: str
state: str
country: str
zip: str
users: t.List["UserModel"] = | Relationship(back_populates="address") | sqlmodel.Relationship |
from __future__ import annotations
import typing as t
import strawberry
from sqlmodel import Field, Relationship, SQLModel
from .schema_generation import create_array_relationship_resolver, create_query_root
class AddressModel(SQLModel, table=True):
__tablename__ = "addresses"
id: t.Optional[int] = Field(
default=None, primary_key=True, index=True, nullable=False
)
street: str
state: str
country: str
zip: str
users: t.List["UserModel"] = Relationship(back_populates="address")
class UserModel(SQLModel, table=True):
__tablename__ = "users"
id: t.Optional[int] = Field(
default=None, primary_key=True, index=True, nullable=False
)
age: int
password: t.Optional[str]
address_id: t.Optional[int] = | Field(default=None, foreign_key="addresses.id") | sqlmodel.Field |
from __future__ import annotations
import typing as t
import strawberry
from sqlmodel import Field, Relationship, SQLModel
from .schema_generation import create_array_relationship_resolver, create_query_root
class AddressModel(SQLModel, table=True):
__tablename__ = "addresses"
id: t.Optional[int] = Field(
default=None, primary_key=True, index=True, nullable=False
)
street: str
state: str
country: str
zip: str
users: t.List["UserModel"] = Relationship(back_populates="address")
class UserModel(SQLModel, table=True):
__tablename__ = "users"
id: t.Optional[int] = Field(
default=None, primary_key=True, index=True, nullable=False
)
age: int
password: t.Optional[str]
address_id: t.Optional[int] = Field(default=None, foreign_key="addresses.id")
address: t.Optional[AddressModel] = | Relationship(back_populates="users") | sqlmodel.Relationship |
# -*- coding: utf-8 -*-
# @Time : 2022/1/2 17:50
# @Author : WhaleFall
# @Site :
# @File : __init__.py.py
# @Software: PyCharm
# Flask 应用初始化,工厂函数
from flask import Flask
from flask_login import LoginManager
from config import config
from sqlmodel import create_engine,SQLModel
# 实例化一个登录组件
login_manager = LoginManager()
login_manager.login_view = 'auth.login' # 登录的蓝图
login_manager.login_message = "请小可爱先登录!"
def create_app(config_name):
"""
工厂函数,指定一个配置类型
程序入口文件千万不能和 `app` 重名,惨痛教训!!
"""
app = Flask(__name__) # 实例化
app.config.from_object(config[config_name]) # 从配置类读取配置
config[config_name].init_app(app) # 调用静态方法初始化组件
# 注册组件
login_manager.init_app(app) # 登录组件
# 数据库
from app import models
app.config['engine'] = | create_engine(config[config_name].SQLALCHEMY_DATABASE_URI, echo=True) | sqlmodel.create_engine |
from typing import List
from app.schemas.role import IRoleCreate, IRoleUpdate
from app.models.role import Role
from app.models.user import User
from app.crud.base_sqlmodel import CRUDBase
from sqlmodel.ext.asyncio.session import AsyncSession
from datetime import datetime
from sqlmodel import select
from uuid import UUID
class CRUDRole(CRUDBase[Role, IRoleCreate, IRoleUpdate]):
async def get_role_by_name(self, db_session: AsyncSession, *, name: str) -> Role:
role = await db_session.exec( | select(Role) | sqlmodel.select |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends, Query, Response
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import func, select
from icon_governance.db import get_session
from icon_governance.models.delegations import Delegation
router = APIRouter()
@router.get("/governance/delegations/{address}")
async def get_delegations(
response: Response,
address: str,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of delegations."""
query = (
select(Delegation)
.where(Delegation.address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([ | func.count(Delegation.address) | sqlmodel.func.count |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends, Query, Response
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import func, select
from icon_governance.db import get_session
from icon_governance.models.delegations import Delegation
router = APIRouter()
@router.get("/governance/delegations/{address}")
async def get_delegations(
response: Response,
address: str,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of delegations."""
query = (
select(Delegation)
.where(Delegation.address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([func.count(Delegation.address)]).where(Delegation.address == address)
result_count = await session.execute(query_count)
total_count = str(result_count.scalars().all()[0])
response.headers["x-total-count"] = total_count
return delegations
@router.get("/governance/votes/{address}")
async def get_delegations(
address: str,
response: Response,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of votes."""
query = (
select(Delegation)
.where(Delegation.prep_address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([ | func.count(Delegation.address) | sqlmodel.func.count |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends, Query, Response
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import func, select
from icon_governance.db import get_session
from icon_governance.models.delegations import Delegation
router = APIRouter()
@router.get("/governance/delegations/{address}")
async def get_delegations(
response: Response,
address: str,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of delegations."""
query = (
| select(Delegation) | sqlmodel.select |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends, Query, Response
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import func, select
from icon_governance.db import get_session
from icon_governance.models.delegations import Delegation
router = APIRouter()
@router.get("/governance/delegations/{address}")
async def get_delegations(
response: Response,
address: str,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of delegations."""
query = (
select(Delegation)
.where(Delegation.address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([func.count(Delegation.address)]).where(Delegation.address == address)
result_count = await session.execute(query_count)
total_count = str(result_count.scalars().all()[0])
response.headers["x-total-count"] = total_count
return delegations
@router.get("/governance/votes/{address}")
async def get_delegations(
address: str,
response: Response,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of votes."""
query = (
| select(Delegation) | sqlmodel.select |
"""add power
Revision ID: 135aec058ce1
Revises: 4400883a1249
Create Date: 2021-12-28 11:38:37.439383
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "135aec058ce1"
down_revision = "4400883a1249"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("preps", sa.Column("power", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
from __future__ import annotations
from sqlmodel import Session, select
from src.models import User
from src.schemas.user import CreateUser
from src.services.auth import check_password_hash, generate_password_hash
def get_user_by_email(db: Session, email: str) -> User | None:
"""
Finds a user with the given email in the database and returns it.
Returns None if a user with the email does not exists.
"""
stmt = | select(User) | sqlmodel.select |
from typing import Optional
from sqlmodel import Field, SQLModel
class Example(SQLModel, table=True):
"""测试一下"""
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from sqlmodel import create_engine
def main():
engine = | create_engine('postgresql://michaelstatt@localhost/sqlalchemy') | sqlmodel.create_engine |
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import root_validator
from datetime import datetime
# {
# "user_id": 1,
# "start_time": "2022-01-19T08:30:00.000Z",
# "end_time": "2022-01-19T09:30:00.000Z",
# "client_id": 1,
# "epic_id": 1,
# "count_hours": 0,
# "count_days": 0,
# "month": 0,
# "year": 0
# }
class TimeLog(SQLModel, table=True):
"""Create an SQLModel for timelogs"""
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import root_validator
from datetime import datetime
# {
# "user_id": 1,
# "start_time": "2022-01-19T08:30:00.000Z",
# "end_time": "2022-01-19T09:30:00.000Z",
# "client_id": 1,
# "epic_id": 1,
# "count_hours": 0,
# "count_days": 0,
# "month": 0,
# "year": 0
# }
class TimeLog(SQLModel, table=True):
"""Create an SQLModel for timelogs"""
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = | Field(foreign_key="app_db.appuser.id") | sqlmodel.Field |
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import root_validator
from datetime import datetime
# {
# "user_id": 1,
# "start_time": "2022-01-19T08:30:00.000Z",
# "end_time": "2022-01-19T09:30:00.000Z",
# "client_id": 1,
# "epic_id": 1,
# "count_hours": 0,
# "count_days": 0,
# "month": 0,
# "year": 0
# }
class TimeLog(SQLModel, table=True):
"""Create an SQLModel for timelogs"""
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="app_db.appuser.id")
start_time: datetime
end_time: datetime
epic_id: int = | Field(foreign_key="app_db.epic.id") | sqlmodel.Field |
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import root_validator
from datetime import datetime
# {
# "user_id": 1,
# "start_time": "2022-01-19T08:30:00.000Z",
# "end_time": "2022-01-19T09:30:00.000Z",
# "client_id": 1,
# "epic_id": 1,
# "count_hours": 0,
# "count_days": 0,
# "month": 0,
# "year": 0
# }
class TimeLog(SQLModel, table=True):
"""Create an SQLModel for timelogs"""
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="app_db.appuser.id")
start_time: datetime
end_time: datetime
epic_id: int = Field(foreign_key="app_db.epic.id")
count_hours: float
count_days: float
month: int
year: int
epic_area_id: int = | Field(foreign_key="app_db.epicarea.id") | sqlmodel.Field |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import select
from starlette.responses import Response
from icon_governance.db import get_session
from icon_governance.models.preps import Prep
router = APIRouter()
@router.get("/preps")
async def get_preps(
session: AsyncSession = Depends(get_session),
) -> List[Prep]:
"""Return list of preps which is limitted to 150 records so no skip."""
result = await session.execute( | select(Prep) | sqlmodel.select |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import select
from starlette.responses import Response
from icon_governance.db import get_session
from icon_governance.models.preps import Prep
router = APIRouter()
@router.get("/preps")
async def get_preps(
session: AsyncSession = Depends(get_session),
) -> List[Prep]:
"""Return list of preps which is limitted to 150 records so no skip."""
result = await session.execute(select(Prep).order_by(Prep.delegated.desc()))
preps = result.scalars().all()
return preps
@router.get("/preps/{address}")
async def get_prep(
address: str,
session: AsyncSession = Depends(get_session),
) -> List[Prep]:
"""Return a single prep."""
result = await session.execute( | select(Prep) | sqlmodel.select |
"""
Rewards are Txs with the claim-iscore method but since this service only listens for
new Txs, this job backfills the value and iscore from the logs service.
"""
import json
from requests import RequestException, get
from sqlmodel import func, select
from icon_governance.config import settings
from icon_governance.log import logger
from icon_governance.models.rewards import Reward
from icon_governance.utils.rpc import convert_hex_int
def get_iscore_value(tx_hash):
"""Get rewards value and Tx from logs service."""
try:
response = get(f"{settings.LOGS_SERVICE_URL}/api/v1/logs?transaction_hash={tx_hash}")
except RequestException as e:
logger.info(f"Exception in iscore - \n{e} - \n{tx_hash}")
return None, None
if response.status_code == 200:
try:
data = json.loads(response.json()[0]["data"])
return convert_hex_int(data[0]) / 1e18, convert_hex_int(data[1]) / 1e18
except Exception as e:
logger.info(f"Exception in iscore - \n{e} - \n{tx_hash}")
return None, None
else:
logger.info(f"Could not find Tx hash from logs service {tx_hash}")
def get_rewards(session):
"""
Cron to get all the values and iscores for rewards txs. Works by getting all the
iscore distributions which are picked up by the transactions processor and insert
them into a DB. The values are then inserted with this cron job by querying for
rewards that have no value.
"""
count = (
session.execute(select([ | func.count(Reward.address) | sqlmodel.func.count |
"""
Rewards are Txs with the claim-iscore method but since this service only listens for
new Txs, this job backfills the value and iscore from the logs service.
"""
import json
from requests import RequestException, get
from sqlmodel import func, select
from icon_governance.config import settings
from icon_governance.log import logger
from icon_governance.models.rewards import Reward
from icon_governance.utils.rpc import convert_hex_int
def get_iscore_value(tx_hash):
"""Get rewards value and Tx from logs service."""
try:
response = get(f"{settings.LOGS_SERVICE_URL}/api/v1/logs?transaction_hash={tx_hash}")
except RequestException as e:
logger.info(f"Exception in iscore - \n{e} - \n{tx_hash}")
return None, None
if response.status_code == 200:
try:
data = json.loads(response.json()[0]["data"])
return convert_hex_int(data[0]) / 1e18, convert_hex_int(data[1]) / 1e18
except Exception as e:
logger.info(f"Exception in iscore - \n{e} - \n{tx_hash}")
return None, None
else:
logger.info(f"Could not find Tx hash from logs service {tx_hash}")
def get_rewards(session):
"""
Cron to get all the values and iscores for rewards txs. Works by getting all the
iscore distributions which are picked up by the transactions processor and insert
them into a DB. The values are then inserted with this cron job by querying for
rewards that have no value.
"""
count = (
session.execute(select([func.count(Reward.address)]).where(Reward.value == None))
.scalars()
.all()
)
logger.info(f"Found {count} number of rewards records.")
chunk_size = 10
for i in range(0, int(count[0] / chunk_size) + 1):
rewards = (
session.execute( | select(Reward) | sqlmodel.select |
from typing import Optional
from sqlmodel import Session
from db.base import engine
from db.models import Plant
def create_plants():
plant_1 = Plant(name="Hebe")
plant_2 = Plant(name="Astilbe")
plant_3 = Plant(name="Sedum")
plant_4 = Plant(name="Helenium")
plant_5 = Plant(name="Heather")
session = | Session(engine) | sqlmodel.Session |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship
from typing import Optional
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import JSON
class TextInferenceBase(SQLModel):
text: str = | Field(nullable=False, index=True) | sqlmodel.Field |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship
from typing import Optional
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import JSON
class TextInferenceBase(SQLModel):
text: str = Field(nullable=False, index=True)
class TextInference(TextInferenceBase, table=True):
id: Optional[int] = | Field(default=None, nullable=False, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship
from typing import Optional
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import JSON
class TextInferenceBase(SQLModel):
text: str = Field(nullable=False, index=True)
class TextInference(TextInferenceBase, table=True):
id: Optional[int] = Field(default=None, nullable=False, primary_key=True)
result: dict[str, float] = Field(nullable=False, sa_column=Column(JSON))
created_at: Optional[datetime]
updated_at: Optional[datetime]
created_by_id: Optional[int] = | Field(default=None, foreign_key="user.id") | sqlmodel.Field |
from sqlite3.dbapi2 import Timestamp, adapt
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import validator
from datetime import datetime, date
from fastapi import HTTPException
import re
class User(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from enum import Enum
from typing import Dict, Optional, Union
from sqlmodel import Field, SQLModel, create_engine
# https://stackoverflow.com/questions/65209934/pydantic-enum-field-does-not-get-converted-to-string
class EventType(str, Enum):
BUILD_IMAGE = 'build_image'
CREATE_CONTAINER = 'create_container'
class Event(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from enum import Enum
from typing import Dict, Optional, Union
from sqlmodel import Field, SQLModel, create_engine
# https://stackoverflow.com/questions/65209934/pydantic-enum-field-does-not-get-converted-to-string
class EventType(str, Enum):
BUILD_IMAGE = 'build_image'
CREATE_CONTAINER = 'create_container'
class Event(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
event_type: EventType
event_payload: str
event_status: Optional[int] = | Field(default=None) | sqlmodel.Field |
from sqlmodel import Session, select
from .models import Person, engine
def create_person(nome: str, idade:int):
person = Person(nome=nome, idade=idade)
with Session(engine) as session:
session.add(person)
session.commit()
session.refresh(person)
return person
def all_person():
query = | select(Person) | sqlmodel.select |
from sqlmodel import Session, select
from .models import Person, engine
def create_person(nome: str, idade:int):
person = Person(nome=nome, idade=idade)
with | Session(engine) | sqlmodel.Session |
from sqlmodel import Session, select
from .models import Person, engine
def create_person(nome: str, idade:int):
person = Person(nome=nome, idade=idade)
with Session(engine) as session:
session.add(person)
session.commit()
session.refresh(person)
return person
def all_person():
query = select(Person)
with | Session(engine) | sqlmodel.Session |
from decimal import Decimal
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
expected_calls = [
[
"Hero 1:",
{
"name": "Deadpond",
"age": None,
"id": 1,
"secret_name": "<NAME>",
"money": Decimal("1.100"),
},
],
[
"Hero 2:",
{
"name": "Rusty-Man",
"age": 48,
"id": 3,
"secret_name": "<NAME>",
"money": Decimal("2.200"),
},
],
["Total money: 3.300"],
]
def test_tutorial(clear_sqlmodel):
from docs_src.advanced.decimal import tutorial001 as mod
mod.sqlite_url = "sqlite://"
mod.engine = | create_engine(mod.sqlite_url) | sqlmodel.create_engine |
from create_db import Student
from sqlmodel import Session, create_engine
student_1 = Student(id=1, first_name="Misal", last_name="Gupta", email="<EMAIL>")
student_2 = Student(id=2, first_name="Vivek", last_name="Kumar", email="<EMAIL>")
student_3 = Student(id=3, first_name="Himesh", last_name="Mahto", email="<EMAIL>")
sqlite_url = "sqlite:///school.db"
engine = | create_engine(sqlite_url, echo=True) | sqlmodel.create_engine |
from create_db import Student
from sqlmodel import Session, create_engine
student_1 = Student(id=1, first_name="Misal", last_name="Gupta", email="<EMAIL>")
student_2 = Student(id=2, first_name="Vivek", last_name="Kumar", email="<EMAIL>")
student_3 = Student(id=3, first_name="Himesh", last_name="Mahto", email="<EMAIL>")
sqlite_url = "sqlite:///school.db"
engine = create_engine(sqlite_url, echo=True)
session = | Session(engine) | sqlmodel.Session |
import re
from datetime import datetime
from enum import Enum
from functools import lru_cache
from inspect import Parameter, signature
from typing import (
TYPE_CHECKING,
Any,
Callable,
Generator,
Generic,
List,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from uuid import UUID
from fastapi import Depends, File, Form, Request, UploadFile, params
from fastapi_utils.api_model import APIModel
from fastapi_utils.camelcase import snake2camel
from makefun import wraps
from pydantic import (
BaseModel as PydanticBaseModel,
ConstrainedInt,
ConstrainedStr,
create_model,
)
from pydantic.datetime_parse import parse_datetime
from pydantic.fields import Undefined
from pydantic.main import ModelMetaclass
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.functions import FunctionElement
from sqlalchemy.sql.schema import Column
from sqlalchemy.types import DateTime
from sqlmodel import Field, SQLModel
from starlette.datastructures import MultiDict
from joj.horse.utils.base import is_uuid
from joj.horse.utils.errors import ErrorCode
if TYPE_CHECKING:
Model = TypeVar("Model", bound="BaseModel")
class BaseModel(APIModel):
""""""
class Config:
validate_all = True
class Operation(Enum):
Create = "Create"
Read = "Read"
Update = "Update"
Delete = "Delete"
class NoneNegativeInt(ConstrainedInt):
ge = 0
# class PositiveInt(ConstrainedInt):
# gt = 0
class PaginationLimit(NoneNegativeInt):
le = 500
class LongStr(ConstrainedStr):
max_length = 256
class NoneEmptyStr(ConstrainedStr):
min_length = 1
class SearchQueryStr(ConstrainedStr):
min_length = 2
class NoneEmptyLongStr(LongStr, NoneEmptyStr):
pass
class UserInputURL(str):
URL_RE = re.compile(r"[\w-]+", flags=re.ASCII)
@classmethod
def __get_validators__(
cls,
) -> Generator[Callable[[Union[str, Any]], str], None, None]:
yield cls.validate
@classmethod
def validate(cls, v: Optional[str]) -> LongStr:
if not v:
return LongStr("")
if is_uuid(v):
raise ValueError("url can not be uuid")
if not UserInputURL.URL_RE.fullmatch(v):
raise ValueError("url can only contains [a-zA-Z0-9_-]")
return LongStr(v)
class LongText(ConstrainedStr):
max_length = 65536
class utcnow(FunctionElement):
type = DateTime()
@compiles(utcnow, "postgresql")
def pg_utcnow(element: Any, compiler: Any, **kwargs: Any) -> str:
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utcnow, "mssql")
def ms_utcnow(element: Any, compiler: Any, **kwargs: Any) -> str:
return "GETUTCDATE()"
def get_datetime_column(**kwargs: Any) -> Column:
if "index" not in kwargs:
kwargs["index"] = False
if "nullable" not in kwargs:
kwargs["nullable"] = False
return Column(DateTime(timezone=True), **kwargs)
class UTCDatetime(datetime):
"""parse a datetime and convert in into UTC format"""
@classmethod
def __get_validators__(cls) -> Any:
yield cls.validate
@classmethod
def validate(cls, v: Any) -> datetime:
return datetime.fromtimestamp(parse_datetime(v).timestamp())
class BaseORMSchema(SQLModel, BaseModel):
pass
class URLORMSchema(BaseORMSchema):
url: str = | Field("", description="(unique) url of the domain") | sqlmodel.Field |
import re
from datetime import datetime
from enum import Enum
from functools import lru_cache
from inspect import Parameter, signature
from typing import (
TYPE_CHECKING,
Any,
Callable,
Generator,
Generic,
List,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from uuid import UUID
from fastapi import Depends, File, Form, Request, UploadFile, params
from fastapi_utils.api_model import APIModel
from fastapi_utils.camelcase import snake2camel
from makefun import wraps
from pydantic import (
BaseModel as PydanticBaseModel,
ConstrainedInt,
ConstrainedStr,
create_model,
)
from pydantic.datetime_parse import parse_datetime
from pydantic.fields import Undefined
from pydantic.main import ModelMetaclass
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.functions import FunctionElement
from sqlalchemy.sql.schema import Column
from sqlalchemy.types import DateTime
from sqlmodel import Field, SQLModel
from starlette.datastructures import MultiDict
from joj.horse.utils.base import is_uuid
from joj.horse.utils.errors import ErrorCode
if TYPE_CHECKING:
Model = TypeVar("Model", bound="BaseModel")
class BaseModel(APIModel):
""""""
class Config:
validate_all = True
class Operation(Enum):
Create = "Create"
Read = "Read"
Update = "Update"
Delete = "Delete"
class NoneNegativeInt(ConstrainedInt):
ge = 0
# class PositiveInt(ConstrainedInt):
# gt = 0
class PaginationLimit(NoneNegativeInt):
le = 500
class LongStr(ConstrainedStr):
max_length = 256
class NoneEmptyStr(ConstrainedStr):
min_length = 1
class SearchQueryStr(ConstrainedStr):
min_length = 2
class NoneEmptyLongStr(LongStr, NoneEmptyStr):
pass
class UserInputURL(str):
URL_RE = re.compile(r"[\w-]+", flags=re.ASCII)
@classmethod
def __get_validators__(
cls,
) -> Generator[Callable[[Union[str, Any]], str], None, None]:
yield cls.validate
@classmethod
def validate(cls, v: Optional[str]) -> LongStr:
if not v:
return LongStr("")
if is_uuid(v):
raise ValueError("url can not be uuid")
if not UserInputURL.URL_RE.fullmatch(v):
raise ValueError("url can only contains [a-zA-Z0-9_-]")
return LongStr(v)
class LongText(ConstrainedStr):
max_length = 65536
class utcnow(FunctionElement):
type = DateTime()
@compiles(utcnow, "postgresql")
def pg_utcnow(element: Any, compiler: Any, **kwargs: Any) -> str:
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utcnow, "mssql")
def ms_utcnow(element: Any, compiler: Any, **kwargs: Any) -> str:
return "GETUTCDATE()"
def get_datetime_column(**kwargs: Any) -> Column:
if "index" not in kwargs:
kwargs["index"] = False
if "nullable" not in kwargs:
kwargs["nullable"] = False
return Column(DateTime(timezone=True), **kwargs)
class UTCDatetime(datetime):
"""parse a datetime and convert in into UTC format"""
@classmethod
def __get_validators__(cls) -> Any:
yield cls.validate
@classmethod
def validate(cls, v: Any) -> datetime:
return datetime.fromtimestamp(parse_datetime(v).timestamp())
class BaseORMSchema(SQLModel, BaseModel):
pass
class URLORMSchema(BaseORMSchema):
url: str = Field("", description="(unique) url of the domain")
class URLCreateMixin(BaseModel):
if not TYPE_CHECKING:
url: UserInputURL = | Field("", description="(unique) url of the domain") | sqlmodel.Field |
import importlib
import os
from typing import Dict, List, Optional, Tuple
from types import ModuleType
import typer
from rich import inspect
from rich.prompt import Prompt
from rich.table import Table
from sqlalchemy import Column
from sqlalchemy.future.engine import Engine
from sqlmodel import SQLModel, create_engine
from ._console import console, error_console
def get_db_url(database_url: Optional[str] = None):
"""A helper function to get the database url."""
if not database_url:
database_url = os.getenv("DATABASE_URL")
if not database_url:
msg = "Please ensure that an environment variable is set for `DATABASE_URL` or pass in the url to the database_url option."
error_console.print(msg)
raise typer.Exit(code=1)
return database_url
def get_tables(models_module) -> Dict[str, SQLModel]:
"""Find all of the SQLModel tables."""
tables = {}
for name, obj in models_module.__dict__.items():
if isinstance(obj, type(SQLModel)) and name != "SQLModel":
tables[obj.__tablename__] = obj
return tables
def get_models(models_path: Optional[str] = None):
# Load the models provided by the user.
if not models_path:
models_path = os.getenv("MODELS_PATH")
if not models_path:
msg = "No modules_path specified. You can set a modules_path by either passing in a value to the -m option or by setting an environment variable `export MODELS_PATH='sqlcli_demo/models.py'`"
error_console.print(msg)
raise typer.Exit(code=1)
models_path = os.path.normpath(models_path)
path, filename = os.path.split(models_path)
module_name, ext = os.path.split(filename)
spec = importlib.util.spec_from_file_location(module_name, models_path)
models = importlib.util.module_from_spec(spec)
spec.loader.exec_module(models)
return models
def is_foreign_key(obj, field_name: str) -> bool:
foreign_keys = [i for i in obj.__table__.foreign_keys]
for fk in foreign_keys:
if fk.parent.name == field_name:
return True
return False
def get_foreign_key_column_name(obj: SQLModel, field_name: str) -> str:
foreign_keys = [i for i in obj.__table__.foreign_keys]
for fk in foreign_keys:
if fk.parent.name == field_name:
return fk.column.name
def get_foreign_key_table_name(obj: SQLModel, field_name: str) -> Optional[str]:
foreign_keys = [i for i in obj.__table__.foreign_keys]
for fk in foreign_keys:
if fk.parent.name == field_name:
return fk.column.table.name
return None
def sqlmodel_setup(
models_path: str, database_url: str
) -> Tuple[ModuleType, str, Engine, Dict[str, SQLModel]]:
"""Quickstart for getting required objects"""
models = get_models(models_path)
url = get_db_url(database_url)
engine = | create_engine(url) | sqlmodel.create_engine |
from typing import Optional
from sqlmodel import Field, SQLModel
from datetime import datetime
class Calendar(SQLModel, table=True):
"""Create an SQLModel for a calendar"""
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
import datetime
from typing import Optional
from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel
class HelpSessionBase(SQLModel):
"""A base model for storing information about users."""
claimant_id: int
channel_id: int
opened_at: datetime.datetime
closed_at: Optional[datetime.datetime]
class HelpSessionTable(HelpSessionBase, table=True):
"""A model for storing information about individual help sessions."""
__tablename__ = "help_sessions"
session_id: int = | Field(primary_key=True) | sqlmodel.Field |
import datetime
from typing import Optional
from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel
class HelpSessionBase(SQLModel):
"""A base model for storing information about users."""
claimant_id: int
channel_id: int
opened_at: datetime.datetime
closed_at: Optional[datetime.datetime]
class HelpSessionTable(HelpSessionBase, table=True):
"""A model for storing information about individual help sessions."""
__tablename__ = "help_sessions"
session_id: int = Field(primary_key=True)
claimant_id: int = Field(
sa_column=Column(
"claimant_id",
BigInteger,
| ForeignKey("users.user_id") | sqlmodel.ForeignKey |
import datetime
from typing import Optional
from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel
class HelpSessionBase(SQLModel):
"""A base model for storing information about users."""
claimant_id: int
channel_id: int
opened_at: datetime.datetime
closed_at: Optional[datetime.datetime]
class HelpSessionTable(HelpSessionBase, table=True):
"""A model for storing information about individual help sessions."""
__tablename__ = "help_sessions"
session_id: int = Field(primary_key=True)
claimant_id: int = Field(
sa_column=Column(
"claimant_id",
BigInteger,
ForeignKey("users.user_id"),
nullable=False
)
)
channel_id: int = Field(
sa_column=Column(
"channel_id",
BigInteger,
index=True,
nullable=False
)
)
opened_at: datetime.datetime = Field(
sa_column=Column(
| DateTime(timezone=True) | sqlmodel.DateTime |
import datetime
from typing import Optional
from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel
class HelpSessionBase(SQLModel):
"""A base model for storing information about users."""
claimant_id: int
channel_id: int
opened_at: datetime.datetime
closed_at: Optional[datetime.datetime]
class HelpSessionTable(HelpSessionBase, table=True):
"""A model for storing information about individual help sessions."""
__tablename__ = "help_sessions"
session_id: int = Field(primary_key=True)
claimant_id: int = Field(
sa_column=Column(
"claimant_id",
BigInteger,
ForeignKey("users.user_id"),
nullable=False
)
)
channel_id: int = Field(
sa_column=Column(
"channel_id",
BigInteger,
index=True,
nullable=False
)
)
opened_at: datetime.datetime = Field(
sa_column=Column(
DateTime(timezone=True),
nullable=False
)
)
closed_at: Optional[datetime.datetime] = Field(
sa_column=Column(
| DateTime(timezone=True) | sqlmodel.DateTime |
from enum import Enum
from typing import TYPE_CHECKING, Optional
from sqlalchemy import Column
from sqlalchemy import Enum as SQLEnum
from sqlalchemy import ForeignKey, Integer
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from .message import Message, MessageList
class MessageTriggerType(Enum):
SIGN_UP = "Sign Up"
APPLICATION_SUBMITTED = "Application - Submitted"
APPLICATION_ACCEPTED = "Application - Accepted"
APPLICATION_REJECTED = "Application - Rejected"
INCOMPLETE_APPLICATION_24H = "Incomplete Application - 24hr"
INCOMPLETE_APPLICATION_7D = "Incomplete Application - 7 day"
class MessageTriggerBase(SQLModel):
trigger: MessageTriggerType = Field(
sa_column=Column(
SQLEnum(MessageTriggerType),
nullable=False,
primary_key=True,
)
)
class MessageTrigger(MessageTriggerBase, table=True):
__tablename__ = "message_triggers"
message_id: Optional[int] = Field(
sa_column=Column(
Integer(),
ForeignKey("messages.id", ondelete="CASCADE"),
nullable=True,
)
)
message: Optional["Message"] = | Relationship() | sqlmodel.Relationship |
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import validator
from datetime import datetime, date
from fastapi import HTTPException
import re
class AppUser(SQLModel, table=True):
"""Create an SQLModel for users"""
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
import os
from fastapi import FastAPI
from sqlmodel import create_engine, SQLModel
from .configurations import env
from .models import * # init models package
class AppFactory(object):
def __init__(self):
self._app = None
@staticmethod
def _get_all_router():
from pigeon.blog.services.routers import __all_routers__
return __all_routers__
def _apply_router(self):
if not isinstance(self._app, FastAPI):
raise RuntimeError("self._app isn't initialized.")
routers = AppFactory._get_all_router()
for r in routers:
self._app.include_router(r)
def _ensure_sql(self):
if not isinstance(self._app, FastAPI):
return
@self._app.on_event("startup")
def sql_startup():
engine = get_engine()
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
from sqlmodel import select
from sqlalchemy.sql import expression
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import defer
from typing import Any
from app.db import models, pagination, session_scope
from app.logs import fastapi_logger
def get_user(email: str) -> Any:
""" Get User Data based on email"""
try:
with session_scope() as db:
statement = select(models.User).where(
models.User.email == email).options(defer('password'))
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("get_user")
return None
def get_user_password(email: str) -> Any:
""" Get User Password based on email"""
try:
with session_scope() as db:
statement = | select(models.User) | sqlmodel.select |
from sqlmodel import select
from sqlalchemy.sql import expression
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import defer
from typing import Any
from app.db import models, pagination, session_scope
from app.logs import fastapi_logger
def get_user(email: str) -> Any:
""" Get User Data based on email"""
try:
with session_scope() as db:
statement = | select(models.User) | sqlmodel.select |
from sqlmodel import select
from sqlalchemy.sql import expression
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import defer
from typing import Any
from app.db import models, pagination, session_scope
from app.logs import fastapi_logger
def get_user(email: str) -> Any:
""" Get User Data based on email"""
try:
with session_scope() as db:
statement = select(models.User).where(
models.User.email == email).options(defer('password'))
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("get_user")
return None
def get_user_password(email: str) -> Any:
""" Get User Password based on email"""
try:
with session_scope() as db:
statement = select(models.User).where(
models.User.email == email)
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("get_user")
return None
def get_active_user(email: str) -> Any:
""" Get User Data based on email and active status"""
try:
with session_scope() as db:
statement = | select(models.User) | sqlmodel.select |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('city', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('city', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('zip_code', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('city', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('zip_code', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_address_city'), 'address', ['city'], unique=False)
op.create_index(op.f('ix_address_house_number'), 'address', ['house_number'], unique=False)
op.create_index(op.f('ix_address_id'), 'address', ['id'], unique=False)
op.create_index(op.f('ix_address_street_name'), 'address', ['street_name'], unique=False)
op.create_index(op.f('ix_address_zip_code'), 'address', ['zip_code'], unique=False)
op.create_table('product',
sa.Column('name', sa.String(), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(op.f('ix_product_id'), 'product', ['id'], unique=False)
op.create_table('customer',
sa.Column('mobile_number', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=True),
sa.Column('first_name', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('city', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('zip_code', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_address_city'), 'address', ['city'], unique=False)
op.create_index(op.f('ix_address_house_number'), 'address', ['house_number'], unique=False)
op.create_index(op.f('ix_address_id'), 'address', ['id'], unique=False)
op.create_index(op.f('ix_address_street_name'), 'address', ['street_name'], unique=False)
op.create_index(op.f('ix_address_zip_code'), 'address', ['zip_code'], unique=False)
op.create_table('product',
sa.Column('name', sa.String(), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(op.f('ix_product_id'), 'product', ['id'], unique=False)
op.create_table('customer',
sa.Column('mobile_number', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=True),
sa.Column('first_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('last_name', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('city', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('zip_code', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_address_city'), 'address', ['city'], unique=False)
op.create_index(op.f('ix_address_house_number'), 'address', ['house_number'], unique=False)
op.create_index(op.f('ix_address_id'), 'address', ['id'], unique=False)
op.create_index(op.f('ix_address_street_name'), 'address', ['street_name'], unique=False)
op.create_index(op.f('ix_address_zip_code'), 'address', ['zip_code'], unique=False)
op.create_table('product',
sa.Column('name', sa.String(), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(op.f('ix_product_id'), 'product', ['id'], unique=False)
op.create_table('customer',
sa.Column('mobile_number', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=True),
sa.Column('first_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('last_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('birth_date', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('city', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('zip_code', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_address_city'), 'address', ['city'], unique=False)
op.create_index(op.f('ix_address_house_number'), 'address', ['house_number'], unique=False)
op.create_index(op.f('ix_address_id'), 'address', ['id'], unique=False)
op.create_index(op.f('ix_address_street_name'), 'address', ['street_name'], unique=False)
op.create_index(op.f('ix_address_zip_code'), 'address', ['zip_code'], unique=False)
op.create_table('product',
sa.Column('name', sa.String(), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(op.f('ix_product_id'), 'product', ['id'], unique=False)
op.create_table('customer',
sa.Column('mobile_number', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=True),
sa.Column('first_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('last_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('birth_date', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('gender', | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
import os
from pathlib import Path
from app import MyApp
from dotenv import load_dotenv
from sqlmodel import Session, SQLModel, create_engine
load_dotenv()
MOVIES_PATH = Path(os.getenv("MOVIES_FILEPATH", None))
dbfile = Path("database.db")
engine = | create_engine("sqlite:///database.db", echo=False) | sqlmodel.create_engine |
import os
from pathlib import Path
from app import MyApp
from dotenv import load_dotenv
from sqlmodel import Session, SQLModel, create_engine
load_dotenv()
MOVIES_PATH = Path(os.getenv("MOVIES_FILEPATH", None))
dbfile = Path("database.db")
engine = create_engine("sqlite:///database.db", echo=False)
def create_db_and_tables():
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
import os
from pathlib import Path
from app import MyApp
from dotenv import load_dotenv
from sqlmodel import Session, SQLModel, create_engine
load_dotenv()
MOVIES_PATH = Path(os.getenv("MOVIES_FILEPATH", None))
dbfile = Path("database.db")
engine = create_engine("sqlite:///database.db", echo=False)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def main():
if not dbfile.exists():
create_db_and_tables()
with | Session(engine) | sqlmodel.Session |
from typing import Optional
from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean
from datetime import datetime
class BaseModel(SQLModel):
"""
BaseModel class
"""
class Config:
use_enum_values = True
class BaseTableFields(SQLModel):
"""
BaseTableField class
"""
id: Optional[int] = | Field(default=None, primary_key=True, nullable=False) | sqlmodel.Field |
from typing import Optional
from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean
from datetime import datetime
class BaseModel(SQLModel):
"""
BaseModel class
"""
class Config:
use_enum_values = True
class BaseTableFields(SQLModel):
"""
BaseTableField class
"""
id: Optional[int] = Field(default=None, primary_key=True, nullable=False)
created_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
)
updated_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
)
is_active: Optional[bool] = Field(
default=None,
sa_column= | Column(Boolean, server_default='true', default=True) | sqlmodel.Column |
from typing import Optional
from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean
from datetime import datetime
class BaseModel(SQLModel):
"""
BaseModel class
"""
class Config:
use_enum_values = True
class BaseTableFields(SQLModel):
"""
BaseTableField class
"""
id: Optional[int] = Field(default=None, primary_key=True, nullable=False)
created_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
| DateTime(timezone=True) | sqlmodel.DateTime |
from typing import Optional
from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean
from datetime import datetime
class BaseModel(SQLModel):
"""
BaseModel class
"""
class Config:
use_enum_values = True
class BaseTableFields(SQLModel):
"""
BaseTableField class
"""
id: Optional[int] = Field(default=None, primary_key=True, nullable=False)
created_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
)
updated_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
| DateTime(timezone=True) | sqlmodel.DateTime |
from typing import Optional
from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean
from datetime import datetime
class BaseModel(SQLModel):
"""
BaseModel class
"""
class Config:
use_enum_values = True
class BaseTableFields(SQLModel):
"""
BaseTableField class
"""
id: Optional[int] = Field(default=None, primary_key=True, nullable=False)
created_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default= | func.now() | sqlmodel.func.now |
from typing import Optional
from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean
from datetime import datetime
class BaseModel(SQLModel):
"""
BaseModel class
"""
class Config:
use_enum_values = True
class BaseTableFields(SQLModel):
"""
BaseTableField class
"""
id: Optional[int] = Field(default=None, primary_key=True, nullable=False)
created_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
)
updated_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default= | func.now() | sqlmodel.func.now |