users, transactions crud apis

#4
.dockerignore CHANGED
@@ -1,5 +1,3 @@
1
  __pycache__
2
  storage
3
- data/*
4
- venv
5
- myvenv
 
1
  __pycache__
2
  storage
3
+ data/*
 
 
.env.example CHANGED
@@ -43,11 +43,3 @@ APP_HOST=0.0.0.0
43
 
44
  # The port to start the backend app.
45
  APP_PORT=8000
46
-
47
- # Postgres database configuration
48
- POSTGRES_USER=postgres
49
- POSTGRES_PASSWORD=postgres
50
- POSTGRES_DB_NAME=postgres
51
- POSTGRES_DB_HOST=localhost
52
- POSTGRES_DB_PORT=5432
53
- SQLALCHEMY_DATABASE_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_DB_HOST}:${POSTGRES_DB_PORT}/${POSTGRES_DB_NAME}
 
43
 
44
  # The port to start the backend app.
45
  APP_PORT=8000
 
 
 
 
 
 
 
 
.gitignore CHANGED
@@ -1,5 +1,4 @@
1
  __pycache__
2
  storage
3
  .env
4
- venv
5
- myvenv
 
1
  __pycache__
2
  storage
3
  .env
4
+ data/*
 
.vscode/settings.json CHANGED
@@ -1,11 +1,3 @@
1
  {
2
- "jupyter.notebookFileRoot": "${workspaceFolder}",
3
- "[python]": {
4
- "editor.defaultFormatter": "ms-python.black-formatter",
5
- "editor.formatOnSave": true,
6
- },
7
- "ms-python.black-formatter.args": [
8
- "--line-length",
9
- "119"
10
- ]
11
  }
 
1
  {
2
+ "jupyter.notebookFileRoot": "${workspaceFolder}"
 
 
 
 
 
 
 
 
3
  }
Dockerfile CHANGED
@@ -25,13 +25,4 @@ COPY . .
25
  # Make port 8000 available to the world outside this container
26
  EXPOSE 8000
27
 
28
- ENV POSTGRES_USER codepath_project_owner
29
- ENV POSTGRES_PASSWORD 03EdiworgCJz
30
- ENV POSTGRES_DB_NAME codepath_project
31
- ENV POSTGRES_DB_HOST ep-icy-cloud-a5m4mcgo.us-east-2.aws.neon.tech
32
- ENV POSTGRES_DB_PORT 5432
33
-
34
- # Run migrations
35
- RUN alembic upgrade head
36
-
37
  CMD ["python", "main.py"]
 
25
  # Make port 8000 available to the world outside this container
26
  EXPOSE 8000
27
 
 
 
 
 
 
 
 
 
 
28
  CMD ["python", "main.py"]
Dockerfile.local.postgres DELETED
@@ -1,23 +0,0 @@
1
- # Pull the official PostgreSQL 16 image
2
- FROM postgres:16
3
-
4
- # Create a directory for the database files
5
- RUN mkdir -p /var/lib/postgresql/data
6
-
7
- # Change the ownership of the data directory
8
- RUN chown -R postgres:postgres /var/lib/postgresql/data
9
-
10
- # Copy the configuration files into the data directory
11
- # NOTE: the [f] suffix is to ensure that the COPY command will not fail if the files don't exist
12
- COPY ./pg_hba.con[f] /var/lib/postgresql/data/pg_hba.conf
13
- COPY ./postgresql.con[f] /var/lib/postgresql/data/postgresql.conf
14
-
15
- # Include environment variables for the PostgreSQL user and password
16
- ENV POSTGRES_USER=${POSTGRES_USER}
17
- ENV POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
18
-
19
- # Expose the default PostgreSQL port
20
- EXPOSE 5432
21
-
22
- # Start the PostgreSQL server
23
- CMD ["postgres", "-c", "config_file=/var/lib/postgresql/data/postgresql.conf"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Makefile DELETED
@@ -1,14 +0,0 @@
1
-
2
- include .env
3
-
4
- build-postgres:
5
- docker build -t codepath_project_postgres -f Dockerfile.local.postgres .
6
-
7
- run-postgres:
8
- docker run -d -p 5432:5432 -e POSTGRES_PASSWORD=${POSTGRES_PASSWORD} -e POSTGRES_USER=${POSTGRES_USER} --name ${POSTGRES_DB_NAME} codepath_project_postgres
9
-
10
- run-migrations:
11
- alembic upgrade head
12
-
13
- generate-migrations:
14
- alembic revision --autogenerate -m "$(migration_title)"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -69,36 +69,6 @@ The API allows CORS for all origins to simplify development. You can change this
69
  ENVIRONMENT=prod python main.py
70
  ```
71
 
72
- ## Local Postgres database setup
73
-
74
- To setup a local postgres database, run:
75
-
76
- 1. Build the docker image:
77
-
78
- ```bash
79
- make build-postgres
80
- ```
81
-
82
- 2. Start the docker container:
83
-
84
- ```bash
85
- make run-postgres
86
- ```
87
-
88
- ## Running Migrations
89
-
90
- To generate new migrations, run:
91
-
92
- ```bash
93
- make generate-migrations migration_title="<name_for_migration>"
94
- ```
95
-
96
- To locally verify your changes, run:
97
-
98
- ```bash
99
- make run-migrations
100
- ```
101
-
102
  ## Using Docker
103
 
104
  1. Build an image for the FastAPI app:
 
69
  ENVIRONMENT=prod python main.py
70
  ```
71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  ## Using Docker
73
 
74
  1. Build an image for the FastAPI app:
alembic.ini DELETED
@@ -1,116 +0,0 @@
1
- # A generic, single database configuration.
2
-
3
- [alembic]
4
- # path to migration scripts
5
- script_location = migration
6
-
7
- # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8
- # Uncomment the line below if you want the files to be prepended with date and time
9
- # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10
- # for all available tokens
11
- # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12
-
13
- # sys.path path, will be prepended to sys.path if present.
14
- # defaults to the current working directory.
15
- prepend_sys_path = .
16
-
17
- # timezone to use when rendering the date within the migration file
18
- # as well as the filename.
19
- # If specified, requires the python>=3.9 or backports.zoneinfo library.
20
- # Any required deps can installed by adding `alembic[tz]` to the pip requirements
21
- # string value is passed to ZoneInfo()
22
- # leave blank for localtime
23
- # timezone =
24
-
25
- # max length of characters to apply to the
26
- # "slug" field
27
- # truncate_slug_length = 40
28
-
29
- # set to 'true' to run the environment during
30
- # the 'revision' command, regardless of autogenerate
31
- # revision_environment = false
32
-
33
- # set to 'true' to allow .pyc and .pyo files without
34
- # a source .py file to be detected as revisions in the
35
- # versions/ directory
36
- # sourceless = false
37
-
38
- # version location specification; This defaults
39
- # to migration/versions. When using multiple version
40
- # directories, initial revisions must be specified with --version-path.
41
- # The path separator used here should be the separator specified by "version_path_separator" below.
42
- # version_locations = %(here)s/bar:%(here)s/bat:migration/versions
43
-
44
- # version path separator; As mentioned above, this is the character used to split
45
- # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46
- # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47
- # Valid values for version_path_separator are:
48
- #
49
- # version_path_separator = :
50
- # version_path_separator = ;
51
- # version_path_separator = space
52
- version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53
-
54
- # set to 'true' to search source files recursively
55
- # in each "version_locations" directory
56
- # new in Alembic version 1.10
57
- # recursive_version_locations = false
58
-
59
- # the output encoding used when revision files
60
- # are written from script.py.mako
61
- # output_encoding = utf-8
62
-
63
- sqlalchemy.url = driver://user:pass@localhost/dbname
64
-
65
-
66
- [post_write_hooks]
67
- # post_write_hooks defines scripts or Python functions that are run
68
- # on newly generated revision scripts. See the documentation for further
69
- # detail and examples
70
-
71
- # format using "black" - use the console_scripts runner, against the "black" entrypoint
72
- # hooks = black
73
- # black.type = console_scripts
74
- # black.entrypoint = black
75
- # black.options = -l 79 REVISION_SCRIPT_FILENAME
76
-
77
- # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
78
- # hooks = ruff
79
- # ruff.type = exec
80
- # ruff.executable = %(here)s/.venv/bin/ruff
81
- # ruff.options = --fix REVISION_SCRIPT_FILENAME
82
-
83
- # Logging configuration
84
- [loggers]
85
- keys = root,sqlalchemy,alembic
86
-
87
- [handlers]
88
- keys = console
89
-
90
- [formatters]
91
- keys = generic
92
-
93
- [logger_root]
94
- level = WARN
95
- handlers = console
96
- qualname =
97
-
98
- [logger_sqlalchemy]
99
- level = WARN
100
- handlers =
101
- qualname = sqlalchemy.engine
102
-
103
- [logger_alembic]
104
- level = INFO
105
- handlers =
106
- qualname = alembic
107
-
108
- [handler_console]
109
- class = StreamHandler
110
- args = (sys.stderr,)
111
- level = NOTSET
112
- formatter = generic
113
-
114
- [formatter_generic]
115
- format = %(levelname)-5.5s [%(name)s] %(message)s
116
- datefmt = %H:%M:%S
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/api/routers/chat.py CHANGED
@@ -1,4 +1,4 @@
1
- from pydantic import BaseModel, ConfigDict
2
  from typing import List, Any, Optional, Dict, Tuple
3
  from fastapi import APIRouter, Depends, HTTPException, Request, status
4
  from llama_index.core.chat_engine.types import BaseChatEngine
@@ -19,8 +19,9 @@ class _Message(BaseModel):
19
 
20
  class _ChatData(BaseModel):
21
  messages: List[_Message]
22
- model_config = ConfigDict(
23
- json_schema_extra={
 
24
  "example": {
25
  "messages": [
26
  {
@@ -30,7 +31,6 @@ class _ChatData(BaseModel):
30
  ]
31
  }
32
  }
33
- )
34
 
35
 
36
  class _SourceNodes(BaseModel):
@@ -121,7 +121,12 @@ async def chat(
121
  yield VercelStreamResponse.convert_data(
122
  {
123
  "type": "sources",
124
- "data": {"nodes": [_SourceNodes.from_source_node(node).dict() for node in response.source_nodes]},
 
 
 
 
 
125
  }
126
  )
127
 
 
1
+ from pydantic import BaseModel
2
  from typing import List, Any, Optional, Dict, Tuple
3
  from fastapi import APIRouter, Depends, HTTPException, Request, status
4
  from llama_index.core.chat_engine.types import BaseChatEngine
 
19
 
20
  class _ChatData(BaseModel):
21
  messages: List[_Message]
22
+
23
+ class Config:
24
+ json_schema_extra = {
25
  "example": {
26
  "messages": [
27
  {
 
31
  ]
32
  }
33
  }
 
34
 
35
 
36
  class _SourceNodes(BaseModel):
 
121
  yield VercelStreamResponse.convert_data(
122
  {
123
  "type": "sources",
124
+ "data": {
125
+ "nodes": [
126
+ _SourceNodes.from_source_node(node).dict()
127
+ for node in response.source_nodes
128
+ ]
129
+ },
130
  }
131
  )
132
 
app/api/routers/file_upload.py DELETED
@@ -1,43 +0,0 @@
1
- from typing import Annotated
2
- from fastapi import APIRouter, UploadFile, Depends
3
- from app.categorization.file_processing import process_file, save_results
4
- from app.schema.index import FileUploadCreate
5
- import asyncio
6
- import os
7
- import csv
8
-
9
- from app.engine.postgresdb import get_db_session
10
- from sqlalchemy.ext.asyncio import AsyncSession
11
-
12
- file_upload_router = r = APIRouter(prefix="/api/v1/file_upload", tags=["file_upload"])
13
-
14
- @r.post(
15
- "/",
16
- responses={
17
- 200: {"description": "File successfully uploaded"},
18
- 400: {"description": "Bad request"},
19
- 500: {"description": "Internal server error"},
20
- },
21
- )
22
- async def create_file(input_file: UploadFile, db: AsyncSession = Depends(get_db_session)):
23
- try:
24
- # Create directory to store all uploaded .csv files
25
- file_upload_directory_path = "data/tx_data/input"
26
- if not os.path.exists(file_upload_directory_path):
27
- os.makedirs(file_upload_directory_path)
28
-
29
- # Write items of .csv filte to directory
30
- with open(os.path.join(file_upload_directory_path, input_file.filename)) as output_file:
31
- [output_file.write(" ".join(row)+'\n') for row in csv.reader(input_file)]
32
- output_file.close()
33
-
34
- # With the newly created file and it's path, process and save it for embedding
35
- processed_file = process_file(os.path.realpath(input_file.filename))
36
- result = await asyncio.gather(processed_file)
37
- await save_results(db, result)
38
-
39
- except Exception:
40
- return {"message": "There was an error uploading this file. Ensure you have a .csv file with the following columns:"
41
- "\n transaction_date, type, category, name_description, amount"}
42
-
43
- return {"message": f"Successfully uploaded {input_file.filename}"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/api/routers/income_statement.py DELETED
@@ -1,64 +0,0 @@
1
- from typing import List
2
- from fastapi import APIRouter, Depends, HTTPException, status
3
- from sqlalchemy.ext.asyncio import AsyncSession
4
- from app.model.transaction import Transaction as TransactionModel
5
- from app.model.income_statement import IncomeStatement as IncomeStatementModel
6
- from app.schema.index import IncomeStatementCreateRequest, IncomeStatementResponse
7
- from app.engine.postgresdb import get_db_session
8
- from app.service.income_statement import call_llm_to_create_income_statement
9
-
10
- income_statement_router = r = APIRouter(prefix="/api/v1/income_statement", tags=["income_statement"])
11
-
12
-
13
- @r.post(
14
- "/",
15
- responses={
16
- 200: {"description": "New transaction created"},
17
- 400: {"description": "Bad request"},
18
- 500: {"description": "Internal server error"},
19
- },
20
- )
21
- async def create_income_statement(payload: IncomeStatementCreateRequest, db: AsyncSession = Depends(get_db_session)) -> None:
22
- try:
23
- await call_llm_to_create_income_statement(payload, db)
24
-
25
- except Exception as e:
26
- raise HTTPException(status_code=500, detail=str(e))
27
-
28
-
29
- @r.get(
30
- "/user/{user_id}",
31
- response_model=List[IncomeStatementResponse],
32
- responses={
33
- 200: {"description": "New user created"},
34
- 400: {"description": "Bad request"},
35
- 204: {"description": "No content"},
36
- 500: {"description": "Internal server error"},
37
- },
38
- )
39
- async def get_income_statements(
40
- user_id: int, db: AsyncSession = Depends(get_db_session)
41
- ) -> List[IncomeStatementResponse]:
42
- """
43
- Retrieve all income statements.
44
- """
45
- result = await IncomeStatementModel.get_by_user(db, user_id)
46
- if len(result) == 0:
47
- raise HTTPException(status_code=status.HTTP_204_NO_CONTENT, detail="No income statements found for this user")
48
- return result
49
-
50
-
51
- @r.get(
52
- "/report/{report_id}",
53
- response_model=IncomeStatementResponse,
54
- responses={
55
- 200: {"description": "Income statement found"},
56
- 404: {"description": "Income statement not found"},
57
- 500: {"description": "Internal server error"},
58
- },
59
- )
60
- async def get_income_statement(report_id: int, db: AsyncSession = Depends(get_db_session)) -> IncomeStatementResponse:
61
- income_statement = await IncomeStatementModel.get(db, id=report_id)
62
- if not income_statement:
63
- raise HTTPException(status_code=404, detail="Income statement not found")
64
- return income_statement
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/api/routers/transaction.py DELETED
@@ -1,30 +0,0 @@
1
- import os
2
- from typing import List
3
- from fastapi import APIRouter, Depends, HTTPException, status
4
- from sqlalchemy.ext.asyncio import AsyncSession
5
- from app.model.transaction import Transaction as TransactionModel
6
- from app.schema.index import TransactionResponse
7
- from app.engine.postgresdb import get_db_session
8
-
9
- transaction_router = r = APIRouter(prefix="/api/v1/transactions", tags=["transactions"])
10
-
11
-
12
- @r.get(
13
- "/{user_id}",
14
- response_model=List[TransactionResponse],
15
- responses={
16
- 200: {"description": "New user created"},
17
- 400: {"description": "Bad request"},
18
- 204: {"description": "No content"},
19
- 500: {"description": "Internal server error"},
20
- },
21
- )
22
- async def get_transactions(user_id: int, db: AsyncSession = Depends(get_db_session)):
23
- """
24
- Retrieve all transactions.
25
- """
26
- result = await TransactionModel.get_by_user(db, user_id)
27
- all_rows = result.all()
28
- if len(all_rows) == 0:
29
- raise HTTPException(status_code=status.HTTP_204_NO_CONTENT, detail="No transactions found for this user")
30
- return all_rows
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/api/routers/user.py DELETED
@@ -1,89 +0,0 @@
1
- import logging
2
- from fastapi import APIRouter, Depends, HTTPException
3
- from sqlalchemy.ext.asyncio import AsyncSession
4
-
5
- from app.engine.postgresdb import get_db_session
6
- from app.schema.index import UserCreate, User as UserSchema, UserResponse, UserUpdate
7
- from app.model.user import User as UserModel
8
-
9
-
10
- user_router = r = APIRouter(prefix="/api/v1/users", tags=["users"])
11
- logger = logging.getLogger(__name__)
12
-
13
-
14
- @r.post(
15
- "/",
16
- response_model=UserResponse,
17
- responses={
18
- 200: {"description": "New user created"},
19
- 400: {"description": "Bad request"},
20
- 409: {"description": "Conflict"},
21
- 500: {"description": "Internal server error"},
22
- },
23
- )
24
- async def create_user(user: UserCreate, db: AsyncSession = Depends(get_db_session)):
25
- try:
26
- db_user = await UserModel.get(db, email=user.email)
27
- if db_user and not db_user.is_deleted:
28
- raise HTTPException(status_code=409, detail="User already exists")
29
-
30
- await UserModel.create(db, **user.model_dump())
31
- user = await UserModel.get(db, email=user.email)
32
- return user
33
- except Exception as e:
34
- raise HTTPException(status_code=500, detail=str(e))
35
-
36
-
37
- @r.get(
38
- "/{email}",
39
- response_model=UserResponse,
40
- responses={
41
- 200: {"description": "User found"},
42
- 404: {"description": "User not found"},
43
- 500: {"description": "Internal server error"},
44
- },
45
- )
46
- async def get_user(email: str, db: AsyncSession = Depends(get_db_session)):
47
- user = await UserModel.get(db, email=email)
48
- if not user:
49
- raise HTTPException(status_code=404, detail="User not found")
50
- return user
51
-
52
-
53
- @r.put(
54
- "/{email}",
55
- response_model=UserResponse,
56
- responses={
57
- 200: {"description": "User updated"},
58
- 404: {"description": "User not found"},
59
- 500: {"description": "Internal server error"},
60
- },
61
- )
62
- async def update_user(email: str, user_payload: UserUpdate, db: AsyncSession = Depends(get_db_session)):
63
- try:
64
- user = await UserModel.get(db, email=email)
65
- if not user:
66
- raise HTTPException(status_code=404, detail="User not found")
67
- await UserModel.update(db, id=user.id, **user_payload.model_dump())
68
- user = await UserModel.get(db, email=email)
69
- return user
70
- except Exception as e:
71
- raise HTTPException(status_code=500, detail=str(e))
72
-
73
-
74
- @r.delete(
75
- "/{email}",
76
- response_model=UserResponse,
77
- responses={
78
- 200: {"description": "User deleted"},
79
- 404: {"description": "User not found"},
80
- 500: {"description": "Internal server error"},
81
- },
82
- )
83
- async def delete_user(email: str, db: AsyncSession = Depends(get_db_session)):
84
- user = await UserModel.get(db, email=email)
85
- if not user:
86
- raise HTTPException(status_code=404, detail="User not found")
87
- await UserModel.delete(db, email=email)
88
- user = await UserModel.get(db, email=email)
89
- return user
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/categorization/file_processing.py CHANGED
@@ -11,11 +11,6 @@ from dateparser import parse
11
 
12
  from app.categorization.categorizer_list import categorize_list
13
  from app.categorization.config import RESULT_OUTPUT_FILE, CATEGORY_REFERENCE_OUTPUT_FILE
14
- from app.model.transaction import Transaction
15
- from app.schema.index import TransactionCreate
16
-
17
- from sqlalchemy.ext.asyncio import AsyncSession
18
-
19
 
20
  # Read file and process it (e.g. categorize transactions)
21
  async def process_file(file_path: str) -> Dict[str, Union[str, pd.DataFrame]]:
@@ -27,27 +22,28 @@ async def process_file(file_path: str) -> Dict[str, Union[str, pd.DataFrame]]:
27
 
28
  Returns:
29
  Dict[str, Union[str, pd.DataFrame]]: Dictionary containing the file name, processed output, and error information if any
30
- """
31
 
32
  file_name = os.path.basename(file_path)
33
- result = {"file_name": file_name, "output": pd.DataFrame(), "error": ""}
34
  try:
35
- # Read file into standardized tx format: source, date, type, category, description, amount
36
  tx_list = standardize_csv_file(file_path)
37
 
38
  # Categorize transactions
39
- result["output"] = await categorize_list(tx_list)
40
- print(f"File processed sucessfully: {file_name}")
41
 
42
  except Exception as e:
43
  # Return an error indicator and exception info
44
  logging.log(logging.ERROR, f"| File: {file_name} | Unexpected Error: {e}")
45
- print(f"ERROR processing file {file_name}: {e}")
46
- result["error"] = str(e)
47
-
48
  return result
49
 
50
 
 
51
  def standardize_csv_file(file_path: str) -> pd.DataFrame:
52
  """
53
  Read and prepare the data from the input file.
@@ -59,21 +55,21 @@ def standardize_csv_file(file_path: str) -> pd.DataFrame:
59
  pd.DataFrame: Prepared transaction data.
60
  """
61
 
62
- tx_list = pd.read_csv(file_path, index_col=False)
63
- tx_list.attrs["file_name"] = file_path
64
  tx_list.columns = tx_list.columns.str.lower().str.strip()
65
 
66
  # Standardize dates to YYYY/MM/DD format
67
- tx_list["date"] = pd.to_datetime(tx_list["date"]).dt.strftime("%Y/%m/%d")
68
 
69
  # Add source and reindex to desired tx format; category column is new and therefore empty
70
- tx_list.loc[:, "source"] = os.path.basename(file_path)
71
- tx_list = tx_list.reindex(columns=["date", "expense/income", "category", "name/description", "amount"])
72
 
73
  return tx_list
74
 
75
 
76
- async def save_results(db: AsyncSession, results: List) -> None:
77
  """
78
  Merge all interim results in the input folder and write the merged results to the output file.
79
 
@@ -91,33 +87,29 @@ async def save_results(db: AsyncSession, results: List) -> None:
91
  ko_files = []
92
  error_messages = []
93
 
94
- col_list = ["transaction_date", "type", "category", "name_description", "amount"]
95
  tx_list = pd.DataFrame(columns=col_list)
96
  for result in results:
97
- if not result["error"]:
98
- ok_files.append(result["file_name"])
99
- result_df = result["output"]
100
  result_df.columns = col_list
101
  tx_list = pd.concat([tx_list, result_df], ignore_index=True)
102
  else:
103
- ko_files.append(result["file_name"])
104
- error_messages.append(f"{result['file_name']}: {result['error']}")
105
 
106
- # Save to database
107
- # FIXME: get user_id from session
108
- txn_list_to_save = [TransactionCreate(**row.to_dict(), user_id=1) for _, row in tx_list.iterrows()]
109
- await Transaction.bulk_create(db, txn_list_to_save)
110
 
111
- new_ref_data = tx_list[["name/description", "category"]]
112
  if os.path.exists(CATEGORY_REFERENCE_OUTPUT_FILE):
113
  # If it exists, add master file to interim results
114
- old_ref_data = pd.read_csv(CATEGORY_REFERENCE_OUTPUT_FILE, names=["name/description", "category"], header=0)
115
  new_ref_data = pd.concat([old_ref_data, new_ref_data], ignore_index=True)
116
-
117
  # Drop duplicates, sort, and write to create new Master File
118
- new_ref_data.drop_duplicates(subset=["name/description"]).sort_values(by=["name/description"]).to_csv(
119
- CATEGORY_REFERENCE_OUTPUT_FILE, mode="w", index=False, header=True
120
- )
121
 
122
  # Summarize results
123
  print(f"\nProcessed {len(results)} files: {len(ok_files)} successful, {len(ko_files)} with errors\n")
@@ -125,4 +117,4 @@ async def save_results(db: AsyncSession, results: List) -> None:
125
  print(f"Errors in the following files:")
126
  for message in error_messages:
127
  print(f" {message}")
128
- print("\n")
 
11
 
12
  from app.categorization.categorizer_list import categorize_list
13
  from app.categorization.config import RESULT_OUTPUT_FILE, CATEGORY_REFERENCE_OUTPUT_FILE
 
 
 
 
 
14
 
15
  # Read file and process it (e.g. categorize transactions)
16
  async def process_file(file_path: str) -> Dict[str, Union[str, pd.DataFrame]]:
 
22
 
23
  Returns:
24
  Dict[str, Union[str, pd.DataFrame]]: Dictionary containing the file name, processed output, and error information if any
25
+ """
26
 
27
  file_name = os.path.basename(file_path)
28
+ result= {'file_name': file_name, 'output': pd.DataFrame(), 'error': ''}
29
  try:
30
+ # Read file into standardized tx format: source, date, type, category, description, amount
31
  tx_list = standardize_csv_file(file_path)
32
 
33
  # Categorize transactions
34
+ result['output'] = await categorize_list(tx_list)
35
+ print(f'File processed sucessfully: {file_name}')
36
 
37
  except Exception as e:
38
  # Return an error indicator and exception info
39
  logging.log(logging.ERROR, f"| File: {file_name} | Unexpected Error: {e}")
40
+ print(f'ERROR processing file {file_name}: {e}')
41
+ result['error'] = str(e)
42
+
43
  return result
44
 
45
 
46
+
47
  def standardize_csv_file(file_path: str) -> pd.DataFrame:
48
  """
49
  Read and prepare the data from the input file.
 
55
  pd.DataFrame: Prepared transaction data.
56
  """
57
 
58
+ tx_list = pd.read_csv(file_path, index_col=False)
59
+ tx_list.attrs['file_name'] = file_path
60
  tx_list.columns = tx_list.columns.str.lower().str.strip()
61
 
62
  # Standardize dates to YYYY/MM/DD format
63
+ tx_list['date'] = pd.to_datetime(tx_list['date']).dt.strftime('%Y/%m/%d')
64
 
65
  # Add source and reindex to desired tx format; category column is new and therefore empty
66
+ tx_list.loc[:, 'source'] = os.path.basename(file_path)
67
+ tx_list = tx_list.reindex(columns=['date', 'expense/income', 'category', 'name/description', 'amount'])
68
 
69
  return tx_list
70
 
71
 
72
+ def save_results(results: List) -> None:
73
  """
74
  Merge all interim results in the input folder and write the merged results to the output file.
75
 
 
87
  ko_files = []
88
  error_messages = []
89
 
90
+ col_list = ['date', 'expense/income', 'category', 'name/description', 'amount']
91
  tx_list = pd.DataFrame(columns=col_list)
92
  for result in results:
93
+ if not result['error']:
94
+ ok_files.append(result['file_name'])
95
+ result_df = result['output']
96
  result_df.columns = col_list
97
  tx_list = pd.concat([tx_list, result_df], ignore_index=True)
98
  else:
99
+ ko_files.append(result['file_name'])
100
+ error_messages.append(f"{result['file_name']}: {result['error']}")
101
 
102
+ # Write contents to output file (based on file type)
103
+ tx_list.to_csv(RESULT_OUTPUT_FILE, mode="a", index=False, header=not os.path.exists(RESULT_OUTPUT_FILE))
 
 
104
 
105
+ new_ref_data = tx_list[['name/description', 'category']]
106
  if os.path.exists(CATEGORY_REFERENCE_OUTPUT_FILE):
107
  # If it exists, add master file to interim results
108
+ old_ref_data = pd.read_csv(CATEGORY_REFERENCE_OUTPUT_FILE, names=['name/description', 'category'], header=0)
109
  new_ref_data = pd.concat([old_ref_data, new_ref_data], ignore_index=True)
110
+
111
  # Drop duplicates, sort, and write to create new Master File
112
+ new_ref_data.drop_duplicates(subset=['name/description']).sort_values(by=['name/description']).to_csv(CATEGORY_REFERENCE_OUTPUT_FILE, mode="w", index=False, header=True)
 
 
113
 
114
  # Summarize results
115
  print(f"\nProcessed {len(results)} files: {len(ok_files)} successful, {len(ko_files)} with errors\n")
 
117
  print(f"Errors in the following files:")
118
  for message in error_messages:
119
  print(f" {message}")
120
+ print('\n')
app/engine/postgresdb.py DELETED
@@ -1,81 +0,0 @@
1
- import logging
2
- import contextlib
3
- from typing import Any, AsyncIterator
4
-
5
- from sqlalchemy.ext.asyncio import (
6
- AsyncConnection,
7
- AsyncSession,
8
- AsyncEngine,
9
- async_sessionmaker,
10
- create_async_engine,
11
- )
12
- from sqlalchemy.orm import declarative_base
13
-
14
- from config.index import config as env
15
-
16
- logger = logging.getLogger(__name__)
17
-
18
- Base = declarative_base()
19
-
20
-
21
- class PostgresDatabase:
22
-
23
- def __init__(self):
24
- self._engine: AsyncEngine | None = None
25
- self._sessionmaker: async_sessionmaker | None = None
26
-
27
- def init(self, host: str, engine_kwargs: dict[str, Any] = {}):
28
- self._engine = create_async_engine(host, **engine_kwargs)
29
- self._sessionmaker = async_sessionmaker(autocommit=False, bind=self._engine)
30
-
31
- async def close(self):
32
- if self._engine is None:
33
- raise Exception("DatabaseSessionManager is not initialized")
34
- await self._engine.dispose()
35
-
36
- self._engine = None
37
- self._sessionmaker = None
38
-
39
- @contextlib.asynccontextmanager
40
- async def connect(self) -> AsyncIterator[AsyncConnection]:
41
- if self._engine is None:
42
- raise Exception("DatabaseSessionManager is not initialized")
43
-
44
- async with self._engine.begin() as connection:
45
- try:
46
- yield connection
47
- except Exception:
48
- await connection.rollback()
49
- raise
50
-
51
- @contextlib.asynccontextmanager
52
- async def session(self) -> AsyncIterator[AsyncSession]:
53
- if self._sessionmaker is None:
54
- raise Exception("DatabaseSessionManager is not initialized")
55
-
56
- session = self._sessionmaker()
57
- try:
58
- yield session
59
- except Exception:
60
- await session.rollback()
61
- raise
62
- finally:
63
- await session.close()
64
-
65
- def get_engine(self):
66
- return self._engine
67
-
68
- # Used for testing
69
- async def create_all(self, connection: AsyncConnection):
70
- await connection.run_sync(Base.metadata.create_all)
71
-
72
- async def drop_all(self, connection: AsyncConnection):
73
- await connection.run_sync(Base.metadata.drop_all)
74
-
75
-
76
- postgresdb = PostgresDatabase()
77
-
78
-
79
- async def get_db_session():
80
- async with postgresdb.session() as session:
81
- yield session
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/model/base.py DELETED
@@ -1,15 +0,0 @@
1
- from datetime import datetime, timezone
2
- from sqlalchemy import DateTime
3
- from sqlalchemy.orm import Mapped, mapped_column
4
-
5
- from app.engine.postgresdb import Base
6
-
7
-
8
- class BaseModel:
9
- id: Mapped[int] = mapped_column(primary_key=True, index=True, autoincrement=True)
10
- created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=datetime.now(timezone.utc))
11
- updated_at: Mapped[datetime] = mapped_column(
12
- DateTime(timezone=True),
13
- default=datetime.now(timezone.utc),
14
- onupdate=datetime.now(timezone.utc),
15
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/model/income_statement.py DELETED
@@ -1,46 +0,0 @@
1
- from datetime import datetime
2
- from typing import Any, Dict, List
3
- from sqlalchemy import ForeignKey
4
- from sqlalchemy.sql import expression as sql
5
- from sqlalchemy.orm import relationship, Mapped, mapped_column
6
- from sqlalchemy.ext.asyncio import AsyncSession
7
- from sqlalchemy.dialects.postgresql import JSON
8
-
9
- from app.model.base import BaseModel
10
- from app.engine.postgresdb import Base
11
- from app.schema.index import IncomeStatement as IncomeStatementSchema
12
-
13
-
14
- class IncomeStatement(Base, BaseModel):
15
- __tablename__ = "income_statement"
16
-
17
- date_from: Mapped[datetime]
18
- date_to: Mapped[datetime]
19
- income: Mapped[Dict] = mapped_column(JSON, nullable=True)
20
- expenses: Mapped[Dict] = mapped_column(JSON, nullable=True)
21
-
22
- user_id: Mapped[int] = mapped_column(ForeignKey("users.id"))
23
- user = relationship("User", back_populates="income_statements")
24
-
25
- def __str__(self) -> str:
26
- return f"IncomeStatement(id={self.id}, user_id={self.user_id}, date_from={self.date_from}, date_to={self.date_to}, income={self.income}, expenses={self.expenses})"
27
-
28
- @classmethod
29
- async def create(cls: "type[IncomeStatement]", db: AsyncSession, **kwargs: IncomeStatementSchema) -> "IncomeStatement":
30
- income_statement = cls(**kwargs)
31
- db.add(income_statement)
32
- await db.commit()
33
- await db.refresh(income_statement)
34
- return income_statement
35
-
36
- @classmethod
37
- async def get_by_user(cls: "type[IncomeStatement]", db: AsyncSession, user_id: int) -> "List[IncomeStatement]":
38
- query = sql.select(cls).where(cls.user_id == user_id)
39
- income_statements = await db.scalars(query)
40
- return income_statements.all()
41
-
42
- @classmethod
43
- async def get(cls: "type[IncomeStatement]", db: AsyncSession, id: int) -> "IncomeStatement":
44
- query = sql.select(cls).where(cls.id == id)
45
- income_statement = await db.scalar(query)
46
- return income_statement
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/model/transaction.py DELETED
@@ -1,62 +0,0 @@
1
- from datetime import datetime
2
- from typing import List
3
- from sqlalchemy import ForeignKey
4
- from sqlalchemy.orm import relationship, Mapped, mapped_column
5
- from sqlalchemy.sql import expression as sql
6
- from sqlalchemy.ext.asyncio import AsyncSession
7
-
8
- from app.model.base import BaseModel
9
- from app.engine.postgresdb import Base
10
- from app.schema.index import TransactionCreate
11
-
12
-
13
- class Transaction(Base, BaseModel):
14
- __tablename__ = "transactions"
15
- transaction_date: Mapped[datetime]
16
- category: Mapped[str]
17
- name_description: Mapped[str]
18
- amount: Mapped[float]
19
- type: Mapped[str]
20
-
21
- user_id = mapped_column(ForeignKey("users.id"))
22
- user = relationship("User", back_populates="transactions")
23
-
24
- def __str__(self) -> str:
25
- return f"{self.transaction_date}, {self.category}, {self.name_description}, {self.amount}, {self.type}"
26
-
27
- @classmethod
28
- async def create(cls: "type[Transaction]", db: AsyncSession, **kwargs) -> "Transaction":
29
- query = sql.insert(cls).values(**kwargs)
30
- transactions = await db.execute(query)
31
- transaction = transactions.first()
32
- await db.commit()
33
- return transaction
34
-
35
- @classmethod
36
- async def bulk_create(cls: "type[Transaction]", db: AsyncSession, transactions: List[TransactionCreate]) -> None:
37
- values = [transaction.model_dump() for transaction in transactions]
38
- query = sql.insert(cls).values(values)
39
- await db.execute(query)
40
- await db.commit()
41
-
42
- @classmethod
43
- async def update(cls: "type[Transaction]", db: AsyncSession, id: int, **kwargs) -> "Transaction":
44
- query = sql.update(cls).where(cls.id == id).values(**kwargs).execution_options(synchronize_session="fetch")
45
- transactions = await db.scalars(query)
46
- transaction = transactions.first()
47
- await db.commit()
48
- return transaction
49
-
50
- @classmethod
51
- async def get_by_user(cls: "type[Transaction]", db: AsyncSession, user_id: int) -> "List[Transaction]":
52
- query = sql.select(cls).where(cls.user_id == user_id)
53
- transactions = await db.scalars(query)
54
- return transactions
55
-
56
- @classmethod
57
- async def get_by_user_between_dates(
58
- cls: "type[Transaction]", db: AsyncSession, user_id: int, start_date: datetime, end_date: datetime
59
- ) -> "List[Transaction]":
60
- query = sql.select(cls).where(cls.user_id == user_id).where(cls.transaction_date.between(start_date, end_date))
61
- transactions = await db.scalars(query)
62
- return transactions.all()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/model/user.py DELETED
@@ -1,110 +0,0 @@
1
- import logging
2
- from sqlalchemy.orm import relationship, Mapped, mapped_column
3
- from sqlalchemy.sql import expression as sql
4
- from sqlalchemy.ext.asyncio import AsyncSession
5
-
6
- from app.model.base import BaseModel
7
- from app.engine.postgresdb import Base
8
- from app.schema.index import UserCreate, UserUpdate
9
-
10
- logger = logging.getLogger(__name__)
11
-
12
-
13
- class User(Base, BaseModel):
14
- __tablename__ = "users"
15
-
16
- name: Mapped[str]
17
- email: Mapped[str]
18
- hashed_password: Mapped[str]
19
- is_deleted: Mapped[bool] = mapped_column(default=False)
20
-
21
- transactions = relationship("Transaction", back_populates="user")
22
- income_statements = relationship("IncomeStatement", back_populates="user")
23
-
24
- @classmethod
25
- async def create(cls: "type[User]", db: AsyncSession, **kwargs: UserCreate) -> "User":
26
- """
27
- Creates a new user in the database.
28
-
29
- Args:
30
- cls (type[User]): The class object representing the User model.
31
- db (AsyncSession): The asynchronous session object for interacting with the database.
32
- **kwargs (UserCreate): The keyword arguments representing the user's attributes.
33
-
34
- Returns:
35
- User
36
- """
37
-
38
- print(f"Creating user: {kwargs}")
39
- # query = sql.insert(cls).values(**kwargs)
40
- user = cls(name=kwargs["name"], email=kwargs["email"], hashed_password=kwargs["hashed_password"])
41
- db.add(user)
42
- await db.commit()
43
- await db.refresh(user)
44
- return user
45
-
46
- @classmethod
47
- async def update(cls: "type[User]", db: AsyncSession, id: int, **kwargs: UserUpdate) -> "User":
48
- """
49
- Updates a user in the database with the given ID and keyword arguments.
50
-
51
- Args:
52
- cls (type[User]): The class object representing the User model.
53
- db (AsyncSession): The asynchronous session object for interacting with the database.
54
- id (int): The ID of the user to update.
55
- **kwargs (UserUpdate): The keyword arguments representing the user's attributes to update.
56
-
57
- Returns:
58
- User
59
- """
60
-
61
- query = sql.update(cls).where(cls.id == id).values(**kwargs).execution_options(synchronize_session="fetch")
62
- results = await db.execute(query)
63
- user = results.fetchone()
64
- await db.commit()
65
- return user
66
-
67
- @classmethod
68
- async def get(cls: "type[User]", db: AsyncSession, email: str) -> "User":
69
- """
70
- Retrieves a user from the database based on their email.
71
-
72
- Args:
73
- cls (type[User]): The class object representing the User model.
74
- db (AsyncSession): The asynchronous session object for interacting with the database.
75
- email (str): The email of the user to retrieve.
76
-
77
- Returns:
78
- User
79
- """
80
-
81
- logging.info(f"Getting user: {email}")
82
- query = sql.select(cls).where(cls.email == email)
83
- logging.info(f"Query: {query}")
84
- users = await db.scalars(query)
85
- logging.info(f"Users: {users}")
86
- return users.first()
87
-
88
- @classmethod
89
- async def delete(cls: "type[User]", db: AsyncSession, email: str) -> "User":
90
- """
91
- Deletes a user from the database based on their email.
92
-
93
- Args:
94
- cls (type[User]): The class object representing the User model.
95
- db (AsyncSession): The asynchronous session object for interacting with the database.
96
- email (str): The email of the user to delete.
97
-
98
- Returns:
99
- User
100
- """
101
- query = (
102
- sql.update(cls)
103
- .where(cls.email == email)
104
- .values(is_deleted=True)
105
- .execution_options(synchronize_session="fetch")
106
- )
107
- result = await db.execute(query)
108
- user = result.fetchone()
109
- await db.commit()
110
- return user
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/schema/base.py DELETED
@@ -1,14 +0,0 @@
1
- from datetime import datetime
2
- from pydantic import BaseModel, ConfigDict
3
-
4
-
5
- class PydanticBaseModel(BaseModel):
6
- pass
7
-
8
-
9
- class BaseModel(PydanticBaseModel):
10
- model_config = ConfigDict(from_attributes=True)
11
-
12
- id: int
13
- created_at: datetime
14
- updated_at: datetime
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/schema/index.py DELETED
@@ -1,85 +0,0 @@
1
- from enum import Enum
2
- from datetime import datetime
3
- from typing import Dict, List
4
- from typing_extensions import TypedDict
5
-
6
- from app.schema.base import BaseModel, PydanticBaseModel
7
-
8
-
9
- class TransactionType(str, Enum):
10
- INCOME = "income"
11
- EXPENSE = "expense"
12
-
13
-
14
- class UserCreate(PydanticBaseModel):
15
- name: str
16
- email: str
17
- hashed_password: str
18
-
19
-
20
- class UserUpdate(PydanticBaseModel):
21
- name: str
22
- email: str
23
- hashed_password: str
24
-
25
-
26
- class UserResponse(PydanticBaseModel):
27
- id: int
28
- name: str
29
- email: str
30
- is_deleted: bool
31
-
32
-
33
- class User(BaseModel):
34
- name: str
35
- email: str
36
- hashed_password: str
37
- is_deleted: bool = False
38
- transactions: "List[Transaction]" = []
39
-
40
-
41
- class TransactionResponse(PydanticBaseModel):
42
- transaction_date: datetime
43
- category: str
44
- name_description: str
45
- amount: float
46
- type: TransactionType
47
-
48
-
49
- class TransactionCreate(TransactionResponse):
50
- user_id: int
51
-
52
-
53
- class Transaction(TransactionResponse):
54
- user: User
55
-
56
-
57
- class FileUploadCreate(PydanticBaseModel):
58
- source: str
59
- date: datetime
60
- category: str
61
- name_description: str
62
- amount: float
63
- type: str
64
-
65
-
66
- class IncomeStatementCreateRequest(PydanticBaseModel):
67
- user_id: int
68
- date_from: datetime
69
- date_to: datetime
70
-
71
-
72
- class IncomeStatementDetail(TypedDict):
73
- total: float
74
- category_totals: List[Dict[str, str | float]]
75
-
76
- class IncomeStatementLLMResponse(PydanticBaseModel):
77
- income: IncomeStatementDetail
78
- expenses: IncomeStatementDetail
79
-
80
-
81
- class IncomeStatement(IncomeStatementCreateRequest, IncomeStatementLLMResponse):
82
- pass
83
-
84
- class IncomeStatementResponse(IncomeStatement):
85
- id: int
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/service/income_statement.py DELETED
@@ -1,37 +0,0 @@
1
- import logging
2
- from app.schema.index import IncomeStatement, IncomeStatementCreateRequest
3
- from app.model.transaction import Transaction as TransactionModel
4
- from app.model.income_statement import IncomeStatement as IncomeStatementModel
5
- from sqlalchemy.ext.asyncio import AsyncSession
6
-
7
- from app.service.llm import call_llm
8
-
9
-
10
- async def call_llm_to_create_income_statement(payload: IncomeStatementCreateRequest, db: AsyncSession) -> None:
11
- transactions = await TransactionModel.get_by_user_between_dates(
12
- db, payload.user_id, payload.date_from, payload.date_to
13
- )
14
-
15
- if not transactions:
16
- print("No transactions found")
17
- return
18
-
19
- response = await call_llm(transactions)
20
-
21
- income = response.dict()['income']
22
- expenses = response.dict()['expenses']
23
-
24
- try:
25
- income_statement_create_payload = IncomeStatement(
26
- user_id=payload.user_id,
27
- date_from=payload.date_from,
28
- date_to=payload.date_to,
29
- income=income,
30
- expenses=expenses,
31
- )
32
-
33
- income_statement = await IncomeStatementModel.create(db, **income_statement_create_payload.model_dump())
34
- print(f"Income statement created: {income_statement}")
35
- except Exception as e:
36
- print(e)
37
- raise e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/service/llm.py DELETED
@@ -1,55 +0,0 @@
1
- from typing import List
2
- from langchain_core.prompts import ChatPromptTemplate
3
- from langchain_core.runnables.base import RunnableSequence
4
- from langchain_openai import OpenAI
5
- from langchain.globals import set_llm_cache
6
-
7
- from app.model.transaction import Transaction
8
- from app.schema.index import IncomeStatementLLMResponse
9
- from config.index import config as env
10
-
11
- from langchain_core.output_parsers import PydanticOutputParser
12
-
13
- set_llm_cache(None)
14
-
15
- def income_statement_prompt () -> ChatPromptTemplate:
16
- context_str = """
17
- You are an accountant skilled at organizing transactions from multiple different bank
18
- accounts and credit card statements to prepare an income statement.
19
-
20
- Input data is in the below csv format:
21
- transaction_date, category, name_description, amount, type\n
22
- {input_data_csv}
23
-
24
- Your task is to prepare an income statement. The output should be in the following format: {format_instructions}
25
-
26
- """
27
- prompt = ChatPromptTemplate.from_template(context_str)
28
- return prompt
29
-
30
- async def call_llm(inputData: List[Transaction]) -> str:
31
- input_data_csv = '\n'.join(str(x) for x in inputData)
32
-
33
- output_parser = PydanticOutputParser(pydantic_object=IncomeStatementLLMResponse)
34
-
35
- prompt = income_statement_prompt().partial(format_instructions=output_parser.get_format_instructions())
36
-
37
- llm = OpenAI(name='Income Statement Generation Bot',
38
- api_key=env.OPENAI_API_KEY,
39
- # cache=True,
40
- temperature=0.7,
41
- verbose=True)
42
-
43
- try:
44
- runnable_chain = RunnableSequence(prompt, llm, output_parser)
45
- except Exception as e:
46
- print(f"runnable_chain error: {str(e)}")
47
- raise e
48
-
49
- try:
50
- output_chunks = runnable_chain.invoke({"input_data_csv": input_data_csv})
51
- return output_chunks
52
-
53
- except Exception as e:
54
- print(f"runnable_chain.invoke error: {str(e)}")
55
- raise e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/transactions_rag/categorize_transactions.ipynb CHANGED
@@ -529,7 +529,7 @@
529
  " print(\"\\nProcessing file\")\n",
530
  " result = await asyncio.gather(processed_file)\n",
531
  "\n",
532
- " await save_results(result)\n",
533
  " print(result)\n",
534
  "\n",
535
  " output_file = open(CATEGORY_REFERENCE_OUTPUT_FILE, \"r+\")\n",
@@ -537,7 +537,8 @@
537
  "\n",
538
  "\n",
539
  "result = await apply_categorization()\n",
540
- "print(result)\n"
 
541
  ]
542
  }
543
  ],
 
529
  " print(\"\\nProcessing file\")\n",
530
  " result = await asyncio.gather(processed_file)\n",
531
  "\n",
532
+ " save_results(result)\n",
533
  " print(result)\n",
534
  "\n",
535
  " output_file = open(CATEGORY_REFERENCE_OUTPUT_FILE, \"r+\")\n",
 
537
  "\n",
538
  "\n",
539
  "result = await apply_categorization()\n",
540
+ "print(result)\n",
541
+ "\n"
542
  ]
543
  }
544
  ],
config/index.py DELETED
@@ -1,34 +0,0 @@
1
- import os
2
- from dotenv import load_dotenv
3
-
4
- load_dotenv()
5
-
6
-
7
- class Config:
8
- DEBUG = os.getenv("DEBUG", False)
9
- SQLALCHEMY_DATABASE_URI = os.getenv("SQLALCHEMY_DATABASE_URI")
10
- SQLALCHEMY_TRACK_MODIFICATIONS = os.getenv("SQLALCHEMY_TRACK_MODIFICATIONS", False)
11
- ENVIRONMENT = os.getenv("ENVIRONMENT", "dev")
12
- APP_HOST = os.getenv("APP_HOST", "0.0.0.0")
13
- APP_PORT = int(os.getenv("APP_PORT", "8000"))
14
- PINECONE_API_KEY = os.getenv("PINECONE_API_KEY")
15
- PINECONE_INDEX_NAME = os.getenv("PINECONE_INDEX_NAME")
16
- PINECONE_ENVIRONMENT = os.getenv("PINECONE_ENVIRONMENT")
17
- OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
18
- OPENAI_ORGANIZATION = os.getenv("OPENAI_ORGANIZATION")
19
- MODEL = os.getenv("MODEL", "gpt-3.5-turbo")
20
- EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "text-embedding-ada-002")
21
- EMBEDDING_DIM = os.getenv("EMBEDDING_DIM", "1536")
22
- CHUNK_SIZE = int(os.getenv("CHUNK_SIZE", "1024"))
23
- CHUNK_OVERLAP = int(os.getenv("CHUNK_OVERLAP", "20"))
24
- MAX_TOKENS = int(os.getenv("MAX_TOKENS", "4000"))
25
-
26
- POSTGRES_USER = os.getenv("POSTGRES_USER")
27
- POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD")
28
- POSTGRES_DB_NAME = os.getenv("POSTGRES_DB_NAME")
29
- POSTGRES_DB_HOST = os.getenv("POSTGRES_DB_HOST")
30
- POSTGRES_DB_PORT = int(os.getenv("POSTGRES_DB_PORT", 5432))
31
- SQLALCHEMY_DATABASE_URL = f"postgresql+asyncpg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_DB_HOST}:{POSTGRES_DB_PORT}/{POSTGRES_DB_NAME}"
32
-
33
-
34
- config = Config
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
data/.gitkeep DELETED
File without changes
data/tx_data/input/transactions_2024.csv DELETED
@@ -1,28 +0,0 @@
1
- 2023-12-30,Comcast Internet,Expense,9.96
2
- 2023-12-30,Lemonade Home Insurance,Expense,17.53
3
- 2023-12-30,Monthly Appartment Rent,Expense,2000.0
4
- 2023-12-30,Staples Office Supplies,Expense,12.46
5
- 2023-12-29,Selling Paintings,Income,13.63
6
- 2023-12-29,Spotify,Expense,12.19
7
- 2023-12-23,Target,Expense,27.08
8
- 2023-12-22,IT Consulting,Income,541.57
9
- 2023-12-22,Phone,Expense,10.7
10
- 2023-12-20,ML Consulting,Income,2641.93
11
- 2023-12-19,Chipotle,Expense,18.9
12
- 2023-12-18,Cold Brew Coffee,Expense,17.67
13
- 2023-12-18,Gas,Expense,8.80
14
- 2023-12-18,CA Property Tax,Expense,1670.34
15
- 2022-11-26,Salary,Income,4000.36
16
- 2022-11-26,Cellphone,Expense,19.27
17
- 2022-11-26,Gym Membership,Expense,24.71
18
- 2022-11-25,Wholefoods,Expense,17.35
19
- 2022-11-24,Freelancing,Income,2409.55
20
- 2022-11-19,Spotify,Expense,20.76
21
- 2022-10-25,Blogging,Income,4044.27
22
- 2022-10-24,Uber Taxi,Expense,18.9
23
- 2022-10-23,Uber Taxi,Expense,27.54
24
- 2022-10-22,Apple Services,Expense,41.25
25
- 2022-10-21,Netflix,Expense,22.8
26
- 2022-01-16,Amazon Lux,Expense,24.11
27
- 2022-01-15,Burger King,Expense,30.08
28
- 2022-01-14,Amazon,Expense,11.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
main.py CHANGED
@@ -1,74 +1,51 @@
 
 
 
 
1
  import logging
2
  import os
3
  import uvicorn
4
- from contextlib import asynccontextmanager
5
  from fastapi import FastAPI
6
  from fastapi.middleware.cors import CORSMiddleware
7
  from fastapi.responses import RedirectResponse
8
  from app.api.routers.chat import chat_router
9
- from app.api.routers.user import user_router
10
- from app.api.routers.transaction import transaction_router
11
- from app.api.routers.file_upload import file_upload_router
12
- from app.api.routers.income_statement import income_statement_router
13
  from app.settings import init_settings
 
14
  from fastapi.staticfiles import StaticFiles
15
- from alembic.config import Config
16
- from alembic import command
17
-
18
 
19
- from app.engine.postgresdb import postgresdb
20
- from config.index import config as env
21
-
22
- logger = logging.getLogger("uvicorn")
23
 
 
24
 
25
  init_settings()
26
- # init_observability()
27
-
28
-
29
- def init_app(init_db: bool = True) -> FastAPI:
30
- lifespan = None
31
 
32
- if init_db:
33
- postgresdb.init(env.SQLALCHEMY_DATABASE_URL, {"echo": True, "future": True})
34
 
35
- @asynccontextmanager
36
- async def lifespan(app: FastAPI):
37
- yield
38
- if postgresdb.get_engine() is not None:
39
- await postgresdb.close()
 
 
 
 
 
40
 
41
- app = FastAPI(lifespan=lifespan)
 
 
 
42
 
43
- if env.ENVIRONMENT == "dev":
44
- logger.warning("Running in development mode - allowing CORS for all origins")
45
- app.add_middleware(
46
- CORSMiddleware,
47
- allow_origins=["*"],
48
- allow_credentials=True,
49
- allow_methods=["*"],
50
- allow_headers=["*"],
51
- )
52
 
53
- if os.path.exists("data"):
54
- app.mount("/api/data", StaticFiles(directory="data"), name="static")
55
- app.include_router(chat_router, prefix="/api/chat")
56
- app.include_router(user_router)
57
- app.include_router(transaction_router)
58
- app.include_router(file_upload_router)
59
- app.include_router(income_statement_router)
60
-
61
- return app
62
-
63
-
64
- app = init_app()
65
-
66
-
67
- # Redirect to documentation page when accessing base URL
68
- @app.get("/")
69
- async def redirect_to_docs():
70
- return RedirectResponse(url="/docs")
71
 
72
 
73
  if __name__ == "__main__":
74
- uvicorn.run(app="main:app", host=env.APP_HOST, port=env.APP_PORT, reload=(env.ENVIRONMENT == "dev"))
 
 
 
 
 
1
+ from dotenv import load_dotenv
2
+
3
+ load_dotenv()
4
+
5
  import logging
6
  import os
7
  import uvicorn
 
8
  from fastapi import FastAPI
9
  from fastapi.middleware.cors import CORSMiddleware
10
  from fastapi.responses import RedirectResponse
11
  from app.api.routers.chat import chat_router
 
 
 
 
12
  from app.settings import init_settings
13
+ from app.observability import init_observability
14
  from fastapi.staticfiles import StaticFiles
 
 
 
15
 
 
 
 
 
16
 
17
+ app = FastAPI()
18
 
19
  init_settings()
20
+ init_observability()
 
 
 
 
21
 
22
+ environment = os.getenv("ENVIRONMENT", "dev") # Default to 'development' if not set
 
23
 
24
+ if environment == "dev":
25
+ logger = logging.getLogger("uvicorn")
26
+ logger.warning("Running in development mode - allowing CORS for all origins")
27
+ app.add_middleware(
28
+ CORSMiddleware,
29
+ allow_origins=["*"],
30
+ allow_credentials=True,
31
+ allow_methods=["*"],
32
+ allow_headers=["*"],
33
+ )
34
 
35
+ # Redirect to documentation page when accessing base URL
36
+ @app.get("/")
37
+ async def redirect_to_docs():
38
+ return RedirectResponse(url="/docs")
39
 
 
 
 
 
 
 
 
 
 
40
 
41
+ if os.path.exists("data"):
42
+ app.mount("/api/data", StaticFiles(directory="data"), name="static")
43
+ app.include_router(chat_router, prefix="/api/chat")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
 
45
 
46
  if __name__ == "__main__":
47
+ app_host = os.getenv("APP_HOST", "0.0.0.0")
48
+ app_port = int(os.getenv("APP_PORT", "8000"))
49
+ reload = True if environment == "dev" else False
50
+
51
+ uvicorn.run(app="main:app", host=app_host, port=app_port, reload=reload)
migration/README DELETED
@@ -1 +0,0 @@
1
- Generic single-database configuration.
 
 
migration/env.py DELETED
@@ -1,91 +0,0 @@
1
- import asyncio
2
- from logging.config import fileConfig
3
-
4
- from app.engine.postgresdb import Base
5
- from sqlalchemy import Connection
6
- from sqlalchemy import pool
7
- from sqlalchemy.ext.asyncio import async_engine_from_config
8
-
9
- from config.index import config as env
10
-
11
- from alembic import context
12
-
13
- # this is the Alembic Config object, which provides
14
- # access to the values within the .ini file in use.
15
- config = context.config
16
-
17
- config.set_main_option("sqlalchemy.url", env.SQLALCHEMY_DATABASE_URL)
18
-
19
- # Interpret the config file for Python logging.
20
- # This line sets up loggers basically.
21
- if config.config_file_name is not None:
22
- fileConfig(config.config_file_name)
23
-
24
- # Import all models so they're registered with SQLAlchemy.
25
- import app.model.user
26
- import app.model.transaction
27
- import app.model.income_statement
28
-
29
- # add your model's MetaData object here
30
- # for 'autogenerate' support
31
- # from myapp import mymodel
32
- # target_metadata = mymodel.Base.metadata
33
- target_metadata = Base.metadata
34
-
35
- # other values from the config, defined by the needs of env.py,
36
- # can be acquired:
37
- # my_important_option = config.get_main_option("my_important_option")
38
- # ... etc.
39
-
40
-
41
- def run_migrations_offline():
42
- """Run migrations in 'offline' mode.
43
- This configures the context with just a URL
44
- and not an Engine, though an Engine is acceptable
45
- here as well. By skipping the Engine creation
46
- we don't even need a DBAPI to be available.
47
- Calls to context.execute() here emit the given string to the
48
- script output.
49
- """
50
- url = config.get_main_option("sqlalchemy.url")
51
- context.configure(
52
- url=url,
53
- target_metadata=target_metadata,
54
- literal_binds=True,
55
- dialect_opts={"paramstyle": "named"},
56
- )
57
-
58
- with context.begin_transaction():
59
- context.run_migrations()
60
-
61
-
62
- def do_run_migrations(connection: Connection) -> None:
63
- context.configure(connection=connection, target_metadata=target_metadata)
64
-
65
- with context.begin_transaction():
66
- context.run_migrations()
67
-
68
-
69
- async def run_migrations_online():
70
- """Run migrations in 'online' mode.
71
- In this scenario we need to create an Engine
72
- and associate a connection with the context.
73
- """
74
- configuration = config.get_section(config.config_ini_section)
75
- configuration["sqlalchemy.url"] = env.SQLALCHEMY_DATABASE_URL
76
- connectable = async_engine_from_config(
77
- configuration,
78
- prefix="sqlalchemy.",
79
- poolclass=pool.NullPool,
80
- )
81
-
82
- async with connectable.connect() as connection:
83
- await connection.run_sync(do_run_migrations)
84
-
85
- await connectable.dispose()
86
-
87
-
88
- if context.is_offline_mode():
89
- run_migrations_offline()
90
- else:
91
- asyncio.run(run_migrations_online())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
migration/script.py.mako DELETED
@@ -1,26 +0,0 @@
1
- """${message}
2
-
3
- Revision ID: ${up_revision}
4
- Revises: ${down_revision | comma,n}
5
- Create Date: ${create_date}
6
-
7
- """
8
- from typing import Sequence, Union
9
-
10
- from alembic import op
11
- import sqlalchemy as sa
12
- ${imports if imports else ""}
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = ${repr(up_revision)}
16
- down_revision: Union[str, None] = ${repr(down_revision)}
17
- branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
- depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
-
20
-
21
- def upgrade() -> None:
22
- ${upgrades if upgrades else "pass"}
23
-
24
-
25
- def downgrade() -> None:
26
- ${downgrades if downgrades else "pass"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
migration/versions/4e76691ab103_add_income_statement_table.py DELETED
@@ -1,43 +0,0 @@
1
- """add_income_statement_table
2
-
3
- Revision ID: 4e76691ab103
4
- Revises: 7ea44cbc5b1f
5
- Create Date: 2024-06-03 23:59:03.717778
6
-
7
- """
8
- from typing import Sequence, Union
9
-
10
- from alembic import op
11
- import sqlalchemy as sa
12
- from sqlalchemy.dialects import postgresql
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = '4e76691ab103'
16
- down_revision: Union[str, None] = '7ea44cbc5b1f'
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # ### commands auto generated by Alembic - please adjust! ###
23
- op.create_table('income_statement',
24
- sa.Column('income', postgresql.JSON(astext_type=sa.Text()), nullable=True),
25
- sa.Column('expenses', postgresql.JSON(astext_type=sa.Text()), nullable=True),
26
- sa.Column('date_from', sa.DateTime(), nullable=False),
27
- sa.Column('date_to', sa.DateTime(), nullable=False),
28
- sa.Column('user_id', sa.Integer(), nullable=False),
29
- sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
30
- sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
31
- sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
32
- sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
33
- sa.PrimaryKeyConstraint('id')
34
- )
35
- op.create_index(op.f('ix_income_statement_id'), 'income_statement', ['id'], unique=False)
36
- # ### end Alembic commands ###
37
-
38
-
39
- def downgrade() -> None:
40
- # ### commands auto generated by Alembic - please adjust! ###
41
- op.drop_index(op.f('ix_income_statement_id'), table_name='income_statement')
42
- op.drop_table('income_statement')
43
- # ### end Alembic commands ###
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
migration/versions/7ea44cbc5b1f_default_timezone.py DELETED
@@ -1,69 +0,0 @@
1
- """default_timezone
2
-
3
- Revision ID: 7ea44cbc5b1f
4
- Revises: 8feaedca36f9
5
- Create Date: 2024-06-02 14:36:01.552518
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- from alembic import op
12
- import sqlalchemy as sa
13
- from sqlalchemy.dialects import postgresql
14
-
15
- # revision identifiers, used by Alembic.
16
- revision: str = "7ea44cbc5b1f"
17
- down_revision: Union[str, None] = "8feaedca36f9"
18
- branch_labels: Union[str, Sequence[str], None] = None
19
- depends_on: Union[str, Sequence[str], None] = None
20
-
21
-
22
- def upgrade() -> None:
23
- # ### commands auto generated by Alembic - please adjust! ###
24
- op.alter_column(
25
- "transactions",
26
- "created_at",
27
- existing_type=postgresql.TIMESTAMP(),
28
- type_=sa.DateTime(timezone=True),
29
- nullable=False,
30
- )
31
- op.alter_column(
32
- "transactions",
33
- "updated_at",
34
- existing_type=postgresql.TIMESTAMP(),
35
- type_=sa.DateTime(timezone=True),
36
- nullable=False,
37
- )
38
- op.alter_column(
39
- "users", "created_at", existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), nullable=False
40
- )
41
- op.alter_column(
42
- "users", "updated_at", existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), nullable=False
43
- )
44
- # ### end Alembic commands ###
45
-
46
-
47
- def downgrade() -> None:
48
- # ### commands auto generated by Alembic - please adjust! ###
49
- op.alter_column(
50
- "users", "updated_at", existing_type=sa.DateTime(timezone=True), type_=postgresql.TIMESTAMP(), nullable=True
51
- )
52
- op.alter_column(
53
- "users", "created_at", existing_type=sa.DateTime(timezone=True), type_=postgresql.TIMESTAMP(), nullable=True
54
- )
55
- op.alter_column(
56
- "transactions",
57
- "updated_at",
58
- existing_type=sa.DateTime(timezone=True),
59
- type_=postgresql.TIMESTAMP(),
60
- nullable=True,
61
- )
62
- op.alter_column(
63
- "transactions",
64
- "created_at",
65
- existing_type=sa.DateTime(timezone=True),
66
- type_=postgresql.TIMESTAMP(),
67
- nullable=True,
68
- )
69
- # ### end Alembic commands ###
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
migration/versions/8feaedca36f9_users_datatype_updates.py DELETED
@@ -1,74 +0,0 @@
1
- """Remove_Transactions_From_Users
2
-
3
- Revision ID: 8feaedca36f9
4
- Revises: cd515c44401d
5
- Create Date: 2024-06-02 01:26:54.731002
6
-
7
- """
8
- from typing import Sequence, Union
9
-
10
- from alembic import op
11
- import sqlalchemy as sa
12
- from sqlalchemy.dialects import postgresql
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = '8feaedca36f9'
16
- down_revision: Union[str, None] = 'cd515c44401d'
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # ### commands auto generated by Alembic - please adjust! ###
23
- op.alter_column('transactions', 'transaction_date',
24
- existing_type=postgresql.TIMESTAMP(),
25
- nullable=False)
26
- op.alter_column('transactions', 'category',
27
- existing_type=sa.VARCHAR(),
28
- nullable=False)
29
- op.alter_column('transactions', 'name_description',
30
- existing_type=sa.VARCHAR(),
31
- nullable=False)
32
- op.alter_column('transactions', 'amount',
33
- existing_type=sa.DOUBLE_PRECISION(precision=53),
34
- nullable=False)
35
- op.alter_column('transactions', 'type',
36
- existing_type=sa.VARCHAR(),
37
- nullable=False)
38
- op.add_column('users', sa.Column('hashed_password', sa.String(), nullable=False))
39
- op.add_column('users', sa.Column('is_deleted', sa.Boolean(), nullable=False))
40
- op.alter_column('users', 'name',
41
- existing_type=sa.VARCHAR(),
42
- nullable=False)
43
- op.alter_column('users', 'email',
44
- existing_type=sa.VARCHAR(),
45
- nullable=False)
46
- # ### end Alembic commands ###
47
-
48
-
49
- def downgrade() -> None:
50
- # ### commands auto generated by Alembic - please adjust! ###
51
- op.alter_column('users', 'email',
52
- existing_type=sa.VARCHAR(),
53
- nullable=True)
54
- op.alter_column('users', 'name',
55
- existing_type=sa.VARCHAR(),
56
- nullable=True)
57
- op.drop_column('users', 'is_deleted')
58
- op.drop_column('users', 'hashed_password')
59
- op.alter_column('transactions', 'type',
60
- existing_type=sa.VARCHAR(),
61
- nullable=True)
62
- op.alter_column('transactions', 'amount',
63
- existing_type=sa.DOUBLE_PRECISION(precision=53),
64
- nullable=True)
65
- op.alter_column('transactions', 'name_description',
66
- existing_type=sa.VARCHAR(),
67
- nullable=True)
68
- op.alter_column('transactions', 'category',
69
- existing_type=sa.VARCHAR(),
70
- nullable=True)
71
- op.alter_column('transactions', 'transaction_date',
72
- existing_type=postgresql.TIMESTAMP(),
73
- nullable=True)
74
- # ### end Alembic commands ###
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
migration/versions/cd515c44401d_add_users_transactions_tables.py DELETED
@@ -1,55 +0,0 @@
1
- """Add users, transactions tables
2
-
3
- Revision ID: cd515c44401d
4
- Revises:
5
- Create Date: 2024-06-01 01:32:42.500524
6
-
7
- """
8
- from typing import Sequence, Union
9
-
10
- from alembic import op
11
- import sqlalchemy as sa
12
-
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = 'cd515c44401d'
16
- down_revision: Union[str, None] = None
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # ### commands auto generated by Alembic - please adjust! ###
23
- op.create_table('users',
24
- sa.Column('name', sa.String(), nullable=True),
25
- sa.Column('email', sa.String(), nullable=True),
26
- sa.Column('id', sa.Integer(), nullable=False),
27
- sa.Column('created_at', sa.DateTime(), nullable=True),
28
- sa.Column('updated_at', sa.DateTime(), nullable=True),
29
- sa.PrimaryKeyConstraint('id')
30
- )
31
- op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
32
- op.create_table('transactions',
33
- sa.Column('transaction_date', sa.DateTime(), nullable=True),
34
- sa.Column('category', sa.String(), nullable=True),
35
- sa.Column('name_description', sa.String(), nullable=True),
36
- sa.Column('amount', sa.Float(), nullable=True),
37
- sa.Column('type', sa.String(), nullable=True),
38
- sa.Column('user_id', sa.Integer(), nullable=True),
39
- sa.Column('id', sa.Integer(), nullable=False),
40
- sa.Column('created_at', sa.DateTime(), nullable=True),
41
- sa.Column('updated_at', sa.DateTime(), nullable=True),
42
- sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
43
- sa.PrimaryKeyConstraint('id')
44
- )
45
- op.create_index(op.f('ix_transactions_id'), 'transactions', ['id'], unique=False)
46
- # ### end Alembic commands ###
47
-
48
-
49
- def downgrade() -> None:
50
- # ### commands auto generated by Alembic - please adjust! ###
51
- op.drop_index(op.f('ix_transactions_id'), table_name='transactions')
52
- op.drop_table('transactions')
53
- op.drop_index(op.f('ix_users_id'), table_name='users')
54
- op.drop_table('users')
55
- # ### end Alembic commands ###
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
poetry.lock CHANGED
@@ -123,25 +123,6 @@ files = [
123
  [package.dependencies]
124
  typing-extensions = "*"
125
 
126
- [[package]]
127
- name = "alembic"
128
- version = "1.13.1"
129
- description = "A database migration tool for SQLAlchemy."
130
- optional = false
131
- python-versions = ">=3.8"
132
- files = [
133
- {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"},
134
- {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"},
135
- ]
136
-
137
- [package.dependencies]
138
- Mako = "*"
139
- SQLAlchemy = ">=1.3.0"
140
- typing-extensions = ">=4"
141
-
142
- [package.extras]
143
- tz = ["backports.zoneinfo"]
144
-
145
  [[package]]
146
  name = "annotated-types"
147
  version = "0.7.0"
@@ -198,87 +179,6 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
198
  test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
199
  trio = ["trio (>=0.23)"]
200
 
201
- [[package]]
202
- name = "async-timeout"
203
- version = "4.0.3"
204
- description = "Timeout context manager for asyncio programs"
205
- optional = false
206
- python-versions = ">=3.7"
207
- files = [
208
- {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
209
- {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
210
- ]
211
-
212
- [[package]]
213
- name = "asyncio"
214
- version = "3.4.3"
215
- description = "reference implementation of PEP 3156"
216
- optional = false
217
- python-versions = "*"
218
- files = [
219
- {file = "asyncio-3.4.3-cp33-none-win32.whl", hash = "sha256:b62c9157d36187eca799c378e572c969f0da87cd5fc42ca372d92cdb06e7e1de"},
220
- {file = "asyncio-3.4.3-cp33-none-win_amd64.whl", hash = "sha256:c46a87b48213d7464f22d9a497b9eef8c1928b68320a2fa94240f969f6fec08c"},
221
- {file = "asyncio-3.4.3-py3-none-any.whl", hash = "sha256:c4d18b22701821de07bd6aea8b53d21449ec0ec5680645e5317062ea21817d2d"},
222
- {file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"},
223
- ]
224
-
225
- [[package]]
226
- name = "asyncpg"
227
- version = "0.29.0"
228
- description = "An asyncio PostgreSQL driver"
229
- optional = false
230
- python-versions = ">=3.8.0"
231
- files = [
232
- {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"},
233
- {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"},
234
- {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"},
235
- {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"},
236
- {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"},
237
- {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"},
238
- {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"},
239
- {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"},
240
- {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"},
241
- {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"},
242
- {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"},
243
- {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"},
244
- {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"},
245
- {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"},
246
- {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"},
247
- {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"},
248
- {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"},
249
- {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"},
250
- {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"},
251
- {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"},
252
- {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"},
253
- {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"},
254
- {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"},
255
- {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"},
256
- {file = "asyncpg-0.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0009a300cae37b8c525e5b449233d59cd9868fd35431abc470a3e364d2b85cb9"},
257
- {file = "asyncpg-0.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cad1324dbb33f3ca0cd2074d5114354ed3be2b94d48ddfd88af75ebda7c43cc"},
258
- {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:012d01df61e009015944ac7543d6ee30c2dc1eb2f6b10b62a3f598beb6531548"},
259
- {file = "asyncpg-0.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000c996c53c04770798053e1730d34e30cb645ad95a63265aec82da9093d88e7"},
260
- {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bfe9c4d3429706cf70d3249089de14d6a01192d617e9093a8e941fea8ee775"},
261
- {file = "asyncpg-0.29.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:642a36eb41b6313ffa328e8a5c5c2b5bea6ee138546c9c3cf1bffaad8ee36dd9"},
262
- {file = "asyncpg-0.29.0-cp38-cp38-win32.whl", hash = "sha256:a921372bbd0aa3a5822dd0409da61b4cd50df89ae85150149f8c119f23e8c408"},
263
- {file = "asyncpg-0.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:103aad2b92d1506700cbf51cd8bb5441e7e72e87a7b3a2ca4e32c840f051a6a3"},
264
- {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"},
265
- {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"},
266
- {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"},
267
- {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"},
268
- {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"},
269
- {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"},
270
- {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"},
271
- {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"},
272
- {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"},
273
- ]
274
-
275
- [package.dependencies]
276
- async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""}
277
-
278
- [package.extras]
279
- docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
280
- test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"]
281
-
282
  [[package]]
283
  name = "attrs"
284
  version = "23.2.0"
@@ -541,20 +441,6 @@ files = [
541
  {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
542
  ]
543
 
544
- [[package]]
545
- name = "execnet"
546
- version = "2.1.1"
547
- description = "execnet: rapid multi-Python deployment"
548
- optional = false
549
- python-versions = ">=3.8"
550
- files = [
551
- {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
552
- {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
553
- ]
554
-
555
- [package.extras]
556
- testing = ["hatch", "pre-commit", "pytest", "tox"]
557
-
558
  [[package]]
559
  name = "fastapi"
560
  version = "0.109.2"
@@ -1039,17 +925,6 @@ files = [
1039
  {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"},
1040
  ]
1041
 
1042
- [[package]]
1043
- name = "iniconfig"
1044
- version = "2.0.0"
1045
- description = "brain-dead simple config-ini parsing"
1046
- optional = false
1047
- python-versions = ">=3.7"
1048
- files = [
1049
- {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
1050
- {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
1051
- ]
1052
-
1053
  [[package]]
1054
  name = "jinja2"
1055
  version = "3.1.4"
@@ -1590,25 +1465,6 @@ files = [
1590
  httpx = ">=0.20.0"
1591
  pydantic = ">=1.10"
1592
 
1593
- [[package]]
1594
- name = "mako"
1595
- version = "1.3.5"
1596
- description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
1597
- optional = false
1598
- python-versions = ">=3.8"
1599
- files = [
1600
- {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"},
1601
- {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"},
1602
- ]
1603
-
1604
- [package.dependencies]
1605
- MarkupSafe = ">=0.9.2"
1606
-
1607
- [package.extras]
1608
- babel = ["Babel"]
1609
- lingua = ["lingua"]
1610
- testing = ["pytest"]
1611
-
1612
  [[package]]
1613
  name = "markupsafe"
1614
  version = "2.1.5"
@@ -1697,20 +1553,6 @@ dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"]
1697
  docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"]
1698
  tests = ["pytest", "pytz", "simplejson"]
1699
 
1700
- [[package]]
1701
- name = "mirakuru"
1702
- version = "2.5.2"
1703
- description = "Process executor (not only) for tests."
1704
- optional = false
1705
- python-versions = ">=3.8"
1706
- files = [
1707
- {file = "mirakuru-2.5.2-py3-none-any.whl", hash = "sha256:90c2d90a8cf14349b2f33e6db30a16acd855499811e0312e56cf80ceacf2d3e5"},
1708
- {file = "mirakuru-2.5.2.tar.gz", hash = "sha256:41ca583d355eb7a6cfdc21c1aea549979d685c27b57239b88725434f115a7132"},
1709
- ]
1710
-
1711
- [package.dependencies]
1712
- psutil = {version = ">=4.0.0", markers = "sys_platform != \"cygwin\""}
1713
-
1714
  [[package]]
1715
  name = "monotonic"
1716
  version = "1.6"
@@ -2698,32 +2540,6 @@ urllib3 = {version = ">=1.26.0", markers = "python_version >= \"3.8\" and python
2698
  [package.extras]
2699
  grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv2 (==0.1.0)", "grpcio (>=1.44.0)", "grpcio (>=1.59.0)", "lz4 (>=3.1.3)", "protobuf (>=3.20.0,<3.21.0)"]
2700
 
2701
- [[package]]
2702
- name = "pluggy"
2703
- version = "1.5.0"
2704
- description = "plugin and hook calling mechanisms for python"
2705
- optional = false
2706
- python-versions = ">=3.8"
2707
- files = [
2708
- {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
2709
- {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
2710
- ]
2711
-
2712
- [package.extras]
2713
- dev = ["pre-commit", "tox"]
2714
- testing = ["pytest", "pytest-benchmark"]
2715
-
2716
- [[package]]
2717
- name = "port-for"
2718
- version = "0.7.2"
2719
- description = "Utility that helps with local TCP ports management. It can find an unused TCP localhost port and remember the association."
2720
- optional = false
2721
- python-versions = ">=3.8"
2722
- files = [
2723
- {file = "port-for-0.7.2.tar.gz", hash = "sha256:074f29335130578aa42fef3726985e57d01c15189e509633a8a1b0b7f9226349"},
2724
- {file = "port_for-0.7.2-py3-none-any.whl", hash = "sha256:16b279ab4f210bad33515c45bd9af0c6e048ab24c3b6bbd9cfc7e451782617df"},
2725
- ]
2726
-
2727
  [[package]]
2728
  name = "posthog"
2729
  version = "3.5.0"
@@ -2767,129 +2583,6 @@ files = [
2767
  {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
2768
  ]
2769
 
2770
- [[package]]
2771
- name = "psutil"
2772
- version = "5.9.8"
2773
- description = "Cross-platform lib for process and system monitoring in Python."
2774
- optional = false
2775
- python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
2776
- files = [
2777
- {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"},
2778
- {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"},
2779
- {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"},
2780
- {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"},
2781
- {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"},
2782
- {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"},
2783
- {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"},
2784
- {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"},
2785
- {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"},
2786
- {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"},
2787
- {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"},
2788
- {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"},
2789
- {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"},
2790
- {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"},
2791
- {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"},
2792
- {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"},
2793
- ]
2794
-
2795
- [package.extras]
2796
- test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
2797
-
2798
- [[package]]
2799
- name = "psycopg"
2800
- version = "3.1.19"
2801
- description = "PostgreSQL database adapter for Python"
2802
- optional = false
2803
- python-versions = ">=3.7"
2804
- files = [
2805
- {file = "psycopg-3.1.19-py3-none-any.whl", hash = "sha256:dca5e5521c859f6606686432ae1c94e8766d29cc91f2ee595378c510cc5b0731"},
2806
- {file = "psycopg-3.1.19.tar.gz", hash = "sha256:92d7b78ad82426cdcf1a0440678209faa890c6e1721361c2f8901f0dccd62961"},
2807
- ]
2808
-
2809
- [package.dependencies]
2810
- typing-extensions = ">=4.1"
2811
- tzdata = {version = "*", markers = "sys_platform == \"win32\""}
2812
-
2813
- [package.extras]
2814
- binary = ["psycopg-binary (==3.1.19)"]
2815
- c = ["psycopg-c (==3.1.19)"]
2816
- dev = ["black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"]
2817
- docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"]
2818
- pool = ["psycopg-pool"]
2819
- test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"]
2820
-
2821
- [[package]]
2822
- name = "psycopg-binary"
2823
- version = "3.1.19"
2824
- description = "PostgreSQL database adapter for Python -- C optimisation distribution"
2825
- optional = false
2826
- python-versions = ">=3.7"
2827
- files = [
2828
- {file = "psycopg_binary-3.1.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7204818f05151dd08f8f851defb01972ec9d2cc925608eb0de232563f203f354"},
2829
- {file = "psycopg_binary-3.1.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4e67fd86758dbeac85641419a54f84d74495a8683b58ad5dfad08b7fc37a8f"},
2830
- {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12173e34b176e93ad2da913de30f774d5119c2d4d4640c6858d2d77dfa6c9bf"},
2831
- {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052f5193304066318853b4b2e248f523c8f52b371fc4e95d4ef63baee3f30955"},
2832
- {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29008f3f8977f600b8a7fb07c2e041b01645b08121760609cc45e861a0364dc9"},
2833
- {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6a9a651a08d876303ed059c9553df18b3c13c3406584a70a8f37f1a1fe2709"},
2834
- {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91a645e6468c4f064b7f4f3b81074bdd68fe5aa2b8c5107de15dcd85ba6141be"},
2835
- {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5c6956808fd5cf0576de5a602243af8e04594b25b9a28675feddc71c5526410a"},
2836
- {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:1622ca27d5a7a98f7d8f35e8b146dc7efda4a4b6241d2edf7e076bd6bcecbeb4"},
2837
- {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a100482950a55228f648bd382bb71bfaff520002f29845274fccbbf02e28bd52"},
2838
- {file = "psycopg_binary-3.1.19-cp310-cp310-win_amd64.whl", hash = "sha256:955ca8905c0251fc4af7ce0a20999e824a25652f53a558ab548b60969f1f368e"},
2839
- {file = "psycopg_binary-3.1.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cf49e91dcf699b8a449944ed898ef1466b39b92720613838791a551bc8f587a"},
2840
- {file = "psycopg_binary-3.1.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:964c307e400c5f33fa762ba1e19853e048814fcfbd9679cc923431adb7a2ead2"},
2841
- {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3433924e1b14074798331dc2bfae2af452ed7888067f2fc145835704d8981b15"},
2842
- {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00879d4c6be4b3afc510073f48a5e960f797200e261ab3d9bd9b7746a08c669d"},
2843
- {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a6997c80f86d3dd80a4f078bb3b200079c47eeda4fd409d8899b883c90d2ac"},
2844
- {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0106e42b481677c41caa69474fe530f786dcef88b11b70000f0e45a03534bc8f"},
2845
- {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81efe09ba27533e35709905c3061db4dc9fb814f637360578d065e2061fbb116"},
2846
- {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d312d6dddc18d9c164e1893706269c293cba1923118349d375962b1188dafb01"},
2847
- {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:bfd2c734da9950f7afaad5f132088e0e1478f32f042881fca6651bb0c8d14206"},
2848
- {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8a732610a5a6b4f06dadcf9288688a8ff202fd556d971436a123b7adb85596e2"},
2849
- {file = "psycopg_binary-3.1.19-cp311-cp311-win_amd64.whl", hash = "sha256:321814a9a3ad785855a821b842aba08ca1b7de7dfb2979a2f0492dca9ec4ae70"},
2850
- {file = "psycopg_binary-3.1.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4aa0ca13bb8a725bb6d12c13999217fd5bc8b86a12589f28a74b93e076fbb959"},
2851
- {file = "psycopg_binary-3.1.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:469424e354ebcec949aa6aa30e5a9edc352a899d9a68ad7a48f97df83cc914cf"},
2852
- {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04f5349313529ae1f1c42fe1aa0443faaf50fdf12d13866c2cc49683bfa53d0"},
2853
- {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959feabddc7fffac89b054d6f23f3b3c62d7d3c90cd414a02e3747495597f150"},
2854
- {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9da624a6ca4bc5f7fa1f03f8485446b5b81d5787b6beea2b4f8d9dbef878ad7"},
2855
- {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1823221a6b96e38b15686170d4fc5b36073efcb87cce7d3da660440b50077f6"},
2856
- {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:866db42f986298f0cf15d805225eb8df2228bf19f7997d7f1cb5f388cbfc6a0f"},
2857
- {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:738c34657305b5973af6dbb6711b07b179dfdd21196d60039ca30a74bafe9648"},
2858
- {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb9758473200384a04374d0e0cac6f451218ff6945a024f65a1526802c34e56e"},
2859
- {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0e991632777e217953ac960726158987da684086dd813ac85038c595e7382c91"},
2860
- {file = "psycopg_binary-3.1.19-cp312-cp312-win_amd64.whl", hash = "sha256:1d87484dd42c8783c44a30400949efb3d81ef2487eaa7d64d1c54df90cf8b97a"},
2861
- {file = "psycopg_binary-3.1.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d1d1723d7449c12bb61aca7eb6e0c6ab2863cd8dc0019273cc4d4a1982f84bdb"},
2862
- {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538a8671005641fa195eab962f85cf0504defbd3b548c4c8fc27102a59f687b"},
2863
- {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c50592bc8517092f40979e4a5d934f96a1737a77724bb1d121eb78b614b30fc8"},
2864
- {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95f16ae82bc242b76cd3c3e5156441e2bd85ff9ec3a9869d750aad443e46073c"},
2865
- {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebd1e98e865e9a28ce0cb2c25b7dfd752f0d1f0a423165b55cd32a431dcc0f4"},
2866
- {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:49cd7af7d49e438a39593d1dd8cab106a1912536c2b78a4d814ebdff2786094e"},
2867
- {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:affebd61aa3b7a8880fd4ac3ee94722940125ff83ff485e1a7c76be9adaabb38"},
2868
- {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d1bac282f140fa092f2bbb6c36ed82270b4a21a6fc55d4b16748ed9f55e50fdb"},
2869
- {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1285aa54449e362b1d30d92b2dc042ad3ee80f479cc4e323448d0a0a8a1641fa"},
2870
- {file = "psycopg_binary-3.1.19-cp37-cp37m-win_amd64.whl", hash = "sha256:6cff31af8155dc9ee364098a328bab688c887c732c66b8d027e5b03818ca0287"},
2871
- {file = "psycopg_binary-3.1.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b689c4a17dd3130791dcbb8c30dbf05602f7c2d56c792e193fb49adc7bf5f8"},
2872
- {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017518bd2de4851adc826a224fb105411e148ad845e11355edd6786ba3dfedf5"},
2873
- {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c35fd811f339a3cbe7f9b54b2d9a5e592e57426c6cc1051632a62c59c4810208"},
2874
- {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38ed45ec9673709bfa5bc17f140e71dd4cca56d4e58ef7fd50d5a5043a4f55c6"},
2875
- {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433f1c256108f9e26f480a8cd6ddb0fb37dbc87d7f5a97e4540a9da9b881f23f"},
2876
- {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ed61e43bf5dc8d0936daf03a19fef3168d64191dbe66483f7ad08c4cea0bc36b"},
2877
- {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ae8109ff9fdf1fa0cb87ab6645298693fdd2666a7f5f85660df88f6965e0bb7"},
2878
- {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a53809ee02e3952fae7977c19b30fd828bd117b8f5edf17a3a94212feb57faaf"},
2879
- {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9d39d5ffc151fb33bcd55b99b0e8957299c0b1b3e5a1a5f4399c1287ef0051a9"},
2880
- {file = "psycopg_binary-3.1.19-cp38-cp38-win_amd64.whl", hash = "sha256:e14bc8250000921fcccd53722f86b3b3d1b57db901e206e49e2ab2afc5919c2d"},
2881
- {file = "psycopg_binary-3.1.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd88c5cea4efe614d5004fb5f5dcdea3d7d59422be796689e779e03363102d24"},
2882
- {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:621a814e60825162d38760c66351b4df679fd422c848b7c2f86ad399bff27145"},
2883
- {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46e50c05952b59a214e27d3606f6d510aaa429daed898e16b8a37bfbacc81acc"},
2884
- {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03354a9db667c27946e70162cb0042c3929154167f3678a30d23cebfe0ad55b5"},
2885
- {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c2f3b79037581afec7baa2bdbcb0a1787f1758744a7662099b0eca2d721cb"},
2886
- {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6469ebd9e93327e9f5f36dcf8692fb1e7aeaf70087c1c15d4f2c020e0be3a891"},
2887
- {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:85bca9765c04b6be90cb46e7566ffe0faa2d7480ff5c8d5e055ac427f039fd24"},
2888
- {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a836610d5c75e9cff98b9fdb3559c007c785c09eaa84a60d5d10ef6f85f671e8"},
2889
- {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef8de7a1d9fb3518cc6b58e3c80b75a824209ad52b90c542686c912db8553dad"},
2890
- {file = "psycopg_binary-3.1.19-cp39-cp39-win_amd64.whl", hash = "sha256:76fcd33342f38e35cd6b5408f1bc117d55ab8b16e5019d99b6d3ce0356c51717"},
2891
- ]
2892
-
2893
  [[package]]
2894
  name = "pydantic"
2895
  version = "2.7.2"
@@ -3018,82 +2711,6 @@ docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
3018
  full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"]
3019
  image = ["Pillow (>=8.0.0)"]
3020
 
3021
- [[package]]
3022
- name = "pytest"
3023
- version = "8.2.1"
3024
- description = "pytest: simple powerful testing with Python"
3025
- optional = false
3026
- python-versions = ">=3.8"
3027
- files = [
3028
- {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"},
3029
- {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"},
3030
- ]
3031
-
3032
- [package.dependencies]
3033
- colorama = {version = "*", markers = "sys_platform == \"win32\""}
3034
- iniconfig = "*"
3035
- packaging = "*"
3036
- pluggy = ">=1.5,<2.0"
3037
-
3038
- [package.extras]
3039
- dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
3040
-
3041
- [[package]]
3042
- name = "pytest-asyncio"
3043
- version = "0.23.7"
3044
- description = "Pytest support for asyncio"
3045
- optional = false
3046
- python-versions = ">=3.8"
3047
- files = [
3048
- {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"},
3049
- {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"},
3050
- ]
3051
-
3052
- [package.dependencies]
3053
- pytest = ">=7.0.0,<9"
3054
-
3055
- [package.extras]
3056
- docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
3057
- testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
3058
-
3059
- [[package]]
3060
- name = "pytest-postgresql"
3061
- version = "6.0.0"
3062
- description = "Postgresql fixtures and fixture factories for Pytest."
3063
- optional = false
3064
- python-versions = ">=3.8"
3065
- files = [
3066
- {file = "pytest-postgresql-6.0.0.tar.gz", hash = "sha256:6a4d8e600a2eef273f3c0e846cd0b2ea577282e252de29b4ca854bfb929bb682"},
3067
- {file = "pytest_postgresql-6.0.0-py3-none-any.whl", hash = "sha256:f14272bffad16a74d9a63f4cc828f243a12ae92995e236b68fd53154760e6a5a"},
3068
- ]
3069
-
3070
- [package.dependencies]
3071
- mirakuru = "*"
3072
- port-for = ">=0.6.0"
3073
- psycopg = ">=3.0.0"
3074
- pytest = ">=6.2"
3075
- setuptools = "*"
3076
-
3077
- [[package]]
3078
- name = "pytest-xdist"
3079
- version = "3.6.1"
3080
- description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
3081
- optional = false
3082
- python-versions = ">=3.8"
3083
- files = [
3084
- {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
3085
- {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
3086
- ]
3087
-
3088
- [package.dependencies]
3089
- execnet = ">=2.1"
3090
- pytest = ">=7.0.0"
3091
-
3092
- [package.extras]
3093
- psutil = ["psutil (>=3.0)"]
3094
- setproctitle = ["setproctitle"]
3095
- testing = ["filelock"]
3096
-
3097
  [[package]]
3098
  name = "python-dateutil"
3099
  version = "2.9.0.post0"
@@ -3122,20 +2739,6 @@ files = [
3122
  [package.extras]
3123
  cli = ["click (>=5.0)"]
3124
 
3125
- [[package]]
3126
- name = "python-multipart"
3127
- version = "0.0.9"
3128
- description = "A streaming multipart parser for Python"
3129
- optional = false
3130
- python-versions = ">=3.8"
3131
- files = [
3132
- {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"},
3133
- {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"},
3134
- ]
3135
-
3136
- [package.extras]
3137
- dev = ["atomicwrites (==1.4.1)", "attrs (==23.2.0)", "coverage (==7.4.1)", "hatch", "invoke (==2.2.0)", "more-itertools (==10.2.0)", "pbr (==6.0.0)", "pluggy (==1.4.0)", "py (==1.11.0)", "pytest (==8.0.0)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.2.0)", "pyyaml (==6.0.1)", "ruff (==0.2.1)"]
3138
-
3139
  [[package]]
3140
  name = "pytz"
3141
  version = "2024.1"
@@ -4390,4 +3993,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more
4390
  [metadata]
4391
  lock-version = "2.0"
4392
  python-versions = "^3.11,<3.12"
4393
- content-hash = "99d9141afef8d1d15a2e26cd82d141146ba630edd99b3adc82803f50c2682091"
 
123
  [package.dependencies]
124
  typing-extensions = "*"
125
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
  [[package]]
127
  name = "annotated-types"
128
  version = "0.7.0"
 
179
  test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
180
  trio = ["trio (>=0.23)"]
181
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
182
  [[package]]
183
  name = "attrs"
184
  version = "23.2.0"
 
441
  {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
442
  ]
443
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
444
  [[package]]
445
  name = "fastapi"
446
  version = "0.109.2"
 
925
  {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"},
926
  ]
927
 
 
 
 
 
 
 
 
 
 
 
 
928
  [[package]]
929
  name = "jinja2"
930
  version = "3.1.4"
 
1465
  httpx = ">=0.20.0"
1466
  pydantic = ">=1.10"
1467
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1468
  [[package]]
1469
  name = "markupsafe"
1470
  version = "2.1.5"
 
1553
  docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"]
1554
  tests = ["pytest", "pytz", "simplejson"]
1555
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1556
  [[package]]
1557
  name = "monotonic"
1558
  version = "1.6"
 
2540
  [package.extras]
2541
  grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv2 (==0.1.0)", "grpcio (>=1.44.0)", "grpcio (>=1.59.0)", "lz4 (>=3.1.3)", "protobuf (>=3.20.0,<3.21.0)"]
2542
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2543
  [[package]]
2544
  name = "posthog"
2545
  version = "3.5.0"
 
2583
  {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
2584
  ]
2585
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2586
  [[package]]
2587
  name = "pydantic"
2588
  version = "2.7.2"
 
2711
  full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"]
2712
  image = ["Pillow (>=8.0.0)"]
2713
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2714
  [[package]]
2715
  name = "python-dateutil"
2716
  version = "2.9.0.post0"
 
2739
  [package.extras]
2740
  cli = ["click (>=5.0)"]
2741
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2742
  [[package]]
2743
  name = "pytz"
2744
  version = "2024.1"
 
3993
  [metadata]
3994
  lock-version = "2.0"
3995
  python-versions = "^3.11,<3.12"
3996
+ content-hash = "764a6a2edfd938e4128d185bb383ddc23dadaa25b9f5cf7215998a90b3cc760e"
pyproject.toml CHANGED
@@ -3,7 +3,7 @@
3
  name = "app"
4
  version = "0.1.0"
5
  description = ""
6
- authors = [ "Praneeth Yerrapragada, Patrick Alexis" ]
7
  readme = "README.md"
8
  package-mode = false
9
 
@@ -28,33 +28,14 @@ pydantic = "^2.7.2"
28
  dateparser = "^1.2.0"
29
  pandas = "^2.2.2"
30
  path = "^16.14.0"
31
- pytest-xdist = "^3.6.1"
32
- pytest-asyncio = "^0.23.7"
33
- httpx = "^0.27.0"
34
- alembic = "^1.13.1"
35
- asyncpg = "^0.29.0"
36
- asyncio = "^3.4.3"
37
- pytest-postgresql = "^6.0.0"
38
- psycopg-binary = "^3.1.19"
39
- python-multipart = "^0.0.9"
40
 
41
  [tool.poetry.dependencies.uvicorn]
42
  extras = [ "standard" ]
43
  version = "^0.23.2"
44
 
45
- [tool.poetry.dependencies.sqlalchemy]
46
- extras = [ "asyncio" ]
47
- version = "^2.0.30"
48
-
49
  [tool.poetry.dependencies.llama-index-agent-openai]
50
  version = "0.2.2"
51
 
52
- [tool.black]
53
- line-length = 119
54
-
55
- [tool.pytest.ini_options]
56
- asyncio_mode = "auto"
57
-
58
  [build-system]
59
  requires = [ "poetry-core" ]
60
  build-backend = "poetry.core.masonry.api"
 
3
  name = "app"
4
  version = "0.1.0"
5
  description = ""
6
+ authors = [ "Marcus Schiesser <mail@marcusschiesser.de>" ]
7
  readme = "README.md"
8
  package-mode = false
9
 
 
28
  dateparser = "^1.2.0"
29
  pandas = "^2.2.2"
30
  path = "^16.14.0"
 
 
 
 
 
 
 
 
 
31
 
32
  [tool.poetry.dependencies.uvicorn]
33
  extras = [ "standard" ]
34
  version = "^0.23.2"
35
 
 
 
 
 
36
  [tool.poetry.dependencies.llama-index-agent-openai]
37
  version = "0.2.2"
38
 
 
 
 
 
 
 
39
  [build-system]
40
  requires = [ "poetry-core" ]
41
  build-backend = "poetry.core.masonry.api"
tests/conftest.py DELETED
@@ -1,83 +0,0 @@
1
- # Credits: https://github.com/ThomasAitken/demo-fastapi-async-sqlalchemy/blob/main/backend/app/conftest.py
2
-
3
- import asyncio
4
- from contextlib import ExitStack
5
-
6
- import pytest
7
- from alembic.config import Config
8
- from alembic.migration import MigrationContext
9
- from alembic.operations import Operations
10
- from alembic.script import ScriptDirectory
11
- from config.index import config as settings
12
- from app.engine.postgresdb import Base, get_db_session, postgresdb as sessionmanager
13
- from main import init_app
14
- from asyncpg import Connection
15
- from fastapi.testclient import TestClient
16
- from pytest_postgresql import factories
17
- from pytest_postgresql.factories.noprocess import postgresql_noproc
18
- from pytest_postgresql.janitor import DatabaseJanitor
19
- from sqlalchemy.testing.entities import ComparableEntity
20
-
21
- from config.index import config as env
22
-
23
- test_db = factories.postgresql_proc(dbname="test_db", port=5433)
24
-
25
-
26
- @pytest.fixture(autouse=True)
27
- def app():
28
- with ExitStack():
29
- # Don't initialize database connection.
30
- # This is because we want to initialize the database connection manually, so that we can create the test database.
31
- yield init_app(init_db=False)
32
-
33
-
34
- @pytest.fixture
35
- def client(app):
36
- with TestClient(app) as c:
37
- yield c
38
-
39
-
40
- @pytest.fixture(scope="session")
41
- def event_loop(request):
42
- loop = asyncio.get_event_loop_policy().new_event_loop()
43
- yield loop
44
- loop.close()
45
-
46
-
47
- @pytest.fixture(scope="function", autouse=True)
48
- async def connection_test(test_db, event_loop):
49
- pg_host = test_db.host
50
- pg_port = test_db.port
51
- pg_user = test_db.user
52
- pg_db = test_db.dbname
53
- pg_password = test_db.password
54
-
55
- with DatabaseJanitor(user=pg_user, host=pg_host, port=pg_port, dbname=pg_db, version=test_db.version, password=pg_password):
56
- connection_str = f"postgresql+psycopg://{pg_user}:@{pg_host}:{pg_port}/{pg_db}"
57
- sessionmanager.init(connection_str,
58
- # {"echo": True, "future": True}
59
- )
60
- yield
61
- await sessionmanager.close()
62
-
63
-
64
- @pytest.fixture(scope="function", autouse=True)
65
- async def create_tables(connection_test):
66
- async with sessionmanager.connect() as connection:
67
- await sessionmanager.drop_all(connection)
68
- await sessionmanager.create_all(connection)
69
-
70
-
71
- @pytest.fixture(scope="function", autouse=True)
72
- async def session_override(app, connection_test):
73
- async def get_db_session_override():
74
- async with sessionmanager.session() as session:
75
- yield session
76
-
77
- app.dependency_overrides[get_db_session] = get_db_session_override
78
-
79
-
80
- @pytest.fixture(scope="function", autouse=True)
81
- async def get_db_session_fixture():
82
- async with sessionmanager.session() as session:
83
- yield session
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tests/pytest.ini DELETED
@@ -1,4 +0,0 @@
1
- [pytest]
2
- asyncio_mode = auto
3
- timeout = 60
4
- addopts = -vv --disable-warnings --durations=10 --durations-min=1.0
 
 
 
 
 
tests/test_income_statement.py DELETED
@@ -1,51 +0,0 @@
1
- from fastapi.testclient import TestClient
2
- import pytest
3
-
4
- from sqlalchemy.ext.asyncio import AsyncSession
5
- from app.model.transaction import Transaction
6
- from app.model.user import User
7
- from tests.utils import get_fake_transactions
8
-
9
- @pytest.mark.asyncio
10
- async def test_income_statement(client: TestClient, get_db_session_fixture: AsyncSession) -> None:
11
- session_override = get_db_session_fixture
12
-
13
- # 1. Create a user
14
- user = await User.create(session_override, name="user", email="email", hashed_password="password")
15
-
16
- # 2. Create a bunch of transactions
17
- fake_transactions = get_fake_transactions(user.id)
18
- await Transaction.bulk_create(session_override, fake_transactions)
19
-
20
- # 3. Create an income statement
21
- min_date = min(t.transaction_date for t in fake_transactions)
22
- max_date = max(t.transaction_date for t in fake_transactions)
23
-
24
- print(f"min_date: {min_date}, max_date: {max_date}")
25
- response = client.post(
26
- "/api/v1/income_statement",
27
- json={
28
- "user_id": 1,
29
- "date_from": str(min_date),
30
- "date_to": str(max_date),
31
- },
32
- )
33
-
34
- assert response.status_code == 200
35
-
36
- # 4. Verify that the income statement matches the transactions
37
- response = client.get(f"/api/v1/income_statement/user/1")
38
- print(response.json())
39
- assert response.status_code == 200
40
- assert response.json()[0].get("income")
41
- assert response.json()[0].get("expenses")
42
-
43
- report_id = response.json()[0].get("id")
44
-
45
- # # 5. Verify that the income statement can be retrieved
46
- if report_id is not None:
47
- response = client.get(f"/api/v1/income_statement/report/{report_id}")
48
- assert response.status_code == 200
49
- assert response.json().get("income")
50
- assert response.json().get("expenses")
51
- assert response.json().get("id") == report_id
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tests/test_transactions.py DELETED
@@ -1,25 +0,0 @@
1
- from fastapi.testclient import TestClient
2
- import pytest
3
-
4
- from app.model.transaction import Transaction
5
- from app.model.user import User
6
- from sqlalchemy.ext.asyncio import AsyncSession
7
- from tests.utils import get_fake_transactions
8
-
9
-
10
- @pytest.mark.asyncio
11
- async def test_transactions(client: TestClient, get_db_session_fixture: AsyncSession) -> None:
12
-
13
- session_override = get_db_session_fixture
14
-
15
- # 1. Create a user
16
- user = await User.create(session_override, name="user", email="email", hashed_password="password")
17
-
18
- # 2. Create a bunch of transactions
19
- fake_transactions = get_fake_transactions(user.id)
20
- await Transaction.bulk_create(session_override, fake_transactions)
21
-
22
- # 3. Verify that the transactions are returned
23
- response = client.get("/api/v1/transactions/1")
24
- assert response.status_code == 200
25
- assert len(response.json()) == 10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tests/test_users.py DELETED
@@ -1,40 +0,0 @@
1
- from random import randint
2
-
3
- from fastapi.testclient import TestClient
4
-
5
-
6
- def test_users(client: TestClient) -> None:
7
- rand_int = randint(0, 1000)
8
- email = f"test{rand_int}@example.com"
9
- response = client.get(f"/api/v1/users/{email}")
10
- assert response.status_code == 404
11
-
12
- response = client.post(
13
- "/api/v1/users",
14
- json={"email": email, "name": "Full Name Test", "hashed_password": "test"},
15
- )
16
- assert response.status_code == 200
17
- assert response.json().get("email") == email
18
- assert response.json().get("name") == "Full Name Test"
19
- assert response.json().get("is_deleted") == False
20
-
21
- response = client.get(f"/api/v1/users/{email}")
22
- assert response.status_code == 200
23
- assert response.json().get("email") == email
24
- assert response.json().get("name") == "Full Name Test"
25
- assert response.json().get("is_deleted") == False
26
-
27
- response = client.put(
28
- f"/api/v1/users/{email}",
29
- json={"email": email, "name": "Full Name Test 2", "hashed_password": "test"},
30
- )
31
- assert response.status_code == 200
32
- assert response.json().get("email") == email
33
- assert response.json().get("name") == "Full Name Test 2"
34
- assert response.json().get("is_deleted") == False
35
-
36
- response = client.delete(f"/api/v1/users/{email}")
37
- assert response.status_code == 200
38
- assert response.json().get("email") == email
39
- assert response.json().get("name") == "Full Name Test 2"
40
- assert response.json().get("is_deleted") == True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tests/utils.py DELETED
@@ -1,88 +0,0 @@
1
- from datetime import datetime
2
- from typing import List
3
- from app.schema.index import TransactionCreate, TransactionType
4
-
5
-
6
- def get_fake_transactions(user_id: int) -> List[TransactionCreate]:
7
- return [
8
- TransactionCreate(
9
- user_id=user_id,
10
- transaction_date=datetime(2022, 1, 1),
11
- category="category1",
12
- name_description="name_description",
13
- amount=1.0,
14
- type=TransactionType.EXPENSE,
15
- ),
16
- TransactionCreate(
17
- user_id=user_id,
18
- transaction_date=datetime(2022, 1, 2),
19
- category="category2",
20
- name_description="name_description",
21
- amount=2.0,
22
- type=TransactionType.EXPENSE,
23
- ),
24
- TransactionCreate(
25
- user_id=user_id,
26
- transaction_date=datetime(2022, 1, 3),
27
- category="category3",
28
- name_description="name_description",
29
- amount=3.0,
30
- type=TransactionType.INCOME,
31
- ),
32
- TransactionCreate(
33
- user_id=user_id,
34
- transaction_date=datetime(2022, 1, 4),
35
- category="category1",
36
- name_description="name_description",
37
- amount=4.0,
38
- type=TransactionType.INCOME,
39
- ),
40
- TransactionCreate(
41
- user_id=user_id,
42
- transaction_date=datetime(2022, 1, 5),
43
- category="category2",
44
- name_description="name_description",
45
- amount=5.0,
46
- type=TransactionType.EXPENSE,
47
- ),
48
- TransactionCreate(
49
- user_id=user_id,
50
- transaction_date=datetime(2022, 1, 6),
51
- category="category3",
52
- name_description="name_description",
53
- amount=6.0,
54
- type=TransactionType.EXPENSE,
55
- ),
56
- TransactionCreate(
57
- user_id=user_id,
58
- transaction_date=datetime(2022, 1, 7),
59
- category="category1",
60
- name_description="name_description",
61
- amount=7.0,
62
- type=TransactionType.INCOME,
63
- ),
64
- TransactionCreate(
65
- user_id=user_id,
66
- transaction_date=datetime(2022, 1, 8),
67
- category="category2",
68
- name_description="name_description",
69
- amount=8.0,
70
- type=TransactionType.INCOME,
71
- ),
72
- TransactionCreate(
73
- user_id=user_id,
74
- transaction_date=datetime(2022, 1, 9),
75
- category="category3",
76
- name_description="name_description",
77
- amount=9.0,
78
- type=TransactionType.EXPENSE,
79
- ),
80
- TransactionCreate(
81
- user_id=user_id,
82
- transaction_date=datetime(2022, 1, 10),
83
- category="category1",
84
- name_description="name_description",
85
- amount=10.0,
86
- type=TransactionType.EXPENSE,
87
- ),
88
- ]