Spaces:
Running
Running
camphong24032002
commited on
Commit
·
6b95d78
1
Parent(s):
71baec5
Test
Browse files- .gitignore +160 -0
- Dockerfile +11 -2
- data/data.csv +0 -0
- data/data_ceo.csv +252 -0
- logic/__init__.py +0 -0
- logic/api.py +93 -0
- logic/nadaraya.py +65 -0
- logic/pattern_hammer.py +275 -0
- logic/res_sup.py +156 -0
- logic/utils.py +0 -0
- main.py +24 -1
- models/__init__.py +0 -0
- models/ichimoku.py +5 -0
- models/macd.py +5 -0
- models/resup.py +7 -0
- models/rsi.py +10 -0
- models/summary.py +10 -0
- notebook.ipynb +0 -0
- routes/__init__.py +0 -0
- routes/confidence.py +23 -0
- routes/signal.py +202 -0
- services/__init__.py +0 -0
- services/config.py +2 -0
- services/credentials/__init__.py +0 -0
- services/credentials/credentials.json +1 -0
- services/credentials/token.pickle +3 -0
- services/google_calendar.py +172 -0
- services/indicator.py +97 -0
- services/notification.py +50 -0
- services/preprocess.py +9 -0
- services/resup.py +153 -0
- services/strategy.py +167 -0
- setup.py +3 -0
.gitignore
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# poetry
|
98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
100 |
+
# commonly ignored for libraries.
|
101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
102 |
+
#poetry.lock
|
103 |
+
|
104 |
+
# pdm
|
105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
106 |
+
#pdm.lock
|
107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
108 |
+
# in version control.
|
109 |
+
# https://pdm.fming.dev/#use-with-ide
|
110 |
+
.pdm.toml
|
111 |
+
|
112 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
113 |
+
__pypackages__/
|
114 |
+
|
115 |
+
# Celery stuff
|
116 |
+
celerybeat-schedule
|
117 |
+
celerybeat.pid
|
118 |
+
|
119 |
+
# SageMath parsed files
|
120 |
+
*.sage.py
|
121 |
+
|
122 |
+
# Environments
|
123 |
+
.env
|
124 |
+
.venv
|
125 |
+
env/
|
126 |
+
venv/
|
127 |
+
ENV/
|
128 |
+
env.bak/
|
129 |
+
venv.bak/
|
130 |
+
|
131 |
+
# Spyder project settings
|
132 |
+
.spyderproject
|
133 |
+
.spyproject
|
134 |
+
|
135 |
+
# Rope project settings
|
136 |
+
.ropeproject
|
137 |
+
|
138 |
+
# mkdocs documentation
|
139 |
+
/site
|
140 |
+
|
141 |
+
# mypy
|
142 |
+
.mypy_cache/
|
143 |
+
.dmypy.json
|
144 |
+
dmypy.json
|
145 |
+
|
146 |
+
# Pyre type checker
|
147 |
+
.pyre/
|
148 |
+
|
149 |
+
# pytype static type analyzer
|
150 |
+
.pytype/
|
151 |
+
|
152 |
+
# Cython debug symbols
|
153 |
+
cython_debug/
|
154 |
+
|
155 |
+
# PyCharm
|
156 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
157 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
158 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
159 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
160 |
+
#.idea/
|
Dockerfile
CHANGED
@@ -1,12 +1,21 @@
|
|
1 |
-
|
2 |
FROM python:3.9
|
3 |
|
|
|
|
|
|
|
|
|
4 |
WORKDIR /code
|
5 |
|
6 |
COPY ./requirements.txt /code/requirements.txt
|
7 |
|
|
|
|
|
8 |
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
9 |
|
10 |
COPY . .
|
11 |
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
1 |
FROM python:3.9
|
2 |
|
3 |
+
RUN apt-get update && apt-get install -y \
|
4 |
+
libgl1-mesa-glx \
|
5 |
+
&& rm -rf /var/lib/apt/lists/*
|
6 |
+
|
7 |
WORKDIR /code
|
8 |
|
9 |
COPY ./requirements.txt /code/requirements.txt
|
10 |
|
11 |
+
RUN pip install --upgrade pip
|
12 |
+
|
13 |
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
14 |
|
15 |
COPY . .
|
16 |
|
17 |
+
RUN chown -R 1000:1000 /code
|
18 |
+
|
19 |
+
USER 1000
|
20 |
+
|
21 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
data/data.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/data_ceo.csv
ADDED
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
,Date,Symbol,Traded Volume,Lowest Price,Opening Price,Highest Price,Closing Price,min,max
|
2 |
+
0,2023-02-02,CEO,11908079.0,16125.0,16650.0,17100.0,16950.0,,
|
3 |
+
1,2023-02-03,CEO,8261230.0,16650.0,16950.0,17550.0,16950.0,,17550.0
|
4 |
+
2,2023-02-06,CEO,6806328.0,16425.0,16950.0,17400.0,17250.0,,
|
5 |
+
3,2023-02-07,CEO,10121335.0,16125.0,16125.0,17325.0,16275.0,,
|
6 |
+
4,2023-02-08,CEO,9716095.0,15450.0,15825.0,17025.0,16425.0,,
|
7 |
+
5,2023-02-09,CEO,3522188.0,15825.0,15975.0,16650.0,16050.0,,
|
8 |
+
6,2023-02-10,CEO,6564157.0,15600.0,16050.0,16425.0,15900.0,,
|
9 |
+
7,2023-02-13,CEO,11503385.0,14475.0,15750.0,15900.0,14550.0,14475.0,
|
10 |
+
8,2023-02-14,CEO,4227034.0,14475.0,14625.0,15000.0,14775.0,14475.0,
|
11 |
+
9,2023-02-15,CEO,7853484.0,14475.0,14775.0,16200.0,15825.0,14475.0,
|
12 |
+
10,2023-02-16,CEO,5682302.0,15525.0,15900.0,16050.0,15975.0,,
|
13 |
+
11,2023-02-17,CEO,9008735.0,15750.0,15975.0,16875.0,15975.0,,
|
14 |
+
12,2023-02-20,CEO,14388698.0,15825.0,15825.0,17550.0,17550.0,,
|
15 |
+
13,2023-02-21,CEO,10347231.0,17100.0,17550.0,18075.0,17100.0,,18075.0
|
16 |
+
14,2023-02-22,CEO,13412784.0,16125.0,17100.0,17250.0,16125.0,,
|
17 |
+
15,2023-02-23,CEO,10572049.0,15000.0,16125.0,16350.0,16125.0,,
|
18 |
+
16,2023-02-24,CEO,3665272.0,15525.0,16125.0,16500.0,15600.0,,
|
19 |
+
17,2023-02-27,CEO,7447638.0,14625.0,15075.0,15600.0,14625.0,,
|
20 |
+
18,2023-02-28,CEO,5260388.0,14625.0,14700.0,15225.0,15150.0,,
|
21 |
+
19,2023-03-01,CEO,8161201.0,13875.0,15000.0,15375.0,15300.0,13875.0,
|
22 |
+
20,2023-03-02,CEO,2693431.0,15000.0,15000.0,15600.0,15075.0,,
|
23 |
+
21,2023-03-03,CEO,5476645.0,14475.0,15075.0,15225.0,14475.0,,
|
24 |
+
22,2023-03-06,CEO,7358582.0,14625.0,14625.0,15900.0,15450.0,,15900.0
|
25 |
+
23,2023-03-07,CEO,4696240.0,15075.0,15750.0,15750.0,15300.0,,
|
26 |
+
24,2023-03-08,CEO,6162978.0,14775.0,15150.0,15900.0,15675.0,,15900.0
|
27 |
+
25,2023-03-09,CEO,4159924.0,14175.0,14175.0,15900.0,15525.0,14175.0,
|
28 |
+
26,2023-03-10,CEO,3927563.0,15150.0,15450.0,15525.0,15225.0,,
|
29 |
+
27,2023-03-13,CEO,3229902.0,14700.0,15150.0,15525.0,15000.0,,
|
30 |
+
28,2023-03-14,CEO,6186850.0,14625.0,14625.0,15375.0,14700.0,,
|
31 |
+
29,2023-03-15,CEO,7701007.0,15000.0,15000.0,16050.0,15900.0,,16050.0
|
32 |
+
30,2023-03-16,CEO,4295589.0,15300.0,15825.0,15825.0,15450.0,,
|
33 |
+
31,2023-03-17,CEO,4742761.0,15300.0,15600.0,15900.0,15375.0,,
|
34 |
+
32,2023-03-20,CEO,5025656.0,15225.0,15225.0,15900.0,15375.0,,
|
35 |
+
33,2023-03-21,CEO,3554391.0,15375.0,15450.0,15750.0,15750.0,,
|
36 |
+
34,2023-03-22,CEO,11769277.0,15750.0,15900.0,16800.0,16050.0,,16800.0
|
37 |
+
35,2023-03-23,CEO,6482349.0,15825.0,16050.0,16275.0,16275.0,,
|
38 |
+
36,2023-03-24,CEO,8111688.0,16275.0,16350.0,16800.0,16275.0,,16800.0
|
39 |
+
37,2023-03-27,CEO,5647444.0,16200.0,16275.0,16650.0,16350.0,,
|
40 |
+
38,2023-03-28,CEO,7471593.0,16050.0,16500.0,16725.0,16050.0,,
|
41 |
+
39,2023-03-29,CEO,4680395.0,15900.0,16050.0,16275.0,16275.0,,
|
42 |
+
40,2023-03-30,CEO,7607650.0,16275.0,16500.0,16725.0,16275.0,,
|
43 |
+
41,2023-03-31,CEO,7441610.0,16125.0,16275.0,16650.0,16650.0,,
|
44 |
+
42,2023-04-03,CEO,14105497.0,16875.0,17025.0,18000.0,17925.0,,
|
45 |
+
43,2023-04-04,CEO,8841956.0,17700.0,17925.0,18225.0,17775.0,,
|
46 |
+
44,2023-04-05,CEO,12078924.0,17400.0,17400.0,18750.0,18375.0,,
|
47 |
+
45,2023-04-06,CEO,19072990.0,18000.0,18000.0,19875.0,18375.0,,
|
48 |
+
46,2023-04-07,CEO,11657589.0,18225.0,18375.0,19125.0,19125.0,,
|
49 |
+
47,2023-04-10,CEO,11776515.0,18525.0,19125.0,19950.0,18750.0,,19950.0
|
50 |
+
48,2023-04-11,CEO,15408534.0,17775.0,18750.0,19125.0,19050.0,,
|
51 |
+
49,2023-04-12,CEO,6440979.0,18750.0,19125.0,19425.0,18975.0,,
|
52 |
+
50,2023-04-13,CEO,7870234.0,18600.0,19200.0,19350.0,18675.0,,
|
53 |
+
51,2023-04-14,CEO,13977468.0,17325.0,18750.0,18825.0,17325.0,,
|
54 |
+
52,2023-04-17,CEO,5243787.0,17100.0,17325.0,17550.0,17250.0,17100.0,
|
55 |
+
53,2023-04-18,CEO,10043820.0,17175.0,17475.0,18225.0,18075.0,,
|
56 |
+
54,2023-04-19,CEO,5249520.0,17475.0,18150.0,18300.0,17550.0,,
|
57 |
+
55,2023-04-20,CEO,4284267.0,17250.0,17550.0,17775.0,17625.0,,
|
58 |
+
56,2023-04-21,CEO,4434529.0,17400.0,17700.0,17925.0,17550.0,,
|
59 |
+
57,2023-04-24,CEO,3885914.0,17175.0,17625.0,17700.0,17550.0,17175.0,
|
60 |
+
58,2023-04-25,CEO,6553030.0,17475.0,17700.0,18150.0,17550.0,,
|
61 |
+
59,2023-04-26,CEO,4430623.0,17400.0,17550.0,18000.0,18000.0,,
|
62 |
+
60,2023-04-27,CEO,10499721.0,18000.0,18225.0,18675.0,18300.0,,
|
63 |
+
61,2023-04-28,CEO,10844589.0,18300.0,18300.0,19125.0,18975.0,,
|
64 |
+
62,2023-05-04,CEO,8308853.0,18375.0,18975.0,19425.0,18675.0,,
|
65 |
+
63,2023-05-05,CEO,7806524.0,16875.0,16875.0,18675.0,18375.0,16875.0,
|
66 |
+
64,2023-05-08,CEO,5939910.0,18375.0,18525.0,18825.0,18675.0,,
|
67 |
+
65,2023-05-09,CEO,6222404.0,18225.0,18675.0,18825.0,18450.0,,
|
68 |
+
66,2023-05-10,CEO,11760379.0,18225.0,18225.0,19500.0,19200.0,,
|
69 |
+
67,2023-05-11,CEO,16058662.0,19050.0,19275.0,20700.0,20175.0,,20700.0
|
70 |
+
68,2023-05-12,CEO,7398644.0,19875.0,20175.0,20400.0,20250.0,,
|
71 |
+
69,2023-05-15,CEO,10510729.0,19500.0,20250.0,20700.0,19500.0,,20700.0
|
72 |
+
70,2023-05-16,CEO,6593493.0,17550.0,17550.0,19800.0,19650.0,17550.0,
|
73 |
+
71,2023-05-17,CEO,9569868.0,18900.0,19650.0,20025.0,18975.0,,
|
74 |
+
72,2023-05-18,CEO,7004697.0,18750.0,18975.0,19425.0,18750.0,,
|
75 |
+
73,2023-05-19,CEO,5825404.0,18600.0,18825.0,19125.0,19050.0,,
|
76 |
+
74,2023-05-22,CEO,5015921.0,18825.0,19125.0,19500.0,19500.0,,
|
77 |
+
75,2023-05-23,CEO,7440555.0,19350.0,19650.0,20025.0,19575.0,,
|
78 |
+
76,2023-05-24,CEO,7066666.0,19350.0,19725.0,20100.0,19425.0,,
|
79 |
+
77,2023-05-25,CEO,6225072.0,18900.0,19350.0,19425.0,19050.0,,
|
80 |
+
78,2023-05-26,CEO,5189651.0,18900.0,19275.0,19650.0,19425.0,,
|
81 |
+
79,2023-05-29,CEO,5912911.0,19500.0,19500.0,19875.0,19800.0,,
|
82 |
+
80,2023-05-30,CEO,13184262.0,19875.0,19950.0,21000.0,20400.0,,21000.0
|
83 |
+
81,2023-05-31,CEO,7483326.0,20025.0,20550.0,20625.0,20250.0,,
|
84 |
+
82,2023-06-01,CEO,6748914.0,19950.0,20250.0,20550.0,20175.0,,
|
85 |
+
83,2023-06-02,CEO,7795951.0,20025.0,20250.0,20625.0,20175.0,,
|
86 |
+
84,2023-06-05,CEO,7397408.0,19950.0,20250.0,20550.0,19950.0,,
|
87 |
+
85,2023-06-06,CEO,7949296.0,19575.0,20175.0,20175.0,19950.0,,
|
88 |
+
86,2023-06-07,CEO,11274333.0,19950.0,19950.0,20850.0,20700.0,,
|
89 |
+
87,2023-06-08,CEO,10403400.0,19650.0,20850.0,20925.0,19650.0,,20925.0
|
90 |
+
88,2023-06-09,CEO,13118171.0,18750.0,19650.0,19800.0,19500.0,,
|
91 |
+
89,2023-06-12,CEO,8316347.0,18825.0,19500.0,19650.0,18975.0,,
|
92 |
+
90,2023-06-13,CEO,6626072.0,18975.0,18975.0,19500.0,19200.0,,
|
93 |
+
91,2023-06-14,CEO,5235718.0,18825.0,19200.0,19425.0,18900.0,,
|
94 |
+
92,2023-06-15,CEO,5543757.0,18600.0,18900.0,18975.0,18675.0,,
|
95 |
+
93,2023-06-16,CEO,7000557.0,18675.0,18750.0,19275.0,18825.0,,
|
96 |
+
94,2023-06-19,CEO,5199958.0,18525.0,19050.0,19050.0,18750.0,18525.0,
|
97 |
+
95,2023-06-20,CEO,4650550.0,18750.0,18750.0,19275.0,19275.0,,
|
98 |
+
96,2023-06-21,CEO,5153996.0,19050.0,19275.0,19500.0,19350.0,,
|
99 |
+
97,2023-06-22,CEO,11228498.0,19350.0,19425.0,20250.0,19875.0,,20250.0
|
100 |
+
98,2023-06-23,CEO,6100164.0,19575.0,20025.0,20025.0,19650.0,,
|
101 |
+
99,2023-06-26,CEO,10115289.0,18975.0,19800.0,19800.0,19425.0,,
|
102 |
+
100,2023-06-27,CEO,3690745.0,19125.0,19200.0,19500.0,19275.0,,
|
103 |
+
101,2023-06-28,CEO,4347803.0,19200.0,19425.0,19725.0,19275.0,,
|
104 |
+
102,2023-06-29,CEO,5514164.0,18675.0,19275.0,19350.0,18675.0,,
|
105 |
+
103,2023-06-30,CEO,12216023.0,16875.0,16875.0,18825.0,17850.0,16875.0,
|
106 |
+
104,2023-07-03,CEO,4119623.0,17475.0,17850.0,18225.0,17625.0,,
|
107 |
+
105,2023-07-04,CEO,3627598.0,17625.0,17625.0,17925.0,17925.0,,
|
108 |
+
106,2023-07-05,CEO,3587781.0,17850.0,18000.0,18225.0,17850.0,,
|
109 |
+
107,2023-07-06,CEO,5901499.0,17175.0,17850.0,17925.0,17325.0,,
|
110 |
+
108,2023-07-07,CEO,11049454.0,15825.0,17325.0,17325.0,16650.0,,
|
111 |
+
109,2023-07-10,CEO,12539637.0,15825.0,16575.0,16800.0,16425.0,,
|
112 |
+
110,2023-07-11,CEO,5801955.0,16125.0,16425.0,16650.0,16275.0,,
|
113 |
+
111,2023-07-12,CEO,14427314.0,15375.0,16275.0,16350.0,15450.0,,
|
114 |
+
112,2023-07-13,CEO,18292271.0,14850.0,15375.0,16425.0,16050.0,14850.0,
|
115 |
+
113,2023-07-14,CEO,7743683.0,15825.0,16050.0,16500.0,15900.0,,
|
116 |
+
114,2023-07-17,CEO,11554001.0,15600.0,15900.0,16125.0,15675.0,,
|
117 |
+
115,2023-07-18,CEO,14757765.0,15225.0,15600.0,15750.0,15300.0,,
|
118 |
+
116,2023-07-19,CEO,1263115.0,16800.0,16800.0,16800.0,16800.0,,
|
119 |
+
117,2023-07-20,CEO,17001583.0,17200.0,17900.0,18400.0,17900.0,,
|
120 |
+
118,2023-07-21,CEO,7702330.0,17600.0,17900.0,18200.0,18000.0,,
|
121 |
+
119,2023-07-24,CEO,9906537.0,16200.0,17800.0,18200.0,18000.0,,
|
122 |
+
120,2023-07-25,CEO,13317752.0,16200.0,16200.0,19400.0,18700.0,16200.0,
|
123 |
+
121,2023-07-26,CEO,7547408.0,18100.0,18400.0,18800.0,18400.0,,
|
124 |
+
122,2023-07-27,CEO,9312126.0,18200.0,18600.0,19000.0,18700.0,,
|
125 |
+
123,2023-07-28,CEO,13565690.0,18500.0,18800.0,19500.0,19200.0,,
|
126 |
+
124,2023-07-31,CEO,9761808.0,19100.0,19300.0,20000.0,19700.0,,
|
127 |
+
125,2023-08-01,CEO,10814531.0,18800.0,19700.0,20100.0,18800.0,,
|
128 |
+
126,2023-08-02,CEO,6822176.0,18700.0,18800.0,19200.0,19100.0,,
|
129 |
+
127,2023-08-03,CEO,9627808.0,18900.0,19100.0,19600.0,19300.0,,
|
130 |
+
128,2023-08-04,CEO,11928824.0,19300.0,19400.0,20000.0,19900.0,,
|
131 |
+
129,2023-08-07,CEO,8853043.0,19700.0,19800.0,20700.0,20200.0,,20700.0
|
132 |
+
130,2023-08-08,CEO,9661819.0,19600.0,20300.0,20400.0,19600.0,,
|
133 |
+
131,2023-08-09,CEO,7166359.0,19200.0,19600.0,19800.0,19200.0,,
|
134 |
+
132,2023-08-10,CEO,5763336.0,19200.0,19300.0,19700.0,19300.0,,
|
135 |
+
133,2023-08-11,CEO,6553665.0,18800.0,19300.0,19600.0,19400.0,18800.0,
|
136 |
+
134,2023-08-14,CEO,15792630.0,19400.0,19400.0,21300.0,21300.0,,
|
137 |
+
135,2023-08-15,CEO,28485217.0,22000.0,22000.0,23400.0,23400.0,,
|
138 |
+
136,2023-08-16,CEO,21155869.0,23300.0,25500.0,25600.0,25300.0,,
|
139 |
+
137,2023-08-17,CEO,16970045.0,24700.0,25300.0,25900.0,25100.0,,
|
140 |
+
138,2023-08-18,CEO,31397070.0,23000.0,24900.0,26500.0,24000.0,,26500.0
|
141 |
+
139,2023-08-21,CEO,17815617.0,21600.0,24000.0,25900.0,25600.0,21600.0,
|
142 |
+
140,2023-08-22,CEO,16453769.0,23600.0,25600.0,26200.0,24900.0,,
|
143 |
+
141,2023-08-23,CEO,11571814.0,23800.0,25200.0,25400.0,24000.0,,
|
144 |
+
142,2023-08-24,CEO,18008270.0,23700.0,24000.0,26100.0,26100.0,,
|
145 |
+
143,2023-08-25,CEO,18218180.0,26000.0,26300.0,27000.0,26100.0,,27000.0
|
146 |
+
144,2023-08-28,CEO,15099470.0,25400.0,26300.0,26400.0,26200.0,,
|
147 |
+
145,2023-08-29,CEO,12754759.0,25600.0,26400.0,27000.0,25600.0,,
|
148 |
+
146,2023-08-30,CEO,15126322.0,25000.0,25600.0,25800.0,25300.0,,
|
149 |
+
147,2023-08-31,CEO,13255367.0,25200.0,25300.0,26500.0,26100.0,,
|
150 |
+
148,2023-09-05,CEO,16948963.0,23500.0,26000.0,27800.0,27200.0,23500.0,
|
151 |
+
149,2023-09-06,CEO,9612018.0,26600.0,26900.0,27600.0,27500.0,,
|
152 |
+
150,2023-09-07,CEO,10605767.0,27100.0,27500.0,28300.0,27300.0,,
|
153 |
+
151,2023-09-08,CEO,11421124.0,26800.0,27400.0,27600.0,27100.0,,
|
154 |
+
152,2023-09-11,CEO,12008927.0,26700.0,27500.0,28000.0,26700.0,,
|
155 |
+
153,2023-09-12,CEO,8324275.0,26400.0,26700.0,27600.0,27600.0,,
|
156 |
+
154,2023-09-13,CEO,14638639.0,27600.0,27900.0,29600.0,28400.0,,29600.0
|
157 |
+
155,2023-09-14,CEO,10061518.0,27000.0,28400.0,28600.0,27000.0,,
|
158 |
+
156,2023-09-15,CEO,6141561.0,26600.0,27000.0,27600.0,27100.0,,
|
159 |
+
157,2023-09-18,CEO,6894578.0,26100.0,27000.0,27200.0,26400.0,,
|
160 |
+
158,2023-09-19,CEO,14234247.0,24900.0,25100.0,26700.0,25100.0,,
|
161 |
+
159,2023-09-20,CEO,10221397.0,24900.0,25100.0,26500.0,26500.0,,
|
162 |
+
160,2023-09-21,CEO,6874657.0,25800.0,26700.0,27000.0,25900.0,,
|
163 |
+
161,2023-09-22,CEO,12549443.0,23500.0,25800.0,25900.0,23900.0,,
|
164 |
+
162,2023-09-25,CEO,12234822.0,21600.0,23800.0,24200.0,21600.0,,
|
165 |
+
163,2023-09-26,CEO,11109817.0,20200.0,21700.0,22300.0,20700.0,,
|
166 |
+
164,2023-09-27,CEO,8707649.0,20100.0,20600.0,21800.0,21800.0,,
|
167 |
+
165,2023-09-28,CEO,5060235.0,20800.0,22000.0,22200.0,21000.0,,
|
168 |
+
166,2023-09-29,CEO,6053644.0,21100.0,21200.0,21800.0,21300.0,,
|
169 |
+
167,2023-10-02,CEO,6882049.0,21300.0,21300.0,22200.0,21600.0,,
|
170 |
+
168,2023-10-03,CEO,14171219.0,19500.0,21600.0,21700.0,19500.0,,
|
171 |
+
169,2023-10-04,CEO,8025282.0,18100.0,19200.0,20200.0,19600.0,,
|
172 |
+
170,2023-10-05,CEO,7369644.0,18200.0,19700.0,20100.0,18300.0,,
|
173 |
+
171,2023-10-06,CEO,8738334.0,17500.0,18300.0,18900.0,18500.0,17500.0,
|
174 |
+
172,2023-10-09,CEO,7764334.0,18500.0,18800.0,19900.0,19800.0,,
|
175 |
+
173,2023-10-10,CEO,7171004.0,19600.0,20000.0,20600.0,19600.0,,
|
176 |
+
174,2023-10-11,CEO,6619141.0,19000.0,19600.0,20000.0,20000.0,,
|
177 |
+
175,2023-10-12,CEO,11334191.0,20100.0,20100.0,21200.0,20600.0,,
|
178 |
+
176,2023-10-13,CEO,8621291.0,19800.0,20600.0,21000.0,20900.0,,
|
179 |
+
177,2023-10-16,CEO,8724225.0,20500.0,20900.0,21500.0,20500.0,,21500.0
|
180 |
+
178,2023-10-17,CEO,5554165.0,19000.0,20700.0,20900.0,19000.0,,
|
181 |
+
179,2023-10-18,CEO,10867376.0,17500.0,19300.0,19500.0,18200.0,17500.0,
|
182 |
+
180,2023-10-19,CEO,8983183.0,17500.0,18200.0,19000.0,18200.0,17500.0,
|
183 |
+
181,2023-10-20,CEO,11377635.0,18000.0,18200.0,20000.0,20000.0,,
|
184 |
+
182,2023-10-23,CEO,12738156.0,18000.0,18000.0,21200.0,20300.0,,
|
185 |
+
183,2023-10-24,CEO,8905742.0,19800.0,19800.0,21000.0,21000.0,,
|
186 |
+
184,2023-10-25,CEO,9573246.0,20100.0,21100.0,21400.0,20200.0,,21400.0
|
187 |
+
185,2023-10-26,CEO,20852383.0,18200.0,20200.0,20200.0,18500.0,,
|
188 |
+
186,2023-10-27,CEO,9752679.0,18200.0,18500.0,20300.0,20300.0,,
|
189 |
+
187,2023-10-30,CEO,9770085.0,19400.0,20100.0,20400.0,19600.0,,
|
190 |
+
188,2023-10-31,CEO,14236635.0,19500.0,19700.0,20600.0,19700.0,,
|
191 |
+
189,2023-11-01,CEO,10760413.0,18800.0,19700.0,20200.0,20200.0,,
|
192 |
+
190,2023-11-02,CEO,18154138.0,20100.0,20500.0,22200.0,22100.0,,
|
193 |
+
191,2023-11-03,CEO,12490448.0,21500.0,22100.0,22500.0,21900.0,,
|
194 |
+
192,2023-11-06,CEO,10068538.0,21400.0,22000.0,22500.0,22200.0,,
|
195 |
+
193,2023-11-07,CEO,11215293.0,21600.0,22200.0,22500.0,21800.0,,
|
196 |
+
194,2023-11-08,CEO,20798138.0,21600.0,21800.0,23900.0,23900.0,,
|
197 |
+
195,2023-11-09,CEO,14727762.0,24000.0,24500.0,25500.0,24100.0,,25500.0
|
198 |
+
196,2023-11-10,CEO,23598705.0,23300.0,24100.0,25000.0,23500.0,,
|
199 |
+
197,2023-11-13,CEO,14125299.0,22600.0,23600.0,24100.0,23100.0,,
|
200 |
+
198,2023-11-14,CEO,14293800.0,22500.0,23500.0,23800.0,22700.0,,
|
201 |
+
199,2023-11-15,CEO,16995446.0,22500.0,22900.0,23900.0,22600.0,,
|
202 |
+
200,2023-11-16,CEO,46940585.0,21400.0,22100.0,23700.0,23500.0,,
|
203 |
+
201,2023-11-17,CEO,26139595.0,22600.0,23500.0,24100.0,22800.0,,
|
204 |
+
202,2023-11-20,CEO,17824665.0,21800.0,23000.0,23200.0,22900.0,,
|
205 |
+
203,2023-11-21,CEO,16432028.0,20700.0,20700.0,23500.0,23100.0,,
|
206 |
+
204,2023-11-22,CEO,20220451.0,22600.0,22800.0,23700.0,23400.0,,
|
207 |
+
205,2023-11-23,CEO,33006034.0,22000.0,23400.0,24300.0,22000.0,,24300.0
|
208 |
+
206,2023-11-24,CEO,23606344.0,19800.0,21800.0,22500.0,22400.0,19800.0,
|
209 |
+
207,2023-11-27,CEO,11521525.0,21600.0,22000.0,22600.0,21600.0,,
|
210 |
+
208,2023-11-28,CEO,17235960.0,20500.0,20500.0,21800.0,21600.0,,
|
211 |
+
209,2023-11-29,CEO,10159152.0,21600.0,21700.0,22100.0,21900.0,,
|
212 |
+
210,2023-11-30,CEO,12968147.0,21600.0,21900.0,22500.0,21600.0,,
|
213 |
+
211,2023-12-01,CEO,9652051.0,21300.0,21600.0,22000.0,21600.0,,
|
214 |
+
212,2023-12-04,CEO,21340997.0,21300.0,21300.0,23300.0,22900.0,,
|
215 |
+
213,2023-12-05,CEO,15539506.0,22700.0,22900.0,23300.0,22800.0,,
|
216 |
+
214,2023-12-06,CEO,20791558.0,22700.0,22700.0,23800.0,23600.0,,
|
217 |
+
215,2023-12-07,CEO,28360957.0,21300.0,21300.0,24400.0,23400.0,21300.0,
|
218 |
+
216,2023-12-08,CEO,16076922.0,23100.0,24800.0,24800.0,23400.0,,24800.0
|
219 |
+
217,2023-12-11,CEO,15399352.0,23100.0,23100.0,24000.0,23900.0,,
|
220 |
+
218,2023-12-12,CEO,12338828.0,21600.0,21700.0,24300.0,23800.0,,
|
221 |
+
219,2023-12-13,CEO,16126308.0,21600.0,23100.0,24200.0,23000.0,,
|
222 |
+
220,2023-12-14,CEO,14368276.0,22500.0,23000.0,23500.0,22600.0,,
|
223 |
+
221,2023-12-15,CEO,15058563.0,22000.0,22300.0,23100.0,22800.0,,
|
224 |
+
222,2023-12-18,CEO,6782798.0,20800.0,22600.0,23200.0,22300.0,,
|
225 |
+
223,2023-12-19,CEO,10176411.0,20100.0,21200.0,22600.0,22600.0,20100.0,
|
226 |
+
224,2023-12-20,CEO,4613358.0,22200.0,22200.0,22900.0,22500.0,,
|
227 |
+
225,2023-12-21,CEO,5572479.0,22200.0,22300.0,22700.0,22600.0,,
|
228 |
+
226,2023-12-22,CEO,9448885.0,22500.0,22700.0,23100.0,22700.0,,
|
229 |
+
227,2023-12-25,CEO,8829755.0,20500.0,21500.0,23300.0,23000.0,,23300.0
|
230 |
+
228,2023-12-26,CEO,6381009.0,20700.0,20700.0,23300.0,23000.0,,23300.0
|
231 |
+
229,2023-12-27,CEO,8090760.0,22800.0,23200.0,23200.0,22800.0,,
|
232 |
+
230,2023-12-28,CEO,7419177.0,22600.0,23000.0,23000.0,22600.0,,
|
233 |
+
231,2023-12-29,CEO,6518709.0,22600.0,22700.0,22900.0,22700.0,,
|
234 |
+
232,2024-01-02,CEO,6949166.0,22500.0,22900.0,23100.0,22500.0,,
|
235 |
+
233,2024-01-03,CEO,4681307.0,22400.0,22400.0,22800.0,22700.0,22400.0,
|
236 |
+
234,2024-01-04,CEO,17510437.0,22700.0,22800.0,23400.0,22800.0,,
|
237 |
+
235,2024-01-05,CEO,8607853.0,22800.0,22900.0,23200.0,22900.0,,
|
238 |
+
236,2024-01-08,CEO,15180351.0,22900.0,23000.0,23600.0,23300.0,,23600.0
|
239 |
+
237,2024-01-09,CEO,9843985.0,22900.0,23200.0,23400.0,22900.0,,
|
240 |
+
238,2024-01-10,CEO,13977672.0,22300.0,22700.0,23100.0,22400.0,,
|
241 |
+
239,2024-01-11,CEO,7479203.0,22200.0,22500.0,22800.0,22600.0,,
|
242 |
+
240,2024-01-12,CEO,16179428.0,21800.0,22400.0,22700.0,22000.0,,
|
243 |
+
241,2024-01-15,CEO,7223186.0,21500.0,21500.0,22300.0,21500.0,,
|
244 |
+
242,2024-01-16,CEO,5843595.0,20600.0,20600.0,21800.0,21800.0,20600.0,
|
245 |
+
243,2024-01-17,CEO,5242101.0,21800.0,22000.0,22200.0,21800.0,,
|
246 |
+
244,2024-01-18,CEO,6272216.0,21800.0,21900.0,22300.0,22000.0,,
|
247 |
+
245,2024-01-19,CEO,5681085.0,21700.0,22300.0,22300.0,21800.0,,
|
248 |
+
246,2024-01-22,CEO,7013691.0,21500.0,21800.0,22200.0,22200.0,,
|
249 |
+
247,2024-01-23,CEO,4322253.0,21800.0,22500.0,22500.0,21800.0,,22500.0
|
250 |
+
248,2024-01-24,CEO,4037889.0,21700.0,22100.0,22100.0,21700.0,,
|
251 |
+
249,2024-01-25,CEO,3817719.0,21600.0,21800.0,21900.0,21600.0,,
|
252 |
+
250,2024-01-26,CEO,4219559.0,21600.0,21600.0,22000.0,21700.0,,
|
logic/__init__.py
ADDED
File without changes
|
logic/api.py
ADDED
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
from pydantic import BaseModel
|
3 |
+
import uvicorn
|
4 |
+
from logic.nadaraya import NadarayaWatson
|
5 |
+
from logic.res_sup import ResSupCluster
|
6 |
+
from data_core.utils import prepare_data
|
7 |
+
from collections import Counter
|
8 |
+
import os
|
9 |
+
import traceback
|
10 |
+
|
11 |
+
app = FastAPI()
|
12 |
+
|
13 |
+
class Response(BaseModel):
|
14 |
+
signal: dict = {}
|
15 |
+
message: str = None
|
16 |
+
error: str = None
|
17 |
+
|
18 |
+
|
19 |
+
@app.post("/nadaraya")
|
20 |
+
def nada(symbol: str = "CEO"):
|
21 |
+
res = Response()
|
22 |
+
try:
|
23 |
+
data = prepare_data(symbol=symbol, date_past=365)
|
24 |
+
Nada = NadarayaWatson(data=data)
|
25 |
+
|
26 |
+
if data.iloc[-1]["open"] <= Nada.lower_bound:
|
27 |
+
signal = "buy"
|
28 |
+
elif data.iloc[-1]["open"] >= Nada.upper_bound:
|
29 |
+
signal = "sell"
|
30 |
+
else:
|
31 |
+
signal = "hold"
|
32 |
+
|
33 |
+
# update res
|
34 |
+
res.signal["Nadaraya"] = signal
|
35 |
+
|
36 |
+
except:
|
37 |
+
res.error = traceback.format_exc()
|
38 |
+
|
39 |
+
return res
|
40 |
+
|
41 |
+
@app.post("/resup")
|
42 |
+
def ressup(symbol: str = "CEO"):
|
43 |
+
res = Response()
|
44 |
+
try:
|
45 |
+
# Extract Data
|
46 |
+
print("symbol", symbol)
|
47 |
+
data = prepare_data(symbol=symbol, date_past=365)
|
48 |
+
|
49 |
+
# today price
|
50 |
+
today_price = data.iloc[-1]["open"]
|
51 |
+
|
52 |
+
# fit cluster
|
53 |
+
ResSup = ResSupCluster(data=data,
|
54 |
+
n_clusters=3,
|
55 |
+
is_delete_outlier=True)
|
56 |
+
|
57 |
+
print(data)
|
58 |
+
# buy and sell range
|
59 |
+
current_week_cluster = Counter(ResSup.data.iloc[-7 :]["Clusters"])
|
60 |
+
current_day_cluster = current_week_cluster.most_common()[0][0]
|
61 |
+
|
62 |
+
support = ResSup.support[current_day_cluster]
|
63 |
+
resistance = ResSup.resistance[current_day_cluster]
|
64 |
+
|
65 |
+
# find buy range
|
66 |
+
buy_range = (support, support + (resistance - support) * 0.236)
|
67 |
+
sell_range = (resistance - (resistance - support) * 0.236, resistance)
|
68 |
+
|
69 |
+
if buy_range[0] <= today_price <= buy_range[1]:
|
70 |
+
signal = "buy"
|
71 |
+
elif sell_range[0] <= today_price <= sell_range[1]:
|
72 |
+
signal = "sell"
|
73 |
+
else:
|
74 |
+
signal = "hold"
|
75 |
+
|
76 |
+
res.signal["resup"] = signal
|
77 |
+
res.message = f"price: {today_price}, resitance: {resistance}, support: {support}"
|
78 |
+
except:
|
79 |
+
res.error = traceback.format_exc()
|
80 |
+
|
81 |
+
return res
|
82 |
+
|
83 |
+
# if __name__ == "__main__":
|
84 |
+
# uvicorn.run(app, host="127.0.0.1", port=8000, log_level="info")
|
85 |
+
|
86 |
+
|
87 |
+
|
88 |
+
|
89 |
+
|
90 |
+
|
91 |
+
|
92 |
+
|
93 |
+
|
logic/nadaraya.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import matplotlib.pyplot as plt
|
3 |
+
|
4 |
+
|
5 |
+
class NadarayaWatson():
|
6 |
+
def __init__() -> None: pass
|
7 |
+
|
8 |
+
@staticmethod
|
9 |
+
def gauss_kernel(u):
|
10 |
+
return np.exp(-0.5 * u**2) / np.sqrt(2 * np.pi)
|
11 |
+
|
12 |
+
@staticmethod
|
13 |
+
def nadaraya_watson_envelope_indicator(prices,
|
14 |
+
bandwidth: int = 10,
|
15 |
+
env_percent: int = 1):
|
16 |
+
n = len(prices)
|
17 |
+
envelopes = np.zeros((n, 3)) # Two columns for upper and lower envelopes
|
18 |
+
|
19 |
+
for i in range(n):
|
20 |
+
# Estimate conditional expectation using Nadaraya-Watson estimator
|
21 |
+
numerator = \
|
22 |
+
np.sum(NadarayaWatson.gauss_kernel(
|
23 |
+
(i - np.arange(n)) / bandwidth) * prices)
|
24 |
+
denominator = \
|
25 |
+
np.sum(NadarayaWatson.gauss_kernel(
|
26 |
+
(i - np.arange(n)) / bandwidth))
|
27 |
+
conditional_expectation = \
|
28 |
+
numerator / denominator if denominator != 0 else 0
|
29 |
+
|
30 |
+
# Calculate upper and lower envelopes
|
31 |
+
envelopes[i, 0] = \
|
32 |
+
conditional_expectation + np.std(prices) * env_percent
|
33 |
+
envelopes[i, 1] = \
|
34 |
+
conditional_expectation - np.std(prices) * env_percent
|
35 |
+
envelopes[i, 2] = \
|
36 |
+
conditional_expectation
|
37 |
+
return envelopes
|
38 |
+
|
39 |
+
# @staticmethod
|
40 |
+
# def get_viz():
|
41 |
+
# # Plotting
|
42 |
+
# plt.figure(figsize=(10, 6))
|
43 |
+
# plt.plot(self.prices, label='Prices', color='blue')
|
44 |
+
# plt.plot(self.envelopes[:, 0], label='Upper Envelope', color='red', linestyle='--')
|
45 |
+
# plt.plot(self.envelopes[:, 1], label='Lower Envelope', color='green', linestyle='--')
|
46 |
+
# plt.plot(self.envelopes[:, 2], label='Estimator', color='k', linestyle='--')
|
47 |
+
# plt.title('Nadaraya-Watson Envelope Indicator')
|
48 |
+
# plt.xlabel('Days')
|
49 |
+
# plt.ylabel('Price')
|
50 |
+
# plt.legend()
|
51 |
+
# plt.grid(True)
|
52 |
+
# plt.show()
|
53 |
+
|
54 |
+
# Example Usage
|
55 |
+
# if __name__ == "__main__":
|
56 |
+
# data = vns.stock_historical_data(symbol="ACB", start_date="2022-01-31",
|
57 |
+
# end_date='2024-01-31', resolution='1D',
|
58 |
+
# type='stock', beautify=True, decor=False,
|
59 |
+
# source='DNSE').iloc[-269:-68].reset_index()
|
60 |
+
# nada = NadarayaWatson(data)
|
61 |
+
# print(nada.envelopes[-1])
|
62 |
+
|
63 |
+
|
64 |
+
|
65 |
+
# %%
|
logic/pattern_hammer.py
ADDED
@@ -0,0 +1,275 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#%%
|
2 |
+
import pandas as pd
|
3 |
+
import numpy as np
|
4 |
+
import vnstock as vns
|
5 |
+
import matplotlib.pyplot as plt
|
6 |
+
from datetime import datetime
|
7 |
+
|
8 |
+
|
9 |
+
def get_candle_plot(df: pd.DataFrame, buy_sell_df: pd.DataFrame ) -> None:
|
10 |
+
# "up" dataframe will store the self.df
|
11 |
+
# when the closing stock price is greater
|
12 |
+
# than or equal to the opening stock prices
|
13 |
+
up = df[df.close >= df.open]
|
14 |
+
|
15 |
+
# "down" dataframe will store the df
|
16 |
+
# when the closing stock price is
|
17 |
+
# lesser than the opening stock prices
|
18 |
+
down = df[df.close < df.open]
|
19 |
+
|
20 |
+
# When the stock prices have decreased, then it
|
21 |
+
# will be represented by blue color candlestick
|
22 |
+
col1 = 'red'
|
23 |
+
|
24 |
+
# When the stock prices have increased, then it
|
25 |
+
# will be represented by green color candlestick
|
26 |
+
col2 = 'green'
|
27 |
+
|
28 |
+
# Setting width of candlestick elements
|
29 |
+
width = .3
|
30 |
+
width2 = .03
|
31 |
+
|
32 |
+
fig = plt.figure()
|
33 |
+
|
34 |
+
# Plotting up prices of the stock
|
35 |
+
plt.bar(up.time, up.close-up.open, width, bottom=up.open, color=col1)
|
36 |
+
plt.bar(up.time, up.high-up.close, width2, bottom=up.close, color=col1)
|
37 |
+
plt.bar(up.time, up.low-up.open, width2, bottom=up.open, color=col1)
|
38 |
+
|
39 |
+
# Plotting down prices of the stock
|
40 |
+
plt.bar(down.time, down.close-down.open, width, bottom=down.open, color=col2)
|
41 |
+
plt.bar(down.time, down.high-down.open, width2, bottom=down.open, color=col2)
|
42 |
+
plt.bar(down.time, down.low-down.close, width2, bottom=down.close, color=col2)
|
43 |
+
|
44 |
+
|
45 |
+
# x and y labeling
|
46 |
+
plt.xlabel("Index")
|
47 |
+
plt.ylabel("Prices (VND)")
|
48 |
+
|
49 |
+
# plot buy point
|
50 |
+
for i in buy_sell_df.index:
|
51 |
+
if buy_sell_df.signal.loc[i] == "buy":
|
52 |
+
color = "red"
|
53 |
+
else:
|
54 |
+
color = "blue"
|
55 |
+
|
56 |
+
plt.axvline(x = buy_sell_df.date.loc[i],
|
57 |
+
color = color,
|
58 |
+
linestyle='--' )
|
59 |
+
|
60 |
+
|
61 |
+
# displaying candlestick chart of stock data
|
62 |
+
# of a week
|
63 |
+
plt.show()
|
64 |
+
|
65 |
+
class CandleBlow():
|
66 |
+
def __init__(self, df: pd.DataFrame,
|
67 |
+
is_viz: bool=True, use_close_price: bool=True,
|
68 |
+
slope_thres = 55) -> None:
|
69 |
+
# init variables
|
70 |
+
self.df = df
|
71 |
+
self.is_buy = False
|
72 |
+
self.is_sell = False
|
73 |
+
|
74 |
+
# ensure slope thres is positbve
|
75 |
+
slope_thres = abs(slope_thres)
|
76 |
+
|
77 |
+
# detect hammer value
|
78 |
+
self.is_hammer = self.detect_hammer(tail_ratio=2, tol_pct=0.1/100)
|
79 |
+
self.is_reverse_hammer = self.detect_inverse_hammer(tail_ratio=2, tol_pct=0.1/100)
|
80 |
+
|
81 |
+
|
82 |
+
# change point
|
83 |
+
is_change_point = self.is_hammer or self.is_reverse_hammer
|
84 |
+
|
85 |
+
if is_change_point:
|
86 |
+
# get fit
|
87 |
+
self.fit_function, self.fit_values = self.__get_fit(degree=1, use_close_price=use_close_price)
|
88 |
+
|
89 |
+
# find derivative
|
90 |
+
self.deriv_function, self.deriv_value = self.__get_derivative(self.fit_function)
|
91 |
+
|
92 |
+
# indentify buy point
|
93 |
+
if abs(self.deriv_value[-1]) > slope_thres and is_viz:
|
94 |
+
self.__get_viz(self.fit_values, self.deriv_value)
|
95 |
+
|
96 |
+
is_buy = self.deriv_value[-1] < -slope_thres
|
97 |
+
is_sell = self.deriv_value[-1] > slope_thres
|
98 |
+
self.is_buy = is_change_point and is_buy
|
99 |
+
self.is_sell = is_change_point and is_sell
|
100 |
+
|
101 |
+
|
102 |
+
|
103 |
+
def __get_viz(self, fit_values=None, deriv_value=None) -> None:
|
104 |
+
|
105 |
+
# "up" dataframe will store the self.df
|
106 |
+
# when the closing stock price is greater
|
107 |
+
# than or equal to the opening stock prices
|
108 |
+
up = self.df[self.df.close >= self.df.open]
|
109 |
+
|
110 |
+
# "down" dataframe will store the self.df
|
111 |
+
# when the closing stock price is
|
112 |
+
# lesser than the opening stock prices
|
113 |
+
down = self.df[self.df.close < self.df.open]
|
114 |
+
|
115 |
+
# When the stock prices have decreased, then it
|
116 |
+
# will be represented by blue color candlestick
|
117 |
+
col1 = 'red'
|
118 |
+
|
119 |
+
# When the stock prices have increased, then it
|
120 |
+
# will be represented by green color candlestick
|
121 |
+
col2 = 'green'
|
122 |
+
|
123 |
+
# Setting width of candlestick elements
|
124 |
+
width = .3
|
125 |
+
width2 = .03
|
126 |
+
|
127 |
+
fig, axs = plt.subplots(2,1, sharex=True)
|
128 |
+
|
129 |
+
# Plotting up prices of the stock
|
130 |
+
axs[0].bar(up.index, up.close-up.open, width, bottom=up.open, color=col1)
|
131 |
+
axs[0].bar(up.index, up.high-up.close, width2, bottom=up.close, color=col1)
|
132 |
+
axs[0].bar(up.index, up.low-up.open, width2, bottom=up.open, color=col1)
|
133 |
+
|
134 |
+
# Plotting down prices of the stock
|
135 |
+
axs[0].bar(down.index, down.close-down.open, width, bottom=down.open, color=col2)
|
136 |
+
axs[0].bar(down.index, down.high-down.open, width2, bottom=down.open, color=col2)
|
137 |
+
axs[0].bar(down.index, down.low-down.close, width2, bottom=down.close, color=col2)
|
138 |
+
|
139 |
+
|
140 |
+
# x and y labeling
|
141 |
+
axs[1].set_xlabel("Index")
|
142 |
+
axs[0].set_ylabel("Prices (VND)")
|
143 |
+
|
144 |
+
if len(fit_values) > 0:
|
145 |
+
axs[0].plot(self.df.index, fit_values, label = "Fit Line")
|
146 |
+
|
147 |
+
if len(deriv_value) > 0:
|
148 |
+
axs[1].plot(self.df.index, deriv_value, label = "Derivative Line")
|
149 |
+
|
150 |
+
axs[0].grid()
|
151 |
+
axs[1].grid()
|
152 |
+
|
153 |
+
# displaying candlestick chart of stock data
|
154 |
+
# of a week
|
155 |
+
fig.show()
|
156 |
+
|
157 |
+
def __get_fit(self, degree: int = 5,
|
158 |
+
use_close_price: bool = True) -> np.ndarray:
|
159 |
+
"""
|
160 |
+
Get poly fit coef and value estimation for stock data:
|
161 |
+
Inputs:
|
162 |
+
self.df: pd.DataFrame, stock price from vnstock
|
163 |
+
degree: int, how tight is the fit
|
164 |
+
use_close: bool, use close or open price
|
165 |
+
Outputs:
|
166 |
+
est_value: fit estimate value
|
167 |
+
var: np.poly1d object function of the polyfit
|
168 |
+
"""
|
169 |
+
|
170 |
+
if use_close_price:
|
171 |
+
price = self.df.close
|
172 |
+
else:
|
173 |
+
price = self.df.open
|
174 |
+
|
175 |
+
data_len = self.df.shape[0]
|
176 |
+
|
177 |
+
# Perform polynomial fitting
|
178 |
+
coefficients = np.polyfit(self.df.index, price, degree)
|
179 |
+
|
180 |
+
# funciton
|
181 |
+
fit_function = np.poly1d(coefficients)
|
182 |
+
index = np.arange(data_len)
|
183 |
+
est_value = fit_function(index)
|
184 |
+
|
185 |
+
# # get y_axis value
|
186 |
+
# est_value = self.__get_fit_value(coefficients ,self.df.shape[0])
|
187 |
+
|
188 |
+
return fit_function, est_value
|
189 |
+
|
190 |
+
def __get_derivative(self, fit_function: np.poly1d) -> np.poly1d:
|
191 |
+
"""
|
192 |
+
Find derivative function of the fit function
|
193 |
+
Inputs:
|
194 |
+
fit_function: np.poly1d object of the fit function, produced by np.polyfit
|
195 |
+
outputs:
|
196 |
+
deriv_function: np.poly1d objects of the derivative function
|
197 |
+
deriv_value: np.ndarray of the output value from deriv function
|
198 |
+
"""
|
199 |
+
data_len = self.df.shape[0]
|
200 |
+
deriv_function = fit_function.deriv()
|
201 |
+
deriv_value = deriv_function(np.arange(data_len))
|
202 |
+
return deriv_function, deriv_value
|
203 |
+
|
204 |
+
def detect_hammer(self,
|
205 |
+
tol_pct = 0.1 / 100,
|
206 |
+
tail_ratio = 2.5) -> bool:
|
207 |
+
today_price = self.df.iloc[-1]
|
208 |
+
close = today_price.close
|
209 |
+
open = today_price.open
|
210 |
+
high = today_price.high
|
211 |
+
low = today_price.low
|
212 |
+
|
213 |
+
tol_price = high - tol_pct * high
|
214 |
+
|
215 |
+
return ((close >= tol_price or open >= tol_price)
|
216 |
+
and high - low >= tail_ratio * abs(close - open))
|
217 |
+
|
218 |
+
|
219 |
+
|
220 |
+
def detect_inverse_hammer(self,
|
221 |
+
tol_pct = 0.1 / 100,
|
222 |
+
tail_ratio = 2.5) -> bool:
|
223 |
+
today_price = self.df.iloc[-1]
|
224 |
+
close = today_price.close
|
225 |
+
open = today_price.open
|
226 |
+
high = today_price.high
|
227 |
+
low = today_price.low
|
228 |
+
|
229 |
+
tol_price = low + tol_pct * high
|
230 |
+
|
231 |
+
return ((close <= tol_price or open <= tol_price)
|
232 |
+
and high - low >= tail_ratio * abs(close - open))
|
233 |
+
|
234 |
+
|
235 |
+
|
236 |
+
#%%
|
237 |
+
|
238 |
+
# Sample Use
|
239 |
+
if __name__ == "__main__":
|
240 |
+
df = vns.stock_historical_data(symbol="ACB", start_date="2023-01-15",
|
241 |
+
end_date='2024-01-15', resolution='1D',
|
242 |
+
type='stock', beautify=True, decor=False)
|
243 |
+
|
244 |
+
buy_sell_df = pd.DataFrame({"date": [df.time.iloc[0]],
|
245 |
+
"hammer": [True],
|
246 |
+
"reverse_hammer": [True],
|
247 |
+
"signal":["buy"]})
|
248 |
+
|
249 |
+
# use trend from 1.5 trading week
|
250 |
+
for i in range(7, df.shape[0]):
|
251 |
+
train_data = df.iloc[i-7 : i]
|
252 |
+
candle = CandleBlow(df=train_data,
|
253 |
+
use_close_price=False,
|
254 |
+
is_viz=False)
|
255 |
+
|
256 |
+
if candle.is_buy:
|
257 |
+
buy_sell_df.loc[buy_sell_df.shape[0]] = {"date": train_data.time.iloc[-1],
|
258 |
+
"hammer": candle.is_hammer,
|
259 |
+
"reverse_hammer": candle.is_reverse_hammer,
|
260 |
+
"signal":"buy"}
|
261 |
+
|
262 |
+
if candle.is_sell:
|
263 |
+
buy_sell_df.loc[buy_sell_df.shape[0]] = {"date": train_data.time.iloc[-1],
|
264 |
+
"hammer": candle.is_hammer,
|
265 |
+
"reverse_hammer": candle.is_reverse_hammer,
|
266 |
+
"signal":"sell"}
|
267 |
+
|
268 |
+
|
269 |
+
# plot result
|
270 |
+
buy_sell_df = buy_sell_df.iloc[1:]
|
271 |
+
get_candle_plot(df, buy_sell_df)
|
272 |
+
|
273 |
+
|
274 |
+
|
275 |
+
|
logic/res_sup.py
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pandas as pd
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
from sklearn.cluster import AgglomerativeClustering
|
5 |
+
import vnstock as vns
|
6 |
+
from collections import Counter, defaultdict
|
7 |
+
|
8 |
+
class ResSupCluster():
|
9 |
+
def __init__(self, data: pd.DataFrame, n_clusters: int = 2,
|
10 |
+
is_visualize: bool = False, is_delete_cluster_outlier: bool = True):
|
11 |
+
self.n_clusters = n_clusters
|
12 |
+
self.data = data
|
13 |
+
|
14 |
+
# train cluster
|
15 |
+
self.model, self.data["Clusters"] = self.train_AgglomerativeClustering(self.data["open"], n_clusters)
|
16 |
+
|
17 |
+
|
18 |
+
# Delete outlier to better fit data
|
19 |
+
if is_delete_cluster_outlier:
|
20 |
+
self.__delete_cluster_outlier(error_period=5)
|
21 |
+
|
22 |
+
# get resistance and support
|
23 |
+
self.__get_res_sup()
|
24 |
+
|
25 |
+
if is_visualize:
|
26 |
+
self.get_viz()
|
27 |
+
|
28 |
+
def __get_res_sup(self):
|
29 |
+
# Calculate cluster centroids
|
30 |
+
self.level_dict = defaultdict(dict)
|
31 |
+
|
32 |
+
prices = self.data["open"]
|
33 |
+
clusters = self.data["Clusters"].copy()
|
34 |
+
current_cluster = clusters.iloc[0]
|
35 |
+
|
36 |
+
# find min max of continuos cluster
|
37 |
+
min_price = float("inf")
|
38 |
+
max_price = 0
|
39 |
+
|
40 |
+
start = 0
|
41 |
+
sum = 0
|
42 |
+
|
43 |
+
for i in range(len(clusters)):
|
44 |
+
if clusters.iloc[i] == current_cluster:
|
45 |
+
max_price = max(max_price, prices.iloc[i])
|
46 |
+
min_price = min(min_price, prices.iloc[i])
|
47 |
+
sum += prices.iloc[i]
|
48 |
+
else:
|
49 |
+
period = (self.data["time"].iloc[start], self.data["time"].iloc[i])
|
50 |
+
self.level_dict[period]["cluster"] = current_cluster
|
51 |
+
self.level_dict[period]["support"] = min_price
|
52 |
+
self.level_dict[period]["resistance"] = max_price
|
53 |
+
self.level_dict[period]["index"] = (start, i)
|
54 |
+
self.level_dict[period]["average"] = sum / (i - start - 1)
|
55 |
+
|
56 |
+
# reinit
|
57 |
+
start = i
|
58 |
+
sum = 0
|
59 |
+
current_cluster = clusters.iloc[i]
|
60 |
+
min_price = prices.iloc[i]
|
61 |
+
max_price = prices.iloc[i]
|
62 |
+
|
63 |
+
period = (self.data["time"].iloc[start], self.data["time"].iloc[i])
|
64 |
+
self.level_dict[period]["cluster"] = current_cluster
|
65 |
+
self.level_dict[period]["support"] = min_price
|
66 |
+
self.level_dict[period]["resistance"] = max_price
|
67 |
+
self.level_dict[period]["average"] = sum / (i - start - 1)
|
68 |
+
self.level_dict[period]["index"] = (start, i)
|
69 |
+
|
70 |
+
|
71 |
+
def __delete_cluster_outlier(self, error_period: int = 5):
|
72 |
+
"""
|
73 |
+
Delete outlier clusters
|
74 |
+
"""
|
75 |
+
left = 0
|
76 |
+
right = 0
|
77 |
+
counter = 0
|
78 |
+
error = 10
|
79 |
+
clusters = list(self.data["Clusters"].copy())
|
80 |
+
|
81 |
+
while right < len(self.data["Clusters"]):
|
82 |
+
if self.data["Clusters"][left] == self.data["Clusters"][right]:
|
83 |
+
counter += 1
|
84 |
+
else:
|
85 |
+
if counter < error:
|
86 |
+
clusters[left:right] = [clusters[left - 1]] * counter
|
87 |
+
counter = 1
|
88 |
+
left = right
|
89 |
+
|
90 |
+
right += 1
|
91 |
+
|
92 |
+
self.data["Clusters"] = clusters
|
93 |
+
self.data = self.data.iloc[2:]
|
94 |
+
|
95 |
+
def get_viz(self):
|
96 |
+
|
97 |
+
level_dict = self.level_dict
|
98 |
+
for period in level_dict.keys():
|
99 |
+
period_dict = level_dict[period]
|
100 |
+
plt.hlines(period_dict["support"], period_dict["index"][0], period_dict["index"][1],
|
101 |
+
colors="r")
|
102 |
+
plt.hlines(period_dict["resistance"], period_dict["index"][0], period_dict["index"][1],
|
103 |
+
colors="b")
|
104 |
+
plt.hlines(period_dict["average"], period_dict["index"][0], period_dict["index"][1],
|
105 |
+
colors="k")
|
106 |
+
|
107 |
+
# labeling
|
108 |
+
plt.hlines(period_dict["support"], 0, 0, colors = "b", label="Resistance")
|
109 |
+
plt.hlines(period_dict["resistance"], 0, 0, colors = "r", label="Support")
|
110 |
+
plt.hlines(period_dict["average"], 0, 0, colors = "b", label="Resistance")
|
111 |
+
|
112 |
+
# actual price
|
113 |
+
plt.scatter(self.data.index, self.data["open"],
|
114 |
+
c=self.data["Clusters"], cmap='viridis', label='Clusters')
|
115 |
+
|
116 |
+
plt.xlabel("Index")
|
117 |
+
plt.ylabel("Prices")
|
118 |
+
plt.grid()
|
119 |
+
plt.legend()
|
120 |
+
plt.show()
|
121 |
+
|
122 |
+
def train_AgglomerativeClustering(
|
123 |
+
self, data: np.ndarray, n_clusters: int = 3
|
124 |
+
) -> np.ndarray:
|
125 |
+
"""
|
126 |
+
Fit Agglomerative Clustering
|
127 |
+
Inputs:
|
128 |
+
data: pd.DataFrame data
|
129 |
+
n_clusters: numbers of clusters, default = 4
|
130 |
+
Ouputs:
|
131 |
+
clusters: clusters data where val = clusters, index is same
|
132 |
+
"""
|
133 |
+
prices = data.values.reshape(-1, 1)
|
134 |
+
# Fit Agglomerative Clustering
|
135 |
+
model = AgglomerativeClustering(n_clusters=n_clusters, linkage="ward")
|
136 |
+
clusters = model.fit_predict(prices)
|
137 |
+
return model, clusters
|
138 |
+
|
139 |
+
|
140 |
+
# Example Ussage
|
141 |
+
if __name__ == "__main__":
|
142 |
+
data = vns.stock_historical_data(symbol="CEO", start_date="2023-01-31",
|
143 |
+
end_date='2024-01-31', resolution='1D',
|
144 |
+
type='stock', beautify=True, decor=False,
|
145 |
+
source='DNSE')
|
146 |
+
|
147 |
+
Cluster = ResSupCluster(data=data,
|
148 |
+
is_visualize=True,
|
149 |
+
is_delete_cluster_outlier=True,
|
150 |
+
n_clusters=3)
|
151 |
+
|
152 |
+
# cluster levels in self.level_dict
|
153 |
+
print(Cluster.level_dict)
|
154 |
+
|
155 |
+
|
156 |
+
|
logic/utils.py
ADDED
File without changes
|
main.py
CHANGED
@@ -1 +1,24 @@
|
|
1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
+
from starlette.middleware.cors import CORSMiddleware
|
3 |
+
from routes.signal import router as SignalRouter
|
4 |
+
from routes.confidence import router as ConfidenceRouter
|
5 |
+
|
6 |
+
app = FastAPI(title="Stock logic API", docs_url="/docs", version="0.1.0")
|
7 |
+
|
8 |
+
|
9 |
+
app.include_router(SignalRouter, tags=["Signal"], prefix="/signal")
|
10 |
+
app.include_router(ConfidenceRouter, tags=["Confidence"], prefix="/confidence")
|
11 |
+
|
12 |
+
|
13 |
+
@app.get("/", tags=["Root"])
|
14 |
+
async def read_root():
|
15 |
+
return {"message": "Stock Signal API v1"}
|
16 |
+
|
17 |
+
|
18 |
+
app.add_middleware(
|
19 |
+
CORSMiddleware,
|
20 |
+
allow_origins=["*"],
|
21 |
+
allow_credentials=True,
|
22 |
+
allow_methods=["POST", "PUT", "DELETE", "OPTION", "GET"],
|
23 |
+
allow_headers=["*"],
|
24 |
+
)
|
models/__init__.py
ADDED
File without changes
|
models/ichimoku.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import BaseModel
|
2 |
+
|
3 |
+
|
4 |
+
class IchimokuPayload(BaseModel):
|
5 |
+
symbol: str
|
models/macd.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import BaseModel
|
2 |
+
|
3 |
+
|
4 |
+
class MACDSignalPayload(BaseModel):
|
5 |
+
symbol: str
|
models/resup.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import BaseModel
|
2 |
+
|
3 |
+
|
4 |
+
class ResSupPayload(BaseModel):
|
5 |
+
data: str
|
6 |
+
feature_map: dict
|
7 |
+
n_clusters: int = 3
|
models/rsi.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import BaseModel
|
2 |
+
|
3 |
+
|
4 |
+
class RSISignalPayload(BaseModel):
|
5 |
+
symbol: str
|
6 |
+
periods: int = 14
|
7 |
+
smooth_k: int = 3
|
8 |
+
smooth_d: int = 3
|
9 |
+
threshold_low: int = 20
|
10 |
+
threshold_high: int = 80
|
models/summary.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pydantic import BaseModel
|
2 |
+
|
3 |
+
|
4 |
+
class SummarySignalPayload(BaseModel):
|
5 |
+
symbol: str
|
6 |
+
periods: int = 14
|
7 |
+
smooth_k: int = 3
|
8 |
+
smooth_d: int = 3
|
9 |
+
threshold_low: int = 20
|
10 |
+
threshold_high: int = 80
|
notebook.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
routes/__init__.py
ADDED
File without changes
|
routes/confidence.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from models.resup import ResSupPayload
|
2 |
+
from fastapi import status, APIRouter
|
3 |
+
from services.resup import ResSupCluster
|
4 |
+
from typing import List
|
5 |
+
import pandas as pd
|
6 |
+
import json
|
7 |
+
|
8 |
+
router = APIRouter()
|
9 |
+
|
10 |
+
|
11 |
+
@router.post(
|
12 |
+
"/resup",
|
13 |
+
name="Get lines of Resistance and Support",
|
14 |
+
status_code=status.HTTP_200_OK
|
15 |
+
)
|
16 |
+
async def get_resup_line(payload: ResSupPayload) -> List[float]:
|
17 |
+
data = pd.DataFrame(json.loads(payload.data))
|
18 |
+
cluster = ResSupCluster(
|
19 |
+
data=data,
|
20 |
+
feature_map=payload.feature_map,
|
21 |
+
n_clusters=payload.n_clusters
|
22 |
+
)
|
23 |
+
return cluster.extract_all_lines()
|
routes/signal.py
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from services.preprocess import Preprocess
|
2 |
+
from services.strategy import Strategy
|
3 |
+
from services.notification import Notification
|
4 |
+
from models.rsi import RSISignalPayload
|
5 |
+
from models.ichimoku import IchimokuPayload
|
6 |
+
from models.macd import MACDSignalPayload
|
7 |
+
from models.summary import SummarySignalPayload
|
8 |
+
from pymongo import MongoClient, ASCENDING, DESCENDING
|
9 |
+
from fastapi import status, APIRouter
|
10 |
+
from typing import Sequence
|
11 |
+
import requests
|
12 |
+
import pandas as pd
|
13 |
+
import os
|
14 |
+
import asyncio
|
15 |
+
from dotenv import load_dotenv
|
16 |
+
load_dotenv()
|
17 |
+
|
18 |
+
router = APIRouter()
|
19 |
+
DATA_URL = "https://intellistock-data-api.hf.space/data"
|
20 |
+
|
21 |
+
|
22 |
+
@router.get(
|
23 |
+
"/daily_report",
|
24 |
+
name="Get daily signal report",
|
25 |
+
status_code=status.HTTP_200_OK
|
26 |
+
)
|
27 |
+
async def get_daily_report():
|
28 |
+
uri = os.environ.get("MONGODB_URI")
|
29 |
+
client = MongoClient(uri)
|
30 |
+
database = client.get_database("data")
|
31 |
+
rsi_collection = database.get_collection("rsi")
|
32 |
+
price_collection = database.get_collection("price")
|
33 |
+
macd_collection = database.get_collection("macd")
|
34 |
+
signal_collection = database.get_collection("signal")
|
35 |
+
rsi_records = list(rsi_collection.find(sort=[("_id", ASCENDING)]))
|
36 |
+
price_records = list(price_collection.find(sort=[("_id", ASCENDING)]))
|
37 |
+
macd_records = list(macd_collection.find(sort=[("_id", ASCENDING)]))
|
38 |
+
rsi_df = pd.DataFrame(rsi_records).drop(columns=["_id"])
|
39 |
+
macd_df = pd.DataFrame(macd_records).drop(columns=["_id"])
|
40 |
+
lst_symbols = rsi_df.columns[1:]
|
41 |
+
last_date = rsi_df["time"].iloc[-1]
|
42 |
+
newest_record = \
|
43 |
+
signal_collection.find_one(sort=[("_id", DESCENDING)])
|
44 |
+
if newest_record["time"] == last_date:
|
45 |
+
return {"message": "The data is up to date!"}
|
46 |
+
result = Strategy.get_daily_report(price_records,
|
47 |
+
rsi_df,
|
48 |
+
macd_df,
|
49 |
+
last_date,
|
50 |
+
lst_symbols)
|
51 |
+
today_df = result[result["time"] == last_date][["ticker",
|
52 |
+
"rsi_signal",
|
53 |
+
"macd_signal",
|
54 |
+
"ichimoku_signal_1",
|
55 |
+
"ichimoku_signal_3"]]
|
56 |
+
today_dict = today_df.to_dict(orient="list")
|
57 |
+
today_dict["time"] = last_date
|
58 |
+
signal_collection.insert_one(today_dict)
|
59 |
+
prefix_url = os.environ.get("PREFIX_URL")
|
60 |
+
asyncio.create_task(
|
61 |
+
Notification.daily_message(today_df, prefix_url, last_date))
|
62 |
+
return {"message": "Updated signal"}
|
63 |
+
|
64 |
+
|
65 |
+
@router.get(
|
66 |
+
"/daily_nadaraya",
|
67 |
+
name="Get daily nadaraya signal",
|
68 |
+
status_code=status.HTTP_200_OK
|
69 |
+
)
|
70 |
+
async def get_daily_nadaraya_report() -> None:
|
71 |
+
uri = os.environ.get("MONGODB_URI")
|
72 |
+
client = MongoClient(uri)
|
73 |
+
database = client.get_database("data")
|
74 |
+
nada_collection = database.get_collection("nada")
|
75 |
+
rsi_collection = database.get_collection("rsi")
|
76 |
+
rsi_records = list(rsi_collection.find(sort=[("_id", ASCENDING)]))
|
77 |
+
price_collection = database.get_collection("price")
|
78 |
+
price_records = list(price_collection.find(sort=[("_id", ASCENDING)]))
|
79 |
+
rsi_df = pd.DataFrame(rsi_records).drop(columns=["_id"])
|
80 |
+
lst_symbols = rsi_df.columns[1:]
|
81 |
+
last_date = rsi_df["time"].iloc[-1]
|
82 |
+
newest_record = \
|
83 |
+
nada_collection.find_one(sort=[("_id", DESCENDING)])
|
84 |
+
if newest_record["time"] == last_date:
|
85 |
+
return {"message": "The data is up to date!"}
|
86 |
+
result = Strategy.get_daily_nadaraya(price_records,
|
87 |
+
lst_symbols,
|
88 |
+
last_date)
|
89 |
+
result_dict = result.to_dict(orient="list")
|
90 |
+
result_dict["time"] = last_date
|
91 |
+
nada_collection.insert_one(result_dict)
|
92 |
+
prefix_url = os.environ.get("PREFIX_URL")
|
93 |
+
asyncio.create_task(
|
94 |
+
Notification.notify_nadaraya(result, prefix_url, last_date))
|
95 |
+
return {"message": "Updated nadaraya"}
|
96 |
+
|
97 |
+
|
98 |
+
@router.post(
|
99 |
+
"/rsi",
|
100 |
+
name="Get signal of RSI",
|
101 |
+
status_code=status.HTTP_200_OK
|
102 |
+
)
|
103 |
+
async def get_rsi_signal(payload: RSISignalPayload) -> Sequence[dict]:
|
104 |
+
rsi_dict = {"symbol": payload.symbol,
|
105 |
+
"periods": payload.periods,
|
106 |
+
"smooth_k": payload.smooth_k,
|
107 |
+
"smooth_d": payload.smooth_d}
|
108 |
+
try:
|
109 |
+
response = requests.post(DATA_URL+"/get_rsi", json=rsi_dict)
|
110 |
+
rsi_json = response.json()
|
111 |
+
rsi_df = Preprocess.lst_dict_to_df(rsi_json)
|
112 |
+
rsi_df = Strategy.rsi_strategy(
|
113 |
+
rsi_df, threshold=(
|
114 |
+
payload.threshold_low, payload.threshold_high
|
115 |
+
)
|
116 |
+
)
|
117 |
+
rsi_signal_lst_dict = \
|
118 |
+
rsi_df[["time", "rsi_signal"]].to_dict(orient="records")
|
119 |
+
except Exception as e:
|
120 |
+
return [{"message": f"Caught error {e}"}]
|
121 |
+
return rsi_signal_lst_dict
|
122 |
+
|
123 |
+
|
124 |
+
@router.post(
|
125 |
+
"/ichimoku",
|
126 |
+
name="Get signal of Ichimoku",
|
127 |
+
status_code=status.HTTP_200_OK
|
128 |
+
)
|
129 |
+
async def get_ichimoku_data(payload: IchimokuPayload) -> Sequence[dict]:
|
130 |
+
ichimoku_dict = {
|
131 |
+
"symbol": payload.symbol
|
132 |
+
}
|
133 |
+
try:
|
134 |
+
response = requests.post(DATA_URL+"/get_ichimoku", json=ichimoku_dict)
|
135 |
+
ichimoku_json = response.json()
|
136 |
+
ichimoku_df = Preprocess.lst_dict_to_df(ichimoku_json)
|
137 |
+
ichimoku_df = Strategy.ichimoku_strategy(ichimoku_df)
|
138 |
+
ichimoku_signal_lst_dict = \
|
139 |
+
ichimoku_df[["time",
|
140 |
+
"ichimoku_signal_1",
|
141 |
+
"ichimoku_signal_3"]].to_dict(orient="records")
|
142 |
+
except Exception as e:
|
143 |
+
return [{"message": f"Caught error {e}"}]
|
144 |
+
return ichimoku_signal_lst_dict
|
145 |
+
|
146 |
+
|
147 |
+
@router.post(
|
148 |
+
"/macd",
|
149 |
+
name="Get signal of MACD",
|
150 |
+
status_code=status.HTTP_200_OK
|
151 |
+
)
|
152 |
+
async def get_macd_data(payload: MACDSignalPayload) -> Sequence[dict]:
|
153 |
+
macd_dict = {
|
154 |
+
"symbol": payload.symbol
|
155 |
+
}
|
156 |
+
try:
|
157 |
+
response = requests.post(DATA_URL+"/get_macd", json=macd_dict)
|
158 |
+
macd_json = response.json()
|
159 |
+
macd_df = Preprocess.lst_dict_to_df(macd_json)
|
160 |
+
macd_df = Strategy.macd_strategy(macd_df)
|
161 |
+
macd_signal_lst_dict = \
|
162 |
+
macd_df[["time", "macd_signal"]].to_dict(orient="records")
|
163 |
+
except Exception as e:
|
164 |
+
return [{"message": f"Caught error {e}"}]
|
165 |
+
return macd_signal_lst_dict
|
166 |
+
|
167 |
+
|
168 |
+
@router.post(
|
169 |
+
"/summary",
|
170 |
+
name="Get summary of signal",
|
171 |
+
status_code=status.HTTP_200_OK
|
172 |
+
)
|
173 |
+
async def get_summary_data(payload: SummarySignalPayload) -> Sequence[dict]:
|
174 |
+
summary_dict = {"symbol": payload.symbol,
|
175 |
+
"periods": payload.periods,
|
176 |
+
"smooth_k": payload.smooth_k,
|
177 |
+
"smooth_d": payload.smooth_d}
|
178 |
+
try:
|
179 |
+
response = requests.post(DATA_URL+"/get_rsi",
|
180 |
+
json=summary_dict)
|
181 |
+
rsi_json = response.json()
|
182 |
+
rsi_df = Preprocess.lst_dict_to_df(rsi_json)
|
183 |
+
rsi_df = Strategy.rsi_strategy(rsi_df)
|
184 |
+
response = requests.post(DATA_URL+"/get_macd",
|
185 |
+
json=summary_dict)
|
186 |
+
macd_json = response.json()
|
187 |
+
macd_df = Preprocess.lst_dict_to_df(macd_json)
|
188 |
+
macd_df = Strategy.macd_strategy(macd_df)
|
189 |
+
df = pd.merge(rsi_df, macd_df, how="left", on="time")
|
190 |
+
response = requests.post(DATA_URL+"/get_ichimoku",
|
191 |
+
json=summary_dict)
|
192 |
+
ichimoku_json = response.json()
|
193 |
+
ichimoku_df = Preprocess.lst_dict_to_df(ichimoku_json)
|
194 |
+
ichimoku_df = Strategy.ichimoku_strategy(ichimoku_df)
|
195 |
+
df = pd.merge(df, ichimoku_df, how="right", on="time")
|
196 |
+
query_df = \
|
197 |
+
df.query("ichimoku_signal_1 != -1 | ichimoku_signal_3 != -1")
|
198 |
+
query_df = query_df.fillna(-1)
|
199 |
+
summary_lst_dict = query_df.to_dict(orient="records")
|
200 |
+
except Exception as e:
|
201 |
+
return [{"message": f"Caught error {e}"}]
|
202 |
+
return summary_lst_dict
|
services/__init__.py
ADDED
File without changes
|
services/config.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
DATE_FORMAT = "%Y-%m-%d"
|
2 |
+
QUERY_CONDITION = "(ichimoku_signal_1 > -1 | ichimoku_signal_3 > -1) | (rsi_signal > -1)"
|
services/credentials/__init__.py
ADDED
File without changes
|
services/credentials/credentials.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"installed":{"client_id":"140817432133-dmqodgl7bsjfalptf8q5ri171ps5373v.apps.googleusercontent.com","project_id":"stockmining-412308","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"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","redirect_uris":["http://localhost"]}}
|
services/credentials/token.pickle
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cacd0d63843ea18ee68d79c6a7ad1e2f79f6f3efafa2306f5e58b8d061d2fc43
|
3 |
+
size 929
|
services/google_calendar.py
ADDED
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from gcsa.google_calendar import GoogleCalendar
|
2 |
+
from gcsa.event import Event
|
3 |
+
from datetime import datetime
|
4 |
+
from pathlib import Path
|
5 |
+
import pickle
|
6 |
+
import os.path
|
7 |
+
from google.auth.transport.requests import Request
|
8 |
+
from google_auth_oauthlib.flow import InstalledAppFlow
|
9 |
+
|
10 |
+
BASE_DIR = Path(__file__).parent
|
11 |
+
|
12 |
+
|
13 |
+
class GoogleCalendarAPI:
|
14 |
+
def __init__(self) -> None:
|
15 |
+
self.scopes = ['https://www.googleapis.com/auth/calendar']
|
16 |
+
self.calendar = self.create_service()
|
17 |
+
|
18 |
+
def create_service(self):
|
19 |
+
"""create service with credentials set up and token file,
|
20 |
+
Note:
|
21 |
+
If you use a new machine or browser, just delete the `token.pickle`
|
22 |
+
in folder `credentials`. And then run application again,
|
23 |
+
a new `sign in cookie pop up` would appear and loging in.
|
24 |
+
After logged, a new `token.pickle` would be created
|
25 |
+
automatically in folder credentials.
|
26 |
+
|
27 |
+
Returns:
|
28 |
+
GoogleCalendar: Calendar object
|
29 |
+
"""
|
30 |
+
creds = None
|
31 |
+
|
32 |
+
if os.path.exists(Path(BASE_DIR, 'credentials', 'token.pickle')):
|
33 |
+
with open(
|
34 |
+
Path(BASE_DIR, 'credentials', 'token.pickle'), 'rb'
|
35 |
+
) as token:
|
36 |
+
creds = pickle.load(token)
|
37 |
+
|
38 |
+
if not creds or not creds.valid:
|
39 |
+
if creds and creds.expired and creds.refresh_token:
|
40 |
+
creds.refresh(Request())
|
41 |
+
else:
|
42 |
+
flow = InstalledAppFlow.from_client_secrets_file(
|
43 |
+
Path(BASE_DIR, 'credentials', 'credentials.json'),
|
44 |
+
self.scopes
|
45 |
+
)
|
46 |
+
creds = flow.run_local_server(port=0)
|
47 |
+
|
48 |
+
with open(
|
49 |
+
Path(BASE_DIR, 'credentials', 'token.pickle'), 'wb'
|
50 |
+
) as token:
|
51 |
+
pickle.dump(creds, token)
|
52 |
+
|
53 |
+
try:
|
54 |
+
calendar = GoogleCalendar('primary', credentials=creds)
|
55 |
+
except Exception as e:
|
56 |
+
print(e)
|
57 |
+
return None
|
58 |
+
|
59 |
+
return calendar
|
60 |
+
|
61 |
+
def setup_event(
|
62 |
+
self,
|
63 |
+
event_name: str,
|
64 |
+
event_start_time: str,
|
65 |
+
event_end_time: str,
|
66 |
+
attendees: list = [],
|
67 |
+
event_description: str = None
|
68 |
+
):
|
69 |
+
"""Set up event to calendar from given information from user
|
70 |
+
|
71 |
+
Args:
|
72 |
+
event_name (str): The title of event,
|
73 |
+
you could generate out by yourself if user not provided
|
74 |
+
event_start_time (str): The start time string of event that user
|
75 |
+
want to set calendar. Format is "%Y-%m-%d %H:%M"
|
76 |
+
Example:
|
77 |
+
"2001-01-02 13:55": which means 13:55 at 1s Feburary 2001
|
78 |
+
event_end_time (str): The end time string of event that user
|
79 |
+
want to set calendar. Format is "%Y-%m-%d %H:%M"
|
80 |
+
Example:
|
81 |
+
"2001-01-02 14:30": which means 14:30 at 1s Feburary 2001
|
82 |
+
attendees (list, optional): List of attendee's email.
|
83 |
+
Defaults to [].
|
84 |
+
event_description (str, optional): The desription of event.
|
85 |
+
Defaults to None.
|
86 |
+
|
87 |
+
Returns:
|
88 |
+
str: result status of this action
|
89 |
+
"""
|
90 |
+
|
91 |
+
try:
|
92 |
+
event = Event(
|
93 |
+
event_name,
|
94 |
+
start=datetime.strptime(event_start_time, "%Y-%m-%d %H:%M"),
|
95 |
+
end=datetime.strptime(event_end_time, "%Y-%m-%d %H:%M"),
|
96 |
+
attendees=attendees,
|
97 |
+
description=event_description,
|
98 |
+
default_reminders=True
|
99 |
+
)
|
100 |
+
self.calendar.add_event(event)
|
101 |
+
except Exception as ex:
|
102 |
+
return f"There is a problem while setting calendar, {ex}"
|
103 |
+
return "Set calendar successfully !"
|
104 |
+
|
105 |
+
def get_events(
|
106 |
+
self,
|
107 |
+
year_range: list,
|
108 |
+
month_range: list,
|
109 |
+
day_range: list,
|
110 |
+
hour_range: list
|
111 |
+
) -> list:
|
112 |
+
"""To get out all the events in user's calendar with
|
113 |
+
given range of time
|
114 |
+
|
115 |
+
Args:
|
116 |
+
year_range (list): Range year that user need to require
|
117 |
+
Example:
|
118 |
+
[2023, 2025]: Range from 2023 to 2025
|
119 |
+
month_range (list): Range month that user need to require
|
120 |
+
Example:
|
121 |
+
[1, 5]: Range from January to May
|
122 |
+
day_range (list): Range day that user need to require
|
123 |
+
Example:
|
124 |
+
[1, 31]: Range from 1st to 31st
|
125 |
+
day_range (list): Range hour that user need to require,
|
126 |
+
if user want to require in same date, hour must be [0, 23]
|
127 |
+
Example:
|
128 |
+
[0, 23]: Range from 0am to 23pm
|
129 |
+
|
130 |
+
Returns:
|
131 |
+
list: list of events found in user calendar with given range time
|
132 |
+
"""
|
133 |
+
start_date = datetime(
|
134 |
+
year=year_range[0],
|
135 |
+
month=month_range[0],
|
136 |
+
day=day_range[0],
|
137 |
+
hour=hour_range[0]
|
138 |
+
)
|
139 |
+
end_date = datetime(
|
140 |
+
year=year_range[1],
|
141 |
+
month=month_range[1],
|
142 |
+
day=day_range[1],
|
143 |
+
hour=hour_range[1]
|
144 |
+
)
|
145 |
+
|
146 |
+
return [event for event in self.calendar.get_events(
|
147 |
+
time_min=start_date, time_max=end_date
|
148 |
+
)]
|
149 |
+
|
150 |
+
def get_events_by_date(
|
151 |
+
self,
|
152 |
+
start_date: datetime,
|
153 |
+
end_date: datetime
|
154 |
+
) -> list:
|
155 |
+
return [event for event in self.calendar.get_events(
|
156 |
+
time_min=start_date, time_max=end_date
|
157 |
+
)]
|
158 |
+
|
159 |
+
|
160 |
+
if __name__ == "__main__":
|
161 |
+
calendar = GoogleCalendarAPI()
|
162 |
+
# # Sample set up event on calendar
|
163 |
+
# calendar.setup_event(
|
164 |
+
# event_name="This is sample buy signal",
|
165 |
+
# event_start_time="02/17/2024 14:33",
|
166 |
+
# event_end_time="02/17/2024 15:30",
|
167 |
+
# attendees=['ndthinh0201@gmail.com', 'camphong.work@gmail.com'],
|
168 |
+
# event_description="""
|
169 |
+
# W trend has found in 7 days past and today figure out
|
170 |
+
# the rise significantly, better to buy.
|
171 |
+
# """
|
172 |
+
# )
|
services/indicator.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
from datetime import datetime, timedelta
|
4 |
+
from .config import DATE_FORMAT
|
5 |
+
|
6 |
+
|
7 |
+
class Indicator:
|
8 |
+
def __init__(self) -> None:
|
9 |
+
pass
|
10 |
+
|
11 |
+
@staticmethod
|
12 |
+
def get_all_rsi(rsi_df: pd.DataFrame,
|
13 |
+
periods: int = 14,
|
14 |
+
smooth_k: int = 3,
|
15 |
+
smooth_d: int = 3) -> pd.DataFrame:
|
16 |
+
rsi_df["stoch_rsi"] = \
|
17 |
+
Indicator.stoch_rsi(rsi_df["rsi"], periods)
|
18 |
+
rsi_df["stoch_rsi_smooth_k"] = \
|
19 |
+
Indicator.stoch_rsi_smooth_k(rsi_df["stoch_rsi"], smooth_k)
|
20 |
+
rsi_df["stoch_rsi_smooth_d"] = Indicator.stoch_rsi_smooth_d(
|
21 |
+
rsi_df["stoch_rsi_smooth_k"], smooth_d
|
22 |
+
)
|
23 |
+
return rsi_df
|
24 |
+
|
25 |
+
@staticmethod
|
26 |
+
def stoch_rsi(rsi: pd.Series, periods: int = 14) -> pd.Series:
|
27 |
+
ma, mi = (
|
28 |
+
rsi.rolling(window=periods).max(),
|
29 |
+
rsi.rolling(window=periods).min(),
|
30 |
+
)
|
31 |
+
return (rsi - mi) * 100 / (ma - mi)
|
32 |
+
|
33 |
+
@staticmethod
|
34 |
+
def stoch_rsi_smooth_k(stoch_rsi: pd.Series, k: int = 3) -> pd.Series:
|
35 |
+
return stoch_rsi.rolling(window=k).mean()
|
36 |
+
|
37 |
+
@staticmethod
|
38 |
+
def stoch_rsi_smooth_d(stoch_rsi_k: pd.Series, d: int = 3) -> pd.Series:
|
39 |
+
return stoch_rsi_k.rolling(window=d).mean()
|
40 |
+
|
41 |
+
@staticmethod
|
42 |
+
def generate_dates(start_date: datetime.date,
|
43 |
+
num_days: int,
|
44 |
+
format=DATE_FORMAT):
|
45 |
+
lst_date = []
|
46 |
+
current_date = start_date
|
47 |
+
for i in range(num_days):
|
48 |
+
current_date += timedelta(days=1)
|
49 |
+
while current_date.weekday() >= 5:
|
50 |
+
current_date += timedelta(days=1)
|
51 |
+
lst_date.append(current_date.strftime(format))
|
52 |
+
return lst_date
|
53 |
+
|
54 |
+
@staticmethod
|
55 |
+
def get_ichimoku_cloud(
|
56 |
+
df: pd.DataFrame,
|
57 |
+
conversion_period=9,
|
58 |
+
base_period=26,
|
59 |
+
span_b_period=52,
|
60 |
+
displacement=26,
|
61 |
+
) -> pd.DataFrame:
|
62 |
+
space_displacement = np.full(displacement, np.nan)
|
63 |
+
tenkan_sen = (
|
64 |
+
df["high"].rolling(window=conversion_period).max()
|
65 |
+
+ df["low"].rolling(window=conversion_period).min()
|
66 |
+
) / 2
|
67 |
+
kijun_sen = (
|
68 |
+
df["high"].rolling(window=base_period).max()
|
69 |
+
+ df["low"].rolling(window=base_period).min()
|
70 |
+
) / 2
|
71 |
+
senkou_span_a = (tenkan_sen + kijun_sen) / 2
|
72 |
+
senkou_span_b = (
|
73 |
+
df["high"].rolling(window=span_b_period).max()
|
74 |
+
+ df["low"].rolling(window=span_b_period).min()
|
75 |
+
) / 2
|
76 |
+
chikou_span = df["close"].shift(-displacement)
|
77 |
+
|
78 |
+
last_date = datetime.strptime(df["time"].iloc[-1], DATE_FORMAT)
|
79 |
+
lst_date = Indicator.generate_dates(last_date, displacement)
|
80 |
+
time = np.concatenate((df["time"], lst_date))
|
81 |
+
tenkan_sen = np.concatenate((tenkan_sen, space_displacement))
|
82 |
+
kijun_sen = np.concatenate((kijun_sen, space_displacement))
|
83 |
+
senkou_span_a = np.concatenate((space_displacement, senkou_span_a))
|
84 |
+
senkou_span_b = np.concatenate((space_displacement, senkou_span_b))
|
85 |
+
chikou_span = np.concatenate((chikou_span, space_displacement))
|
86 |
+
|
87 |
+
data_dict = {
|
88 |
+
"time": time,
|
89 |
+
"tenkan_sen": tenkan_sen,
|
90 |
+
"kijun_sen": kijun_sen,
|
91 |
+
"senkou_span_a": senkou_span_a,
|
92 |
+
"senkou_span_b": senkou_span_b,
|
93 |
+
"chikou_span": chikou_span,
|
94 |
+
"tenkan_kijun": tenkan_sen - kijun_sen,
|
95 |
+
"kumo_cloud": senkou_span_a - senkou_span_b
|
96 |
+
}
|
97 |
+
return pd.DataFrame(data_dict)
|
services/notification.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import requests
|
3 |
+
|
4 |
+
|
5 |
+
class Notification:
|
6 |
+
def __init__(self) -> None:
|
7 |
+
pass
|
8 |
+
|
9 |
+
@staticmethod
|
10 |
+
def generate_today_description(df: pd.DataFrame,
|
11 |
+
today: str, name: str) -> str:
|
12 |
+
df = df.astype(str)
|
13 |
+
result = [f"{today} {name} Signal"]
|
14 |
+
result = result + ['| '.join([f'{s:<6}' for s in df.columns])]
|
15 |
+
for value in df.values:
|
16 |
+
value = [f'{s:<6}' for s in value]
|
17 |
+
result.append('| '.join(value))
|
18 |
+
return '\n'.join(result)
|
19 |
+
|
20 |
+
@staticmethod
|
21 |
+
async def daily_message(df: pd.DataFrame, prefix: str, today: str) -> None:
|
22 |
+
signal_mapping = {-1: "-", 0: "Buy", 1: "Sell"}
|
23 |
+
df["rsi_signal"] = df["rsi_signal"].map(signal_mapping)
|
24 |
+
df["macd_signal"] = df["macd_signal"].map(signal_mapping)
|
25 |
+
df["ichimoku_signal_1"] = \
|
26 |
+
df["ichimoku_signal_1"].map(signal_mapping)
|
27 |
+
df["ichimoku_signal_3"] = \
|
28 |
+
df["ichimoku_signal_3"].map(signal_mapping)
|
29 |
+
df.columns = ["ticker", "rsi", "macd", "ichi1", "ichi3"]
|
30 |
+
message = Notification.generate_today_description(df, today, "Daily")
|
31 |
+
Notification.send_msg(prefix, message)
|
32 |
+
print("Sent daily message")
|
33 |
+
|
34 |
+
@staticmethod
|
35 |
+
async def notify_nadaraya(df: pd.DataFrame, prefix: str, today: str) -> None:
|
36 |
+
signal_mapping = {-1: "-", 0: "Buy", 1: "Sell"}
|
37 |
+
# Today signal
|
38 |
+
df["open_signal"] = df["open_signal"].map(signal_mapping)
|
39 |
+
df["close_signal"] = df["close_signal"].map(signal_mapping)
|
40 |
+
df = df[["ticker", "open_signal", "close_signal"]]
|
41 |
+
df.columns = ["ticker", "open", "close"]
|
42 |
+
message = Notification.generate_today_description(df,
|
43 |
+
today, "Nadaraya")
|
44 |
+
Notification.send_msg(prefix, message)
|
45 |
+
print("Sent nadaraya message")
|
46 |
+
|
47 |
+
@staticmethod
|
48 |
+
def send_msg(prefix: str, message: str):
|
49 |
+
url = prefix + message
|
50 |
+
requests.get(url)
|
services/preprocess.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
|
3 |
+
|
4 |
+
class Preprocess:
|
5 |
+
def __init__(self) -> None: pass
|
6 |
+
|
7 |
+
@staticmethod
|
8 |
+
def lst_dict_to_df(lst_dict) -> pd.DataFrame:
|
9 |
+
return pd.DataFrame.from_records(lst_dict)
|
services/resup.py
ADDED
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pandas as pd
|
3 |
+
import matplotlib.pyplot as plt
|
4 |
+
from sklearn.cluster import AgglomerativeClustering
|
5 |
+
|
6 |
+
# import vnstock as vns
|
7 |
+
|
8 |
+
|
9 |
+
class ResSupCluster:
|
10 |
+
def __init__(
|
11 |
+
self,
|
12 |
+
data: pd.DataFrame,
|
13 |
+
feature_map: dict,
|
14 |
+
n_clusters: int = 2,
|
15 |
+
is_visualize: bool = False,
|
16 |
+
is_delete_outlier: bool = True,
|
17 |
+
):
|
18 |
+
self.n_clusters = n_clusters
|
19 |
+
self.data = data
|
20 |
+
self.feature_map = feature_map
|
21 |
+
# Delete outlier to better fit data
|
22 |
+
if is_delete_outlier:
|
23 |
+
self.delete_outlier()
|
24 |
+
|
25 |
+
# train cluster
|
26 |
+
self.model, self.data["Clusters"] = self.train_AgglomerativeClustering(
|
27 |
+
self.data[feature_map["open"]], n_clusters
|
28 |
+
)
|
29 |
+
|
30 |
+
# get resistance and support
|
31 |
+
self.get_res_sup()
|
32 |
+
|
33 |
+
if is_visualize:
|
34 |
+
self.get_viz()
|
35 |
+
|
36 |
+
def get_res_sup(self):
|
37 |
+
# Calculate cluster centroids
|
38 |
+
self.support = []
|
39 |
+
self.resistance = []
|
40 |
+
self.average = []
|
41 |
+
|
42 |
+
prices = self.data[self.feature_map["open"]]
|
43 |
+
clusters = self.data["Clusters"]
|
44 |
+
for cluster_label in range(self.n_clusters):
|
45 |
+
# Get prices in same cluster
|
46 |
+
cluster_points = prices[clusters == cluster_label]
|
47 |
+
|
48 |
+
# Get support resistance and average
|
49 |
+
self.resistance.append(max(cluster_points))
|
50 |
+
self.support.append(min(cluster_points))
|
51 |
+
self.average.append(np.mean(cluster_points))
|
52 |
+
|
53 |
+
def delete_outlier(self):
|
54 |
+
"""
|
55 |
+
Delete outlier that is 2 std away from mean
|
56 |
+
"""
|
57 |
+
# to make sure we dont drop today data
|
58 |
+
data = self.data.iloc[:-1]
|
59 |
+
|
60 |
+
# extract mean and std
|
61 |
+
mean = np.mean(data[self.feature_map["open"]])
|
62 |
+
std = np.std(data[self.feature_map["open"]])
|
63 |
+
|
64 |
+
# drop outlier
|
65 |
+
data = data[
|
66 |
+
(mean - 2 * std < data[self.feature_map["open"]])
|
67 |
+
& (data[self.feature_map["open"]] < mean + 2 * std)
|
68 |
+
]
|
69 |
+
|
70 |
+
# update self.data
|
71 |
+
today_data = self.data.iloc[-1].copy()
|
72 |
+
self.data = data
|
73 |
+
self.data[len(data.index)] = today_data
|
74 |
+
|
75 |
+
def get_viz(self):
|
76 |
+
# plt.plot(train_data.index, train_data["open"])
|
77 |
+
plt.scatter(
|
78 |
+
self.data.index,
|
79 |
+
self.data[self.feature_map["open"]],
|
80 |
+
c=self.data["Clusters"],
|
81 |
+
cmap="viridis",
|
82 |
+
label="Clusters",
|
83 |
+
)
|
84 |
+
plt.hlines(
|
85 |
+
self.support,
|
86 |
+
self.data.index[0],
|
87 |
+
self.data.index[-1],
|
88 |
+
colors="r",
|
89 |
+
label="Support",
|
90 |
+
)
|
91 |
+
plt.hlines(
|
92 |
+
self.resistance,
|
93 |
+
self.data.index[0],
|
94 |
+
self.data.index[-1],
|
95 |
+
colors="b",
|
96 |
+
label="Resistance",
|
97 |
+
)
|
98 |
+
plt.hlines(
|
99 |
+
self.average,
|
100 |
+
self.data.index[0],
|
101 |
+
self.data.index[-1],
|
102 |
+
colors="k",
|
103 |
+
label="Average",
|
104 |
+
)
|
105 |
+
plt.plot(
|
106 |
+
self.data.index,
|
107 |
+
self.data[self.feature_map["open"]],
|
108 |
+
label="Clusters",
|
109 |
+
)
|
110 |
+
plt.grid()
|
111 |
+
plt.legend()
|
112 |
+
plt.show()
|
113 |
+
|
114 |
+
def train_AgglomerativeClustering(
|
115 |
+
self, data: np.ndarray, n_clusters: int = 3
|
116 |
+
) -> np.ndarray:
|
117 |
+
"""
|
118 |
+
Fit Agglomerative Clustering
|
119 |
+
Inputs:
|
120 |
+
data: pd.DataFrame data
|
121 |
+
n_clusters: numbers of clusters, default = 4
|
122 |
+
Ouputs:
|
123 |
+
clusters: clusters data where val = clusters, index is same
|
124 |
+
"""
|
125 |
+
prices = data.values.reshape(-1, 1)
|
126 |
+
# Fit Agglomerative Clustering
|
127 |
+
model = AgglomerativeClustering(n_clusters=n_clusters, linkage="ward")
|
128 |
+
clusters = model.fit_predict(prices)
|
129 |
+
return model, clusters
|
130 |
+
|
131 |
+
def extract_all_lines(self):
|
132 |
+
return {
|
133 |
+
'support': self.support,
|
134 |
+
'resistance': self.resistance,
|
135 |
+
'average': self.average
|
136 |
+
}
|
137 |
+
|
138 |
+
|
139 |
+
# Example Ussage
|
140 |
+
# if __name__ == "__main__":
|
141 |
+
# data = vns.stock_historical_data(
|
142 |
+
# symbol="CEO",
|
143 |
+
# start_date="2023-01-31",
|
144 |
+
# end_date="2024-01-31",
|
145 |
+
# resolution="1D",
|
146 |
+
# type="stock",
|
147 |
+
# beautify=True,
|
148 |
+
# decor=False,
|
149 |
+
# source="DNSE",
|
150 |
+
# )
|
151 |
+
# Cluster = ResSupCluster(
|
152 |
+
# data=data, is_visualize=True, is_delete_outlier=True, n_clusters=3
|
153 |
+
# )
|
services/strategy.py
ADDED
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
from .indicator import Indicator
|
4 |
+
from .config import QUERY_CONDITION
|
5 |
+
from logic.nadaraya import NadarayaWatson
|
6 |
+
|
7 |
+
|
8 |
+
class Strategy:
|
9 |
+
"""
|
10 |
+
Signal: -1 is null, 0 is buy and 1 is sell
|
11 |
+
"""
|
12 |
+
def __init__(self) -> None:
|
13 |
+
pass
|
14 |
+
|
15 |
+
@staticmethod
|
16 |
+
def rsi_strategy(rsi_df, threshold=(20, 80)) -> pd.DataFrame:
|
17 |
+
low, high = threshold[0], threshold[1]
|
18 |
+
signal_values = []
|
19 |
+
loop_df = rsi_df[["stoch_rsi",
|
20 |
+
"stoch_rsi_smooth_k",
|
21 |
+
"stoch_rsi_smooth_d"]]
|
22 |
+
for stoch, smooth_k, smooth_d in loop_df.values:
|
23 |
+
if stoch <= low and \
|
24 |
+
smooth_k <= low and \
|
25 |
+
smooth_d <= low:
|
26 |
+
signal_values.append(0)
|
27 |
+
elif stoch >= high and \
|
28 |
+
smooth_k >= high and \
|
29 |
+
smooth_d >= high:
|
30 |
+
signal_values.append(1)
|
31 |
+
else:
|
32 |
+
signal_values.append(-1)
|
33 |
+
rsi_df["rsi_signal"] = signal_values
|
34 |
+
return rsi_df
|
35 |
+
|
36 |
+
@staticmethod
|
37 |
+
def ichimoku_strategy(df) -> pd.DataFrame:
|
38 |
+
signal_values_1, signal_values_3 = [-1], [-1]
|
39 |
+
for index in range(1, df.shape[0]):
|
40 |
+
prev = df.iloc[index - 1]
|
41 |
+
signal_1, signal_3 = -1, -1
|
42 |
+
if (not (np.isnan(df["tenkan_kijun"].iloc[index])) and
|
43 |
+
df["tenkan_kijun"].iloc[index - 1: index + 1].prod() < 0):
|
44 |
+
if prev["tenkan_kijun"] < 0 and prev["tenkan_sen"] < min(
|
45 |
+
prev["senkou_span_a"], prev["senkou_span_b"]
|
46 |
+
):
|
47 |
+
signal_1 = 0
|
48 |
+
elif prev["tenkan_kijun"] > 0 and prev["tenkan_sen"] > max(
|
49 |
+
prev["senkou_span_a"], prev["senkou_span_b"]
|
50 |
+
):
|
51 |
+
signal_1 = 1
|
52 |
+
signal_values_1.append(signal_1)
|
53 |
+
if (not (np.isnan(df["kumo_cloud"].iloc[index])) and
|
54 |
+
df["kumo_cloud"].iloc[index - 1: index + 1].prod() < 0):
|
55 |
+
if prev["kumo_cloud"] < 0:
|
56 |
+
signal_3 = 0
|
57 |
+
elif prev["kumo_cloud"] > 0:
|
58 |
+
signal_3 = 1
|
59 |
+
signal_values_3.append(signal_3)
|
60 |
+
df["ichimoku_signal_1"] = signal_values_1
|
61 |
+
df["ichimoku_signal_3"] = signal_values_3
|
62 |
+
return df
|
63 |
+
|
64 |
+
@staticmethod
|
65 |
+
def macd_strategy(macd_df) -> pd.DataFrame:
|
66 |
+
signal_values = []
|
67 |
+
for macd in macd_df["macd"]:
|
68 |
+
if macd < 0:
|
69 |
+
signal_values.append(0)
|
70 |
+
elif macd > 0:
|
71 |
+
signal_values.append(1)
|
72 |
+
else:
|
73 |
+
signal_values.append(-1)
|
74 |
+
macd_df["macd_signal"] = signal_values
|
75 |
+
return macd_df
|
76 |
+
|
77 |
+
@staticmethod
|
78 |
+
def get_daily_report(price_records,
|
79 |
+
rsi_df,
|
80 |
+
macd_df,
|
81 |
+
last_date,
|
82 |
+
lst_symbols):
|
83 |
+
try:
|
84 |
+
result = []
|
85 |
+
for index, symbol in enumerate(lst_symbols):
|
86 |
+
symbol_rsi_df = rsi_df[["time", symbol]]
|
87 |
+
symbol_macd_df = macd_df[["time", symbol]]
|
88 |
+
price_values = []
|
89 |
+
for price_record in price_records:
|
90 |
+
tmp_record = price_record["value"][index]
|
91 |
+
tmp_record["time"] = price_record["time"]
|
92 |
+
price_values.append(tmp_record)
|
93 |
+
symbol_price_df = pd.DataFrame(price_values)
|
94 |
+
symbol_rsi_df = \
|
95 |
+
symbol_rsi_df[["time", symbol]].rename(
|
96 |
+
columns={symbol: "rsi"})
|
97 |
+
symbol_rsi_df = Indicator.get_all_rsi(symbol_rsi_df)
|
98 |
+
symbol_rsi_df = Strategy.rsi_strategy(symbol_rsi_df)
|
99 |
+
symbol_rsi_df = symbol_rsi_df.iloc[-1:]
|
100 |
+
|
101 |
+
symbol_macd_df = \
|
102 |
+
symbol_macd_df[["time", symbol]].rename(
|
103 |
+
columns={symbol: "macd"})
|
104 |
+
symbol_macd_df = Strategy.macd_strategy(symbol_macd_df)
|
105 |
+
symbol_macd_df = symbol_macd_df.iloc[-1:]
|
106 |
+
|
107 |
+
df = pd.merge(symbol_rsi_df,
|
108 |
+
symbol_macd_df,
|
109 |
+
how="inner",
|
110 |
+
on="time")
|
111 |
+
symbol_ichi_df = Indicator.get_ichimoku_cloud(symbol_price_df)
|
112 |
+
symbol_ichi_df = Strategy.ichimoku_strategy(symbol_ichi_df)
|
113 |
+
symbol_ichi_df = symbol_ichi_df[["time",
|
114 |
+
"ichimoku_signal_1",
|
115 |
+
"ichimoku_signal_3"]]
|
116 |
+
symbol_ichi_df["ticker"] = symbol
|
117 |
+
df = pd.merge(df, symbol_ichi_df, how="right", on="time")
|
118 |
+
query_df = df[df["time"] >= last_date]
|
119 |
+
query_df = query_df.query(QUERY_CONDITION)
|
120 |
+
query_df = query_df.fillna(-1)
|
121 |
+
result.extend(query_df.to_dict(orient="records"))
|
122 |
+
result_df = pd.DataFrame(result)
|
123 |
+
return result_df
|
124 |
+
except Exception as e:
|
125 |
+
return {"message": f"Caught error {e}"}
|
126 |
+
|
127 |
+
@staticmethod
|
128 |
+
def nadaraya_strategy(symbol, price, envelope) -> dict:
|
129 |
+
open_signal, close_signal = -1, -1
|
130 |
+
if price["open"] >= envelope[0]:
|
131 |
+
open_signal = 1
|
132 |
+
elif price["open"] <= envelope[1]:
|
133 |
+
open_signal = 0
|
134 |
+
if price["close"] >= envelope[0]:
|
135 |
+
close_signal = 1
|
136 |
+
elif price["close"] <= envelope[1]:
|
137 |
+
close_signal = 0
|
138 |
+
return [{"ticker": symbol,
|
139 |
+
"open_signal": open_signal,
|
140 |
+
"close_signal": close_signal}]
|
141 |
+
|
142 |
+
@staticmethod
|
143 |
+
def get_daily_nadaraya(price_records,
|
144 |
+
lst_symbols,
|
145 |
+
last_date):
|
146 |
+
try:
|
147 |
+
result = []
|
148 |
+
for index, symbol in enumerate(lst_symbols):
|
149 |
+
price_values = []
|
150 |
+
for price_record in price_records:
|
151 |
+
tmp_record = price_record["value"][index]
|
152 |
+
tmp_record["time"] = price_record["time"]
|
153 |
+
price_values.append(tmp_record)
|
154 |
+
symbol_price_df = pd.DataFrame(price_values).iloc[-201:]
|
155 |
+
nada_envelopes = \
|
156 |
+
NadarayaWatson.nadaraya_watson_envelope_indicator(
|
157 |
+
symbol_price_df["close"])
|
158 |
+
current_price = symbol_price_df.iloc[-1]
|
159 |
+
signal = Strategy.nadaraya_strategy(symbol,
|
160 |
+
current_price,
|
161 |
+
nada_envelopes[-1])
|
162 |
+
result.extend(signal)
|
163 |
+
result_df = pd.DataFrame(result)
|
164 |
+
result_df = result_df.query("open_signal > -1 | close_signal > -1")
|
165 |
+
return result_df
|
166 |
+
except Exception as e:
|
167 |
+
return {"message": f"Caught error {e}"}
|
setup.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
from setuptools import setup, find_packages
|
2 |
+
|
3 |
+
setup(name="main-api", version='1.0', packages=find_packages())
|