NKeistyle commited on
Commit
0a6843b
·
1 Parent(s): 385bd31

update app.py get_fish_price.py

Browse files
Files changed (3) hide show
  1. app.py +11 -2
  2. get_fish_price.py +173 -0
  3. notebook/update_data.ipynb +642 -0
app.py CHANGED
@@ -3,17 +3,26 @@ import numpy as np
3
  import pandas as pd
4
  import plotly.graph_objects as go
5
  import gradio as gr
 
6
  from model import SarimaModel
7
 
8
  df_hamachi = pd.read_csv(r'./data/hamachi_price.csv', encoding='utf_8_sig')
9
  df_hamachi["date"] = df_hamachi["date"].apply(lambda x: pd.to_datetime(str(x)))
10
 
11
  df_hamachi = df_hamachi.set_index(df_hamachi["date"])
12
- train = df_hamachi["quantity"]
 
 
 
 
 
 
 
 
13
 
 
14
 
15
  def graph(forecast_range):
16
- today = dt.date.today()
17
  year = today.year
18
  sarima = SarimaModel(forecast_range=int(forecast_range))
19
  sarima_fit = sarima.fit(train)
 
3
  import pandas as pd
4
  import plotly.graph_objects as go
5
  import gradio as gr
6
+ import get_fish_price
7
  from model import SarimaModel
8
 
9
  df_hamachi = pd.read_csv(r'./data/hamachi_price.csv', encoding='utf_8_sig')
10
  df_hamachi["date"] = df_hamachi["date"].apply(lambda x: pd.to_datetime(str(x)))
11
 
12
  df_hamachi = df_hamachi.set_index(df_hamachi["date"])
13
+ today = dt.date.today()
14
+
15
+ if df_hamachi['date'].max().date() < today:
16
+ start_date = df_hamachi['date'].max().date() + dt.timedelta(days=1)
17
+ temp_df = get_fish_price.get_fish_price_data(start_date=start_date, end_date=today)
18
+ temp_df["date"] = temp_df["date"].apply(lambda x: pd.to_datetime(str(x)))
19
+ temp_df = temp_df.set_index(temp_df["date"])
20
+ df_hamachi = pd.concat([df_hamachi, temp_df])
21
+ df_hamachi.to_csv(r'/data/hamachi_price.csv', encoding='utf_8_sig')
22
 
23
+ train = df_hamachi["quantity"]
24
 
25
  def graph(forecast_range):
 
26
  year = today.year
27
  sarima = SarimaModel(forecast_range=int(forecast_range))
28
  sarima_fit = sarima.fit(train)
get_fish_price.py ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import codecs
2
+ import io
3
+ import random
4
+ import requests
5
+ import time
6
+ from datetime import date, timedelta
7
+ from tqdm import tqdm
8
+ from typing import Generator, Tuple
9
+
10
+ import numpy as np
11
+ import pandas as pd
12
+
13
+
14
+ def date_range(
15
+ start: date, stop: date, step: timedelta = timedelta(1)
16
+ ) -> Generator[date, None, None]:
17
+ """startからendまで日付をstep日ずつループさせるジェネレータ"""
18
+ current = start
19
+ while current < stop:
20
+ yield current
21
+ current += step
22
+
23
+
24
+ def get_url(download_date: date) -> Tuple[str, str]:
25
+ """ダウンロードするURLと日付の文字列を返す"""
26
+ month = download_date.strftime("%Y%m")
27
+ day = download_date.strftime("%Y%m%d")
28
+ return (
29
+ f"https://www.shijou-nippo.metro.tokyo.lg.jp/SN/{month}/{day}/Sui/Sui_K1.csv",
30
+ day,
31
+ )
32
+
33
+
34
+ def content_wrap(content):
35
+ """1行目にヘッダ行が来るまでスキップする"""
36
+ buffer = ""
37
+ first = True
38
+ for line in io.BytesIO(content):
39
+ line_str = codecs.decode(line, "shift-jis")
40
+ if first:
41
+ if "品名" in line_str:
42
+ first = False
43
+ buffer = line_str
44
+ else:
45
+ continue
46
+ else:
47
+ buffer += line_str
48
+ return io.StringIO(buffer)
49
+
50
+
51
+ def insert_data(data, day, low_price, center_price, high_price, quantity):
52
+ """ "データをリストに追加する"""
53
+ data["date"].append(day)
54
+ data["low_price"].append(low_price)
55
+ data["center_price"].append(center_price)
56
+ data["high_price"].append(high_price)
57
+ data["quantity"].append(quantity)
58
+
59
+
60
+ def to_numeric(x):
61
+ """文字列を数値に変換する"""
62
+ if isinstance(x, str):
63
+ return float(x)
64
+ else:
65
+ return x
66
+
67
+
68
+ def get_fish_price_data(start_date: date, end_date: date) -> pd.core.frame.DataFrame:
69
+ """
70
+ 東京卸売市場からデータを引っ張ってくる
71
+
72
+ :param start_date: 開始日
73
+ :param end_date: 終了日
74
+ :return: はまちの値段を結合したデータ
75
+ """
76
+ data = {
77
+ "date": [],
78
+ "low_price": [],
79
+ "center_price": [],
80
+ "high_price": [],
81
+ "quantity": [],
82
+ }
83
+ iterator = tqdm(
84
+ date_range(start_date, end_date), total=(end_date - start_date).days
85
+ )
86
+
87
+ for download_date in iterator:
88
+ url, day = get_url(download_date)
89
+ iterator.set_description(day)
90
+ response = requests.get(url)
91
+
92
+ # URLが存在しないとき
93
+ if response.status_code == 404:
94
+ insert_data(data, day, np.nan, np.nan, np.nan, 0)
95
+ continue
96
+ assert (
97
+ response.status_code == 200
98
+ ), f"Unexpected HTTP response. Please check the website {url}."
99
+
100
+ df = pd.read_csv(content_wrap(response.content))
101
+
102
+ # 欠損値補完
103
+ price_cols = ["安値(円)", "中値(円)", "高値(円)"]
104
+ for c in price_cols:
105
+ df[c].mask(df[c] == "-", np.nan, inplace=True)
106
+ df[c].mask(df[c] == "−", np.nan, inplace=True)
107
+ df["卸売数量"].mask(df["卸売数量"] == "-", np.nan, inplace=True)
108
+ df["卸売数量"].mask(df["卸売数量"] == "−", np.nan, inplace=True)
109
+
110
+
111
+ # 品目 == はまち の行だけ抽出
112
+ df_aji = df.loc[df["品名"] == "はまち", ["卸売数量"] + price_cols]
113
+
114
+ # あじの販売がなかったら欠損扱いに
115
+ if len(df_aji) == 0:
116
+ insert_data(data, day, np.nan, np.nan, np.nan, 0)
117
+ continue
118
+
119
+ isnan = lambda x: isinstance(x, float) and np.isnan(x)
120
+ # はまちの販売実績を調べる
121
+ low_prices = []
122
+ center_prices = []
123
+ high_prices = []
124
+ quantities = []
125
+ for i, row in enumerate(df_aji.iloc):
126
+ lp, cp, hp, q = row[price_cols + ["卸売数量"]]
127
+ lp, cp, hp, q = (
128
+ to_numeric(lp),
129
+ to_numeric(cp),
130
+ to_numeric(hp),
131
+ to_numeric(q),
132
+ )
133
+
134
+ # 中値だけが記録されている -> 価格帯が1個だけなので高値、安値も中値と同じにしておく
135
+ if isnan(lp) and isnan(hp) and (not isnan(cp)):
136
+ low_prices.append(cp)
137
+ center_prices.append(cp)
138
+ high_prices.append(cp)
139
+
140
+ # 高値・安値があり中値がない -> 価格帯2個、とりあえず両者の平均を中値とする
141
+ elif (not isnan(lp)) and (not isnan(hp)) and isnan(cp):
142
+ low_prices.append(lp)
143
+ center_prices.append((lp + hp) / 2)
144
+ high_prices.append(hp)
145
+ else:
146
+ low_prices.append(lp)
147
+ center_prices.append(cp)
148
+ high_prices.append(hp)
149
+
150
+ if isnan(row["卸売数量"]):
151
+ quantities.append(0)
152
+ else:
153
+ quantities.append(q)
154
+
155
+ low_price = int(min(low_prices))
156
+ center_price = int(sum(center_prices) / len(center_prices))
157
+ high_price = int(max(high_prices))
158
+ quantity = int(float(sum(quantities)))
159
+
160
+ # 保存
161
+ insert_data(data, day, low_price, center_price, high_price, quantity)
162
+ # 短期間にアクセスが集中しないようにクールタイムを設定
163
+ time.sleep(max(0.5 + random.normalvariate(0, 0.3), 0.1))
164
+ # DataFrameを作成
165
+ df = pd.DataFrame(data)
166
+ return df
167
+
168
+
169
+ if __name__ == "__main__":
170
+ start_date = date(2020, 12, 21)
171
+ end_date = date(2020, 12, 26)
172
+ df = get_fish_price_data(start_date=start_date, end_date=end_date)
173
+ df.to_csv("fish_price.csv", index=False)
notebook/update_data.ipynb ADDED
@@ -0,0 +1,642 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 24,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import datetime as dt\n",
10
+ "import numpy as np\n",
11
+ "import pandas as pd\n",
12
+ "\n",
13
+ "import sys\n",
14
+ "sys.path.append(\"../\")\n",
15
+ "import get_fish_price"
16
+ ]
17
+ },
18
+ {
19
+ "cell_type": "code",
20
+ "execution_count": 25,
21
+ "metadata": {},
22
+ "outputs": [],
23
+ "source": [
24
+ "df_hamachi = pd.read_csv(r'../data/hamachi_price.csv', encoding='utf_8_sig')\n",
25
+ "df_hamachi[\"date\"] = df_hamachi[\"date\"].apply(lambda x: pd.to_datetime(str(x)))\n",
26
+ "\n",
27
+ "df_hamachi = df_hamachi.set_index(df_hamachi[\"date\"])"
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 26,
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "data": {
37
+ "text/html": [
38
+ "<div>\n",
39
+ "<style scoped>\n",
40
+ " .dataframe tbody tr th:only-of-type {\n",
41
+ " vertical-align: middle;\n",
42
+ " }\n",
43
+ "\n",
44
+ " .dataframe tbody tr th {\n",
45
+ " vertical-align: top;\n",
46
+ " }\n",
47
+ "\n",
48
+ " .dataframe thead th {\n",
49
+ " text-align: right;\n",
50
+ " }\n",
51
+ "</style>\n",
52
+ "<table border=\"1\" class=\"dataframe\">\n",
53
+ " <thead>\n",
54
+ " <tr style=\"text-align: right;\">\n",
55
+ " <th></th>\n",
56
+ " <th>date</th>\n",
57
+ " <th>low_price</th>\n",
58
+ " <th>center_price</th>\n",
59
+ " <th>high_price</th>\n",
60
+ " <th>quantity</th>\n",
61
+ " </tr>\n",
62
+ " <tr>\n",
63
+ " <th>date</th>\n",
64
+ " <th></th>\n",
65
+ " <th></th>\n",
66
+ " <th></th>\n",
67
+ " <th></th>\n",
68
+ " <th></th>\n",
69
+ " </tr>\n",
70
+ " </thead>\n",
71
+ " <tbody>\n",
72
+ " <tr>\n",
73
+ " <th>2012-03-01</th>\n",
74
+ " <td>2012-03-01</td>\n",
75
+ " <td>546.0</td>\n",
76
+ " <td>588.0</td>\n",
77
+ " <td>788.0</td>\n",
78
+ " <td>57277.0</td>\n",
79
+ " </tr>\n",
80
+ " <tr>\n",
81
+ " <th>2012-03-02</th>\n",
82
+ " <td>2012-03-02</td>\n",
83
+ " <td>546.0</td>\n",
84
+ " <td>588.0</td>\n",
85
+ " <td>788.0</td>\n",
86
+ " <td>58926.0</td>\n",
87
+ " </tr>\n",
88
+ " <tr>\n",
89
+ " <th>2012-03-03</th>\n",
90
+ " <td>2012-03-03</td>\n",
91
+ " <td>546.0</td>\n",
92
+ " <td>588.0</td>\n",
93
+ " <td>788.0</td>\n",
94
+ " <td>83306.0</td>\n",
95
+ " </tr>\n",
96
+ " <tr>\n",
97
+ " <th>2012-03-04</th>\n",
98
+ " <td>2012-03-04</td>\n",
99
+ " <td>NaN</td>\n",
100
+ " <td>NaN</td>\n",
101
+ " <td>NaN</td>\n",
102
+ " <td>0.0</td>\n",
103
+ " </tr>\n",
104
+ " <tr>\n",
105
+ " <th>2012-03-05</th>\n",
106
+ " <td>2012-03-05</td>\n",
107
+ " <td>546.0</td>\n",
108
+ " <td>588.0</td>\n",
109
+ " <td>788.0</td>\n",
110
+ " <td>50844.0</td>\n",
111
+ " </tr>\n",
112
+ " <tr>\n",
113
+ " <th>...</th>\n",
114
+ " <td>...</td>\n",
115
+ " <td>...</td>\n",
116
+ " <td>...</td>\n",
117
+ " <td>...</td>\n",
118
+ " <td>...</td>\n",
119
+ " </tr>\n",
120
+ " <tr>\n",
121
+ " <th>2023-02-23</th>\n",
122
+ " <td>2023-02-23</td>\n",
123
+ " <td>NaN</td>\n",
124
+ " <td>NaN</td>\n",
125
+ " <td>NaN</td>\n",
126
+ " <td>0.0</td>\n",
127
+ " </tr>\n",
128
+ " <tr>\n",
129
+ " <th>2023-02-24</th>\n",
130
+ " <td>2023-02-24</td>\n",
131
+ " <td>1512.0</td>\n",
132
+ " <td>1566.0</td>\n",
133
+ " <td>1620.0</td>\n",
134
+ " <td>17643.0</td>\n",
135
+ " </tr>\n",
136
+ " <tr>\n",
137
+ " <th>2023-02-25</th>\n",
138
+ " <td>2023-02-25</td>\n",
139
+ " <td>1512.0</td>\n",
140
+ " <td>1566.0</td>\n",
141
+ " <td>1620.0</td>\n",
142
+ " <td>16470.0</td>\n",
143
+ " </tr>\n",
144
+ " <tr>\n",
145
+ " <th>2023-02-26</th>\n",
146
+ " <td>2023-02-26</td>\n",
147
+ " <td>NaN</td>\n",
148
+ " <td>NaN</td>\n",
149
+ " <td>NaN</td>\n",
150
+ " <td>0.0</td>\n",
151
+ " </tr>\n",
152
+ " <tr>\n",
153
+ " <th>2023-02-27</th>\n",
154
+ " <td>2023-02-27</td>\n",
155
+ " <td>1512.0</td>\n",
156
+ " <td>1566.0</td>\n",
157
+ " <td>1620.0</td>\n",
158
+ " <td>11921.0</td>\n",
159
+ " </tr>\n",
160
+ " </tbody>\n",
161
+ "</table>\n",
162
+ "<p>4016 rows × 5 columns</p>\n",
163
+ "</div>"
164
+ ],
165
+ "text/plain": [
166
+ " date low_price center_price high_price quantity\n",
167
+ "date \n",
168
+ "2012-03-01 2012-03-01 546.0 588.0 788.0 57277.0\n",
169
+ "2012-03-02 2012-03-02 546.0 588.0 788.0 58926.0\n",
170
+ "2012-03-03 2012-03-03 546.0 588.0 788.0 83306.0\n",
171
+ "2012-03-04 2012-03-04 NaN NaN NaN 0.0\n",
172
+ "2012-03-05 2012-03-05 546.0 588.0 788.0 50844.0\n",
173
+ "... ... ... ... ... ...\n",
174
+ "2023-02-23 2023-02-23 NaN NaN NaN 0.0\n",
175
+ "2023-02-24 2023-02-24 1512.0 1566.0 1620.0 17643.0\n",
176
+ "2023-02-25 2023-02-25 1512.0 1566.0 1620.0 16470.0\n",
177
+ "2023-02-26 2023-02-26 NaN NaN NaN 0.0\n",
178
+ "2023-02-27 2023-02-27 1512.0 1566.0 1620.0 11921.0\n",
179
+ "\n",
180
+ "[4016 rows x 5 columns]"
181
+ ]
182
+ },
183
+ "execution_count": 26,
184
+ "metadata": {},
185
+ "output_type": "execute_result"
186
+ }
187
+ ],
188
+ "source": [
189
+ "df_hamachi"
190
+ ]
191
+ },
192
+ {
193
+ "cell_type": "code",
194
+ "execution_count": 31,
195
+ "metadata": {},
196
+ "outputs": [
197
+ {
198
+ "data": {
199
+ "text/plain": [
200
+ "datetime.date(2023, 2, 27)"
201
+ ]
202
+ },
203
+ "execution_count": 31,
204
+ "metadata": {},
205
+ "output_type": "execute_result"
206
+ }
207
+ ],
208
+ "source": [
209
+ "df_hamachi['date'].max().date()"
210
+ ]
211
+ },
212
+ {
213
+ "cell_type": "code",
214
+ "execution_count": 28,
215
+ "metadata": {},
216
+ "outputs": [
217
+ {
218
+ "data": {
219
+ "text/plain": [
220
+ "datetime.date(2023, 3, 17)"
221
+ ]
222
+ },
223
+ "execution_count": 28,
224
+ "metadata": {},
225
+ "output_type": "execute_result"
226
+ }
227
+ ],
228
+ "source": [
229
+ "today = dt.date.today()\n",
230
+ "today"
231
+ ]
232
+ },
233
+ {
234
+ "cell_type": "code",
235
+ "execution_count": 37,
236
+ "metadata": {},
237
+ "outputs": [
238
+ {
239
+ "data": {
240
+ "text/plain": [
241
+ "datetime.date(2023, 3, 1)"
242
+ ]
243
+ },
244
+ "execution_count": 37,
245
+ "metadata": {},
246
+ "output_type": "execute_result"
247
+ }
248
+ ],
249
+ "source": [
250
+ "df_hamachi['date'].max().date() + dt.timedelta(days=1)"
251
+ ]
252
+ },
253
+ {
254
+ "cell_type": "code",
255
+ "execution_count": 42,
256
+ "metadata": {},
257
+ "outputs": [
258
+ {
259
+ "name": "stderr",
260
+ "output_type": "stream",
261
+ "text": [
262
+ "20230316: 100%|██████████| 17/17 [00:07<00:00, 2.39it/s]\n"
263
+ ]
264
+ }
265
+ ],
266
+ "source": [
267
+ "if df_hamachi['date'].max().date() < today:\n",
268
+ " start_date = df_hamachi['date'].max().date() + dt.timedelta(days=1)\n",
269
+ " temp_df = get_fish_price.get_fish_price_data(start_date=start_date, end_date=today)\n",
270
+ " temp_df[\"date\"] = temp_df[\"date\"].apply(lambda x: pd.to_datetime(str(x)))\n",
271
+ " temp_df = temp_df.set_index(temp_df[\"date\"])\n",
272
+ " df_hamachi = pd.concat([df_hamachi, temp_df])\n",
273
+ " df_hamachi.to_csv(r'/data/hamachi_price.csv', encoding='utf_8_sig')"
274
+ ]
275
+ },
276
+ {
277
+ "cell_type": "code",
278
+ "execution_count": 46,
279
+ "metadata": {},
280
+ "outputs": [
281
+ {
282
+ "data": {
283
+ "text/html": [
284
+ "<div>\n",
285
+ "<style scoped>\n",
286
+ " .dataframe tbody tr th:only-of-type {\n",
287
+ " vertical-align: middle;\n",
288
+ " }\n",
289
+ "\n",
290
+ " .dataframe tbody tr th {\n",
291
+ " vertical-align: top;\n",
292
+ " }\n",
293
+ "\n",
294
+ " .dataframe thead th {\n",
295
+ " text-align: right;\n",
296
+ " }\n",
297
+ "</style>\n",
298
+ "<table border=\"1\" class=\"dataframe\">\n",
299
+ " <thead>\n",
300
+ " <tr style=\"text-align: right;\">\n",
301
+ " <th></th>\n",
302
+ " <th>date</th>\n",
303
+ " <th>low_price</th>\n",
304
+ " <th>center_price</th>\n",
305
+ " <th>high_price</th>\n",
306
+ " <th>quantity</th>\n",
307
+ " </tr>\n",
308
+ " <tr>\n",
309
+ " <th>date</th>\n",
310
+ " <th></th>\n",
311
+ " <th></th>\n",
312
+ " <th></th>\n",
313
+ " <th></th>\n",
314
+ " <th></th>\n",
315
+ " </tr>\n",
316
+ " </thead>\n",
317
+ " <tbody>\n",
318
+ " <tr>\n",
319
+ " <th>2023-02-15</th>\n",
320
+ " <td>2023-02-15</td>\n",
321
+ " <td>NaN</td>\n",
322
+ " <td>NaN</td>\n",
323
+ " <td>NaN</td>\n",
324
+ " <td>0.0</td>\n",
325
+ " </tr>\n",
326
+ " <tr>\n",
327
+ " <th>2023-02-16</th>\n",
328
+ " <td>2023-02-16</td>\n",
329
+ " <td>1512.0</td>\n",
330
+ " <td>1566.0</td>\n",
331
+ " <td>1620.0</td>\n",
332
+ " <td>19955.0</td>\n",
333
+ " </tr>\n",
334
+ " <tr>\n",
335
+ " <th>2023-02-17</th>\n",
336
+ " <td>2023-02-17</td>\n",
337
+ " <td>1512.0</td>\n",
338
+ " <td>1566.0</td>\n",
339
+ " <td>1620.0</td>\n",
340
+ " <td>14942.0</td>\n",
341
+ " </tr>\n",
342
+ " <tr>\n",
343
+ " <th>2023-02-18</th>\n",
344
+ " <td>2023-02-18</td>\n",
345
+ " <td>1512.0</td>\n",
346
+ " <td>1566.0</td>\n",
347
+ " <td>1620.0</td>\n",
348
+ " <td>18391.0</td>\n",
349
+ " </tr>\n",
350
+ " <tr>\n",
351
+ " <th>2023-02-19</th>\n",
352
+ " <td>2023-02-19</td>\n",
353
+ " <td>NaN</td>\n",
354
+ " <td>NaN</td>\n",
355
+ " <td>NaN</td>\n",
356
+ " <td>0.0</td>\n",
357
+ " </tr>\n",
358
+ " <tr>\n",
359
+ " <th>2023-02-20</th>\n",
360
+ " <td>2023-02-20</td>\n",
361
+ " <td>1512.0</td>\n",
362
+ " <td>1566.0</td>\n",
363
+ " <td>1620.0</td>\n",
364
+ " <td>17548.0</td>\n",
365
+ " </tr>\n",
366
+ " <tr>\n",
367
+ " <th>2023-02-21</th>\n",
368
+ " <td>2023-02-21</td>\n",
369
+ " <td>1512.0</td>\n",
370
+ " <td>1566.0</td>\n",
371
+ " <td>1620.0</td>\n",
372
+ " <td>12495.0</td>\n",
373
+ " </tr>\n",
374
+ " <tr>\n",
375
+ " <th>2023-02-22</th>\n",
376
+ " <td>2023-02-22</td>\n",
377
+ " <td>1512.0</td>\n",
378
+ " <td>1566.0</td>\n",
379
+ " <td>1620.0</td>\n",
380
+ " <td>15880.0</td>\n",
381
+ " </tr>\n",
382
+ " <tr>\n",
383
+ " <th>2023-02-23</th>\n",
384
+ " <td>2023-02-23</td>\n",
385
+ " <td>NaN</td>\n",
386
+ " <td>NaN</td>\n",
387
+ " <td>NaN</td>\n",
388
+ " <td>0.0</td>\n",
389
+ " </tr>\n",
390
+ " <tr>\n",
391
+ " <th>2023-02-24</th>\n",
392
+ " <td>2023-02-24</td>\n",
393
+ " <td>1512.0</td>\n",
394
+ " <td>1566.0</td>\n",
395
+ " <td>1620.0</td>\n",
396
+ " <td>17643.0</td>\n",
397
+ " </tr>\n",
398
+ " <tr>\n",
399
+ " <th>2023-02-25</th>\n",
400
+ " <td>2023-02-25</td>\n",
401
+ " <td>1512.0</td>\n",
402
+ " <td>1566.0</td>\n",
403
+ " <td>1620.0</td>\n",
404
+ " <td>16470.0</td>\n",
405
+ " </tr>\n",
406
+ " <tr>\n",
407
+ " <th>2023-02-26</th>\n",
408
+ " <td>2023-02-26</td>\n",
409
+ " <td>NaN</td>\n",
410
+ " <td>NaN</td>\n",
411
+ " <td>NaN</td>\n",
412
+ " <td>0.0</td>\n",
413
+ " </tr>\n",
414
+ " <tr>\n",
415
+ " <th>2023-02-27</th>\n",
416
+ " <td>2023-02-27</td>\n",
417
+ " <td>1512.0</td>\n",
418
+ " <td>1566.0</td>\n",
419
+ " <td>1620.0</td>\n",
420
+ " <td>11921.0</td>\n",
421
+ " </tr>\n",
422
+ " <tr>\n",
423
+ " <th>2023-02-28</th>\n",
424
+ " <td>2023-02-28</td>\n",
425
+ " <td>1512.0</td>\n",
426
+ " <td>1566.0</td>\n",
427
+ " <td>1620.0</td>\n",
428
+ " <td>13823.0</td>\n",
429
+ " </tr>\n",
430
+ " <tr>\n",
431
+ " <th>2023-03-01</th>\n",
432
+ " <td>2023-03-01</td>\n",
433
+ " <td>NaN</td>\n",
434
+ " <td>NaN</td>\n",
435
+ " <td>NaN</td>\n",
436
+ " <td>0.0</td>\n",
437
+ " </tr>\n",
438
+ " <tr>\n",
439
+ " <th>2023-03-02</th>\n",
440
+ " <td>2023-03-02</td>\n",
441
+ " <td>1512.0</td>\n",
442
+ " <td>1566.0</td>\n",
443
+ " <td>1620.0</td>\n",
444
+ " <td>13342.0</td>\n",
445
+ " </tr>\n",
446
+ " <tr>\n",
447
+ " <th>2023-03-03</th>\n",
448
+ " <td>2023-03-03</td>\n",
449
+ " <td>1512.0</td>\n",
450
+ " <td>1566.0</td>\n",
451
+ " <td>1620.0</td>\n",
452
+ " <td>13660.0</td>\n",
453
+ " </tr>\n",
454
+ " <tr>\n",
455
+ " <th>2023-03-04</th>\n",
456
+ " <td>2023-03-04</td>\n",
457
+ " <td>1512.0</td>\n",
458
+ " <td>1566.0</td>\n",
459
+ " <td>1620.0</td>\n",
460
+ " <td>15128.0</td>\n",
461
+ " </tr>\n",
462
+ " <tr>\n",
463
+ " <th>2023-03-05</th>\n",
464
+ " <td>2023-03-05</td>\n",
465
+ " <td>NaN</td>\n",
466
+ " <td>NaN</td>\n",
467
+ " <td>NaN</td>\n",
468
+ " <td>0.0</td>\n",
469
+ " </tr>\n",
470
+ " <tr>\n",
471
+ " <th>2023-03-06</th>\n",
472
+ " <td>2023-03-06</td>\n",
473
+ " <td>1512.0</td>\n",
474
+ " <td>1566.0</td>\n",
475
+ " <td>1620.0</td>\n",
476
+ " <td>10972.0</td>\n",
477
+ " </tr>\n",
478
+ " <tr>\n",
479
+ " <th>2023-03-07</th>\n",
480
+ " <td>2023-03-07</td>\n",
481
+ " <td>1512.0</td>\n",
482
+ " <td>1566.0</td>\n",
483
+ " <td>1620.0</td>\n",
484
+ " <td>12020.0</td>\n",
485
+ " </tr>\n",
486
+ " <tr>\n",
487
+ " <th>2023-03-08</th>\n",
488
+ " <td>2023-03-08</td>\n",
489
+ " <td>NaN</td>\n",
490
+ " <td>NaN</td>\n",
491
+ " <td>NaN</td>\n",
492
+ " <td>0.0</td>\n",
493
+ " </tr>\n",
494
+ " <tr>\n",
495
+ " <th>2023-03-09</th>\n",
496
+ " <td>2023-03-09</td>\n",
497
+ " <td>1512.0</td>\n",
498
+ " <td>1566.0</td>\n",
499
+ " <td>1620.0</td>\n",
500
+ " <td>11045.0</td>\n",
501
+ " </tr>\n",
502
+ " <tr>\n",
503
+ " <th>2023-03-10</th>\n",
504
+ " <td>2023-03-10</td>\n",
505
+ " <td>1512.0</td>\n",
506
+ " <td>1566.0</td>\n",
507
+ " <td>1620.0</td>\n",
508
+ " <td>9888.0</td>\n",
509
+ " </tr>\n",
510
+ " <tr>\n",
511
+ " <th>2023-03-11</th>\n",
512
+ " <td>2023-03-11</td>\n",
513
+ " <td>1512.0</td>\n",
514
+ " <td>1566.0</td>\n",
515
+ " <td>1620.0</td>\n",
516
+ " <td>15630.0</td>\n",
517
+ " </tr>\n",
518
+ " <tr>\n",
519
+ " <th>2023-03-12</th>\n",
520
+ " <td>2023-03-12</td>\n",
521
+ " <td>NaN</td>\n",
522
+ " <td>NaN</td>\n",
523
+ " <td>NaN</td>\n",
524
+ " <td>0.0</td>\n",
525
+ " </tr>\n",
526
+ " <tr>\n",
527
+ " <th>2023-03-13</th>\n",
528
+ " <td>2023-03-13</td>\n",
529
+ " <td>1512.0</td>\n",
530
+ " <td>1566.0</td>\n",
531
+ " <td>1620.0</td>\n",
532
+ " <td>9428.0</td>\n",
533
+ " </tr>\n",
534
+ " <tr>\n",
535
+ " <th>2023-03-14</th>\n",
536
+ " <td>2023-03-14</td>\n",
537
+ " <td>1512.0</td>\n",
538
+ " <td>1566.0</td>\n",
539
+ " <td>1620.0</td>\n",
540
+ " <td>12271.0</td>\n",
541
+ " </tr>\n",
542
+ " <tr>\n",
543
+ " <th>2023-03-15</th>\n",
544
+ " <td>2023-03-15</td>\n",
545
+ " <td>NaN</td>\n",
546
+ " <td>NaN</td>\n",
547
+ " <td>NaN</td>\n",
548
+ " <td>0.0</td>\n",
549
+ " </tr>\n",
550
+ " <tr>\n",
551
+ " <th>2023-03-16</th>\n",
552
+ " <td>2023-03-16</td>\n",
553
+ " <td>1512.0</td>\n",
554
+ " <td>1566.0</td>\n",
555
+ " <td>1620.0</td>\n",
556
+ " <td>9849.0</td>\n",
557
+ " </tr>\n",
558
+ " </tbody>\n",
559
+ "</table>\n",
560
+ "</div>"
561
+ ],
562
+ "text/plain": [
563
+ " date low_price center_price high_price quantity\n",
564
+ "date \n",
565
+ "2023-02-15 2023-02-15 NaN NaN NaN 0.0\n",
566
+ "2023-02-16 2023-02-16 1512.0 1566.0 1620.0 19955.0\n",
567
+ "2023-02-17 2023-02-17 1512.0 1566.0 1620.0 14942.0\n",
568
+ "2023-02-18 2023-02-18 1512.0 1566.0 1620.0 18391.0\n",
569
+ "2023-02-19 2023-02-19 NaN NaN NaN 0.0\n",
570
+ "2023-02-20 2023-02-20 1512.0 1566.0 1620.0 17548.0\n",
571
+ "2023-02-21 2023-02-21 1512.0 1566.0 1620.0 12495.0\n",
572
+ "2023-02-22 2023-02-22 1512.0 1566.0 1620.0 15880.0\n",
573
+ "2023-02-23 2023-02-23 NaN NaN NaN 0.0\n",
574
+ "2023-02-24 2023-02-24 1512.0 1566.0 1620.0 17643.0\n",
575
+ "2023-02-25 2023-02-25 1512.0 1566.0 1620.0 16470.0\n",
576
+ "2023-02-26 2023-02-26 NaN NaN NaN 0.0\n",
577
+ "2023-02-27 2023-02-27 1512.0 1566.0 1620.0 11921.0\n",
578
+ "2023-02-28 2023-02-28 1512.0 1566.0 1620.0 13823.0\n",
579
+ "2023-03-01 2023-03-01 NaN NaN NaN 0.0\n",
580
+ "2023-03-02 2023-03-02 1512.0 1566.0 1620.0 13342.0\n",
581
+ "2023-03-03 2023-03-03 1512.0 1566.0 1620.0 13660.0\n",
582
+ "2023-03-04 2023-03-04 1512.0 1566.0 1620.0 15128.0\n",
583
+ "2023-03-05 2023-03-05 NaN NaN NaN 0.0\n",
584
+ "2023-03-06 2023-03-06 1512.0 1566.0 1620.0 10972.0\n",
585
+ "2023-03-07 2023-03-07 1512.0 1566.0 1620.0 12020.0\n",
586
+ "2023-03-08 2023-03-08 NaN NaN NaN 0.0\n",
587
+ "2023-03-09 2023-03-09 1512.0 1566.0 1620.0 11045.0\n",
588
+ "2023-03-10 2023-03-10 1512.0 1566.0 1620.0 9888.0\n",
589
+ "2023-03-11 2023-03-11 1512.0 1566.0 1620.0 15630.0\n",
590
+ "2023-03-12 2023-03-12 NaN NaN NaN 0.0\n",
591
+ "2023-03-13 2023-03-13 1512.0 1566.0 1620.0 9428.0\n",
592
+ "2023-03-14 2023-03-14 1512.0 1566.0 1620.0 12271.0\n",
593
+ "2023-03-15 2023-03-15 NaN NaN NaN 0.0\n",
594
+ "2023-03-16 2023-03-16 1512.0 1566.0 1620.0 9849.0"
595
+ ]
596
+ },
597
+ "execution_count": 46,
598
+ "metadata": {},
599
+ "output_type": "execute_result"
600
+ }
601
+ ],
602
+ "source": [
603
+ "df_hamachi.tail(30)\n",
604
+ "\n"
605
+ ]
606
+ },
607
+ {
608
+ "cell_type": "code",
609
+ "execution_count": null,
610
+ "metadata": {},
611
+ "outputs": [],
612
+ "source": []
613
+ }
614
+ ],
615
+ "metadata": {
616
+ "kernelspec": {
617
+ "display_name": "python38",
618
+ "language": "python",
619
+ "name": "python3"
620
+ },
621
+ "language_info": {
622
+ "codemirror_mode": {
623
+ "name": "ipython",
624
+ "version": 3
625
+ },
626
+ "file_extension": ".py",
627
+ "mimetype": "text/x-python",
628
+ "name": "python",
629
+ "nbconvert_exporter": "python",
630
+ "pygments_lexer": "ipython3",
631
+ "version": "3.8.16"
632
+ },
633
+ "orig_nbformat": 4,
634
+ "vscode": {
635
+ "interpreter": {
636
+ "hash": "1141289dbaa853feef947890554ae6471105425144b165b485924c74cd7145f9"
637
+ }
638
+ }
639
+ },
640
+ "nbformat": 4,
641
+ "nbformat_minor": 2
642
+ }