KevinHuSh commited on
Commit
1eb186a
·
1 Parent(s): 72b7b5f

go through smoke test of all API (#12)

Browse files

* add field progress msg into docinfo; add file processing procedure

* go through upload, create kb, add doc to kb

* smoke test for all API

* smoke test for all API

docker/.env CHANGED
@@ -11,7 +11,7 @@ ES_PORT=9200
11
  KIBANA_PORT=6601
12
 
13
  # Increase or decrease based on the available host memory (in bytes)
14
- MEM_LIMIT=1073741824
15
 
16
  POSTGRES_USER=root
17
  POSTGRES_PASSWORD=infiniflow_docgpt
 
11
  KIBANA_PORT=6601
12
 
13
  # Increase or decrease based on the available host memory (in bytes)
14
+ MEM_LIMIT=4073741824
15
 
16
  POSTGRES_USER=root
17
  POSTGRES_PASSWORD=infiniflow_docgpt
docker/docker-compose.yml CHANGED
@@ -54,6 +54,22 @@ services:
54
  - docgpt
55
  restart: always
56
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
58
  volumes:
59
  esdata01:
@@ -62,6 +78,8 @@ volumes:
62
  driver: local
63
  pg_data:
64
  driver: local
 
 
65
 
66
  networks:
67
  docgpt:
 
54
  - docgpt
55
  restart: always
56
 
57
+ minio:
58
+ image: quay.io/minio/minio:RELEASE.2023-12-20T01-00-02Z
59
+ container_name: docgpt-minio
60
+ command: server --console-address ":9001" /data
61
+ ports:
62
+ - 9000:9000
63
+ - 9001:9001
64
+ environment:
65
+ - MINIO_ROOT_USER=${MINIO_USER}
66
+ - MINIO_ROOT_PASSWORD=${MINIO_PASSWORD}
67
+ volumes:
68
+ - minio_data:/data
69
+ networks:
70
+ - docgpt
71
+ restart: always
72
+
73
 
74
  volumes:
75
  esdata01:
 
78
  driver: local
79
  pg_data:
80
  driver: local
81
+ minio_data:
82
+ driver: local
83
 
84
  networks:
85
  docgpt:
migration/src/m20220101_000001_create_table.rs CHANGED
@@ -20,13 +20,14 @@ impl MigrationTrait for Migration {
20
  )
21
  .col(ColumnDef::new(UserInfo::Email).string().not_null())
22
  .col(ColumnDef::new(UserInfo::Nickname).string().not_null())
23
- .col(ColumnDef::new(UserInfo::AvatarUrl).string())
24
- .col(ColumnDef::new(UserInfo::ColorSchema).string().default("dark"))
25
  .col(ColumnDef::new(UserInfo::ListStyle).string().default("list"))
26
  .col(ColumnDef::new(UserInfo::Language).string().default("chinese"))
27
  .col(ColumnDef::new(UserInfo::Password).string().not_null())
28
- .col(ColumnDef::new(UserInfo::CreatedAt).date().not_null())
29
- .col(ColumnDef::new(UserInfo::UpdatedAt).date().not_null())
 
30
  .col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
31
  .to_owned(),
32
  )
@@ -49,9 +50,9 @@ impl MigrationTrait for Migration {
49
  .col(ColumnDef::new(TagInfo::Regx).string())
50
  .col(ColumnDef::new(TagInfo::Color).tiny_unsigned().default(1))
51
  .col(ColumnDef::new(TagInfo::Icon).tiny_unsigned().default(1))
52
- .col(ColumnDef::new(TagInfo::Dir).string())
53
- .col(ColumnDef::new(TagInfo::CreatedAt).date().not_null())
54
- .col(ColumnDef::new(TagInfo::UpdatedAt).date().not_null())
55
  .col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
56
  .to_owned(),
57
  )
@@ -89,6 +90,10 @@ impl MigrationTrait for Migration {
89
  )
90
  .col(ColumnDef::new(Kb2Doc::KbId).big_integer())
91
  .col(ColumnDef::new(Kb2Doc::Did).big_integer())
 
 
 
 
92
  .to_owned(),
93
  )
94
  .await?;
@@ -141,8 +146,8 @@ impl MigrationTrait for Migration {
141
  .col(ColumnDef::new(KbInfo::Uid).big_integer().not_null())
142
  .col(ColumnDef::new(KbInfo::KbName).string().not_null())
143
  .col(ColumnDef::new(KbInfo::Icon).tiny_unsigned().default(1))
144
- .col(ColumnDef::new(KbInfo::CreatedAt).date().not_null())
145
- .col(ColumnDef::new(KbInfo::UpdatedAt).date().not_null())
146
  .col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
147
  .to_owned(),
148
  )
@@ -162,10 +167,8 @@ impl MigrationTrait for Migration {
162
  .col(ColumnDef::new(DocInfo::Location).string().not_null())
163
  .col(ColumnDef::new(DocInfo::Size).big_integer().not_null())
164
  .col(ColumnDef::new(DocInfo::Type).string().not_null()).comment("doc|folder")
165
- .col(ColumnDef::new(DocInfo::KbProgress).float().default(0))
166
- .col(ColumnDef::new(DocInfo::KbProgressMsg).string().default(""))
167
- .col(ColumnDef::new(DocInfo::CreatedAt).date().not_null())
168
- .col(ColumnDef::new(DocInfo::UpdatedAt).date().not_null())
169
  .col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
170
  .to_owned(),
171
  )
@@ -182,10 +185,11 @@ impl MigrationTrait for Migration {
182
  .auto_increment()
183
  .primary_key())
184
  .col(ColumnDef::new(DialogInfo::Uid).big_integer().not_null())
 
185
  .col(ColumnDef::new(DialogInfo::DialogName).string().not_null())
186
  .col(ColumnDef::new(DialogInfo::History).string().comment("json"))
187
- .col(ColumnDef::new(DialogInfo::CreatedAt).date().not_null())
188
- .col(ColumnDef::new(DialogInfo::UpdatedAt).date().not_null())
189
  .col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
190
  .to_owned(),
191
  )
@@ -241,11 +245,12 @@ enum UserInfo {
241
  Uid,
242
  Email,
243
  Nickname,
244
- AvatarUrl,
245
- ColorSchema,
246
  ListStyle,
247
  Language,
248
  Password,
 
249
  CreatedAt,
250
  UpdatedAt,
251
  IsDeleted,
@@ -260,7 +265,7 @@ enum TagInfo {
260
  Regx,
261
  Color,
262
  Icon,
263
- Dir,
264
  CreatedAt,
265
  UpdatedAt,
266
  IsDeleted,
@@ -280,6 +285,10 @@ enum Kb2Doc {
280
  Id,
281
  KbId,
282
  Did,
 
 
 
 
283
  }
284
 
285
  #[derive(DeriveIden)]
@@ -319,8 +328,6 @@ enum DocInfo {
319
  Location,
320
  Size,
321
  Type,
322
- KbProgress,
323
- KbProgressMsg,
324
  CreatedAt,
325
  UpdatedAt,
326
  IsDeleted,
@@ -329,8 +336,9 @@ enum DocInfo {
329
  #[derive(DeriveIden)]
330
  enum DialogInfo {
331
  Table,
332
- DialogId,
333
  Uid,
 
 
334
  DialogName,
335
  History,
336
  CreatedAt,
 
20
  )
21
  .col(ColumnDef::new(UserInfo::Email).string().not_null())
22
  .col(ColumnDef::new(UserInfo::Nickname).string().not_null())
23
+ .col(ColumnDef::new(UserInfo::AvatarBase64).string())
24
+ .col(ColumnDef::new(UserInfo::ColorScheme).string().default("dark"))
25
  .col(ColumnDef::new(UserInfo::ListStyle).string().default("list"))
26
  .col(ColumnDef::new(UserInfo::Language).string().default("chinese"))
27
  .col(ColumnDef::new(UserInfo::Password).string().not_null())
28
+ .col(ColumnDef::new(UserInfo::LastLoginAt).timestamp_with_time_zone())
29
+ .col(ColumnDef::new(UserInfo::CreatedAt).timestamp_with_time_zone().not_null())
30
+ .col(ColumnDef::new(UserInfo::UpdatedAt).timestamp_with_time_zone().not_null())
31
  .col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
32
  .to_owned(),
33
  )
 
50
  .col(ColumnDef::new(TagInfo::Regx).string())
51
  .col(ColumnDef::new(TagInfo::Color).tiny_unsigned().default(1))
52
  .col(ColumnDef::new(TagInfo::Icon).tiny_unsigned().default(1))
53
+ .col(ColumnDef::new(TagInfo::FolderId).big_integer())
54
+ .col(ColumnDef::new(TagInfo::CreatedAt).timestamp_with_time_zone().not_null())
55
+ .col(ColumnDef::new(TagInfo::UpdatedAt).timestamp_with_time_zone().not_null())
56
  .col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
57
  .to_owned(),
58
  )
 
90
  )
91
  .col(ColumnDef::new(Kb2Doc::KbId).big_integer())
92
  .col(ColumnDef::new(Kb2Doc::Did).big_integer())
93
+ .col(ColumnDef::new(Kb2Doc::KbProgress).float().default(0))
94
+ .col(ColumnDef::new(Kb2Doc::KbProgressMsg).string().default(""))
95
+ .col(ColumnDef::new(Kb2Doc::UpdatedAt).timestamp_with_time_zone().not_null())
96
+ .col(ColumnDef::new(Kb2Doc::IsDeleted).boolean().default(false))
97
  .to_owned(),
98
  )
99
  .await?;
 
146
  .col(ColumnDef::new(KbInfo::Uid).big_integer().not_null())
147
  .col(ColumnDef::new(KbInfo::KbName).string().not_null())
148
  .col(ColumnDef::new(KbInfo::Icon).tiny_unsigned().default(1))
149
+ .col(ColumnDef::new(KbInfo::CreatedAt).timestamp_with_time_zone().not_null())
150
+ .col(ColumnDef::new(KbInfo::UpdatedAt).timestamp_with_time_zone().not_null())
151
  .col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
152
  .to_owned(),
153
  )
 
167
  .col(ColumnDef::new(DocInfo::Location).string().not_null())
168
  .col(ColumnDef::new(DocInfo::Size).big_integer().not_null())
169
  .col(ColumnDef::new(DocInfo::Type).string().not_null()).comment("doc|folder")
170
+ .col(ColumnDef::new(DocInfo::CreatedAt).timestamp_with_time_zone().not_null())
171
+ .col(ColumnDef::new(DocInfo::UpdatedAt).timestamp_with_time_zone().not_null())
 
 
172
  .col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
173
  .to_owned(),
174
  )
 
185
  .auto_increment()
186
  .primary_key())
187
  .col(ColumnDef::new(DialogInfo::Uid).big_integer().not_null())
188
+ .col(ColumnDef::new(DialogInfo::KbId).big_integer().not_null())
189
  .col(ColumnDef::new(DialogInfo::DialogName).string().not_null())
190
  .col(ColumnDef::new(DialogInfo::History).string().comment("json"))
191
+ .col(ColumnDef::new(DialogInfo::CreatedAt).timestamp_with_time_zone().not_null())
192
+ .col(ColumnDef::new(DialogInfo::UpdatedAt).timestamp_with_time_zone().not_null())
193
  .col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
194
  .to_owned(),
195
  )
 
245
  Uid,
246
  Email,
247
  Nickname,
248
+ AvatarBase64,
249
+ ColorScheme,
250
  ListStyle,
251
  Language,
252
  Password,
253
+ LastLoginAt,
254
  CreatedAt,
255
  UpdatedAt,
256
  IsDeleted,
 
265
  Regx,
266
  Color,
267
  Icon,
268
+ FolderId,
269
  CreatedAt,
270
  UpdatedAt,
271
  IsDeleted,
 
285
  Id,
286
  KbId,
287
  Did,
288
+ KbProgress,
289
+ KbProgressMsg,
290
+ UpdatedAt,
291
+ IsDeleted,
292
  }
293
 
294
  #[derive(DeriveIden)]
 
328
  Location,
329
  Size,
330
  Type,
 
 
331
  CreatedAt,
332
  UpdatedAt,
333
  IsDeleted,
 
336
  #[derive(DeriveIden)]
337
  enum DialogInfo {
338
  Table,
 
339
  Uid,
340
+ KbId,
341
+ DialogId,
342
  DialogName,
343
  History,
344
  CreatedAt,
python/conf/sys.cnf CHANGED
@@ -1,7 +1,10 @@
1
  [infiniflow]
2
- es=127.0.0.1:9200
3
  pgdb_usr=root
4
  pgdb_pwd=infiniflow_docgpt
5
  pgdb_host=127.0.0.1
6
  pgdb_port=5455
 
 
 
7
 
 
1
  [infiniflow]
2
+ es=http://127.0.0.1:9200
3
  pgdb_usr=root
4
  pgdb_pwd=infiniflow_docgpt
5
  pgdb_host=127.0.0.1
6
  pgdb_port=5455
7
+ minio_host=127.0.0.1:9000
8
+ minio_usr=infiniflow
9
+ minio_pwd=infiniflow_docgpt
10
 
python/nlp/huchunk.py CHANGED
@@ -2,6 +2,7 @@ import re
2
  import os
3
  import copy
4
  import base64
 
5
  from dataclasses import dataclass
6
  from typing import List
7
  import numpy as np
@@ -373,6 +374,7 @@ class PptChunker(HuChunker):
373
  from pptx import Presentation
374
  ppt = Presentation(fnm)
375
  flds = self.Fields()
 
376
  for slide in ppt.slides:
377
  for shape in slide.shapes:
378
  if hasattr(shape, "text"):
@@ -391,11 +393,21 @@ class TextChunker(HuChunker):
391
  def __init__(self):
392
  super().__init__()
393
 
 
 
 
 
 
 
 
 
 
394
  def __call__(self, fnm):
395
  flds = self.Fields()
 
396
  with open(fnm, "r") as f:
397
  txt = f.read()
398
- flds.text_chunks = self.naive_text_chunk(txt)
399
  flds.table_chunks = []
400
  return flds
401
 
 
2
  import os
3
  import copy
4
  import base64
5
+ import magic
6
  from dataclasses import dataclass
7
  from typing import List
8
  import numpy as np
 
374
  from pptx import Presentation
375
  ppt = Presentation(fnm)
376
  flds = self.Fields()
377
+ flds.text_chunks = []
378
  for slide in ppt.slides:
379
  for shape in slide.shapes:
380
  if hasattr(shape, "text"):
 
393
  def __init__(self):
394
  super().__init__()
395
 
396
+ @staticmethod
397
+ def is_binary_file(file_path):
398
+ mime = magic.Magic(mime=True)
399
+ file_type = mime.from_file(file_path)
400
+ if 'text' in file_type:
401
+ return False
402
+ else:
403
+ return True
404
+
405
  def __call__(self, fnm):
406
  flds = self.Fields()
407
+ if self.is_binary_file(fnm):return flds
408
  with open(fnm, "r") as f:
409
  txt = f.read()
410
+ flds.text_chunks = [(c, None) for c in self.naive_text_chunk(txt)]
411
  flds.table_chunks = []
412
  return flds
413
 
python/svr/parse_user_docs.py CHANGED
@@ -1,10 +1,15 @@
1
- import json, re, sys, os, hashlib, copy, glob, util, time, random
2
- from util.es_conn import HuEs, Postgres
 
 
 
 
3
  from util import rmSpace, findMaxDt
4
  from FlagEmbedding import FlagModel
5
  from nlp import huchunk, huqie
6
  import base64, hashlib
7
  from io import BytesIO
 
8
  from elasticsearch_dsl import Q
9
  from parser import (
10
  PdfParser,
@@ -22,73 +27,115 @@ from nlp.huchunk import (
22
  ES = HuEs("infiniflow")
23
  BATCH_SIZE = 64
24
  PG = Postgres("infiniflow", "docgpt")
 
25
 
26
  PDF = PdfChunker(PdfParser())
27
  DOC = DocxChunker(DocxParser())
28
  EXC = ExcelChunker(ExcelParser())
29
  PPT = PptChunker()
30
 
 
 
 
31
 
32
  def chuck_doc(name):
33
- name = os.path.split(name)[-1].lower().split(".")[-1]
34
- if name.find("pdf") >= 0: return PDF(name)
35
- if name.find("doc") >= 0: return DOC(name)
36
- if name.find("xlsx") >= 0: return EXC(name)
37
- if name.find("ppt") >= 0: return PDF(name)
38
- if name.find("pdf") >= 0: return PPT(name)
39
 
40
- if re.match(r"(txt|csv)", name): return TextChunker(name)
41
 
42
 
43
  def collect(comm, mod, tm):
44
  sql = f"""
45
  select
 
 
46
  did,
47
- uid,
48
- doc_name,
49
- location,
50
- updated_at
51
- from docinfo
52
- where
53
- updated_at >= '{tm}'
54
  and kb_progress = 0
55
- and type = 'doc'
56
- and MOD(uid, {comm}) = {mod}
57
  order by updated_at asc
58
  limit 1000
59
  """
60
- df = PG.select(sql)
61
- df = df.fillna("")
62
- mtm = str(df["updated_at"].max())[:19]
63
- print("TOTAL:", len(df), "To: ", mtm)
64
- return df, mtm
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
 
66
 
67
- def set_progress(did, prog, msg):
68
  sql = f"""
69
- update docinfo set kb_progress={prog}, kb_progress_msg='{msg}' where did={did}
 
 
70
  """
71
  PG.update(sql)
72
 
73
 
74
  def build(row):
75
  if row["size"] > 256000000:
76
- set_progress(row["did"], -1, "File size exceeds( <= 256Mb )")
77
  return []
78
- doc = {
79
- "doc_id": row["did"],
80
- "title_tks": huqie.qie(os.path.split(row["location"])[-1]),
81
- "updated_at": row["updated_at"]
82
- }
 
 
 
 
 
 
 
83
  random.seed(time.time())
84
- set_progress(row["did"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
85
- obj = chuck_doc(row["location"])
86
- if not obj:
87
- set_progress(row["did"], -1, "Unsuported file type.")
 
 
 
 
 
 
 
 
 
88
  return []
89
 
90
- set_progress(row["did"], random.randint(20, 60)/100.)
91
 
 
 
 
 
 
 
92
  output_buffer = BytesIO()
93
  docs = []
94
  md5 = hashlib.md5()
@@ -97,12 +144,11 @@ def build(row):
97
  md5.update((txt + str(d["doc_id"])).encode("utf-8"))
98
  d["_id"] = md5.hexdigest()
99
  d["content_ltks"] = huqie.qie(txt)
100
- d["docnm_kwd"] = rmSpace(d["docnm_tks"])
101
  if not img:
102
  docs.append(d)
103
  continue
104
  img.save(output_buffer, format='JPEG')
105
- d["img_bin"] = base64.b64encode(output_buffer.getvalue())
106
  docs.append(d)
107
 
108
  for arr, img in obj.table_chunks:
@@ -115,9 +161,11 @@ def build(row):
115
  docs.append(d)
116
  continue
117
  img.save(output_buffer, format='JPEG')
118
- d["img_bin"] = base64.b64encode(output_buffer.getvalue())
 
 
119
  docs.append(d)
120
- set_progress(row["did"], random.randint(60, 70)/100., "Finished slicing. Start to embedding the content.")
121
 
122
  return docs
123
 
@@ -127,7 +175,7 @@ def index_name(uid):return f"docgpt_{uid}"
127
  def init_kb(row):
128
  idxnm = index_name(row["uid"])
129
  if ES.indexExist(idxnm): return
130
- return ES.createIdx(idxnm, json.load(open("res/mapping.json", "r")))
131
 
132
 
133
  model = None
@@ -138,27 +186,59 @@ def embedding(docs):
138
  vects = 0.1 * tts + 0.9 * cnts
139
  assert len(vects) == len(docs)
140
  for i,d in enumerate(docs):d["q_vec"] = vects[i].tolist()
141
- for d in docs:
142
- set_progress(d["doc_id"], random.randint(70, 95)/100.,
143
- "Finished embedding! Start to build index!")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
 
145
 
146
  def main(comm, mod):
 
 
 
 
 
147
  tm_fnm = f"res/{comm}-{mod}.tm"
148
- tmf = open(tm_fnm, "a+")
149
  tm = findMaxDt(tm_fnm)
150
- rows, tm = collect(comm, mod, tm)
151
- for r in rows:
152
- if r["is_deleted"]:
153
- ES.deleteByQuery(Q("term", dock_id=r["did"]), index_name(r["uid"]))
154
- continue
155
 
 
 
 
 
 
156
  cks = build(r)
 
 
 
157
  ## TODO: exception handler
158
  ## set_progress(r["did"], -1, "ERROR: ")
159
  embedding(cks)
160
- if cks: init_kb(r)
161
- ES.bulk(cks, index_name(r["uid"]))
 
 
 
 
 
 
 
162
  tmf.write(str(r["updated_at"]) + "\n")
163
  tmf.close()
164
 
@@ -166,6 +246,5 @@ def main(comm, mod):
166
  if __name__ == "__main__":
167
  from mpi4py import MPI
168
  comm = MPI.COMM_WORLD
169
- rank = comm.Get_rank()
170
- main(comm, rank)
171
 
 
1
+ import json, os, sys, hashlib, copy, time, random, re, logging, torch
2
+ from os.path import dirname, realpath
3
+ sys.path.append(dirname(realpath(__file__)) + "/../")
4
+ from util.es_conn import HuEs
5
+ from util.db_conn import Postgres
6
+ from util.minio_conn import HuMinio
7
  from util import rmSpace, findMaxDt
8
  from FlagEmbedding import FlagModel
9
  from nlp import huchunk, huqie
10
  import base64, hashlib
11
  from io import BytesIO
12
+ import pandas as pd
13
  from elasticsearch_dsl import Q
14
  from parser import (
15
  PdfParser,
 
27
  ES = HuEs("infiniflow")
28
  BATCH_SIZE = 64
29
  PG = Postgres("infiniflow", "docgpt")
30
+ MINIO = HuMinio("infiniflow")
31
 
32
  PDF = PdfChunker(PdfParser())
33
  DOC = DocxChunker(DocxParser())
34
  EXC = ExcelChunker(ExcelParser())
35
  PPT = PptChunker()
36
 
37
+ UPLOAD_LOCATION = os.environ.get("UPLOAD_LOCATION", "./")
38
+ logging.warning(f"The files are stored in {UPLOAD_LOCATION}, please check it!")
39
+
40
 
41
  def chuck_doc(name):
42
+ suff = os.path.split(name)[-1].lower().split(".")[-1]
43
+ if suff.find("pdf") >= 0: return PDF(name)
44
+ if suff.find("doc") >= 0: return DOC(name)
45
+ if re.match(r"(xlsx|xlsm|xltx|xltm)", suff): return EXC(name)
46
+ if suff.find("ppt") >= 0: return PPT(name)
 
47
 
48
+ return TextChunker()(name)
49
 
50
 
51
  def collect(comm, mod, tm):
52
  sql = f"""
53
  select
54
+ id as kb2doc_id,
55
+ kb_id,
56
  did,
57
+ updated_at,
58
+ is_deleted
59
+ from kb2_doc
60
+ where
61
+ updated_at >= '{tm}'
 
 
62
  and kb_progress = 0
63
+ and MOD(did, {comm}) = {mod}
 
64
  order by updated_at asc
65
  limit 1000
66
  """
67
+ kb2doc = PG.select(sql)
68
+ if len(kb2doc) == 0:return pd.DataFrame()
69
+
70
+ sql = """
71
+ select
72
+ did,
73
+ uid,
74
+ doc_name,
75
+ location,
76
+ size
77
+ from doc_info
78
+ where
79
+ did in (%s)
80
+ """%",".join([str(i) for i in kb2doc["did"].unique()])
81
+ docs = PG.select(sql)
82
+ docs = docs.fillna("")
83
+ docs = docs.join(kb2doc.set_index("did"), on="did", how="left")
84
+
85
+ mtm = str(docs["updated_at"].max())[:19]
86
+ print("TOTAL:", len(docs), "To: ", mtm)
87
+ return docs
88
 
89
 
90
+ def set_progress(kb2doc_id, prog, msg="Processing..."):
91
  sql = f"""
92
+ update kb2_doc set kb_progress={prog}, kb_progress_msg='{msg}'
93
+ where
94
+ id={kb2doc_id}
95
  """
96
  PG.update(sql)
97
 
98
 
99
  def build(row):
100
  if row["size"] > 256000000:
101
+ set_progress(row["kb2doc_id"], -1, "File size exceeds( <= 256Mb )")
102
  return []
103
+ res = ES.search(Q("term", doc_id=row["did"]))
104
+ if ES.getTotal(res) > 0:
105
+ ES.updateScriptByQuery(Q("term", doc_id=row["did"]),
106
+ scripts="""
107
+ if(!ctx._source.kb_id.contains('%s'))
108
+ ctx._source.kb_id.add('%s');
109
+ """%(str(row["kb_id"]), str(row["kb_id"])),
110
+ idxnm = index_name(row["uid"])
111
+ )
112
+ set_progress(row["kb2doc_id"], 1, "Done")
113
+ return []
114
+
115
  random.seed(time.time())
116
+ set_progress(row["kb2doc_id"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
117
+ try:
118
+ obj = chuck_doc(os.path.join(UPLOAD_LOCATION, row["location"]))
119
+ except Exception as e:
120
+ if re.search("(No such file|not found)", str(e)):
121
+ set_progress(row["kb2doc_id"], -1, "Can not find file <%s>"%row["doc_name"])
122
+ else:
123
+ set_progress(row["kb2doc_id"], -1, f"Internal system error: %s"%str(e).replace("'", ""))
124
+ return []
125
+
126
+ print(row["doc_name"], obj)
127
+ if not obj.text_chunks and not obj.table_chunks:
128
+ set_progress(row["kb2doc_id"], 1, "Nothing added! Mostly, file type unsupported yet.")
129
  return []
130
 
131
+ set_progress(row["kb2doc_id"], random.randint(20, 60)/100., "Finished slicing files. Start to embedding the content.")
132
 
133
+ doc = {
134
+ "doc_id": row["did"],
135
+ "kb_id": [str(row["kb_id"])],
136
+ "title_tks": huqie.qie(os.path.split(row["location"])[-1]),
137
+ "updated_at": str(row["updated_at"]).replace("T", " ")[:19]
138
+ }
139
  output_buffer = BytesIO()
140
  docs = []
141
  md5 = hashlib.md5()
 
144
  md5.update((txt + str(d["doc_id"])).encode("utf-8"))
145
  d["_id"] = md5.hexdigest()
146
  d["content_ltks"] = huqie.qie(txt)
 
147
  if not img:
148
  docs.append(d)
149
  continue
150
  img.save(output_buffer, format='JPEG')
151
+ d["img_bin"] = str(output_buffer.getvalue())
152
  docs.append(d)
153
 
154
  for arr, img in obj.table_chunks:
 
161
  docs.append(d)
162
  continue
163
  img.save(output_buffer, format='JPEG')
164
+ MINIO.put("{}-{}".format(row["uid"], row["kb_id"]), d["_id"],
165
+ output_buffer.getvalue())
166
+ d["img_id"] = "{}-{}".format(row["uid"], row["kb_id"])
167
  docs.append(d)
168
+ set_progress(row["kb2doc_id"], random.randint(60, 70)/100., "Continue embedding the content.")
169
 
170
  return docs
171
 
 
175
  def init_kb(row):
176
  idxnm = index_name(row["uid"])
177
  if ES.indexExist(idxnm): return
178
+ return ES.createIdx(idxnm, json.load(open("conf/mapping.json", "r")))
179
 
180
 
181
  model = None
 
186
  vects = 0.1 * tts + 0.9 * cnts
187
  assert len(vects) == len(docs)
188
  for i,d in enumerate(docs):d["q_vec"] = vects[i].tolist()
189
+
190
+
191
+ def rm_doc_from_kb(df):
192
+ if len(df) == 0:return
193
+ for _,r in df.iterrows():
194
+ ES.updateScriptByQuery(Q("term", doc_id=r["did"]),
195
+ scripts="""
196
+ if(ctx._source.kb_id.contains('%s'))
197
+ ctx._source.kb_id.remove(
198
+ ctx._source.kb_id.indexOf('%s')
199
+ );
200
+ """%(str(r["kb_id"]),str(r["kb_id"])),
201
+ idxnm = index_name(r["uid"])
202
+ )
203
+ if len(df) == 0:return
204
+ sql = """
205
+ delete from kb2_doc where id in (%s)
206
+ """%",".join([str(i) for i in df["kb2doc_id"]])
207
+ PG.update(sql)
208
 
209
 
210
  def main(comm, mod):
211
+ global model
212
+ from FlagEmbedding import FlagModel
213
+ model = FlagModel('/opt/home/kevinhu/data/bge-large-zh-v1.5/',
214
+ query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
215
+ use_fp16=torch.cuda.is_available())
216
  tm_fnm = f"res/{comm}-{mod}.tm"
 
217
  tm = findMaxDt(tm_fnm)
218
+ rows = collect(comm, mod, tm)
219
+ if len(rows) == 0:return
 
 
 
220
 
221
+ rm_doc_from_kb(rows.loc[rows.is_deleted == True])
222
+ rows = rows.loc[rows.is_deleted == False].reset_index(drop=True)
223
+ if len(rows) == 0:return
224
+ tmf = open(tm_fnm, "a+")
225
+ for _, r in rows.iterrows():
226
  cks = build(r)
227
+ if not cks:
228
+ tmf.write(str(r["updated_at"]) + "\n")
229
+ continue
230
  ## TODO: exception handler
231
  ## set_progress(r["did"], -1, "ERROR: ")
232
  embedding(cks)
233
+
234
+ set_progress(r["kb2doc_id"], random.randint(70, 95)/100.,
235
+ "Finished embedding! Start to build index!")
236
+ init_kb(r)
237
+ es_r = ES.bulk(cks, index_name(r["uid"]))
238
+ if es_r:
239
+ set_progress(r["kb2doc_id"], -1, "Index failure!")
240
+ print(es_r)
241
+ else: set_progress(r["kb2doc_id"], 1., "Done!")
242
  tmf.write(str(r["updated_at"]) + "\n")
243
  tmf.close()
244
 
 
246
  if __name__ == "__main__":
247
  from mpi4py import MPI
248
  comm = MPI.COMM_WORLD
249
+ main(comm.Get_size(), comm.Get_rank())
 
250
 
python/util/config.py CHANGED
@@ -14,9 +14,9 @@ class Config:
14
  self.env = env
15
  if env == "spark":CF.read("./cv.cnf")
16
 
17
- def get(self, key):
18
  global CF
19
- return CF.get(self.env, key)
20
 
21
  def init(env):
22
  return Config(env)
 
14
  self.env = env
15
  if env == "spark":CF.read("./cv.cnf")
16
 
17
+ def get(self, key, default=None):
18
  global CF
19
+ return CF[self.env].get(key, default)
20
 
21
  def init(env):
22
  return Config(env)
python/util/db_conn.py CHANGED
@@ -49,7 +49,11 @@ class Postgres(object):
49
  cur = self.conn.cursor()
50
  cur.execute(sql)
51
  updated_rows = cur.rowcount
 
 
 
52
  conn.commit()
 
53
  cur.close()
54
  return updated_rows
55
  except Exception as e:
 
49
  cur = self.conn.cursor()
50
  cur.execute(sql)
51
  updated_rows = cur.rowcount
52
+ <<<<<<< HEAD
53
+ self.conn.commit()
54
+ =======
55
  conn.commit()
56
+ >>>>>>> upstream/main
57
  cur.close()
58
  return updated_rows
59
  except Exception as e:
python/util/es_conn.py CHANGED
@@ -5,10 +5,10 @@ import time
5
  import copy
6
  import elasticsearch
7
  from elasticsearch import Elasticsearch
8
- from elasticsearch_dsl import UpdateByQuery, Search, Index
9
  from util import config
10
 
11
- print("Elasticsearch version: ", elasticsearch.__version__)
12
 
13
 
14
  def instance(env):
@@ -20,7 +20,7 @@ def instance(env):
20
  timeout=600
21
  )
22
 
23
- print("ES: ", ES_DRESS, ES.info())
24
 
25
  return ES
26
 
@@ -31,7 +31,7 @@ class HuEs:
31
  self.info = {}
32
  self.config = config.init(env)
33
  self.conn()
34
- self.idxnm = self.config.get("idx_nm","")
35
  if not self.es.ping():
36
  raise Exception("Can't connect to ES cluster")
37
 
@@ -46,6 +46,7 @@ class HuEs:
46
  break
47
  except Exception as e:
48
  logging.error("Fail to connect to es: " + str(e))
 
49
 
50
  def version(self):
51
  v = self.info.get("version", {"number": "5.6"})
@@ -121,7 +122,6 @@ class HuEs:
121
  acts.append(
122
  {"update": {"_id": id, "_index": ids[id]["_index"]}, "retry_on_conflict": 100})
123
  acts.append({"doc": d, "doc_as_upsert": "true"})
124
- logging.info("bulk upsert: %s" % id)
125
 
126
  res = []
127
  for _ in range(100):
@@ -148,7 +148,6 @@ class HuEs:
148
  return res
149
  except Exception as e:
150
  logging.warn("Fail to bulk: " + str(e))
151
- print(e)
152
  if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
153
  time.sleep(3)
154
  continue
@@ -229,7 +228,7 @@ class HuEs:
229
  return False
230
 
231
  def search(self, q, idxnm=None, src=False, timeout="2s"):
232
- print(json.dumps(q, ensure_ascii=False))
233
  for i in range(3):
234
  try:
235
  res = self.es.search(index=(self.idxnm if not idxnm else idxnm),
@@ -271,9 +270,31 @@ class HuEs:
271
  str(e) + "【Q】:" + str(q.to_dict()))
272
  if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
273
  continue
 
274
 
275
  return False
276
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
277
  def deleteByQuery(self, query, idxnm=""):
278
  for i in range(3):
279
  try:
@@ -307,7 +328,6 @@ class HuEs:
307
  routing=routing, refresh=False) # , doc_type="_doc")
308
  return True
309
  except Exception as e:
310
- print(e)
311
  logging.error("ES update exception: " + str(e) + " id:" + str(id) + ", version:" + str(self.version()) +
312
  json.dumps(script, ensure_ascii=False))
313
  if str(e).find("Timeout") > 0:
 
5
  import copy
6
  import elasticsearch
7
  from elasticsearch import Elasticsearch
8
+ from elasticsearch_dsl import UpdateByQuery, Search, Index, Q
9
  from util import config
10
 
11
+ logging.info("Elasticsearch version: ", elasticsearch.__version__)
12
 
13
 
14
  def instance(env):
 
20
  timeout=600
21
  )
22
 
23
+ logging.info("ES: ", ES_DRESS, ES.info())
24
 
25
  return ES
26
 
 
31
  self.info = {}
32
  self.config = config.init(env)
33
  self.conn()
34
+ self.idxnm = self.config.get("idx_nm", "")
35
  if not self.es.ping():
36
  raise Exception("Can't connect to ES cluster")
37
 
 
46
  break
47
  except Exception as e:
48
  logging.error("Fail to connect to es: " + str(e))
49
+ time.sleep(1)
50
 
51
  def version(self):
52
  v = self.info.get("version", {"number": "5.6"})
 
122
  acts.append(
123
  {"update": {"_id": id, "_index": ids[id]["_index"]}, "retry_on_conflict": 100})
124
  acts.append({"doc": d, "doc_as_upsert": "true"})
 
125
 
126
  res = []
127
  for _ in range(100):
 
148
  return res
149
  except Exception as e:
150
  logging.warn("Fail to bulk: " + str(e))
 
151
  if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
152
  time.sleep(3)
153
  continue
 
228
  return False
229
 
230
  def search(self, q, idxnm=None, src=False, timeout="2s"):
231
+ if not isinstance(q, dict): q = Search().query(q).to_dict()
232
  for i in range(3):
233
  try:
234
  res = self.es.search(index=(self.idxnm if not idxnm else idxnm),
 
270
  str(e) + "【Q】:" + str(q.to_dict()))
271
  if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
272
  continue
273
+ self.conn()
274
 
275
  return False
276
 
277
+
278
+ def updateScriptByQuery(self, q, scripts, idxnm=None):
279
+ ubq = UpdateByQuery(index=self.idxnm if not idxnm else idxnm).using(self.es).query(q)
280
+ ubq = ubq.script(source=scripts)
281
+ ubq = ubq.params(refresh=True)
282
+ ubq = ubq.params(slices=5)
283
+ ubq = ubq.params(conflicts="proceed")
284
+ for i in range(3):
285
+ try:
286
+ r = ubq.execute()
287
+ return True
288
+ except Exception as e:
289
+ logging.error("ES updateByQuery exception: " +
290
+ str(e) + "【Q】:" + str(q.to_dict()))
291
+ if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
292
+ continue
293
+ self.conn()
294
+
295
+ return False
296
+
297
+
298
  def deleteByQuery(self, query, idxnm=""):
299
  for i in range(3):
300
  try:
 
328
  routing=routing, refresh=False) # , doc_type="_doc")
329
  return True
330
  except Exception as e:
 
331
  logging.error("ES update exception: " + str(e) + " id:" + str(id) + ", version:" + str(self.version()) +
332
  json.dumps(script, ensure_ascii=False))
333
  if str(e).find("Timeout") > 0:
python/util/minio_conn.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import time
3
+ from util import config
4
+ from minio import Minio
5
+ from io import BytesIO
6
+
7
+ class HuMinio(object):
8
+ def __init__(self, env):
9
+ self.config = config.init(env)
10
+ self.conn = None
11
+ self.__open__()
12
+
13
+ def __open__(self):
14
+ try:
15
+ if self.conn:self.__close__()
16
+ except Exception as e:
17
+ pass
18
+
19
+ try:
20
+ self.conn = Minio(self.config.get("minio_host"),
21
+ access_key=self.config.get("minio_usr"),
22
+ secret_key=self.config.get("minio_pwd"),
23
+ secure=False
24
+ )
25
+ except Exception as e:
26
+ logging.error("Fail to connect %s "%self.config.get("minio_host") + str(e))
27
+
28
+
29
+ def __close__(self):
30
+ del self.conn
31
+ self.conn = None
32
+
33
+
34
+ def put(self, bucket, fnm, binary):
35
+ for _ in range(10):
36
+ try:
37
+ if not self.conn.bucket_exists(bucket):
38
+ self.conn.make_bucket(bucket)
39
+
40
+ r = self.conn.put_object(bucket, fnm,
41
+ BytesIO(binary),
42
+ len(binary)
43
+ )
44
+ return r
45
+ except Exception as e:
46
+ logging.error(f"Fail put {bucket}/{fnm}: "+str(e))
47
+ self.__open__()
48
+ time.sleep(1)
49
+
50
+
51
+ def get(self, bucket, fnm):
52
+ for _ in range(10):
53
+ try:
54
+ r = self.conn.get_object(bucket, fnm)
55
+ return r.read()
56
+ except Exception as e:
57
+ logging.error(f"Fail get {bucket}/{fnm}: "+str(e))
58
+ self.__open__()
59
+ time.sleep(1)
60
+ return
61
+
62
+ if __name__ == "__main__":
63
+ conn = HuMinio("infiniflow")
64
+ fnm = "/opt/home/kevinhu/docgpt/upload/13/11-408.jpg"
65
+ from PIL import Image
66
+ img = Image.open(fnm)
67
+ buff = BytesIO()
68
+ img.save(buff, format='JPEG')
69
+ print(conn.put("test", "11-408.jpg", buff.getvalue()))
70
+ bts = conn.get("test", "11-408.jpg")
71
+ img = Image.open(BytesIO(bts))
72
+ img.save("test.jpg")
73
+
src/api/dialog_info.rs CHANGED
@@ -1,5 +1,8 @@
1
  use std::collections::HashMap;
2
- use actix_web::{get, HttpResponse, post, web};
 
 
 
3
  use crate::api::JsonResponse;
4
  use crate::AppState;
5
  use crate::entity::dialog_info;
@@ -7,13 +10,46 @@ use crate::errors::AppError;
7
  use crate::service::dialog_info::Query;
8
  use crate::service::dialog_info::Mutation;
9
 
10
- #[get("/v1.0/dialogs")]
11
- async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
12
- let dialogs = Query::find_dialog_infos_by_uid(&data.conn, model.uid).await?;
13
-
 
 
 
14
  let mut result = HashMap::new();
15
- result.insert("dialogs", dialogs);
 
 
 
 
 
 
 
 
 
 
 
 
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  let json_response = JsonResponse {
18
  code: 200,
19
  err: "".to_owned(),
@@ -25,17 +61,19 @@ async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -
25
  .body(serde_json::to_string(&json_response)?))
26
  }
27
 
28
- #[get("/v1.0/dialog")]
29
- async fn detail(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
30
- let dialogs = Query::find_dialog_info_by_id(&data.conn, model.dialog_id).await?;
31
-
32
- let mut result = HashMap::new();
33
- result.insert("dialogs", dialogs);
 
 
34
 
35
  let json_response = JsonResponse {
36
  code: 200,
37
  err: "".to_owned(),
38
- data: result,
39
  };
40
 
41
  Ok(HttpResponse::Ok()
@@ -43,14 +81,30 @@ async fn detail(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
43
  .body(serde_json::to_string(&json_response)?))
44
  }
45
 
46
- #[post("/v1.0/delete_dialog")]
47
- async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
48
- let _ = Mutation::delete_dialog_info(&data.conn, model.dialog_id).await?;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
  let json_response = JsonResponse {
51
  code: 200,
52
  err: "".to_owned(),
53
- data: (),
54
  };
55
 
56
  Ok(HttpResponse::Ok()
@@ -58,19 +112,31 @@ async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
58
  .body(serde_json::to_string(&json_response)?))
59
  }
60
 
61
- #[post("/v1.0/create_kb")]
62
- async fn create(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
63
- let model = Mutation::create_dialog_info(&data.conn, model.into_inner()).await?;
64
-
65
- let mut result = HashMap::new();
66
- result.insert("dialog_id", model.dialog_id.unwrap());
67
 
68
- let json_response = JsonResponse {
 
 
 
 
 
 
 
 
69
  code: 200,
70
  err: "".to_owned(),
71
- data: result,
72
  };
73
 
 
 
 
 
 
 
 
 
 
 
74
  Ok(HttpResponse::Ok()
75
  .content_type("application/json")
76
  .body(serde_json::to_string(&json_response)?))
 
1
  use std::collections::HashMap;
2
+ use actix_web::{HttpResponse, post, web};
3
+ use serde::Deserialize;
4
+ use serde_json::Value;
5
+ use serde_json::json;
6
  use crate::api::JsonResponse;
7
  use crate::AppState;
8
  use crate::entity::dialog_info;
 
10
  use crate::service::dialog_info::Query;
11
  use crate::service::dialog_info::Mutation;
12
 
13
+ #[derive(Debug, Deserialize)]
14
+ pub struct ListParams {
15
+ pub uid: i64,
16
+ pub dialog_id: Option<i64>
17
+ }
18
+ #[post("/v1.0/dialogs")]
19
+ async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
20
  let mut result = HashMap::new();
21
+ if let Some(dia_id) = params.dialog_id{
22
+ let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?.unwrap();
23
+ let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
24
+ print!("{:?}", dia.history);
25
+ let hist:Value = serde_json::from_str(&dia.history)?;
26
+ let detail = json!({
27
+ "dialog_id": dia_id,
28
+ "dialog_name": dia.dialog_name.to_owned(),
29
+ "created_at": dia.created_at.to_string().to_owned(),
30
+ "updated_at": dia.updated_at.to_string().to_owned(),
31
+ "history": hist,
32
+ "kb_info": kb
33
+ });
34
 
35
+ result.insert("dialogs", vec![detail]);
36
+ }
37
+ else{
38
+ let mut dias = Vec::<Value>::new();
39
+ for dia in Query::find_dialog_infos_by_uid(&data.conn, params.uid).await?{
40
+ let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
41
+ let hist:Value = serde_json::from_str(&dia.history)?;
42
+ dias.push(json!({
43
+ "dialog_id": dia.dialog_id,
44
+ "dialog_name": dia.dialog_name.to_owned(),
45
+ "created_at": dia.created_at.to_string().to_owned(),
46
+ "updated_at": dia.updated_at.to_string().to_owned(),
47
+ "history": hist,
48
+ "kb_info": kb
49
+ }));
50
+ }
51
+ result.insert("dialogs", dias);
52
+ }
53
  let json_response = JsonResponse {
54
  code: 200,
55
  err: "".to_owned(),
 
61
  .body(serde_json::to_string(&json_response)?))
62
  }
63
 
64
+ #[derive(Debug, Deserialize)]
65
+ pub struct RmParams {
66
+ pub uid: i64,
67
+ pub dialog_id: i64
68
+ }
69
+ #[post("/v1.0/delete_dialog")]
70
+ async fn delete(params: web::Json<RmParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
71
+ let _ = Mutation::delete_dialog_info(&data.conn, params.dialog_id).await?;
72
 
73
  let json_response = JsonResponse {
74
  code: 200,
75
  err: "".to_owned(),
76
+ data: (),
77
  };
78
 
79
  Ok(HttpResponse::Ok()
 
81
  .body(serde_json::to_string(&json_response)?))
82
  }
83
 
84
+ #[derive(Debug, Deserialize)]
85
+ pub struct CreateParams {
86
+ pub uid: i64,
87
+ pub dialog_id: Option<i64>,
88
+ pub kb_id: i64,
89
+ pub name: String
90
+ }
91
+ #[post("/v1.0/create_dialog")]
92
+ async fn create(param: web::Json<CreateParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
93
+ let mut result = HashMap::new();
94
+ if let Some(dia_id) = param.dialog_id {
95
+ result.insert("dialog_id", dia_id);
96
+ let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?;
97
+ let _ = Mutation::update_dialog_info_by_id(&data.conn, dia_id, &param.name, &dia.unwrap().history).await?;
98
+ }
99
+ else{
100
+ let dia = Mutation::create_dialog_info(&data.conn, param.uid, param.kb_id, &param.name).await?;
101
+ result.insert("dialog_id", dia.dialog_id.unwrap());
102
+ }
103
 
104
  let json_response = JsonResponse {
105
  code: 200,
106
  err: "".to_owned(),
107
+ data: result,
108
  };
109
 
110
  Ok(HttpResponse::Ok()
 
112
  .body(serde_json::to_string(&json_response)?))
113
  }
114
 
 
 
 
 
 
 
115
 
116
+ #[derive(Debug, Deserialize)]
117
+ pub struct UpdateHistoryParams {
118
+ pub uid: i64,
119
+ pub dialog_id: i64,
120
+ pub history: Value
121
+ }
122
+ #[post("/v1.0/update_history")]
123
+ async fn update_history(param: web::Json<UpdateHistoryParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
124
+ let mut json_response = JsonResponse {
125
  code: 200,
126
  err: "".to_owned(),
127
+ data: (),
128
  };
129
 
130
+ if let Some(dia) = Query::find_dialog_info_by_id(&data.conn, param.dialog_id).await?{
131
+ let _ = Mutation::update_dialog_info_by_id(&data.conn, param.dialog_id, &dia.dialog_name,
132
+ &param.history.to_string()).await?;
133
+ }
134
+ else{
135
+ json_response.code = 500;
136
+ json_response.err = "Can't find dialog data!".to_owned();
137
+ }
138
+
139
+
140
  Ok(HttpResponse::Ok()
141
  .content_type("application/json")
142
  .body(serde_json::to_string(&json_response)?))
src/api/doc_info.rs CHANGED
@@ -1,12 +1,8 @@
1
  use std::collections::HashMap;
2
  use std::io::Write;
3
- use std::slice::Chunks;
4
- //use actix_multipart::{Multipart, MultipartError, Field};
5
  use actix_multipart_extract::{File, Multipart, MultipartForm};
6
  use actix_web::{get, HttpResponse, post, web};
7
- use actix_web::web::Bytes;
8
- use chrono::Local;
9
- use futures_util::StreamExt;
10
  use sea_orm::DbConn;
11
  use crate::api::JsonResponse;
12
  use crate::AppState;
@@ -15,14 +11,17 @@ use crate::errors::AppError;
15
  use crate::service::doc_info::{Mutation, Query};
16
  use serde::Deserialize;
17
 
 
 
 
18
 
19
  #[derive(Debug, Deserialize)]
20
- pub struct Params {
21
  pub uid: i64,
22
  pub filter: FilterParams,
23
  pub sortby: String,
24
- pub page: u64,
25
- pub per_page: u64,
26
  }
27
 
28
  #[derive(Debug, Deserialize)]
@@ -33,14 +32,8 @@ pub struct FilterParams {
33
  pub kb_id: Option<i64>,
34
  }
35
 
36
- #[derive(Debug, Deserialize)]
37
- pub struct MvParams {
38
- pub dids: Vec<i64>,
39
- pub dest_did: i64,
40
- }
41
-
42
- #[get("/v1.0/docs")]
43
- async fn list(params: web::Json<Params>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
44
  let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner())
45
  .await?;
46
 
@@ -69,21 +62,21 @@ pub struct UploadForm {
69
  #[post("/v1.0/upload")]
70
  async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
71
  let uid = payload.uid;
72
- async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64) -> String {
73
  let mut i = 0;
74
  let mut new_file_name = file_name.to_string();
75
  let arr: Vec<&str> = file_name.split(".").collect();
76
  let suffix = String::from(arr[arr.len()-1]);
77
  let preffix = arr[..arr.len()-1].join(".");
78
- let mut docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
79
  while docs.len()>0 {
80
  i += 1;
81
  new_file_name = format!("{}_{}.{}", preffix, i, suffix);
82
- docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
83
  }
84
  new_file_name
85
  }
86
- let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid).await;
87
 
88
  std::fs::create_dir_all(format!("./upload/{}/", uid));
89
  let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone());
@@ -95,13 +88,11 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
95
  uid: uid,
96
  doc_name: fnm,
97
  size: payload.file_field.bytes.len() as i64,
98
- kb_infos: Vec::new(),
99
- kb_progress: 0.0,
100
- kb_progress_msg: "".to_string(),
101
  location: filepath,
102
  r#type: "doc".to_string(),
103
- created_at: Local::now().date_naive(),
104
- updated_at: Local::now().date_naive(),
 
105
  }).await?;
106
 
107
  let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?;
@@ -109,11 +100,14 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
109
  Ok(HttpResponse::Ok().body("File uploaded successfully"))
110
  }
111
 
 
 
 
 
 
112
  #[post("/v1.0/delete_docs")]
113
- async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
114
- for doc_id in doc_ids.iter() {
115
- let _ = Mutation::delete_doc_info(&data.conn, *doc_id).await?;
116
- }
117
 
118
  let json_response = JsonResponse {
119
  code: 200,
@@ -126,6 +120,13 @@ async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Resu
126
  .body(serde_json::to_string(&json_response)?))
127
  }
128
 
 
 
 
 
 
 
 
129
  #[post("/v1.0/mv_docs")]
130
  async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
131
  Mutation::mv_doc_info(&data.conn, params.dest_did, &params.dids).await?;
@@ -140,3 +141,61 @@ async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<Ht
140
  .content_type("application/json")
141
  .body(serde_json::to_string(&json_response)?))
142
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  use std::collections::HashMap;
2
  use std::io::Write;
 
 
3
  use actix_multipart_extract::{File, Multipart, MultipartForm};
4
  use actix_web::{get, HttpResponse, post, web};
5
+ use chrono::{Utc, FixedOffset};
 
 
6
  use sea_orm::DbConn;
7
  use crate::api::JsonResponse;
8
  use crate::AppState;
 
11
  use crate::service::doc_info::{Mutation, Query};
12
  use serde::Deserialize;
13
 
14
+ fn now()->chrono::DateTime<FixedOffset>{
15
+ Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
16
+ }
17
 
18
  #[derive(Debug, Deserialize)]
19
+ pub struct ListParams {
20
  pub uid: i64,
21
  pub filter: FilterParams,
22
  pub sortby: String,
23
+ pub page: Option<u32>,
24
+ pub per_page: Option<u32>,
25
  }
26
 
27
  #[derive(Debug, Deserialize)]
 
32
  pub kb_id: Option<i64>,
33
  }
34
 
35
+ #[post("/v1.0/docs")]
36
+ async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
 
 
 
 
 
 
37
  let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner())
38
  .await?;
39
 
 
62
  #[post("/v1.0/upload")]
63
  async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
64
  let uid = payload.uid;
65
+ async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64, parent_id:i64) -> String {
66
  let mut i = 0;
67
  let mut new_file_name = file_name.to_string();
68
  let arr: Vec<&str> = file_name.split(".").collect();
69
  let suffix = String::from(arr[arr.len()-1]);
70
  let preffix = arr[..arr.len()-1].join(".");
71
+ let mut docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
72
  while docs.len()>0 {
73
  i += 1;
74
  new_file_name = format!("{}_{}.{}", preffix, i, suffix);
75
+ docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
76
  }
77
  new_file_name
78
  }
79
+ let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid, payload.did).await;
80
 
81
  std::fs::create_dir_all(format!("./upload/{}/", uid));
82
  let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone());
 
88
  uid: uid,
89
  doc_name: fnm,
90
  size: payload.file_field.bytes.len() as i64,
 
 
 
91
  location: filepath,
92
  r#type: "doc".to_string(),
93
+ created_at: now(),
94
+ updated_at: now(),
95
+ is_deleted:Default::default(),
96
  }).await?;
97
 
98
  let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?;
 
100
  Ok(HttpResponse::Ok().body("File uploaded successfully"))
101
  }
102
 
103
+ #[derive(Deserialize, Debug)]
104
+ pub struct RmDocsParam {
105
+ uid: i64,
106
+ dids: Vec<i64>
107
+ }
108
  #[post("/v1.0/delete_docs")]
109
+ async fn delete(params: web::Json<RmDocsParam>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
110
+ let _ = Mutation::delete_doc_info(&data.conn, &params.dids).await?;
 
 
111
 
112
  let json_response = JsonResponse {
113
  code: 200,
 
120
  .body(serde_json::to_string(&json_response)?))
121
  }
122
 
123
+ #[derive(Debug, Deserialize)]
124
+ pub struct MvParams {
125
+ pub uid:i64,
126
+ pub dids: Vec<i64>,
127
+ pub dest_did: i64,
128
+ }
129
+
130
  #[post("/v1.0/mv_docs")]
131
  async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
132
  Mutation::mv_doc_info(&data.conn, params.dest_did, &params.dids).await?;
 
141
  .content_type("application/json")
142
  .body(serde_json::to_string(&json_response)?))
143
  }
144
+
145
+ #[derive(Debug, Deserialize)]
146
+ pub struct NewFoldParams {
147
+ pub uid: i64,
148
+ pub parent_id: i64,
149
+ pub name: String
150
+ }
151
+
152
+ #[post("/v1.0/new_folder")]
153
+ async fn new_folder(params: web::Json<NewFoldParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
154
+ let doc = Mutation::create_doc_info(&data.conn, Model {
155
+ did:Default::default(),
156
+ uid: params.uid,
157
+ doc_name: params.name.to_string(),
158
+ size:0,
159
+ r#type: "folder".to_string(),
160
+ location: "".to_owned(),
161
+ created_at: now(),
162
+ updated_at: now(),
163
+ is_deleted:Default::default(),
164
+ }).await?;
165
+ let _ = Mutation::place_doc(&data.conn, params.parent_id, doc.did.unwrap()).await?;
166
+
167
+ Ok(HttpResponse::Ok().body("Folder created successfully"))
168
+ }
169
+
170
+ #[derive(Debug, Deserialize)]
171
+ pub struct RenameParams {
172
+ pub uid: i64,
173
+ pub did: i64,
174
+ pub name: String
175
+ }
176
+
177
+ #[post("/v1.0/rename")]
178
+ async fn rename(params: web::Json<RenameParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
179
+ let docs = Query::find_doc_infos_by_name(&data.conn, params.uid, &params.name, None).await?;
180
+ if docs.len()>0{
181
+ let json_response = JsonResponse {
182
+ code: 500,
183
+ err: "Name duplicated!".to_owned(),
184
+ data: (),
185
+ };
186
+ return Ok(HttpResponse::Ok()
187
+ .content_type("application/json")
188
+ .body(serde_json::to_string(&json_response)?));
189
+ }
190
+ let doc = Mutation::rename(&data.conn, params.did, &params.name).await?;
191
+
192
+ let json_response = JsonResponse {
193
+ code: 200,
194
+ err: "".to_owned(),
195
+ data: doc,
196
+ };
197
+
198
+ Ok(HttpResponse::Ok()
199
+ .content_type("application/json")
200
+ .body(serde_json::to_string(&json_response)?))
201
+ }
src/api/kb_info.rs CHANGED
@@ -60,6 +60,20 @@ async fn add_docs_to_kb(param: web::Json<AddDocs2KbParams>, data: web::Data<AppS
60
  .body(serde_json::to_string(&json_response)?))
61
  }
62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  #[get("/v1.0/kbs")]
64
  async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
65
  let kbs = Query::find_kb_infos_by_uid(&data.conn, model.uid).await?;
@@ -91,4 +105,28 @@ async fn delete(model: web::Json<kb_info::Model>, data: web::Data<AppState>) ->
91
  Ok(HttpResponse::Ok()
92
  .content_type("application/json")
93
  .body(serde_json::to_string(&json_response)?))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
  }
 
60
  .body(serde_json::to_string(&json_response)?))
61
  }
62
 
63
+ #[post("/v1.0/anti_kb_docs")]
64
+ async fn anti_kb_docs(param: web::Json<AddDocs2KbParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
65
+ let _ = Mutation::remove_docs(&data.conn, param.dids.to_owned(), Some(param.kb_id)).await?;
66
+
67
+ let json_response = JsonResponse {
68
+ code: 200,
69
+ err: "".to_owned(),
70
+ data: (),
71
+ };
72
+
73
+ Ok(HttpResponse::Ok()
74
+ .content_type("application/json")
75
+ .body(serde_json::to_string(&json_response)?))
76
+ }
77
  #[get("/v1.0/kbs")]
78
  async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
79
  let kbs = Query::find_kb_infos_by_uid(&data.conn, model.uid).await?;
 
105
  Ok(HttpResponse::Ok()
106
  .content_type("application/json")
107
  .body(serde_json::to_string(&json_response)?))
108
+ }
109
+
110
+ #[derive(Clone, Debug, Serialize, Deserialize)]
111
+ pub struct DocIdsParams {
112
+ pub uid: i64,
113
+ pub dids: Vec<i64>
114
+ }
115
+
116
+ #[post("/v1.0/all_relevents")]
117
+ async fn all_relevents(params: web::Json<DocIdsParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
118
+ let dids = crate::service::doc_info::Query::all_descendent_ids(&data.conn, &params.dids).await?;
119
+ let mut result = HashMap::new();
120
+ let kbs = Query::find_kb_by_docs(&data.conn, dids).await?;
121
+ result.insert("kbs", kbs);
122
+ let json_response = JsonResponse {
123
+ code: 200,
124
+ err: "".to_owned(),
125
+ data: result,
126
+ };
127
+
128
+ Ok(HttpResponse::Ok()
129
+ .content_type("application/json")
130
+ .body(serde_json::to_string(&json_response)?))
131
+
132
  }
src/api/tag_info.rs CHANGED
@@ -1,6 +1,7 @@
1
  use std::collections::HashMap;
2
  use actix_web::{get, HttpResponse, post, web};
3
  use actix_web_httpauth::middleware::HttpAuthentication;
 
4
  use crate::validator;
5
  use crate::api::JsonResponse;
6
  use crate::AppState;
@@ -8,6 +9,11 @@ use crate::entity::tag_info;
8
  use crate::errors::AppError;
9
  use crate::service::tag_info::{Mutation, Query};
10
 
 
 
 
 
 
11
  #[post("/v1.0/create_tag")]
12
  async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
13
  let model = Mutation::create_tag(&data.conn, model.into_inner()).await?;
@@ -41,9 +47,12 @@ async fn delete(model: web::Json<tag_info::Model>, data: web::Data<AppState>) ->
41
  .body(serde_json::to_string(&json_response)?))
42
  }
43
 
44
- #[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
45
- async fn list(data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
46
- let tags = Query::find_tag_infos(&data.conn).await?;
 
 
 
47
 
48
  let mut result = HashMap::new();
49
  result.insert("tags", tags);
 
1
  use std::collections::HashMap;
2
  use actix_web::{get, HttpResponse, post, web};
3
  use actix_web_httpauth::middleware::HttpAuthentication;
4
+ use serde::Deserialize;
5
  use crate::validator;
6
  use crate::api::JsonResponse;
7
  use crate::AppState;
 
9
  use crate::errors::AppError;
10
  use crate::service::tag_info::{Mutation, Query};
11
 
12
+ #[derive(Debug, Deserialize)]
13
+ pub struct TagListParams {
14
+ pub uid: i64
15
+ }
16
+
17
  #[post("/v1.0/create_tag")]
18
  async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
19
  let model = Mutation::create_tag(&data.conn, model.into_inner()).await?;
 
47
  .body(serde_json::to_string(&json_response)?))
48
  }
49
 
50
+
51
+ //#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
52
+
53
+ #[post("/v1.0/tags")]
54
+ async fn list(param: web::Json<TagListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
55
+ let tags = Query::find_tags_by_uid(param.uid, &data.conn).await?;
56
 
57
  let mut result = HashMap::new();
58
  result.insert("tags", tags);
src/api/user_info.rs CHANGED
@@ -1,10 +1,13 @@
 
 
1
  use actix_identity::Identity;
2
- use actix_web::{get, HttpResponse, post, web};
3
  use serde::{Deserialize, Serialize};
4
  use crate::api::JsonResponse;
5
  use crate::AppState;
6
  use crate::entity::user_info::Model;
7
  use crate::errors::{AppError, UserError};
 
8
  use crate::service::user_info::Query;
9
 
10
  pub(crate) fn create_auth_token(user: &Model) -> u64 {
@@ -32,6 +35,7 @@ async fn login(
32
  ) -> Result<HttpResponse, AppError> {
33
  match Query::login(&data.conn, &input.email, &input.password).await? {
34
  Some(user) => {
 
35
  let token = create_auth_token(&user).to_string();
36
 
37
  identity.remember(token.clone());
@@ -49,4 +53,34 @@ async fn login(
49
  }
50
  None => Err(UserError::LoginFailed.into())
51
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  }
 
1
+ use std::collections::HashMap;
2
+
3
  use actix_identity::Identity;
4
+ use actix_web::{HttpResponse, post, web};
5
  use serde::{Deserialize, Serialize};
6
  use crate::api::JsonResponse;
7
  use crate::AppState;
8
  use crate::entity::user_info::Model;
9
  use crate::errors::{AppError, UserError};
10
+ use crate::service::user_info::Mutation;
11
  use crate::service::user_info::Query;
12
 
13
  pub(crate) fn create_auth_token(user: &Model) -> u64 {
 
35
  ) -> Result<HttpResponse, AppError> {
36
  match Query::login(&data.conn, &input.email, &input.password).await? {
37
  Some(user) => {
38
+ let _ = Mutation::update_login_status(user.uid,&data.conn).await?;
39
  let token = create_auth_token(&user).to_string();
40
 
41
  identity.remember(token.clone());
 
53
  }
54
  None => Err(UserError::LoginFailed.into())
55
  }
56
+ }
57
+
58
+ #[post("/v1.0/register")]
59
+ async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
60
+ let mut result = HashMap::new();
61
+ let usr = Mutation::create_user(&data.conn, &model).await?;
62
+ result.insert("uid", usr.uid.unwrap());
63
+ let json_response = JsonResponse {
64
+ code: 200,
65
+ err: "".to_owned(),
66
+ data: result,
67
+ };
68
+
69
+ Ok(HttpResponse::Ok()
70
+ .content_type("application/json")
71
+ .body(serde_json::to_string(&json_response)?))
72
+ }
73
+
74
+ #[post("/v1.0/setting")]
75
+ async fn setting(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
76
+ let _ = Mutation::update_user_by_id(&data.conn, &model).await?;
77
+ let json_response = JsonResponse {
78
+ code: 200,
79
+ err: "".to_owned(),
80
+ data: (),
81
+ };
82
+
83
+ Ok(HttpResponse::Ok()
84
+ .content_type("application/json")
85
+ .body(serde_json::to_string(&json_response)?))
86
  }
src/entity/dialog_info.rs CHANGED
@@ -1,3 +1,4 @@
 
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
3
 
@@ -8,13 +9,17 @@ pub struct Model {
8
  pub dialog_id: i64,
9
  #[sea_orm(index)]
10
  pub uid: i64,
 
 
11
  pub dialog_name: String,
12
  pub history: String,
13
 
14
  #[serde(skip_deserializing)]
15
- pub created_at: Date,
 
 
16
  #[serde(skip_deserializing)]
17
- pub updated_at: Date,
18
  }
19
 
20
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
 
1
+ use chrono::{DateTime, FixedOffset};
2
  use sea_orm::entity::prelude::*;
3
  use serde::{Deserialize, Serialize};
4
 
 
9
  pub dialog_id: i64,
10
  #[sea_orm(index)]
11
  pub uid: i64,
12
+ #[serde(skip_deserializing)]
13
+ pub kb_id: i64,
14
  pub dialog_name: String,
15
  pub history: String,
16
 
17
  #[serde(skip_deserializing)]
18
+ pub created_at: DateTime<FixedOffset>,
19
+ #[serde(skip_deserializing)]
20
+ pub updated_at: DateTime<FixedOffset>,
21
  #[serde(skip_deserializing)]
22
+ pub is_deleted: bool
23
  }
24
 
25
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
src/entity/doc_info.rs CHANGED
@@ -1,6 +1,7 @@
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
3
  use crate::entity::kb_info;
 
4
 
5
  #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
6
  #[sea_orm(table_name = "doc_info")]
@@ -13,16 +14,14 @@ pub struct Model {
13
  pub size: i64,
14
  #[sea_orm(column_name = "type")]
15
  pub r#type: String,
16
- pub kb_progress: f32,
17
- pub kb_progress_msg: String,
18
  pub location: String,
19
- #[sea_orm(ignore)]
20
- pub kb_infos: Vec<kb_info::Model>,
21
-
22
  #[serde(skip_deserializing)]
23
- pub created_at: Date,
 
 
24
  #[serde(skip_deserializing)]
25
- pub updated_at: Date,
26
  }
27
 
28
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -48,7 +47,7 @@ impl Related<super::kb_info::Entity> for Entity {
48
  }
49
  }
50
 
51
- impl Related<Entity> for Entity {
52
  fn to() -> RelationDef {
53
  super::doc2_doc::Relation::Parent.def()
54
  }
 
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
3
  use crate::entity::kb_info;
4
+ use chrono::{DateTime, FixedOffset};
5
 
6
  #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
7
  #[sea_orm(table_name = "doc_info")]
 
14
  pub size: i64,
15
  #[sea_orm(column_name = "type")]
16
  pub r#type: String,
17
+ #[serde(skip_deserializing)]
 
18
  pub location: String,
 
 
 
19
  #[serde(skip_deserializing)]
20
+ pub created_at: DateTime<FixedOffset>,
21
+ #[serde(skip_deserializing)]
22
+ pub updated_at: DateTime<FixedOffset>,
23
  #[serde(skip_deserializing)]
24
+ pub is_deleted: bool
25
  }
26
 
27
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
 
47
  }
48
  }
49
 
50
+ impl Related<super::doc2_doc::Entity> for Entity {
51
  fn to() -> RelationDef {
52
  super::doc2_doc::Relation::Parent.def()
53
  }
src/entity/kb2_doc.rs CHANGED
@@ -1,7 +1,8 @@
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
 
3
 
4
- #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
5
  #[sea_orm(table_name = "kb2_doc")]
6
  pub struct Model {
7
  #[sea_orm(primary_key, auto_increment = true)]
@@ -10,6 +11,17 @@ pub struct Model {
10
  pub kb_id: i64,
11
  #[sea_orm(index)]
12
  pub did: i64,
 
 
 
 
 
 
 
 
 
 
 
13
  }
14
 
15
  #[derive(Debug, Clone, Copy, EnumIter)]
 
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
3
+ use chrono::{DateTime, FixedOffset};
4
 
5
+ #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
6
  #[sea_orm(table_name = "kb2_doc")]
7
  pub struct Model {
8
  #[sea_orm(primary_key, auto_increment = true)]
 
11
  pub kb_id: i64,
12
  #[sea_orm(index)]
13
  pub did: i64,
14
+ <<<<<<< HEAD
15
+ #[serde(skip_deserializing)]
16
+ pub kb_progress: f32,
17
+ #[serde(skip_deserializing)]
18
+ pub kb_progress_msg: String,
19
+ #[serde(skip_deserializing)]
20
+ pub updated_at: DateTime<FixedOffset>,
21
+ #[serde(skip_deserializing)]
22
+ pub is_deleted: bool,
23
+ =======
24
+ >>>>>>> upstream/main
25
  }
26
 
27
  #[derive(Debug, Clone, Copy, EnumIter)]
src/entity/kb_info.rs CHANGED
@@ -1,10 +1,12 @@
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
 
3
 
4
  #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
5
  #[sea_orm(table_name = "kb_info")]
6
  pub struct Model {
7
  #[sea_orm(primary_key, auto_increment = false)]
 
8
  pub kb_id: i64,
9
  #[sea_orm(index)]
10
  pub uid: i64,
@@ -12,9 +14,11 @@ pub struct Model {
12
  pub icon: i16,
13
 
14
  #[serde(skip_deserializing)]
15
- pub created_at: Date,
 
 
16
  #[serde(skip_deserializing)]
17
- pub updated_at: Date,
18
  }
19
 
20
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
 
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
3
+ use chrono::{DateTime, FixedOffset};
4
 
5
  #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
6
  #[sea_orm(table_name = "kb_info")]
7
  pub struct Model {
8
  #[sea_orm(primary_key, auto_increment = false)]
9
+ #[serde(skip_deserializing)]
10
  pub kb_id: i64,
11
  #[sea_orm(index)]
12
  pub uid: i64,
 
14
  pub icon: i16,
15
 
16
  #[serde(skip_deserializing)]
17
+ pub created_at: DateTime<FixedOffset>,
18
+ #[serde(skip_deserializing)]
19
+ pub updated_at: DateTime<FixedOffset>,
20
  #[serde(skip_deserializing)]
21
+ pub is_deleted: bool,
22
  }
23
 
24
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
src/entity/tag_info.rs CHANGED
@@ -1,23 +1,34 @@
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
 
3
 
4
- #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
5
  #[sea_orm(table_name = "tag_info")]
6
  pub struct Model {
7
  #[sea_orm(primary_key)]
8
  #[serde(skip_deserializing)]
9
  pub tid: i64,
 
10
  pub uid: i64,
11
  pub tag_name: String,
 
 
 
 
 
 
 
 
12
  pub regx: Option<String>,
13
  pub color: u16,
14
  pub icon: u16,
15
  pub dir: Option<String>,
 
16
 
17
  #[serde(skip_deserializing)]
18
- pub created_at: Date,
19
  #[serde(skip_deserializing)]
20
- pub updated_at: Date,
21
  }
22
 
23
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
 
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
3
+ use chrono::{DateTime, FixedOffset};
4
 
5
+ #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
6
  #[sea_orm(table_name = "tag_info")]
7
  pub struct Model {
8
  #[sea_orm(primary_key)]
9
  #[serde(skip_deserializing)]
10
  pub tid: i64,
11
+ #[sea_orm(index)]
12
  pub uid: i64,
13
  pub tag_name: String,
14
+ <<<<<<< HEAD
15
+ #[serde(skip_deserializing)]
16
+ pub regx: String,
17
+ pub color: i16,
18
+ pub icon: i16,
19
+ #[serde(skip_deserializing)]
20
+ pub folder_id: i64,
21
+ =======
22
  pub regx: Option<String>,
23
  pub color: u16,
24
  pub icon: u16,
25
  pub dir: Option<String>,
26
+ >>>>>>> upstream/main
27
 
28
  #[serde(skip_deserializing)]
29
+ pub created_at: DateTime<FixedOffset>,
30
  #[serde(skip_deserializing)]
31
+ pub updated_at: DateTime<FixedOffset>,
32
  }
33
 
34
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
src/entity/user_info.rs CHANGED
@@ -1,5 +1,6 @@
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
 
3
 
4
  #[derive(Clone, Debug, PartialEq, Eq, Hash, DeriveEntityModel, Deserialize, Serialize)]
5
  #[sea_orm(table_name = "user_info")]
@@ -9,19 +10,22 @@ pub struct Model {
9
  pub uid: i64,
10
  pub email: String,
11
  pub nickname: String,
12
- pub avatar_url: Option<String>,
13
- pub color_schema: String,
14
  pub list_style: String,
15
  pub language: String,
16
  pub password: String,
17
 
18
  #[serde(skip_deserializing)]
19
- pub created_at: Date,
20
  #[serde(skip_deserializing)]
21
- pub updated_at: Date,
 
 
22
  }
23
 
24
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
25
  pub enum Relation {}
26
 
 
27
  impl ActiveModelBehavior for ActiveModel {}
 
1
  use sea_orm::entity::prelude::*;
2
  use serde::{Deserialize, Serialize};
3
+ use chrono::{DateTime, FixedOffset};
4
 
5
  #[derive(Clone, Debug, PartialEq, Eq, Hash, DeriveEntityModel, Deserialize, Serialize)]
6
  #[sea_orm(table_name = "user_info")]
 
10
  pub uid: i64,
11
  pub email: String,
12
  pub nickname: String,
13
+ pub avatar_base64: String,
14
+ pub color_scheme: String,
15
  pub list_style: String,
16
  pub language: String,
17
  pub password: String,
18
 
19
  #[serde(skip_deserializing)]
20
+ pub last_login_at: DateTime<FixedOffset>,
21
  #[serde(skip_deserializing)]
22
+ pub created_at: DateTime<FixedOffset>,
23
+ #[serde(skip_deserializing)]
24
+ pub updated_at: DateTime<FixedOffset>,
25
  }
26
 
27
  #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
28
  pub enum Relation {}
29
 
30
+
31
  impl ActiveModelBehavior for ActiveModel {}
src/main.rs CHANGED
@@ -98,16 +98,25 @@ fn init(cfg: &mut web::ServiceConfig) {
98
  cfg.service(api::kb_info::delete);
99
  cfg.service(api::kb_info::list);
100
  cfg.service(api::kb_info::add_docs_to_kb);
 
 
 
 
 
101
 
102
  cfg.service(api::doc_info::list);
103
  cfg.service(api::doc_info::delete);
104
  cfg.service(api::doc_info::mv);
105
  cfg.service(api::doc_info::upload);
 
 
106
 
107
  cfg.service(api::dialog_info::list);
108
  cfg.service(api::dialog_info::delete);
109
- cfg.service(api::dialog_info::detail);
110
  cfg.service(api::dialog_info::create);
 
111
 
112
  cfg.service(api::user_info::login);
 
 
113
  }
 
98
  cfg.service(api::kb_info::delete);
99
  cfg.service(api::kb_info::list);
100
  cfg.service(api::kb_info::add_docs_to_kb);
101
+ <<<<<<< HEAD
102
+ cfg.service(api::kb_info::anti_kb_docs);
103
+ cfg.service(api::kb_info::all_relevents);
104
+ =======
105
+ >>>>>>> upstream/main
106
 
107
  cfg.service(api::doc_info::list);
108
  cfg.service(api::doc_info::delete);
109
  cfg.service(api::doc_info::mv);
110
  cfg.service(api::doc_info::upload);
111
+ cfg.service(api::doc_info::new_folder);
112
+ cfg.service(api::doc_info::rename);
113
 
114
  cfg.service(api::dialog_info::list);
115
  cfg.service(api::dialog_info::delete);
 
116
  cfg.service(api::dialog_info::create);
117
+ cfg.service(api::dialog_info::update_history);
118
 
119
  cfg.service(api::user_info::login);
120
+ cfg.service(api::user_info::register);
121
+ cfg.service(api::user_info::setting);
122
  }
src/service/dialog_info.rs CHANGED
@@ -1,11 +1,15 @@
1
- use chrono::Local;
2
- use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
 
3
  use sea_orm::ActiveValue::Set;
4
  use sea_orm::QueryFilter;
5
  use sea_orm::ColumnTrait;
6
  use crate::entity::dialog_info;
7
  use crate::entity::dialog_info::Entity;
8
 
 
 
 
9
  pub struct Query;
10
 
11
  impl Query {
@@ -20,6 +24,7 @@ impl Query {
20
  pub async fn find_dialog_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<dialog_info::Model>, DbErr> {
21
  Entity::find()
22
  .filter(dialog_info::Column::Uid.eq(uid))
 
23
  .all(db)
24
  .await
25
  }
@@ -45,15 +50,19 @@ pub struct Mutation;
45
  impl Mutation {
46
  pub async fn create_dialog_info(
47
  db: &DbConn,
48
- form_data: dialog_info::Model,
 
 
49
  ) -> Result<dialog_info::ActiveModel, DbErr> {
50
  dialog_info::ActiveModel {
51
  dialog_id: Default::default(),
52
- uid: Set(form_data.uid.to_owned()),
53
- dialog_name: Set(form_data.dialog_name.to_owned()),
54
- history: Set(form_data.history.to_owned()),
55
- created_at: Set(Local::now().date_naive()),
56
- updated_at: Set(Local::now().date_naive()),
 
 
57
  }
58
  .save(db)
59
  .await
@@ -61,35 +70,25 @@ impl Mutation {
61
 
62
  pub async fn update_dialog_info_by_id(
63
  db: &DbConn,
64
- id: i64,
65
- form_data: dialog_info::Model,
66
- ) -> Result<dialog_info::Model, DbErr> {
67
- let dialog_info: dialog_info::ActiveModel = Entity::find_by_id(id)
68
- .one(db)
69
- .await?
70
- .ok_or(DbErr::Custom("Cannot find.".to_owned()))
71
- .map(Into::into)?;
72
-
73
- dialog_info::ActiveModel {
74
- dialog_id: dialog_info.dialog_id,
75
- uid: dialog_info.uid,
76
- dialog_name: Set(form_data.dialog_name.to_owned()),
77
- history: Set(form_data.history.to_owned()),
78
- created_at: Default::default(),
79
- updated_at: Set(Local::now().date_naive()),
80
- }
81
- .update(db)
82
  .await
83
  }
84
 
85
- pub async fn delete_dialog_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
86
- let tag: dialog_info::ActiveModel = Entity::find_by_id(kb_id)
87
- .one(db)
88
- .await?
89
- .ok_or(DbErr::Custom("Cannot find.".to_owned()))
90
- .map(Into::into)?;
91
-
92
- tag.delete(db).await
93
  }
94
 
95
  pub async fn delete_all_dialog_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
 
1
+ use chrono::{Local, FixedOffset, Utc};
2
+ use migration::Expr;
3
+ use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, UpdateResult};
4
  use sea_orm::ActiveValue::Set;
5
  use sea_orm::QueryFilter;
6
  use sea_orm::ColumnTrait;
7
  use crate::entity::dialog_info;
8
  use crate::entity::dialog_info::Entity;
9
 
10
+ fn now()->chrono::DateTime<FixedOffset>{
11
+ Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
12
+ }
13
  pub struct Query;
14
 
15
  impl Query {
 
24
  pub async fn find_dialog_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<dialog_info::Model>, DbErr> {
25
  Entity::find()
26
  .filter(dialog_info::Column::Uid.eq(uid))
27
+ .filter(dialog_info::Column::IsDeleted.eq(false))
28
  .all(db)
29
  .await
30
  }
 
50
  impl Mutation {
51
  pub async fn create_dialog_info(
52
  db: &DbConn,
53
+ uid: i64,
54
+ kb_id: i64,
55
+ name: &String
56
  ) -> Result<dialog_info::ActiveModel, DbErr> {
57
  dialog_info::ActiveModel {
58
  dialog_id: Default::default(),
59
+ uid: Set(uid),
60
+ kb_id: Set(kb_id),
61
+ dialog_name: Set(name.to_owned()),
62
+ history: Set("".to_owned()),
63
+ created_at: Set(now()),
64
+ updated_at: Set(now()),
65
+ is_deleted: Default::default()
66
  }
67
  .save(db)
68
  .await
 
70
 
71
  pub async fn update_dialog_info_by_id(
72
  db: &DbConn,
73
+ dialog_id: i64,
74
+ dialog_name:&String,
75
+ history: &String
76
+ ) -> Result<UpdateResult, DbErr> {
77
+ Entity::update_many()
78
+ .col_expr(dialog_info::Column::DialogName, Expr::value(dialog_name))
79
+ .col_expr(dialog_info::Column::History, Expr::value(history))
80
+ .col_expr(dialog_info::Column::UpdatedAt, Expr::value(now()))
81
+ .filter(dialog_info::Column::DialogId.eq(dialog_id))
82
+ .exec(db)
 
 
 
 
 
 
 
 
83
  .await
84
  }
85
 
86
+ pub async fn delete_dialog_info(db: &DbConn, dialog_id: i64) -> Result<UpdateResult, DbErr> {
87
+ Entity::update_many()
88
+ .col_expr(dialog_info::Column::IsDeleted, Expr::value(true))
89
+ .filter(dialog_info::Column::DialogId.eq(dialog_id))
90
+ .exec(db)
91
+ .await
 
 
92
  }
93
 
94
  pub async fn delete_all_dialog_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
src/service/doc_info.rs CHANGED
@@ -1,10 +1,15 @@
1
- use chrono::Local;
2
- use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement};
3
  use sea_orm::ActiveValue::Set;
4
  use sea_orm::QueryFilter;
5
- use crate::api::doc_info::Params;
6
- use crate::entity::{doc2_doc, doc_info, kb_info, tag_info};
7
  use crate::entity::doc_info::Entity;
 
 
 
 
 
8
 
9
  pub struct Query;
10
 
@@ -24,42 +29,121 @@ impl Query {
24
  .await
25
  }
26
 
27
- pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: String) -> Result<Vec<doc_info::Model>, DbErr> {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  Entity::find()
29
- .filter(doc_info::Column::DocName.eq(name))
30
  .filter(doc_info::Column::Uid.eq(uid))
 
 
31
  .all(db)
32
  .await
33
  }
34
 
35
- pub async fn find_doc_infos_by_params(db: &DbConn, params: Params) -> Result<Vec<doc_info::Model>, DbErr> {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  // Setup paginator
37
- let paginator = Entity::find();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
- // Fetch paginated posts
40
- let mut query = paginator
41
- .find_with_related(kb_info::Entity);
42
  if let Some(kb_id) = params.filter.kb_id {
43
- query = query.filter(kb_info::Column::KbId.eq(kb_id));
44
  }
45
  if let Some(folder_id) = params.filter.folder_id {
46
-
47
  }
 
48
  if let Some(tag_id) = params.filter.tag_id {
49
- query = query.filter(tag_info::Column::Tid.eq(tag_id));
 
 
 
 
 
 
50
  }
51
- if let Some(keywords) = params.filter.keywords {
52
 
 
 
53
  }
54
- Ok(query.order_by_asc(doc_info::Column::Did)
55
- .all(db)
56
- .await?
57
- .into_iter()
58
- .map(|(mut doc_info, kb_infos)| {
59
- doc_info.kb_infos = kb_infos;
60
- doc_info
61
- })
62
- .collect())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  }
64
 
65
  pub async fn find_doc_infos_in_page(
@@ -126,11 +210,10 @@ impl Mutation {
126
  doc_name: Set(form_data.doc_name.to_owned()),
127
  size: Set(form_data.size.to_owned()),
128
  r#type: Set(form_data.r#type.to_owned()),
129
- kb_progress: Set(form_data.kb_progress.to_owned()),
130
- kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
131
  location: Set(form_data.location.to_owned()),
132
- created_at: Set(Local::now().date_naive()),
133
- updated_at: Set(Local::now().date_naive()),
 
134
  }
135
  .save(db)
136
  .await
@@ -153,24 +236,50 @@ impl Mutation {
153
  doc_name: Set(form_data.doc_name.to_owned()),
154
  size: Set(form_data.size.to_owned()),
155
  r#type: Set(form_data.r#type.to_owned()),
156
- kb_progress: Set(form_data.kb_progress.to_owned()),
157
- kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
158
  location: Set(form_data.location.to_owned()),
159
- created_at: Default::default(),
160
- updated_at: Set(Local::now().date_naive()),
 
161
  }
162
  .update(db)
163
  .await
164
  }
165
 
166
- pub async fn delete_doc_info(db: &DbConn, doc_id: i64) -> Result<DeleteResult, DbErr> {
167
- let tag: doc_info::ActiveModel = Entity::find_by_id(doc_id)
 
 
 
 
 
 
168
  .one(db)
169
  .await?
170
- .ok_or(DbErr::Custom("Cannot find.".to_owned()))
171
  .map(Into::into)?;
 
 
 
 
 
 
 
 
 
 
 
 
 
172
 
173
- tag.delete(db).await
 
 
 
 
 
 
 
 
174
  }
175
 
176
  pub async fn delete_all_doc_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
 
1
+ use chrono::{Utc, FixedOffset};
2
+ use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement, QuerySelect, JoinType, RelationTrait, DbBackend, Statement, UpdateResult};
3
  use sea_orm::ActiveValue::Set;
4
  use sea_orm::QueryFilter;
5
+ use crate::api::doc_info::ListParams;
6
+ use crate::entity::{doc2_doc, doc_info};
7
  use crate::entity::doc_info::Entity;
8
+ use crate::service;
9
+
10
+ fn now()->chrono::DateTime<FixedOffset>{
11
+ Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
12
+ }
13
 
14
  pub struct Query;
15
 
 
29
  .await
30
  }
31
 
32
+ pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: &String, parent_id:Option<i64>) -> Result<Vec<doc_info::Model>, DbErr> {
33
+ let mut dids = Vec::<i64>::new();
34
+ if let Some(pid) = parent_id {
35
+ for d2d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(pid)).all(db).await?{
36
+ dids.push(d2d.did);
37
+ }
38
+ }
39
+ else{
40
+ let doc = Entity::find()
41
+ .filter(doc_info::Column::DocName.eq(name.clone()))
42
+ .filter(doc_info::Column::Uid.eq(uid))
43
+ .all(db)
44
+ .await?;
45
+ if doc.len() == 0{
46
+ return Ok(vec![]);
47
+ }
48
+ assert!(doc.len()>0);
49
+ let d2d = doc2_doc::Entity::find().filter(doc2_doc::Column::Did.eq(doc[0].did)).all(db).await?;
50
+ assert!(d2d.len() <= 1, "Did: {}->{}", doc[0].did, d2d.len());
51
+ if d2d.len()>0{
52
+ for d2d_ in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(d2d[0].parent_id)).all(db).await?{
53
+ dids.push(d2d_.did);
54
+ }
55
+ }
56
+ }
57
+
58
  Entity::find()
59
+ .filter(doc_info::Column::DocName.eq(name.clone()))
60
  .filter(doc_info::Column::Uid.eq(uid))
61
+ .filter(doc_info::Column::Did.is_in(dids))
62
+ .filter(doc_info::Column::IsDeleted.eq(false))
63
  .all(db)
64
  .await
65
  }
66
 
67
+ pub async fn all_descendent_ids(db: &DbConn, doc_ids: &Vec<i64>) -> Result<Vec<i64>, DbErr> {
68
+ let mut dids = doc_ids.clone();
69
+ let mut i:usize = 0;
70
+ loop {
71
+ if dids.len() == i {
72
+ break;
73
+ }
74
+
75
+ for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
76
+ dids.push(d.did);
77
+ }
78
+ i += 1;
79
+ }
80
+ Ok(dids)
81
+ }
82
+
83
+ pub async fn find_doc_infos_by_params(db: &DbConn, params: ListParams) -> Result<Vec<doc_info::Model>, DbErr> {
84
  // Setup paginator
85
+ let mut sql:String = "
86
+ select
87
+ a.did,
88
+ a.uid,
89
+ a.doc_name,
90
+ a.location,
91
+ a.size,
92
+ a.type,
93
+ a.created_at,
94
+ a.updated_at,
95
+ a.is_deleted
96
+ from
97
+ doc_info as a
98
+ ".to_owned();
99
+
100
+ let mut cond:String = format!(" a.uid={} and a.is_deleted=False ", params.uid);
101
 
 
 
 
102
  if let Some(kb_id) = params.filter.kb_id {
103
+ sql.push_str(&format!(" inner join kb2_doc on kb2_doc.did = a.did and kb2_doc.kb_id={}", kb_id));
104
  }
105
  if let Some(folder_id) = params.filter.folder_id {
106
+ sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", folder_id));
107
  }
108
+ // Fetch paginated posts
109
  if let Some(tag_id) = params.filter.tag_id {
110
+ let tag = service::tag_info::Query::find_tag_info_by_id(tag_id, &db).await.unwrap().unwrap();
111
+ if tag.folder_id > 0{
112
+ sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", tag.folder_id));
113
+ }
114
+ if tag.regx.len()>0{
115
+ cond.push_str(&format!(" and doc_name ~ '{}'", tag.regx));
116
+ }
117
  }
 
118
 
119
+ if let Some(keywords) = params.filter.keywords {
120
+ cond.push_str(&format!(" and doc_name like '%{}%'", keywords));
121
  }
122
+ if cond.len() > 0{
123
+ sql.push_str(&" where ");
124
+ sql.push_str(&cond);
125
+ }
126
+ let mut orderby = params.sortby.clone();
127
+ if orderby.len() == 0 {
128
+ orderby = "updated_at desc".to_owned();
129
+ }
130
+ sql.push_str(&format!(" order by {}", orderby));
131
+ let mut page_size:u32 = 30;
132
+ if let Some(pg_sz) = params.per_page {
133
+ page_size = pg_sz;
134
+ }
135
+ let mut page:u32 = 0;
136
+ if let Some(pg) = params.page {
137
+ page = pg;
138
+ }
139
+ sql.push_str(&format!(" limit {} offset {} ;", page_size, page*page_size));
140
+
141
+ print!("{}", sql);
142
+ Entity::find()
143
+ .from_raw_sql(
144
+ Statement::from_sql_and_values(DbBackend::Postgres,sql,vec![])
145
+ ).all(db).await
146
+
147
  }
148
 
149
  pub async fn find_doc_infos_in_page(
 
210
  doc_name: Set(form_data.doc_name.to_owned()),
211
  size: Set(form_data.size.to_owned()),
212
  r#type: Set(form_data.r#type.to_owned()),
 
 
213
  location: Set(form_data.location.to_owned()),
214
+ created_at: Set(form_data.created_at.to_owned()),
215
+ updated_at: Set(form_data.updated_at.to_owned()),
216
+ is_deleted:Default::default()
217
  }
218
  .save(db)
219
  .await
 
236
  doc_name: Set(form_data.doc_name.to_owned()),
237
  size: Set(form_data.size.to_owned()),
238
  r#type: Set(form_data.r#type.to_owned()),
 
 
239
  location: Set(form_data.location.to_owned()),
240
+ created_at: doc_info.created_at,
241
+ updated_at: Set(now()),
242
+ is_deleted: Default::default(),
243
  }
244
  .update(db)
245
  .await
246
  }
247
 
248
+ pub async fn delete_doc_info(db: &DbConn, doc_ids: &Vec<i64>) -> Result<UpdateResult, DbErr> {
249
+ let mut dids = doc_ids.clone();
250
+ let mut i:usize = 0;
251
+ loop {
252
+ if dids.len() == i {
253
+ break;
254
+ }
255
+ let mut doc: doc_info::ActiveModel = Entity::find_by_id(dids[i])
256
  .one(db)
257
  .await?
258
+ .ok_or(DbErr::Custom(format!("Can't find doc:{}", dids[i])))
259
  .map(Into::into)?;
260
+ doc.updated_at = Set(now());
261
+ doc.is_deleted = Set(true);
262
+ let _ = doc.update(db).await?;
263
+
264
+ for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
265
+ dids.push(d.did);
266
+ }
267
+ let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::ParentId.eq(dids[i])).exec(db).await?;
268
+ let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::Did.eq(dids[i])).exec(db).await?;
269
+ i += 1;
270
+ }
271
+ crate::service::kb_info::Mutation::remove_docs(&db, dids,None).await
272
+ }
273
 
274
+ pub async fn rename(db: &DbConn, doc_id: i64, name: &String) -> Result<doc_info::Model, DbErr> {
275
+ let mut doc: doc_info::ActiveModel = Entity::find_by_id(doc_id)
276
+ .one(db)
277
+ .await?
278
+ .ok_or(DbErr::Custom(format!("Can't find doc:{}", doc_id)))
279
+ .map(Into::into)?;
280
+ doc.updated_at = Set(now());
281
+ doc.doc_name = Set(name.clone());
282
+ doc.update(db).await
283
  }
284
 
285
  pub async fn delete_all_doc_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
src/service/kb_info.rs CHANGED
@@ -1,10 +1,14 @@
1
- use chrono::Local;
2
- use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
 
3
  use sea_orm::ActiveValue::Set;
4
  use crate::entity::kb_info;
5
  use crate::entity::kb2_doc;
6
  use crate::entity::kb_info::Entity;
7
 
 
 
 
8
  pub struct Query;
9
 
10
  impl Query {
@@ -30,6 +34,14 @@ impl Query {
30
  .await
31
  }
32
 
 
 
 
 
 
 
 
 
33
  pub async fn find_kb_infos_in_page(
34
  db: &DbConn,
35
  page: u64,
@@ -58,8 +70,9 @@ impl Mutation {
58
  uid: Set(form_data.uid.to_owned()),
59
  kb_name: Set(form_data.kb_name.to_owned()),
60
  icon: Set(form_data.icon.to_owned()),
61
- created_at: Set(Local::now().date_naive()),
62
- updated_at: Set(Local::now().date_naive()),
 
63
  }
64
  .save(db)
65
  .await
@@ -71,10 +84,20 @@ impl Mutation {
71
  doc_ids: Vec<i64>
72
  )-> Result<(), DbErr> {
73
  for did in doc_ids{
 
 
 
 
 
 
74
  let _ = kb2_doc::ActiveModel {
75
  id: Default::default(),
76
  kb_id: Set(kb_id),
77
  did: Set(did),
 
 
 
 
78
  }
79
  .save(db)
80
  .await?;
@@ -83,6 +106,25 @@ impl Mutation {
83
  Ok(())
84
  }
85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  pub async fn update_kb_info_by_id(
87
  db: &DbConn,
88
  id: i64,
@@ -99,24 +141,25 @@ impl Mutation {
99
  uid: kb_info.uid,
100
  kb_name: Set(form_data.kb_name.to_owned()),
101
  icon: Set(form_data.icon.to_owned()),
102
- created_at: Default::default(),
103
- updated_at: Set(Local::now().date_naive()),
 
104
  }
105
  .update(db)
106
  .await
107
  }
108
 
109
  pub async fn delete_kb_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
110
- let tag: kb_info::ActiveModel = Entity::find_by_id(kb_id)
111
  .one(db)
112
  .await?
113
  .ok_or(DbErr::Custom("Cannot find.".to_owned()))
114
  .map(Into::into)?;
115
 
116
- tag.delete(db).await
117
  }
118
 
119
  pub async fn delete_all_kb_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
120
  Entity::delete_many().exec(db).await
121
  }
122
- }
 
1
+ use chrono::{Local, FixedOffset, Utc};
2
+ use migration::Expr;
3
+ use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
4
  use sea_orm::ActiveValue::Set;
5
  use crate::entity::kb_info;
6
  use crate::entity::kb2_doc;
7
  use crate::entity::kb_info::Entity;
8
 
9
+ fn now()->chrono::DateTime<FixedOffset>{
10
+ Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
11
+ }
12
  pub struct Query;
13
 
14
  impl Query {
 
34
  .await
35
  }
36
 
37
+ pub async fn find_kb_by_docs(db: &DbConn, doc_ids: Vec<i64>) -> Result<Vec<kb_info::Model>, DbErr> {
38
+ let mut kbids = Vec::<i64>::new();
39
+ for k in kb2_doc::Entity::find().filter(kb2_doc::Column::Did.is_in(doc_ids)).all(db).await?{
40
+ kbids.push(k.kb_id);
41
+ }
42
+ Entity::find().filter(kb_info::Column::KbId.is_in(kbids)).all(db).await
43
+ }
44
+
45
  pub async fn find_kb_infos_in_page(
46
  db: &DbConn,
47
  page: u64,
 
70
  uid: Set(form_data.uid.to_owned()),
71
  kb_name: Set(form_data.kb_name.to_owned()),
72
  icon: Set(form_data.icon.to_owned()),
73
+ created_at: Set(now()),
74
+ updated_at: Set(now()),
75
+ is_deleted:Default::default()
76
  }
77
  .save(db)
78
  .await
 
84
  doc_ids: Vec<i64>
85
  )-> Result<(), DbErr> {
86
  for did in doc_ids{
87
+ let res = kb2_doc::Entity::find()
88
+ .filter(kb2_doc::Column::KbId.eq(kb_id))
89
+ .filter(kb2_doc::Column::Did.eq(did))
90
+ .all(db)
91
+ .await?;
92
+ if res.len()>0{continue;}
93
  let _ = kb2_doc::ActiveModel {
94
  id: Default::default(),
95
  kb_id: Set(kb_id),
96
  did: Set(did),
97
+ kb_progress: Set(0.0),
98
+ kb_progress_msg: Set("".to_owned()),
99
+ updated_at: Set(now()),
100
+ is_deleted:Default::default()
101
  }
102
  .save(db)
103
  .await?;
 
106
  Ok(())
107
  }
108
 
109
+ pub async fn remove_docs(
110
+ db: &DbConn,
111
+ doc_ids: Vec<i64>,
112
+ kb_id: Option<i64>
113
+ )-> Result<UpdateResult, DbErr> {
114
+ let update = kb2_doc::Entity::update_many()
115
+ .col_expr(kb2_doc::Column::IsDeleted, Expr::value(true))
116
+ .col_expr(kb2_doc::Column::KbProgress, Expr::value(0))
117
+ .col_expr(kb2_doc::Column::KbProgressMsg, Expr::value(""))
118
+ .filter(kb2_doc::Column::Did.is_in(doc_ids));
119
+ if let Some(kbid) = kb_id{
120
+ update.filter(kb2_doc::Column::KbId.eq(kbid))
121
+ .exec(db)
122
+ .await
123
+ }else{
124
+ update.exec(db).await
125
+ }
126
+ }
127
+
128
  pub async fn update_kb_info_by_id(
129
  db: &DbConn,
130
  id: i64,
 
141
  uid: kb_info.uid,
142
  kb_name: Set(form_data.kb_name.to_owned()),
143
  icon: Set(form_data.icon.to_owned()),
144
+ created_at: kb_info.created_at,
145
+ updated_at: Set(now()),
146
+ is_deleted: Default::default()
147
  }
148
  .update(db)
149
  .await
150
  }
151
 
152
  pub async fn delete_kb_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
153
+ let kb: kb_info::ActiveModel = Entity::find_by_id(kb_id)
154
  .one(db)
155
  .await?
156
  .ok_or(DbErr::Custom("Cannot find.".to_owned()))
157
  .map(Into::into)?;
158
 
159
+ kb.delete(db).await
160
  }
161
 
162
  pub async fn delete_all_kb_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
163
  Entity::delete_many().exec(db).await
164
  }
165
+ }
src/service/tag_info.rs CHANGED
@@ -1,18 +1,24 @@
1
- use chrono::Local;
2
- use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
3
  use sea_orm::ActiveValue::Set;
4
  use crate::entity::tag_info;
5
  use crate::entity::tag_info::Entity;
6
 
 
 
 
7
  pub struct Query;
8
 
9
  impl Query {
10
- pub async fn find_tag_info_by_id(db: &DbConn, id: i64) -> Result<Option<tag_info::Model>, DbErr> {
11
  Entity::find_by_id(id).one(db).await
12
  }
13
 
14
- pub async fn find_tag_infos(db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
15
- Entity::find().all(db).await
 
 
 
16
  }
17
 
18
  pub async fn find_tag_infos_in_page(
@@ -45,9 +51,9 @@ impl Mutation {
45
  regx: Set(form_data.regx.to_owned()),
46
  color: Set(form_data.color.to_owned()),
47
  icon: Set(form_data.icon.to_owned()),
48
- dir: Set(form_data.dir.to_owned()),
49
- created_at: Set(Local::now().date_naive()),
50
- updated_at: Set(Local::now().date_naive()),
51
  }
52
  .save(db)
53
  .await
@@ -71,9 +77,9 @@ impl Mutation {
71
  regx: Set(form_data.regx.to_owned()),
72
  color: Set(form_data.color.to_owned()),
73
  icon: Set(form_data.icon.to_owned()),
74
- dir: Set(form_data.dir.to_owned()),
75
  created_at: Default::default(),
76
- updated_at: Set(Local::now().date_naive()),
77
  }
78
  .update(db)
79
  .await
 
1
+ use chrono::{FixedOffset, Utc};
2
+ use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, ColumnTrait, QueryFilter};
3
  use sea_orm::ActiveValue::Set;
4
  use crate::entity::tag_info;
5
  use crate::entity::tag_info::Entity;
6
 
7
+ fn now()->chrono::DateTime<FixedOffset>{
8
+ Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
9
+ }
10
  pub struct Query;
11
 
12
  impl Query {
13
+ pub async fn find_tag_info_by_id(id: i64, db: &DbConn) -> Result<Option<tag_info::Model>, DbErr> {
14
  Entity::find_by_id(id).one(db).await
15
  }
16
 
17
+ pub async fn find_tags_by_uid(uid:i64, db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
18
+ Entity::find()
19
+ .filter(tag_info::Column::Uid.eq(uid))
20
+ .all(db)
21
+ .await
22
  }
23
 
24
  pub async fn find_tag_infos_in_page(
 
51
  regx: Set(form_data.regx.to_owned()),
52
  color: Set(form_data.color.to_owned()),
53
  icon: Set(form_data.icon.to_owned()),
54
+ folder_id: Set(form_data.folder_id.to_owned()),
55
+ created_at: Set(now()),
56
+ updated_at: Set(now()),
57
  }
58
  .save(db)
59
  .await
 
77
  regx: Set(form_data.regx.to_owned()),
78
  color: Set(form_data.color.to_owned()),
79
  icon: Set(form_data.icon.to_owned()),
80
+ folder_id: Set(form_data.folder_id.to_owned()),
81
  created_at: Default::default(),
82
+ updated_at: Set(now()),
83
  }
84
  .update(db)
85
  .await
src/service/user_info.rs CHANGED
@@ -1,9 +1,13 @@
1
- use chrono::Local;
2
- use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
 
3
  use sea_orm::ActiveValue::Set;
4
  use crate::entity::user_info;
5
  use crate::entity::user_info::Entity;
6
 
 
 
 
7
  pub struct Query;
8
 
9
  impl Query {
@@ -44,52 +48,64 @@ pub struct Mutation;
44
  impl Mutation {
45
  pub async fn create_user(
46
  db: &DbConn,
47
- form_data: user_info::Model,
48
  ) -> Result<user_info::ActiveModel, DbErr> {
49
  user_info::ActiveModel {
50
  uid: Default::default(),
51
  email: Set(form_data.email.to_owned()),
52
  nickname: Set(form_data.nickname.to_owned()),
53
- avatar_url: Set(form_data.avatar_url.to_owned()),
54
- color_schema: Set(form_data.color_schema.to_owned()),
55
  list_style: Set(form_data.list_style.to_owned()),
56
  language: Set(form_data.language.to_owned()),
57
  password: Set(form_data.password.to_owned()),
58
- created_at: Set(Local::now().date_naive()),
59
- updated_at: Set(Local::now().date_naive()),
 
60
  }
61
  .save(db)
62
  .await
63
  }
64
 
65
- pub async fn update_tag_by_id(
66
  db: &DbConn,
67
- id: i64,
68
- form_data: user_info::Model,
69
  ) -> Result<user_info::Model, DbErr> {
70
- let user: user_info::ActiveModel = Entity::find_by_id(id)
71
  .one(db)
72
  .await?
73
- .ok_or(DbErr::Custom("Cannot find tag.".to_owned()))
74
  .map(Into::into)?;
75
 
76
  user_info::ActiveModel {
77
- uid: user.uid,
78
  email: Set(form_data.email.to_owned()),
79
  nickname: Set(form_data.nickname.to_owned()),
80
- avatar_url: Set(form_data.avatar_url.to_owned()),
81
- color_schema: Set(form_data.color_schema.to_owned()),
82
  list_style: Set(form_data.list_style.to_owned()),
83
  language: Set(form_data.language.to_owned()),
84
  password: Set(form_data.password.to_owned()),
85
- created_at: Default::default(),
86
- updated_at: Set(Local::now().date_naive()),
 
87
  }
88
  .update(db)
89
  .await
90
  }
91
 
92
- pub async fn delete_tag(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
 
 
 
 
 
 
 
 
 
 
 
93
  let tag: user_info::ActiveModel = Entity::find_by_id(tid)
94
  .one(db)
95
  .await?
@@ -99,7 +115,7 @@ impl Mutation {
99
  tag.delete(db).await
100
  }
101
 
102
- pub async fn delete_all_tags(db: &DbConn) -> Result<DeleteResult, DbErr> {
103
  Entity::delete_many().exec(db).await
104
  }
105
  }
 
1
+ use chrono::{FixedOffset, Utc};
2
+ use migration::Expr;
3
+ use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
4
  use sea_orm::ActiveValue::Set;
5
  use crate::entity::user_info;
6
  use crate::entity::user_info::Entity;
7
 
8
+ fn now()->chrono::DateTime<FixedOffset>{
9
+ Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
10
+ }
11
  pub struct Query;
12
 
13
  impl Query {
 
48
  impl Mutation {
49
  pub async fn create_user(
50
  db: &DbConn,
51
+ form_data: &user_info::Model,
52
  ) -> Result<user_info::ActiveModel, DbErr> {
53
  user_info::ActiveModel {
54
  uid: Default::default(),
55
  email: Set(form_data.email.to_owned()),
56
  nickname: Set(form_data.nickname.to_owned()),
57
+ avatar_base64: Set(form_data.avatar_base64.to_owned()),
58
+ color_scheme: Set(form_data.color_scheme.to_owned()),
59
  list_style: Set(form_data.list_style.to_owned()),
60
  language: Set(form_data.language.to_owned()),
61
  password: Set(form_data.password.to_owned()),
62
+ last_login_at: Set(now()),
63
+ created_at: Set(now()),
64
+ updated_at: Set(now()),
65
  }
66
  .save(db)
67
  .await
68
  }
69
 
70
+ pub async fn update_user_by_id(
71
  db: &DbConn,
72
+ form_data: &user_info::Model,
 
73
  ) -> Result<user_info::Model, DbErr> {
74
+ let usr: user_info::ActiveModel = Entity::find_by_id(form_data.uid)
75
  .one(db)
76
  .await?
77
+ .ok_or(DbErr::Custom("Cannot find user.".to_owned()))
78
  .map(Into::into)?;
79
 
80
  user_info::ActiveModel {
81
+ uid: Set(form_data.uid),
82
  email: Set(form_data.email.to_owned()),
83
  nickname: Set(form_data.nickname.to_owned()),
84
+ avatar_base64: Set(form_data.avatar_base64.to_owned()),
85
+ color_scheme: Set(form_data.color_scheme.to_owned()),
86
  list_style: Set(form_data.list_style.to_owned()),
87
  language: Set(form_data.language.to_owned()),
88
  password: Set(form_data.password.to_owned()),
89
+ updated_at: Set(now()),
90
+ last_login_at: usr.last_login_at,
91
+ created_at:usr.created_at,
92
  }
93
  .update(db)
94
  .await
95
  }
96
 
97
+ pub async fn update_login_status(
98
+ uid: i64,
99
+ db: &DbConn
100
+ ) -> Result<UpdateResult, DbErr> {
101
+ Entity::update_many()
102
+ .col_expr(user_info::Column::LastLoginAt, Expr::value(now()))
103
+ .filter(user_info::Column::Uid.eq(uid))
104
+ .exec(db)
105
+ .await
106
+ }
107
+
108
+ pub async fn delete_user(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
109
  let tag: user_info::ActiveModel = Entity::find_by_id(tid)
110
  .one(db)
111
  .await?
 
115
  tag.delete(db).await
116
  }
117
 
118
+ pub async fn delete_all(db: &DbConn) -> Result<DeleteResult, DbErr> {
119
  Entity::delete_many().exec(db).await
120
  }
121
  }