yumikimi381 commited on
Commit
daf0288
·
verified ·
1 Parent(s): dfc97a6

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +5 -0
  2. .gitignore +18 -0
  3. .gitlab-ci.yml +33 -0
  4. .gitmodules +0 -0
  5. .vscode/settings.json +2 -0
  6. Dockerfile +55 -0
  7. README.md +113 -7
  8. abstractClass.py +59 -0
  9. app.py +406 -0
  10. cropped_table.png +0 -0
  11. cropped_table_0.png +0 -0
  12. cropped_table_1.png +0 -0
  13. deepdoc/README.md +122 -0
  14. deepdoc/__init__.py +8 -0
  15. deepdoc/models/.gitattributes +35 -0
  16. deepdoc/models/README.md +3 -0
  17. deepdoc/models/det.onnx +3 -0
  18. deepdoc/models/layout.laws.onnx +3 -0
  19. deepdoc/models/layout.manual.onnx +3 -0
  20. deepdoc/models/layout.onnx +3 -0
  21. deepdoc/models/layout.paper.onnx +3 -0
  22. deepdoc/models/ocr.res +6623 -0
  23. deepdoc/models/rec.onnx +3 -0
  24. deepdoc/models/tsr.onnx +3 -0
  25. deepdoc/vision/__init__.py +3 -0
  26. deepdoc/vision/ocr.res +6623 -0
  27. deepdoc/vision/operators.py +711 -0
  28. deepdoc/vision/postprocess.py +353 -0
  29. deepdoc/vision/ragFlow.py +313 -0
  30. detectionAndOcrTable1.py +425 -0
  31. detectionAndOcrTable2.py +306 -0
  32. detectionAndOcrTable3.py +267 -0
  33. detectionAndOcrTable4.py +112 -0
  34. doctrfiles/__init__.py +4 -0
  35. doctrfiles/doctr_recognizer.py +183 -0
  36. doctrfiles/models/config-multi2.json +21 -0
  37. doctrfiles/models/db_mobilenet_v3_large-81e9b152.pt +3 -0
  38. doctrfiles/models/db_resnet34-cb6aed9e.pt +3 -0
  39. doctrfiles/models/db_resnet50-79bd7d70.pt +3 -0
  40. doctrfiles/models/db_resnet50_config.json +20 -0
  41. doctrfiles/models/doctr-multilingual-parseq.bin +3 -0
  42. doctrfiles/models/master-fde31e4a.pt +3 -0
  43. doctrfiles/models/master.json +21 -0
  44. doctrfiles/models/multi2.bin +3 -0
  45. doctrfiles/models/multilingual-parseq-config.json +21 -0
  46. doctrfiles/word_detector.py +282 -0
  47. image-1.png +0 -0
  48. image-2.png +0 -0
  49. image.png +0 -0
  50. june11.jpg +0 -0
.gitattributes CHANGED
@@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ res0.png filter=lfs diff=lfs merge=lfs -text
37
+ table_drawn_bbox_with_extra.png filter=lfs diff=lfs merge=lfs -text
38
+ unitable/website/unitable-demo.gif filter=lfs diff=lfs merge=lfs -text
39
+ unitable/website/unitable-demo.mp4 filter=lfs diff=lfs merge=lfs -text
40
+ unitable/website/wandb_screenshot.png filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ unitable/experiments/unitable_weights/**
3
+
4
+ res/**
5
+
6
+ TestingFiles/**
7
+ TestingFilesImages/**
8
+
9
+ # python generated files
10
+ __pycache__/
11
+ *.py[oc]
12
+ build/
13
+ dist/
14
+ wheels/
15
+ *.egg-info
16
+
17
+ # venv
18
+ .venv
.gitlab-ci.yml ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ variables:
2
+ GIT_STRATEGY: fetch
3
+ GIT_SSL_NO_VERIFY: "true"
4
+ GIT_LFS_SKIP_SMUDGE: 1
5
+ DOCKER_BUILDKIT: 1
6
+
7
+ stages:
8
+ - build
9
+
10
+ image_build:
11
+ stage: build
12
+ image: docker:stable
13
+ before_script:
14
+ - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN http://$CI_REGISTRY
15
+ script: |
16
+ CI_COMMIT_SHA_7=$(echo $CI_COMMIT_SHA | cut -c1-7)
17
+ DATE=$(date +%Y-%m-%d)
18
+ docker build --tag $CI_REGISTRY_IMAGE/$CI_COMMIT_BRANCH:latest \
19
+ --tag $CI_REGISTRY_IMAGE/$CI_COMMIT_BRANCH:$CI_COMMIT_SHA_7 \
20
+ --tag $CI_REGISTRY_IMAGE/$CI_COMMIT_BRANCH:$DATE \
21
+ -f Dockerfile .
22
+ docker push $CI_REGISTRY_IMAGE/$CI_COMMIT_BRANCH:latest
23
+ docker push $CI_REGISTRY_IMAGE/$CI_COMMIT_BRANCH:$CI_COMMIT_SHA_7
24
+ docker push $CI_REGISTRY_IMAGE/$CI_COMMIT_BRANCH:$DATE
25
+ # Run only when Dockerfile has changed
26
+ rules:
27
+ - if: $CI_PIPELINE_SOURCE == "push"
28
+ changes:
29
+ - Dockerfile
30
+ # Set to `on_success` to automatically rebuild
31
+ # Set to `manual` to trigger the build manually using Gitlab UI
32
+ when: on_success
33
+ allow_failure: true
.gitmodules ADDED
File without changes
.vscode/settings.json ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ {
2
+ }
Dockerfile ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ARG BASE_IMAGE="nvidia/cuda:12.2.2-devel-ubuntu22.04"
2
+
3
+ FROM ${BASE_IMAGE}
4
+ ARG HOMEDIRECTORY="/myhome"
5
+ ENV HOMEDIRECTORY=$HOMEDIRECTORY
6
+
7
+ USER root
8
+ RUN apt-get update && \
9
+ apt-get install -y --no-install-recommends \
10
+ curl \
11
+ python3 \
12
+ python3-pip \
13
+ python3-dev \
14
+ poppler-utils \
15
+ gcc \
16
+ git \
17
+ git-lfs \
18
+ htop \
19
+ libgl1 \
20
+ libglib2.0-0 \
21
+ ncdu \
22
+ openssh-client \
23
+ openssh-server \
24
+ psmisc \
25
+ rsync \
26
+ screen \
27
+ sudo \
28
+ tmux \
29
+ unzip \
30
+ vim \
31
+ wget && \
32
+ wget -q https://github.com/justjanne/powerline-go/releases/download/v1.24/powerline-go-linux-"$(dpkg --print-architecture)" -O /usr/local/bin/powerline-shell && \
33
+ chmod a+x /usr/local/bin/powerline-shell
34
+
35
+ RUN ln -s /usr/bin/python3 /usr/bin/python
36
+ COPY requirements.txt .
37
+ RUN pip install --no-cache-dir -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu117
38
+
39
+
40
+ # setup ssh
41
+ RUN ssh-keygen -A
42
+ RUN sed -i 's/#*PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config
43
+ EXPOSE 22
44
+
45
+ # Make the root user's home directory /myhome (the default for run.ai),
46
+ # and allow to login with password 'root'.
47
+ RUN echo 'root:root' | chpasswd
48
+ RUN sed -i 's|:root:/root:|:root:/myhome:|' /etc/passwd
49
+
50
+ ENTRYPOINT sudo service ssh start && /bin/bash
51
+
52
+
53
+
54
+
55
+
README.md CHANGED
@@ -1,12 +1,118 @@
1
  ---
2
- title: Alps
3
- emoji: 👀
4
- colorFrom: purple
5
- colorTo: green
6
  sdk: gradio
7
  sdk_version: 4.44.0
8
- app_file: app.py
9
- pinned: false
10
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: alps
3
+ app_file: app.py
 
 
4
  sdk: gradio
5
  sdk_version: 4.44.0
 
 
6
  ---
7
+ # Alps
8
+
9
+ Pipeline for OCRing PDFs and tables
10
+
11
+ This repository contains different OCR methods using various libraries/models.
12
+
13
+ ## Running gradio:
14
+ `python app.py` in terminal
15
+
16
+
17
+ ## Installation :
18
+ Build the docker image and run the contianer
19
+
20
+ Clone this repository and Install the required dependencies:
21
+ ```
22
+ pip install -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu117
23
+
24
+ apt install weasyprint
25
+
26
+ ```
27
+ Note: You need a GPU to run this code.
28
+
29
+ ## Example Usage
30
+
31
+ Run python main.py inside the directory. Provide the path to the test file (the file must be placed inside the repository,and the file path should be relative to the repository (alps)). Next, provide the path to save intermediate outputs from the run (draw cell bounding boxes on the table, show table detection results in pdf), and specify which component to run.
32
+
33
+ outputs are printed in terminal
34
+
35
+ ```
36
+ usage: main.py [-h] [--test_file TEST_FILE] [--debug_folder DEBUG_FOLDER] [--englishFlag ENGLISHFLAG] [--denoise DENOISE] ocr
37
+
38
+ ```
39
+ Description of the component:
40
+
41
+ ### ocr1
42
+
43
+ ocr1
44
+ Input: Path to a PDF file
45
+ Output: Dictionary of each page and list of line_annotations. List of LineAnnotations contains bboxes for each line and List of its children wordAnnotation. Each wordAnnotation contains bboxes and text inside.
46
+ What it does: Runs Ragflow textline detector + OCR with DocTR
47
+
48
+ Example:
49
+ ```
50
+ python main.py ocr1 --test_file TestingFiles/OCRTest1German.pdf --debug_folder ./res/ocrdebug1/
51
+ python main.py ocr1 --test_file TestingFiles/OCRTest3English.pdf --debug_folder ./res/ocrdebug1/ --englishFlag True
52
+ ```
53
+
54
+ ### table1
55
+ Input : file path to an image of a cropped table
56
+ Output: Parsed table in HTML form
57
+ What it does: Uses Unitable + DocTR
58
+
59
+ ```
60
+ python main.py table1 --test_file cropped_table.png --debug_folder ./res/table1/
61
+
62
+ ```
63
+
64
+ ### table2
65
+ Input: File path to an image of a cropped table
66
+ Output: Parsed table in HTML form
67
+ What it does: Uses Unitable
68
+
69
+ ```
70
+ python main.py table2 --test_file cropped_table.png --debug_folder ./res/table2/
71
+
72
+ ```
73
+ ### pdftable1
74
+ Input: PDF file path
75
+ Output: Parsed table in HTML form
76
+ What it does: Uses Unitable + DocTR
77
+
78
+
79
+ ```
80
+ python main.py pdftable1 --test_file TestingFiles/OCRTest5English.pdf --debug_folder ./res/table_debug1/
81
+
82
+ python main.py pdftable3 --test_file TestingFiles/TableOCRTestEnglish.pdf --debug_folder ./res/poor_relief2
83
+ ```
84
+
85
+
86
+ ### pdftable2 :
87
+ Input: PDF file path
88
+ Output: Parsed table in HTML form
89
+ What it does: Detects table and parses them, Runs Full Unitable Table detection
90
+
91
+ ```
92
+ python main.py pdftable2 --test_file TestingFiles/OCRTest5English.pdf --debug_folder ./res/table_debug2/
93
+ ```
94
+
95
+
96
+ ### pdftable3
97
+ Input: PDF file path
98
+ Output: Parsed table in HTML form
99
+ What it does: Detects table with YOLO, Unitable + DocTR
100
+
101
+
102
+
103
+ ### pdftable4
104
+ Input: PDF file path
105
+ Output: Parsed table in HTML form
106
+ What it does: Detects table with YOLO, Runs Full doctr Table detection
107
+
108
+ python main.py pdftable4 --test_file TestingFiles/TableOCRTestEasier.pdf --debug_folder ./res/table_debug3/
109
+
110
+
111
+ ## bbox
112
+ They are ordered as ordered as [xmin,ymin,xmax,ymax] . Cause the coordinates starts from (0,0) of the image which is upper left corner
113
+
114
+ xmin ymim - upper left corner
115
+ xmax ymax - bottom lower corner
116
+
117
+ ![alt text](image-2.png)
118
 
 
abstractClass.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from typing import Any, List, Literal, Mapping, Optional, Tuple
3
+ from abc import ABC, abstractmethod
4
+
5
+ import numpy as np
6
+ import cv2
7
+ from PIL import Image
8
+ from abc import ABC, abstractmethod
9
+
10
+ from utils import cropImage
11
+
12
+
13
+ class OCRComponent:
14
+ """
15
+ Wrapper class for cropping images and giving it to OCR Predictor
16
+ """
17
+ def predict_pdf(self, pdf_name:str="", page:int=None, bbx:List[List[float]]=None)-> List[List[float]]:
18
+ #TODO: Preprocessing to crop interest region
19
+ pass
20
+
21
+
22
+ class TextDetector(ABC):
23
+ """
24
+ Abstract base class for text detectors that takes in bounding boxes, pdf name, and page
25
+ and returns bounding boxes results on them.
26
+ """
27
+
28
+ def __init__(self):
29
+
30
+ pass
31
+
32
+ """
33
+ This is for predicting given an already cropped image
34
+ """
35
+ @abstractmethod
36
+ def predict_img(self, img:np.ndarray=None)-> List[List[float]]:
37
+ # do something with self.input and return bbx
38
+ pass
39
+
40
+ class textRecognizer(ABC):
41
+ """
42
+ class of textRecognizer that takes in bounding boxes, pdf name and page and returns
43
+ OCR results on them
44
+ """
45
+
46
+ def __init__(self):
47
+
48
+ pass
49
+
50
+
51
+ """
52
+ This is for predicting given text line detection result form text line detector
53
+ """
54
+ @abstractmethod
55
+ def predict_img(self, bxs:List[List[float]], img:Image.Image)-> List[List[float]]:
56
+ # do something with self.input and return bbx
57
+ pass
58
+
59
+
app.py ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os
3
+ import traceback
4
+ import argparse
5
+ from typing import List, Tuple, Set, Dict
6
+
7
+ import time
8
+ from PIL import Image
9
+ import numpy as np
10
+ from doctr.models import ocr_predictor
11
+ import logging
12
+ import pandas as pd
13
+ from bs4 import BeautifulSoup
14
+ import gradio
15
+
16
+ from utils import cropImages
17
+ from utils import draw_only_box,draw_box_with_text,getlogger,Annotation
18
+ from ocr_component1 import OCRComponent1
19
+ from detectionAndOcrTable1 import DetectionAndOcrTable1
20
+ from detectionAndOcrTable2 import DetectionAndOcrTable2
21
+ from detectionAndOcrTable3 import DetectionAndOcrTable3
22
+ from detectionAndOcrTable4 import DetectionAndOcrTable4
23
+ from ocrTable1 import OcrTable1
24
+ from ocrTable2 import OcrTable2
25
+ from pdf2image import convert_from_path
26
+
27
+
28
+ def convertHTMLToCSV(html:str,output_path:str)->str:
29
+
30
+ # empty list
31
+ data = []
32
+
33
+ # for getting the header from
34
+ # the HTML file
35
+ list_header = []
36
+ soup = BeautifulSoup(html,'html.parser')
37
+ header = soup.find_all("table")[0].find("tr")
38
+
39
+ for items in header:
40
+ try:
41
+ list_header.append(items.get_text())
42
+ except:
43
+ continue
44
+
45
+ # for getting the data
46
+ HTML_data = soup.find_all("table")[0].find_all("tr")[1:]
47
+
48
+ for element in HTML_data:
49
+ sub_data = []
50
+ for sub_element in element:
51
+ try:
52
+ sub_data.append(sub_element.get_text())
53
+ except:
54
+ continue
55
+ data.append(sub_data)
56
+
57
+ # Storing the data into Pandas
58
+ # DataFrame
59
+ dataFrame = pd.DataFrame(data = data, columns = list_header)
60
+
61
+ # Converting Pandas DataFrame
62
+ # into CSV file
63
+ dataFrame.to_csv(output_path)
64
+
65
+ def saveResults(image_list, results, labels, output_dir='output/', threshold=0.5):
66
+ if not os.path.exists(output_dir):
67
+ os.makedirs(output_dir)
68
+ for idx, im in enumerate(image_list):
69
+ im = draw_only_box(im, results[idx], labels, threshold=threshold)
70
+
71
+ out_path = os.path.join(output_dir, f"{idx}.jpg")
72
+ im.save(out_path, quality=95)
73
+ print("save result to: " + out_path)
74
+
75
+ def InputToImages(input_path:str,resolution=300)-> List[Image.Image]:
76
+ """
77
+ input is file location to image
78
+ return : List of Pillow image objects
79
+ """
80
+ images=[]
81
+ try:
82
+ img =Image.open(input_path)
83
+ if img.mode == 'RGBA':
84
+ img = img.convert('RGB')
85
+ images.append(img)
86
+ except Exception as e:
87
+ traceback.print_exc()
88
+ return images
89
+
90
+ def drawTextDetRes(bxs :List[List[float]],img:Image.Image,output_path:str):
91
+ """
92
+ draw layout analysis results
93
+ """
94
+ """bxs_draw is xmin, ymin, xmax, ymax"""
95
+ bxs_draw = [[b[0][0], b[0][1], b[1][0], b[-1][1]] for b in bxs if b[0][0] <= b[1][0] and b[0][1] <= b[-1][1]]
96
+
97
+ #images_to_recognizer = cropImage(bxs, img)
98
+ img_to_save = draw_only_box(img, bxs_draw)
99
+ img_to_save.save(output_path, quality=95)
100
+
101
+ def test_ocr_component1(test_file="TestingFiles/OCRTest1German.pdf", debug_folder = './res/table1/',englishFlag = False):
102
+ #Takes as input image of a single page and returns the detected lines and words
103
+
104
+ images = convert_from_path(test_file)
105
+ ocr = OCRComponent1(englishFlag)
106
+ ocr_results = {}
107
+
108
+ all_text_in_pages = {}
109
+ for page_number,img in enumerate(images):
110
+ text_in_page = ""
111
+
112
+ line_annotations= ocr.predict(img = np.array(img))
113
+ ocr_results[page_number] = line_annotations
114
+
115
+ """
116
+ boxes_to_draw =[]
117
+ for list_of_ann in word_annotations:
118
+ for ann in list_of_ann:
119
+ logger.info(ann.text)
120
+ b = ann.box
121
+ boxes_to_draw.append(b)
122
+
123
+ img_to_save = draw_only_box(img,boxes_to_draw)
124
+ img_to_save.save("res/12June_2_lines.png", quality=95)
125
+ """
126
+
127
+ line_boxes_to_draw =[]
128
+ #print("Detected lines are ")
129
+ #print(len(line_annotations.items()))
130
+ for index,ann in line_annotations.items():
131
+
132
+ b = ann.box
133
+ line_boxes_to_draw.append(b)
134
+ line_words = ""
135
+ #print("detected words per line")
136
+ #print(len(ann.words))
137
+ for wordann in ann.words:
138
+ line_words += wordann.text +" "
139
+ print(line_words)
140
+ text_in_page += line_words +"\n"
141
+
142
+ img_to_save1 = draw_only_box(img,line_boxes_to_draw)
143
+ imgname = test_file.split("/")[-1][:-4]
144
+ img_to_save1.save(debug_folder+imgname+"_"+str(page_number)+"_bbox_detection.png", quality=95)
145
+
146
+ all_text_in_pages[page_number] = text_in_page
147
+
148
+ return ocr_results, all_text_in_pages
149
+
150
+
151
+ def test_tableOcrOnly1(test_file :Image.Image , debug_folder = './res/table1/',denoise = False,englishFlag = False):
152
+ #Hybrid Unitable +DocTR
153
+ #Good at these kind of tables - with a lot of texts
154
+ table = OcrTable1(englishFlag)
155
+ image = test_file.convert("RGB")
156
+ """
157
+ parts = test_file.split("/")
158
+ filename = parts[-1][:-4]
159
+ debugfolder_filename_page_name= debug_folder+filename+"_"
160
+
161
+ table_code = table.predict([image],debugfolder_filename_page_name,denoise = denoise)
162
+ with open(debugfolder_filename_page_name+'output.txt', 'w') as file:
163
+ file.write(table_code)
164
+ """
165
+
166
+ table_code = table.predict([image],denoise = denoise)
167
+ return table_code
168
+
169
+
170
+ def test_tableOcrOnly2(test_file:Image.Image, debug_folder = './res/table2/'):
171
+ table = OcrTable2()
172
+ #FullUnitable
173
+ #Good at these kind of tables - with not much text
174
+
175
+ image = test_file.convert("RGB")
176
+ table.predict([image],debug_folder)
177
+
178
+ def test_table_component1(test_file = 'TestingFiles/TableOCRTestEnglish.pdf', debug_folder ='./res/table_debug2/',denoise = False,englishFlag = True):
179
+ table_predictor = DetectionAndOcrTable1(englishFlag)
180
+
181
+ images = convert_from_path(test_file)
182
+ for page_number,img in enumerate(images):
183
+
184
+ #print(img.mode)
185
+ print("Looking at page:")
186
+ print(page_number)
187
+ parts = test_file.split("/")
188
+ filename = parts[-1][:-4]
189
+ debugfolder_filename_page_name= debug_folder+filename+"_"+ str(page_number)+'_'
190
+ table_codes = table_predictor.predict(img,debugfolder_filename_page_name=debugfolder_filename_page_name,denoise = denoise)
191
+ for index, table_code in enumerate(table_codes):
192
+ with open(debugfolder_filename_page_name+str(index)+'output.xls', 'w') as file:
193
+ file.write(table_code)
194
+ return table_codes
195
+
196
+ def test_table_component2(test_file = 'TestingFiles/TableOCRTestEnglish.pdf', debug_folder ='./res/table_debug2/'):
197
+ #This components can take in entire pdf page as input , scan for tables and return the table in html format
198
+ #Uses the full unitable model
199
+
200
+ table_predictor = DetectionAndOcrTable2()
201
+
202
+ images = convert_from_path(test_file)
203
+ for page_number,img in enumerate(images):
204
+ print("Looking at page:")
205
+ print(page_number)
206
+ parts = test_file.split("/")
207
+ filename = parts[-1][:-4]
208
+ debugfolder_filename_page_name= debug_folder+filename+"_"+ str(page_number)+'_'
209
+ table_codes = table_predictor.predict(img,debugfolder_filename_page_name=debugfolder_filename_page_name)
210
+ for index, table_code in enumerate(table_codes):
211
+ with open(debugfolder_filename_page_name+str(index)+'output.xls', 'w') as file:
212
+ file.write(table_code)
213
+ return table_codes
214
+
215
+ def test_table_component3(test_file = 'TestingFiles/TableOCRTestEnglish.pdf',debug_folder ='./res/table_debug3/',denoise = False,englishFlag = True):
216
+ table_predictor = DetectionAndOcrTable3(englishFlag)
217
+
218
+ images = convert_from_path(test_file)
219
+ for page_number,img in enumerate(images):
220
+ #print(img.mode)
221
+ print("Looking at page:")
222
+ print(page_number)
223
+ parts = test_file.split("/")
224
+ filename = parts[-1][:-4]
225
+ debugfolder_filename_page_name= debug_folder+filename+"_"+ str(page_number)+'_'
226
+ table_codes = table_predictor.predict(img,debugfolder_filename_page_name=debugfolder_filename_page_name)
227
+ for index, table_code in enumerate(table_codes):
228
+ with open(debugfolder_filename_page_name+str(index)+'output.xls', 'w') as file:
229
+ file.write(table_code)
230
+ return table_codes
231
+
232
+
233
+
234
+ def test_table_component4(test_file = 'TestingFiles/TableOCRTestEnglish.pdf',debug_folder ='./res/table_debug3/'):
235
+ table_predictor = DetectionAndOcrTable4()
236
+
237
+ images = convert_from_path(test_file)
238
+ for page_number,img in enumerate(images):
239
+ #print(img.mode)
240
+ print("Looking at page:")
241
+ print(page_number)
242
+ parts = test_file.split("/")
243
+ filename = parts[-1][:-4]
244
+ debugfolder_filename_page_name= debug_folder+filename+"_"+ str(page_number)+'_'
245
+ table_codes = table_predictor.predict(img,debugfolder_filename_page_name=debugfolder_filename_page_name)
246
+ for index, table_code in enumerate(table_codes):
247
+ with open(debugfolder_filename_page_name+str(index)+'output.xls', 'w') as file:
248
+ file.write(table_code)
249
+ return table_codes
250
+
251
+
252
+ """
253
+ parser = argparse.ArgumentParser(description='Process some strings.')
254
+ parser.add_argument('ocr', type=str, help='type in id of the component to test')
255
+ parser.add_argument('--test_file',type=str, help='path to the testing file')
256
+ parser.add_argument('--debug_folder',type=str, help='path to the folder you want to save your results in')
257
+ parser.add_argument('--englishFlag',type=bool, help='Whether your pdf is in english => could lead to better results ')
258
+ parser.add_argument('--denoise',type=bool, help='preprocessing for not clean scans ')
259
+
260
+ args = parser.parse_args()
261
+ start = time.time()
262
+ if args.ocr == "ocr1":
263
+ test_ocr_component1(args.test_file,args.debug_folder, args.englishFlag)
264
+ elif args.ocr == "table1":
265
+ test_tableOcrOnly1(args.test_file,args.debug_folder,args.englishFlag,args.denoise)
266
+ elif args.ocr == "table2":
267
+ test_tableOcrOnly2(args.test_file,args.debug_folder)
268
+ elif args.ocr =="pdftable1":
269
+ test_table_component1(args.test_file,args.debug_folder,args.englishFlag,args.denoise)
270
+ elif args.ocr =="pdftable2":
271
+ test_table_component2(args.test_file,args.debug_folder)
272
+ elif args.ocr =="pdftable3":
273
+ test_table_component3(args.test_file,args.debug_folder,args.englishFlag,args.denoise)
274
+ elif args.ocr =="pdftable4":
275
+ test_table_component4(args.test_file,args.debug_folder)
276
+
277
+ """
278
+ import gradio as gr
279
+ from gradio_pdf import PDF
280
+
281
+ with gr.Blocks() as demo:
282
+ gr.Markdown("# OCR component")
283
+ inputs_for_ocr = [PDF(label="Document"), gr.Textbox(label="internal debug folder",placeholder = "./res/table1/"),gr.Checkbox(label ="English Document?",value =False)]
284
+ ocr_btn = gr.Button("Run ocr")
285
+
286
+ gr.Examples(
287
+ examples=[["TestingFiles/OCRTest1German.pdf",'./res/table1/',False]],
288
+ inputs=inputs_for_ocr
289
+ )
290
+
291
+ outputs_for_ocr = [gr.Textbox(label="List of annotation objects"), gr.Textbox("Text in page")]
292
+
293
+ ocr_btn.click(fn=test_ocr_component1,
294
+ inputs = inputs_for_ocr,
295
+ outputs = outputs_for_ocr,
296
+ api_name="OCR"
297
+ )
298
+
299
+ gr.Markdown("# Table OCR components that takes a pdf, extract table and return their html code ")
300
+ gr.Markdown("## Component 1 uses table transformer and doctr +Unitable")
301
+ inputs_for_pdftable1 = [PDF(label="Document"), gr.Textbox(label="internal debug folder",placeholder = "./res/table1/"),gr.Checkbox(label ="Denoise?",value =False),gr.Checkbox(label ="English Document?",value =False)]
302
+ table1_btn = gr.Button("Run pdftable1")
303
+
304
+ gr.Examples(
305
+ examples=[["TestingFiles/OCRTest5English.pdf",'./res/table1/',False]],
306
+ inputs=inputs_for_pdftable1
307
+ )
308
+ outputs_for_pdftable1 = [gr.Textbox(label="Table code")]
309
+
310
+ table1_btn.click(fn=test_table_component1,
311
+ inputs = inputs_for_pdftable1,
312
+ outputs = outputs_for_pdftable1,
313
+ api_name="pdfTable1"
314
+ )
315
+
316
+ gr.Markdown("## Component 2 uses table transformer and Unitable")
317
+ inputs_for_pdftable2 = [PDF(label="Document"), gr.Textbox(label="internal debug folder",placeholder = "./res/table1/")]
318
+ table2_btn = gr.Button("Run pdftable2")
319
+
320
+ gr.Examples(
321
+ examples=[["TestingFiles/OCRTest5English.pdf",'./res/table1/',False]],
322
+ inputs=inputs_for_pdftable1
323
+ )
324
+ outputs_for_pdftable2 = [gr.Textbox(label="Table code")]
325
+
326
+ table2_btn.click(fn=test_table_component2,
327
+ inputs = inputs_for_pdftable2,
328
+ outputs = outputs_for_pdftable2,
329
+ api_name="pdfTable2"
330
+ )
331
+
332
+ gr.Markdown("## Component 3 uses Yolo and Unitable+doctr")
333
+ inputs_for_pdftable3 = [PDF(label="Document"), gr.Textbox(label="internal debug folder",placeholder = "./res/table1/"),gr.Checkbox(label ="Denoise?",value =False),gr.Checkbox(label ="English Document?",value =False)]
334
+ table3_btn = gr.Button("Run pdftable3")
335
+
336
+
337
+ gr.Examples(
338
+ examples=[["TestingFiles/TableOCRTestEnglish.pdf",'./res/table1/',False]],
339
+ inputs=inputs_for_pdftable1
340
+ )
341
+ outputs_for_pdftable3 = [gr.Textbox(label="Table code")]
342
+
343
+ table3_btn.click(fn=test_table_component3,
344
+ inputs = inputs_for_pdftable3,
345
+ outputs = outputs_for_pdftable3,
346
+ api_name="pdfTable3"
347
+ )
348
+
349
+ gr.Markdown("## Component 4 uses Yolo and Unitable")
350
+ inputs_for_pdftable4 = [PDF(label="Document"), gr.Textbox(label="internal debug folder",placeholder = "./res/table1/")]
351
+ table4_btn = gr.Button("Run pdftable4")
352
+
353
+ gr.Examples(
354
+ examples=[["TestingFiles/TableOCRTestEasier.pdf",'./res/table1/',False]],
355
+ inputs=inputs_for_pdftable1
356
+ )
357
+ outputs_for_pdftable4 = [gr.Textbox(label="Table code")]
358
+
359
+
360
+ table4_btn.click(fn=test_table_component4,
361
+ inputs = inputs_for_pdftable4,
362
+ outputs = outputs_for_pdftable4,
363
+ api_name="pdfTable4"
364
+ )
365
+
366
+
367
+ gr.Markdown("# Table OCR component that takes image of an cropped tavle, extract table and return their html code ")
368
+
369
+ inputs_for_table1 = [gr.Image(label="Image of cropped table",type='pil'), gr.Textbox(label="internal debug folder",placeholder = "./res/table1/"),gr.Checkbox(label ="Denoise?",value =False),gr.Checkbox(label ="English Document?",value =False)]
370
+ onlytable1_btn = gr.Button("Run table1")
371
+
372
+ gr.Examples(
373
+ examples=[[Image.open("cropped_table.png"),'./res/table1/',False]],
374
+ inputs=inputs_for_table1
375
+ )
376
+ outputs_for_table1 = [gr.HTML(label="Table code")]
377
+
378
+
379
+ onlytable1_btn.click(fn=test_tableOcrOnly1,
380
+ inputs = inputs_for_table1,
381
+ outputs = outputs_for_table1,
382
+ api_name="table1"
383
+ )
384
+
385
+ gr.Markdown("## Another Table OCR component that takes image of an cropped table, extract table and return their html code ")
386
+
387
+ inputs_for_table2 = [gr.Image(label="Image of cropped table",type='pil'), gr.Textbox(label="internal debug folder",placeholder = "./res/table1/")]
388
+ onlytable2_btn = gr.Button("Run table2")
389
+
390
+
391
+ gr.Examples(
392
+ examples=[[Image.open("cropped_table.png"),'./res/table1/',False]],
393
+ inputs=inputs_for_table2
394
+ )
395
+ outputs_for_table2 = [gr.HTML(label="Table code")]
396
+
397
+ onlytable2_btn.click(fn=test_tableOcrOnly2,
398
+ inputs = inputs_for_table2,
399
+ outputs = outputs_for_table2,
400
+ api_name="table2"
401
+ )
402
+
403
+
404
+
405
+
406
+ demo.launch(share=True)
cropped_table.png ADDED
cropped_table_0.png ADDED
cropped_table_1.png ADDED
deepdoc/README.md ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ English | [简体中文](./README_zh.md)
2
+
3
+ # *Deep*Doc
4
+
5
+ - [1. Introduction](#1)
6
+ - [2. Vision](#2)
7
+ - [3. Parser](#3)
8
+
9
+ <a name="1"></a>
10
+ ## 1. Introduction
11
+
12
+ With a bunch of documents from various domains with various formats and along with diverse retrieval requirements,
13
+ an accurate analysis becomes a very challenge task. *Deep*Doc is born for that purpose.
14
+ There are 2 parts in *Deep*Doc so far: vision and parser.
15
+ You can run the flowing test programs if you're interested in our results of OCR, layout recognition and TSR.
16
+ ```bash
17
+ python deepdoc/vision/t_ocr.py -h
18
+ usage: t_ocr.py [-h] --inputs INPUTS [--output_dir OUTPUT_DIR]
19
+
20
+ options:
21
+ -h, --help show this help message and exit
22
+ --inputs INPUTS Directory where to store images or PDFs, or a file path to a single image or PDF
23
+ --output_dir OUTPUT_DIR
24
+ Directory where to store the output images. Default: './ocr_outputs'
25
+ ```
26
+ ```bash
27
+ python deepdoc/vision/t_recognizer.py -h
28
+ usage: t_recognizer.py [-h] --inputs INPUTS [--output_dir OUTPUT_DIR] [--threshold THRESHOLD] [--mode {layout,tsr}]
29
+
30
+ options:
31
+ -h, --help show this help message and exit
32
+ --inputs INPUTS Directory where to store images or PDFs, or a file path to a single image or PDF
33
+ --output_dir OUTPUT_DIR
34
+ Directory where to store the output images. Default: './layouts_outputs'
35
+ --threshold THRESHOLD
36
+ A threshold to filter out detections. Default: 0.5
37
+ --mode {layout,tsr} Task mode: layout recognition or table structure recognition
38
+ ```
39
+
40
+ Our models are served on HuggingFace. If you have trouble downloading HuggingFace models, this might help!!
41
+ ```bash
42
+ export HF_ENDPOINT=https://hf-mirror.com
43
+ ```
44
+
45
+ <a name="2"></a>
46
+ ## 2. Vision
47
+
48
+ We use vision information to resolve problems as human being.
49
+ - OCR. Since a lot of documents presented as images or at least be able to transform to image,
50
+ OCR is a very essential and fundamental or even universal solution for text extraction.
51
+ ```bash
52
+ python deepdoc/vision/t_ocr.py --inputs=path_to_images_or_pdfs --output_dir=path_to_store_result
53
+ ```
54
+ The inputs could be directory to images or PDF, or a image or PDF.
55
+ You can look into the folder 'path_to_store_result' where has images which demonstrate the positions of results,
56
+ txt files which contain the OCR text.
57
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
58
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/f25bee3d-aaf7-4102-baf5-d5208361d110" width="900"/>
59
+ </div>
60
+
61
+ - Layout recognition. Documents from different domain may have various layouts,
62
+ like, newspaper, magazine, book and résumé are distinct in terms of layout.
63
+ Only when machine have an accurate layout analysis, it can decide if these text parts are successive or not,
64
+ or this part needs Table Structure Recognition(TSR) to process, or this part is a figure and described with this caption.
65
+ We have 10 basic layout components which covers most cases:
66
+ - Text
67
+ - Title
68
+ - Figure
69
+ - Figure caption
70
+ - Table
71
+ - Table caption
72
+ - Header
73
+ - Footer
74
+ - Reference
75
+ - Equation
76
+
77
+ Have a try on the following command to see the layout detection results.
78
+ ```bash
79
+ python deepdoc/vision/t_recognizer.py --inputs=path_to_images_or_pdfs --threshold=0.2 --mode=layout --output_dir=path_to_store_result
80
+ ```
81
+ The inputs could be directory to images or PDF, or a image or PDF.
82
+ You can look into the folder 'path_to_store_result' where has images which demonstrate the detection results as following:
83
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
84
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/07e0f625-9b28-43d0-9fbb-5bf586cd286f" width="1000"/>
85
+ </div>
86
+
87
+ - Table Structure Recognition(TSR). Data table is a frequently used structure to present data including numbers or text.
88
+ And the structure of a table might be very complex, like hierarchy headers, spanning cells and projected row headers.
89
+ Along with TSR, we also reassemble the content into sentences which could be well comprehended by LLM.
90
+ We have five labels for TSR task:
91
+ - Column
92
+ - Row
93
+ - Column header
94
+ - Projected row header
95
+ - Spanning cell
96
+
97
+ Have a try on the following command to see the layout detection results.
98
+ ```bash
99
+ python deepdoc/vision/t_recognizer.py --inputs=path_to_images_or_pdfs --threshold=0.2 --mode=tsr --output_dir=path_to_store_result
100
+ ```
101
+ The inputs could be directory to images or PDF, or a image or PDF.
102
+ You can look into the folder 'path_to_store_result' where has both images and html pages which demonstrate the detection results as following:
103
+ <div align="center" style="margin-top:20px;margin-bottom:20px;">
104
+ <img src="https://github.com/infiniflow/ragflow/assets/12318111/cb24e81b-f2ba-49f3-ac09-883d75606f4c" width="1000"/>
105
+ </div>
106
+
107
+ <a name="3"></a>
108
+ ## 3. Parser
109
+
110
+ Four kinds of document formats as PDF, DOCX, EXCEL and PPT have their corresponding parser.
111
+ The most complex one is PDF parser since PDF's flexibility. The output of PDF parser includes:
112
+ - Text chunks with their own positions in PDF(page number and rectangular positions).
113
+ - Tables with cropped image from the PDF, and contents which has already translated into natural language sentences.
114
+ - Figures with caption and text in the figures.
115
+
116
+ ### Résumé
117
+
118
+ The résumé is a very complicated kind of document. A résumé which is composed of unstructured text
119
+ with various layouts could be resolved into structured data composed of nearly a hundred of fields.
120
+ We haven't opened the parser yet, as we open the processing method after parsing procedure.
121
+
122
+
deepdoc/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+
2
+ """
3
+ In deepdoc/__init__.py, import the class from ragFlow.py and make it available for import from the deepdoc package:
4
+ """
5
+
6
+ from .vision import RagFlow
7
+
8
+ __all__ = ['RagFlow']
deepdoc/models/.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
deepdoc/models/README.md ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ ---
deepdoc/models/det.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30a86f5731181461d08021402766601e4302a9b9b9666be8aff402696339cdff
3
+ size 4745517
deepdoc/models/layout.laws.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:485a7ddf6889ef15a150bded7091ec1ea5467871f50a88f5f4297c66c1ecef1e
3
+ size 12246134
deepdoc/models/layout.manual.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:485a7ddf6889ef15a150bded7091ec1ea5467871f50a88f5f4297c66c1ecef1e
3
+ size 12246134
deepdoc/models/layout.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:485a7ddf6889ef15a150bded7091ec1ea5467871f50a88f5f4297c66c1ecef1e
3
+ size 12246134
deepdoc/models/layout.paper.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:485a7ddf6889ef15a150bded7091ec1ea5467871f50a88f5f4297c66c1ecef1e
3
+ size 12246134
deepdoc/models/ocr.res ADDED
@@ -0,0 +1,6623 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '
2
+
3
+
4
+
5
+
6
+
7
+
8
+ 贿
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+
24
+
25
+ 2
26
+ 0
27
+ 8
28
+ -
29
+ 7
30
+
31
+ >
32
+ :
33
+ ]
34
+ ,
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
44
+
45
+
46
+
47
+
48
+
49
+
50
+
51
+
52
+
53
+
54
+
55
+
56
+
57
+
58
+
59
+
60
+
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+
73
+
74
+
75
+
76
+
77
+
78
+
79
+
80
+
81
+
82
+ 蹿
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+
91
+
92
+
93
+ 1
94
+ 3
95
+
96
+
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+
105
+
106
+
107
+
108
+
109
+
110
+
111
+
112
+
113
+
114
+
115
+
116
+
117
+
118
+
119
+
120
+
121
+
122
+
123
+
124
+
125
+
126
+
127
+
128
+
129
+
130
+
131
+
132
+
133
+
134
+
135
+
136
+
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+
154
+
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+
165
+
166
+ !
167
+
168
+
169
+
170
+
171
+
172
+
173
+
174
+
175
+
176
+
177
+
178
+
179
+
180
+
181
+
182
+
183
+
184
+
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+
216
+
217
+
218
+
219
+
220
+
221
+
222
+
223
+
224
+
225
+
226
+
227
+
228
+
229
+
230
+
231
+
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+
240
+
241
+
242
+
243
+
244
+
245
+
246
+
247
+
248
+
249
+
250
+
251
+
252
+
253
+
254
+
255
+
256
+
257
+
258
+
259
+
260
+
261
+
262
+
263
+
264
+
265
+
266
+
267
+
268
+
269
+
270
+
271
+
272
+
273
+
274
+
275
+
276
+
277
+
278
+
279
+
280
+
281
+
282
+
283
+
284
+
285
+
286
+
287
+
288
+
289
+
290
+
291
+
292
+
293
+
294
+
295
+
296
+
297
+
298
+
299
+
300
+
301
+
302
+
303
+
304
+ 诿
305
+
306
+
307
+
308
+
309
+
310
+
311
+
312
+
313
+
314
+
315
+
316
+ 线
317
+
318
+
319
+
320
+
321
+
322
+
323
+
324
+
325
+
326
+
327
+
328
+
329
+
330
+
331
+
332
+
333
+
334
+
335
+
336
+
337
+
338
+
339
+
340
+
341
+
342
+
343
+
344
+
345
+
346
+
347
+
348
+
349
+
350
+
351
+
352
+
353
+
354
+
355
+
356
+
357
+
358
+
359
+
360
+
361
+
362
+
363
+
364
+
365
+
366
+
367
+
368
+
369
+
370
+
371
+
372
+
373
+
374
+
375
+
376
+
377
+
378
+
379
+
380
+
381
+
382
+
383
+
384
+
385
+
386
+
387
+
388
+
389
+
390
+
391
+
392
+ 尿
393
+
394
+
395
+
396
+
397
+
398
+
399
+
400
+
401
+ |
402
+ ;
403
+
404
+
405
+
406
+
407
+
408
+
409
+
410
+
411
+
412
+
413
+
414
+
415
+
416
+
417
+
418
+
419
+
420
+
421
+
422
+
423
+
424
+
425
+ H
426
+
427
+
428
+
429
+
430
+
431
+
432
+
433
+
434
+
435
+
436
+
437
+
438
+
439
+
440
+
441
+
442
+
443
+
444
+
445
+
446
+
447
+
448
+
449
+
450
+
451
+
452
+
453
+
454
+
455
+
456
+
457
+
458
+
459
+
460
+
461
+
462
+
463
+
464
+
465
+
466
+ .
467
+
468
+
469
+
470
+
471
+
472
+
473
+
474
+
475
+
476
+
477
+
478
+
479
+
480
+
481
+
482
+
483
+
484
+
485
+
486
+
487
+ /
488
+ *
489
+
490
+ 忿
491
+
492
+
493
+
494
+
495
+
496
+
497
+
498
+
499
+
500
+
501
+
502
+
503
+
504
+ 齿
505
+
506
+
507
+
508
+
509
+
510
+
511
+
512
+
513
+
514
+
515
+
516
+
517
+
518
+
519
+
520
+
521
+
522
+
523
+
524
+
525
+
526
+
527
+
528
+
529
+
530
+
531
+
532
+
533
+
534
+
535
+
536
+
537
+
538
+
539
+
540
+
541
+
542
+
543
+
544
+
545
+
546
+
547
+
548
+
549
+
550
+
551
+
552
+
553
+
554
+
555
+
556
+
557
+
558
+
559
+
560
+
561
+
562
+
563
+
564
+
565
+
566
+
567
+
568
+
569
+
570
+
571
+ 西
572
+
573
+
574
+
575
+
576
+
577
+
578
+
579
+
580
+
581
+
582
+
583
+
584
+
585
+
586
+
587
+
588
+
589
+
590
+
591
+
592
+
593
+
594
+
595
+
596
+
597
+
598
+
599
+
600
+
601
+
602
+
603
+
604
+
605
+
606
+
607
+
608
+
609
+
610
+
611
+
612
+
613
+
614
+
615
+
616
+
617
+
618
+
619
+
620
+
621
+
622
+
623
+
624
+
625
+
626
+
627
+
628
+
629
+
630
+
631
+ 5
632
+ 4
633
+
634
+
635
+
636
+
637
+
638
+
639
+
640
+
641
+
642
+
643
+
644
+
645
+
646
+
647
+
648
+
649
+
650
+
651
+
652
+
653
+
654
+
655
+
656
+ 亿
657
+
658
+
659
+
660
+
661
+
662
+
663
+
664
+
665
+
666
+
667
+
668
+
669
+
670
+
671
+
672
+
673
+
674
+
675
+
676
+
677
+
678
+
679
+
680
+
681
+
682
+
683
+
684
+
685
+
686
+
687
+
688
+
689
+
690
+
691
+
692
+
693
+
694
+
695
+
696
+
697
+
698
+
699
+
700
+
701
+
702
+
703
+
704
+
705
+
706
+
707
+
708
+
709
+
710
+
711
+
712
+
713
+
714
+
715
+
716
+
717
+
718
+
719
+
720
+
721
+ (
722
+
723
+
724
+
725
+
726
+
727
+
728
+
729
+
730
+
731
+
732
+
733
+
734
+
735
+
736
+
737
+
738
+
739
+
740
+
741
+
742
+
743
+
744
+
745
+
746
+
747
+
748
+
749
+
750
+
751
+
752
+
753
+
754
+ 访
755
+
756
+
757
+
758
+
759
+
760
+
761
+
762
+
763
+
764
+
765
+
766
+
767
+
768
+
769
+
770
+
771
+
772
+
773
+
774
+
775
+
776
+
777
+
778
+
779
+
780
+
781
+
782
+
783
+
784
+
785
+
786
+
787
+
788
+
789
+
790
+
791
+
792
+
793
+
794
+
795
+
796
+
797
+
798
+
799
+
800
+
801
+
802
+
803
+
804
+
805
+
806
+
807
+
808
+
809
+
810
+
811
+
812
+
813
+
814
+
815
+
816
+
817
+
818
+
819
+
820
+
821
+
822
+
823
+
824
+
825
+
826
+
827
+
828
+
829
+
830
+
831
+
832
+
833
+
834
+
835
+
836
+
837
+
838
+
839
+
840
+
841
+
842
+
843
+
844
+
845
+
846
+
847
+
848
+
849
+
850
+
851
+
852
+
853
+
854
+
855
+
856
+
857
+
858
+
859
+
860
+
861
+
862
+
863
+
864
+
865
+
866
+
867
+
868
+
869
+
870
+
871
+
872
+
873
+
874
+
875
+
876
+
877
+
878
+
879
+
880
+
881
+
882
+
883
+
884
+
885
+
886
+
887
+
888
+
889
+
890
+
891
+
892
+
893
+
894
+
895
+
896
+
897
+
898
+
899
+
900
+
901
+
902
+
903
+
904
+
905
+
906
+
907
+
908
+
909
+
910
+
911
+
912
+
913
+
914
+
915
+
916
+
917
+
918
+
919
+
920
+
921
+
922
+
923
+
924
+
925
+
926
+
927
+
928
+
929
+
930
+
931
+
932
+
933
+ 6
934
+
935
+
936
+
937
+
938
+
939
+
940
+
941
+
942
+
943
+
944
+
945
+
946
+
947
+
948
+
949
+
950
+
951
+
952
+
953
+
954
+
955
+
956
+
957
+
958
+
959
+
960
+
961
+
962
+
963
+
964
+
965
+ )
966
+
967
+
968
+
969
+
970
+
971
+
972
+
973
+
974
+
975
+
976
+
977
+
978
+
979
+
980
+
981
+
982
+
983
+
984
+
985
+
986
+
987
+
988
+
989
+ 稿
990
+
991
+
992
+
993
+
994
+
995
+
996
+
997
+
998
+
999
+
1000
+
1001
+
1002
+
1003
+
1004
+
1005
+
1006
+
1007
+
1008
+
1009
+
1010
+
1011
+
1012
+
1013
+
1014
+
1015
+
1016
+
1017
+
1018
+
1019
+
1020
+
1021
+
1022
+
1023
+
1024
+
1025
+
1026
+
1027
+
1028
+
1029
+
1030
+
1031
+
1032
+
1033
+ s
1034
+ u
1035
+
1036
+
1037
+
1038
+
1039
+
1040
+
1041
+
1042
+
1043
+
1044
+
1045
+
1046
+
1047
+
1048
+
1049
+
1050
+
1051
+
1052
+
1053
+
1054
+
1055
+
1056
+
1057
+
1058
+
1059
+
1060
+
1061
+
1062
+
1063
+
1064
+
1065
+
1066
+
1067
+
1068
+
1069
+
1070
+
1071
+
1072
+
1073
+
1074
+
1075
+
1076
+
1077
+
1078
+
1079
+
1080
+
1081
+
1082
+
1083
+
1084
+
1085
+
1086
+
1087
+
1088
+
1089
+
1090
+
1091
+
1092
+
1093
+
1094
+
1095
+
1096
+
1097
+
1098
+
1099
+
1100
+
1101
+
1102
+
1103
+
1104
+
1105
+ [
1106
+
1107
+
1108
+
1109
+ 9
1110
+
1111
+
1112
+
1113
+
1114
+
1115
+
1116
+
1117
+
1118
+
1119
+
1120
+
1121
+
1122
+
1123
+
1124
+
1125
+
1126
+
1127
+
1128
+
1129
+
1130
+
1131
+
1132
+
1133
+
1134
+
1135
+
1136
+
1137
+
1138
+
1139
+
1140
+
1141
+
1142
+
1143
+
1144
+
1145
+
1146
+
1147
+
1148
+
1149
+
1150
+
1151
+
1152
+
1153
+
1154
+
1155
+
1156
+
1157
+
1158
+
1159
+
1160
+
1161
+
1162
+
1163
+
1164
+
1165
+
1166
+
1167
+
1168
+
1169
+
1170
+
1171
+
1172
+
1173
+
1174
+
1175
+
1176
+
1177
+
1178
+
1179
+
1180
+
1181
+
1182
+ 岿
1183
+
1184
+
1185
+
1186
+
1187
+
1188
+
1189
+
1190
+
1191
+
1192
+
1193
+
1194
+
1195
+
1196
+
1197
+
1198
+
1199
+
1200
+ 广
1201
+
1202
+
1203
+
1204
+
1205
+
1206
+
1207
+
1208
+
1209
+
1210
+
1211
+
1212
+
1213
+
1214
+
1215
+
1216
+
1217
+ S
1218
+ Y
1219
+ F
1220
+ D
1221
+ A
1222
+
1223
+
1224
+
1225
+
1226
+
1227
+
1228
+
1229
+
1230
+
1231
+
1232
+
1233
+
1234
+
1235
+
1236
+
1237
+
1238
+
1239
+
1240
+
1241
+
1242
+
1243
+
1244
+
1245
+
1246
+
1247
+
1248
+
1249
+
1250
+
1251
+
1252
+
1253
+
1254
+
1255
+
1256
+
1257
+
1258
+
1259
+
1260
+
1261
+
1262
+
1263
+
1264
+
1265
+
1266
+
1267
+
1268
+
1269
+
1270
+
1271
+
1272
+
1273
+
1274
+
1275
+
1276
+
1277
+
1278
+
1279
+
1280
+
1281
+
1282
+
1283
+
1284
+
1285
+
1286
+
1287
+
1288
+
1289
+
1290
+
1291
+
1292
+
1293
+
1294
+
1295
+
1296
+
1297
+
1298
+
1299
+
1300
+
1301
+
1302
+
1303
+
1304
+
1305
+
1306
+
1307
+
1308
+
1309
+
1310
+ P
1311
+
1312
+
1313
+
1314
+
1315
+
1316
+
1317
+
1318
+
1319
+
1320
+
1321
+
1322
+
1323
+
1324
+
1325
+
1326
+
1327
+
1328
+
1329
+
1330
+
1331
+
1332
+
1333
+
1334
+
1335
+
1336
+
1337
+
1338
+
1339
+
1340
+
1341
+
1342
+
1343
+
1344
+
1345
+
1346
+
1347
+
1348
+
1349
+
1350
+
1351
+
1352
+
1353
+
1354
+
1355
+
1356
+
1357
+
1358
+
1359
+
1360
+
1361
+
1362
+
1363
+
1364
+
1365
+
1366
+
1367
+
1368
+
1369
+
1370
+
1371
+
1372
+
1373
+
1374
+
1375
+
1376
+
1377
+
1378
+
1379
+
1380
+
1381
+ T
1382
+
1383
+
1384
+
1385
+
1386
+ 湿
1387
+
1388
+
1389
+
1390
+
1391
+
1392
+
1393
+
1394
+
1395
+
1396
+
1397
+ 窿
1398
+
1399
+
1400
+
1401
+
1402
+
1403
+
1404
+
1405
+
1406
+
1407
+
1408
+
1409
+
1410
+
1411
+
1412
+
1413
+
1414
+
1415
+
1416
+
1417
+
1418
+
1419
+
1420
+
1421
+
1422
+
1423
+
1424
+
1425
+
1426
+
1427
+
1428
+
1429
+
1430
+
1431
+
1432
+
1433
+
1434
+
1435
+
1436
+
1437
+
1438
+
1439
+
1440
+
1441
+
1442
+
1443
+
1444
+
1445
+
1446
+
1447
+
1448
+
1449
+
1450
+
1451
+
1452
+
1453
+
1454
+
1455
+
1456
+
1457
+
1458
+
1459
+
1460
+
1461
+
1462
+
1463
+
1464
+
1465
+
1466
+
1467
+
1468
+
1469
+
1470
+
1471
+
1472
+
1473
+
1474
+
1475
+
1476
+
1477
+
1478
+
1479
+
1480
+
1481
+
1482
+
1483
+
1484
+
1485
+
1486
+
1487
+
1488
+
1489
+
1490
+
1491
+
1492
+
1493
+
1494
+
1495
+
1496
+
1497
+
1498
+
1499
+
1500
+
1501
+
1502
+
1503
+
1504
+
1505
+
1506
+
1507
+
1508
+
1509
+
1510
+
1511
+
1512
+
1513
+
1514
+
1515
+
1516
+
1517
+
1518
+
1519
+
1520
+
1521
+
1522
+
1523
+
1524
+
1525
+
1526
+
1527
+
1528
+
1529
+ @
1530
+
1531
+
1532
+
1533
+
1534
+
1535
+
1536
+
1537
+
1538
+
1539
+
1540
+
1541
+
1542
+
1543
+
1544
+
1545
+
1546
+
1547
+
1548
+
1549
+
1550
+
1551
+
1552
+
1553
+
1554
+
1555
+
1556
+
1557
+
1558
+
1559
+
1560
+
1561
+
1562
+
1563
+ 丿
1564
+
1565
+
1566
+
1567
+
1568
+
1569
+
1570
+
1571
+
1572
+
1573
+
1574
+
1575
+
1576
+
1577
+
1578
+
1579
+
1580
+
1581
+
1582
+
1583
+
1584
+
1585
+
1586
+
1587
+
1588
+
1589
+
1590
+
1591
+
1592
+
1593
+
1594
+
1595
+
1596
+
1597
+
1598
+
1599
+
1600
+
1601
+
1602
+
1603
+
1604
+
1605
+
1606
+
1607
+
1608
+
1609
+
1610
+
1611
+
1612
+
1613
+
1614
+
1615
+
1616
+
1617
+
1618
+
1619
+
1620
+
1621
+
1622
+
1623
+
1624
+
1625
+
1626
+
1627
+
1628
+
1629
+
1630
+
1631
+
1632
+
1633
+
1634
+
1635
+
1636
+
1637
+
1638
+
1639
+
1640
+
1641
+
1642
+
1643
+ 沿
1644
+
1645
+
1646
+
1647
+
1648
+
1649
+
1650
+
1651
+
1652
+
1653
+
1654
+
1655
+
1656
+
1657
+
1658
+
1659
+
1660
+
1661
+
1662
+
1663
+
1664
+
1665
+
1666
+
1667
+
1668
+
1669
+
1670
+
1671
+
1672
+
1673
+
1674
+
1675
+
1676
+
1677
+
1678
+
1679
+
1680
+
1681
+ 使
1682
+
1683
+
1684
+
1685
+
1686
+
1687
+
1688
+
1689
+
1690
+
1691
+
1692
+
1693
+
1694
+
1695
+ 绿
1696
+
1697
+
1698
+
1699
+
1700
+
1701
+
1702
+
1703
+
1704
+
1705
+
1706
+
1707
+
1708
+
1709
+
1710
+
1711
+
1712
+
1713
+
1714
+
1715
+
1716
+
1717
+
1718
+
1719
+
1720
+
1721
+
1722
+
1723
+
1724
+
1725
+
1726
+
1727
+
1728
+
1729
+
1730
+
1731
+
1732
+
1733
+
1734
+
1735
+
1736
+
1737
+
1738
+
1739
+
1740
+
1741
+
1742
+
1743
+
1744
+
1745
+
1746
+
1747
+
1748
+
1749
+
1750
+
1751
+
1752
+
1753
+
1754
+
1755
+
1756
+
1757
+
1758
+
1759
+
1760
+
1761
+
1762
+
1763
+
1764
+
1765
+
1766
+
1767
+
1768
+
1769
+
1770
+
1771
+
1772
+
1773
+
1774
+
1775
+
1776
+
1777
+
1778
+
1779
+
1780
+
1781
+
1782
+
1783
+
1784
+
1785
+
1786
+
1787
+
1788
+
1789
+
1790
+
1791
+
1792
+
1793
+
1794
+
1795
+
1796
+
1797
+
1798
+
1799
+
1800
+
1801
+
1802
+
1803
+
1804
+
1805
+
1806
+
1807
+
1808
+
1809
+
1810
+
1811
+ %
1812
+
1813
+
1814
+
1815
+
1816
+
1817
+
1818
+
1819
+
1820
+
1821
+
1822
+ "
1823
+
1824
+
1825
+
1826
+
1827
+
1828
+
1829
+
1830
+
1831
+
1832
+
1833
+
1834
+
1835
+
1836
+
1837
+ 婿
1838
+
1839
+
1840
+
1841
+
1842
+
1843
+
1844
+
1845
+
1846
+
1847
+
1848
+
1849
+
1850
+
1851
+
1852
+
1853
+
1854
+
1855
+
1856
+
1857
+
1858
+
1859
+
1860
+
1861
+
1862
+
1863
+
1864
+
1865
+
1866
+
1867
+
1868
+
1869
+
1870
+
1871
+
1872
+
1873
+
1874
+
1875
+
1876
+
1877
+
1878
+
1879
+
1880
+
1881
+
1882
+
1883
+
1884
+
1885
+
1886
+
1887
+
1888
+
1889
+
1890
+
1891
+
1892
+
1893
+
1894
+
1895
+
1896
+
1897
+
1898
+
1899
+
1900
+
1901
+
1902
+
1903
+
1904
+
1905
+
1906
+
1907
+
1908
+
1909
+
1910
+
1911
+
1912
+
1913
+
1914
+
1915
+
1916
+
1917
+
1918
+
1919
+
1920
+
1921
+
1922
+
1923
+
1924
+
1925
+
1926
+
1927
+
1928
+
1929
+
1930
+
1931
+
1932
+
1933
+
1934
+
1935
+
1936
+
1937
+
1938
+
1939
+
1940
+
1941
+
1942
+
1943
+
1944
+
1945
+
1946
+
1947
+
1948
+
1949
+
1950
+
1951
+
1952
+
1953
+
1954
+
1955
+
1956
+
1957
+
1958
+ r
1959
+
1960
+
1961
+
1962
+
1963
+
1964
+
1965
+
1966
+
1967
+
1968
+
1969
+
1970
+
1971
+
1972
+
1973
+
1974
+
1975
+
1976
+
1977
+
1978
+
1979
+
1980
+
1981
+
1982
+
1983
+
1984
+
1985
+
1986
+
1987
+
1988
+
1989
+ =
1990
+
1991
+
1992
+
1993
+
1994
+
1995
+
1996
+
1997
+
1998
+
1999
+
2000
+ 饿
2001
+
2002
+
2003
+
2004
+
2005
+
2006
+
2007
+
2008
+
2009
+
2010
+
2011
+
2012
+
2013
+
2014
+
2015
+
2016
+
2017
+
2018
+
2019
+
2020
+
2021
+
2022
+
2023
+
2024
+
2025
+
2026
+
2027
+
2028
+
2029
+
2030
+
2031
+
2032
+
2033
+
2034
+
2035
+
2036
+
2037
+
2038
+
2039
+
2040
+
2041
+
2042
+
2043
+
2044
+
2045
+
2046
+
2047
+
2048
+
2049
+
2050
+
2051
+
2052
+
2053
+
2054
+
2055
+
2056
+
2057
+
2058
+
2059
+
2060
+
2061
+
2062
+
2063
+
2064
+
2065
+
2066
+
2067
+
2068
+
2069
+
2070
+
2071
+
2072
+
2073
+
2074
+
2075
+
2076
+
2077
+
2078
+
2079
+
2080
+
2081
+
2082
+
2083
+
2084
+
2085
+
2086
+
2087
+
2088
+
2089
+
2090
+
2091
+
2092
+
2093
+
2094
+
2095
+
2096
+
2097
+
2098
+
2099
+
2100
+
2101
+
2102
+
2103
+
2104
+
2105
+
2106
+
2107
+
2108
+
2109
+
2110
+
2111
+
2112
+
2113
+
2114
+
2115
+ ˇ
2116
+
2117
+
2118
+
2119
+
2120
+
2121
+
2122
+
2123
+
2124
+
2125
+
2126
+
2127
+
2128
+
2129
+
2130
+
2131
+
2132
+
2133
+
2134
+
2135
+
2136
+
2137
+
2138
+
2139
+
2140
+
2141
+
2142
+
2143
+
2144
+
2145
+
2146
+
2147
+
2148
+
2149
+
2150
+
2151
+
2152
+
2153
+
2154
+
2155
+
2156
+ q
2157
+
2158
+
2159
+
2160
+
2161
+
2162
+
2163
+
2164
+
2165
+
2166
+
2167
+
2168
+
2169
+
2170
+
2171
+
2172
+
2173
+
2174
+
2175
+
2176
+
2177
+
2178
+
2179
+
2180
+
2181
+
2182
+
2183
+
2184
+
2185
+
2186
+
2187
+
2188
+
2189
+
2190
+
2191
+
2192
+
2193
+
2194
+
2195
+
2196
+
2197
+
2198
+
2199
+
2200
+
2201
+
2202
+
2203
+
2204
+
2205
+
2206
+
2207
+
2208
+
2209
+
2210
+
2211
+
2212
+
2213
+
2214
+
2215
+
2216
+
2217
+
2218
+
2219
+
2220
+
2221
+
2222
+
2223
+
2224
+
2225
+
2226
+
2227
+
2228
+
2229
+
2230
+
2231
+
2232
+
2233
+
2234
+
2235
+
2236
+
2237
+
2238
+
2239
+
2240
+
2241
+
2242
+
2243
+
2244
+
2245
+
2246
+
2247
+
2248
+
2249
+
2250
+
2251
+
2252
+
2253
+
2254
+
2255
+
2256
+
2257
+
2258
+
2259
+
2260
+
2261
+
2262
+
2263
+
2264
+
2265
+
2266
+
2267
+
2268
+
2269
+ ÷
2270
+
2271
+
2272
+
2273
+
2274
+
2275
+
2276
+
2277
+
2278
+
2279
+
2280
+
2281
+
2282
+
2283
+
2284
+
2285
+
2286
+
2287
+
2288
+
2289
+
2290
+
2291
+
2292
+
2293
+
2294
+
2295
+
2296
+
2297
+
2298
+
2299
+
2300
+
2301
+
2302
+
2303
+
2304
+
2305
+
2306
+
2307
+
2308
+
2309
+
2310
+
2311
+
2312
+
2313
+
2314
+
2315
+
2316
+
2317
+
2318
+
2319
+
2320
+
2321
+
2322
+
2323
+
2324
+
2325
+
2326
+
2327
+
2328
+
2329
+
2330
+
2331
+
2332
+
2333
+
2334
+
2335
+
2336
+
2337
+
2338
+
2339
+
2340
+
2341
+
2342
+
2343
+
2344
+
2345
+
2346
+
2347
+
2348
+
2349
+
2350
+
2351
+
2352
+
2353
+
2354
+
2355
+
2356
+
2357
+
2358
+
2359
+
2360
+
2361
+
2362
+
2363
+
2364
+
2365
+
2366
+
2367
+
2368
+
2369
+
2370
+
2371
+
2372
+
2373
+
2374
+
2375
+
2376
+
2377
+
2378
+
2379
+
2380
+
2381
+ 椿
2382
+
2383
+
2384
+
2385
+ 寿
2386
+
2387
+
2388
+
2389
+
2390
+
2391
+
2392
+
2393
+
2394
+
2395
+
2396
+
2397
+
2398
+
2399
+
2400
+
2401
+
2402
+
2403
+
2404
+
2405
+
2406
+
2407
+
2408
+
2409
+
2410
+
2411
+
2412
+
2413
+
2414
+
2415
+
2416
+
2417
+
2418
+
2419
+
2420
+
2421
+
2422
+
2423
+
2424
+
2425
+
2426
+
2427
+
2428
+
2429
+
2430
+
2431
+
2432
+
2433
+
2434
+
2435
+
2436
+
2437
+
2438
+
2439
+
2440
+
2441
+
2442
+
2443
+
2444
+
2445
+
2446
+
2447
+
2448
+
2449
+
2450
+
2451
+
2452
+
2453
+
2454
+
2455
+
2456
+ ?
2457
+
2458
+
2459
+
2460
+
2461
+
2462
+
2463
+
2464
+
2465
+
2466
+
2467
+
2468
+
2469
+
2470
+
2471
+
2472
+
2473
+
2474
+
2475
+
2476
+
2477
+
2478
+
2479
+
2480
+
2481
+
2482
+
2483
+
2484
+
2485
+
2486
+
2487
+
2488
+
2489
+
2490
+
2491
+
2492
+
2493
+
2494
+
2495
+
2496
+
2497
+
2498
+
2499
+
2500
+
2501
+
2502
+
2503
+
2504
+
2505
+
2506
+
2507
+
2508
+
2509
+
2510
+
2511
+
2512
+
2513
+
2514
+
2515
+
2516
+
2517
+
2518
+
2519
+
2520
+
2521
+
2522
+
2523
+
2524
+
2525
+
2526
+
2527
+
2528
+
2529
+
2530
+
2531
+
2532
+
2533
+
2534
+
2535
+
2536
+
2537
+
2538
+
2539
+
2540
+
2541
+
2542
+
2543
+
2544
+
2545
+
2546
+
2547
+
2548
+
2549
+
2550
+
2551
+
2552
+
2553
+
2554
+
2555
+
2556
+
2557
+
2558
+
2559
+
2560
+
2561
+
2562
+
2563
+
2564
+
2565
+
2566
+
2567
+
2568
+
2569
+
2570
+
2571
+
2572
+
2573
+
2574
+
2575
+
2576
+
2577
+
2578
+
2579
+
2580
+
2581
+
2582
+
2583
+
2584
+
2585
+
2586
+
2587
+
2588
+
2589
+
2590
+
2591
+
2592
+
2593
+
2594
+
2595
+
2596
+
2597
+
2598
+ 便
2599
+
2600
+
2601
+
2602
+
2603
+
2604
+
2605
+
2606
+
2607
+
2608
+
2609
+
2610
+
2611
+
2612
+
2613
+
2614
+
2615
+
2616
+
2617
+
2618
+
2619
+
2620
+
2621
+
2622
+
2623
+
2624
+
2625
+
2626
+
2627
+
2628
+
2629
+
2630
+
2631
+
2632
+
2633
+
2634
+
2635
+
2636
+
2637
+
2638
+
2639
+
2640
+
2641
+
2642
+
2643
+
2644
+
2645
+
2646
+
2647
+
2648
+
2649
+
2650
+
2651
+
2652
+
2653
+
2654
+
2655
+
2656
+
2657
+
2658
+
2659
+
2660
+
2661
+
2662
+
2663
+
2664
+
2665
+
2666
+
2667
+ 殿
2668
+
2669
+
2670
+
2671
+
2672
+
2673
+
2674
+
2675
+
2676
+
2677
+
2678
+
2679
+
2680
+
2681
+
2682
+
2683
+
2684
+
2685
+
2686
+
2687
+
2688
+
2689
+
2690
+
2691
+
2692
+
2693
+
2694
+
2695
+
2696
+
2697
+ J
2698
+
2699
+
2700
+
2701
+
2702
+
2703
+
2704
+
2705
+
2706
+
2707
+
2708
+
2709
+
2710
+ l
2711
+
2712
+
2713
+
2714
+
2715
+
2716
+
2717
+
2718
+
2719
+
2720
+
2721
+
2722
+
2723
+
2724
+
2725
+
2726
+
2727
+
2728
+
2729
+
2730
+
2731
+
2732
+
2733
+
2734
+
2735
+
2736
+
2737
+
2738
+
2739
+
2740
+
2741
+
2742
+
2743
+
2744
+
2745
+
2746
+
2747
+
2748
+
2749
+
2750
+
2751
+
2752
+
2753
+
2754
+
2755
+
2756
+
2757
+
2758
+
2759
+
2760
+
2761
+
2762
+
2763
+
2764
+
2765
+
2766
+
2767
+
2768
+
2769
+
2770
+
2771
+
2772
+
2773
+
2774
+
2775
+
2776
+
2777
+
2778
+
2779
+
2780
+
2781
+
2782
+
2783
+
2784
+
2785
+
2786
+
2787
+
2788
+
2789
+
2790
+
2791
+
2792
+
2793
+
2794
+
2795
+
2796
+
2797
+
2798
+
2799
+
2800
+
2801
+
2802
+
2803
+
2804
+
2805
+
2806
+
2807
+
2808
+
2809
+
2810
+
2811
+
2812
+
2813
+
2814
+
2815
+
2816
+
2817
+
2818
+
2819
+
2820
+
2821
+
2822
+
2823
+
2824
+
2825
+
2826
+
2827
+
2828
+
2829
+
2830
+
2831
+
2832
+
2833
+
2834
+
2835
+
2836
+
2837
+
2838
+
2839
+
2840
+
2841
+
2842
+
2843
+
2844
+
2845
+
2846
+
2847
+
2848
+
2849
+
2850
+
2851
+
2852
+
2853
+
2854
+ &
2855
+
2856
+
2857
+
2858
+
2859
+
2860
+
2861
+
2862
+
2863
+
2864
+
2865
+
2866
+
2867
+
2868
+
2869
+
2870
+
2871
+
2872
+
2873
+
2874
+
2875
+
2876
+
2877
+
2878
+
2879
+
2880
+
2881
+
2882
+
2883
+
2884
+
2885
+
2886
+
2887
+
2888
+
2889
+
2890
+
2891
+
2892
+
2893
+
2894
+
2895
+
2896
+
2897
+
2898
+
2899
+
2900
+
2901
+
2902
+
2903
+
2904
+
2905
+
2906
+
2907
+
2908
+
2909
+
2910
+
2911
+
2912
+
2913
+
2914
+
2915
+
2916
+
2917
+
2918
+
2919
+
2920
+
2921
+
2922
+
2923
+
2924
+
2925
+
2926
+
2927
+
2928
+
2929
+
2930
+
2931
+
2932
+
2933
+
2934
+
2935
+
2936
+
2937
+
2938
+
2939
+
2940
+
2941
+
2942
+
2943
+ 驿
2944
+
2945
+
2946
+
2947
+
2948
+
2949
+
2950
+
2951
+
2952
+
2953
+
2954
+
2955
+
2956
+
2957
+
2958
+
2959
+
2960
+
2961
+
2962
+
2963
+
2964
+
2965
+
2966
+
2967
+
2968
+
2969
+
2970
+
2971
+
2972
+
2973
+
2974
+
2975
+
2976
+
2977
+
2978
+
2979
+
2980
+
2981
+
2982
+
2983
+
2984
+
2985
+
2986
+
2987
+
2988
+
2989
+
2990
+
2991
+
2992
+
2993
+ x
2994
+
2995
+
2996
+
2997
+
2998
+
2999
+
3000
+
3001
+
3002
+
3003
+
3004
+
3005
+
3006
+
3007
+
3008
+
3009
+
3010
+
3011
+
3012
+
3013
+
3014
+
3015
+
3016
+
3017
+
3018
+
3019
+
3020
+
3021
+
3022
+ 耀
3023
+
3024
+
3025
+
3026
+
3027
+
3028
+
3029
+
3030
+
3031
+
3032
+
3033
+
3034
+
3035
+
3036
+
3037
+
3038
+
3039
+
3040
+
3041
+
3042
+
3043
+
3044
+
3045
+
3046
+
3047
+
3048
+
3049
+
3050
+
3051
+
3052
+
3053
+
3054
+
3055
+
3056
+
3057
+
3058
+
3059
+
3060
+
3061
+
3062
+
3063
+
3064
+
3065
+
3066
+
3067
+
3068
+
3069
+
3070
+
3071
+
3072
+ 仿
3073
+
3074
+
3075
+
3076
+
3077
+
3078
+
3079
+
3080
+
3081
+
3082
+
3083
+
3084
+
3085
+
3086
+
3087
+
3088
+
3089
+
3090
+
3091
+
3092
+
3093
+
3094
+
3095
+
3096
+
3097
+
3098
+
3099
+
3100
+
3101
+
3102
+
3103
+
3104
+
3105
+
3106
+
3107
+
3108
+
3109
+
3110
+
3111
+
3112
+
3113
+
3114
+
3115
+
3116
+
3117
+
3118
+
3119
+
3120
+
3121
+
3122
+
3123
+ 鸿
3124
+
3125
+
3126
+
3127
+
3128
+
3129
+
3130
+
3131
+
3132
+
3133
+
3134
+
3135
+
3136
+
3137
+
3138
+
3139
+
3140
+
3141
+
3142
+
3143
+
3144
+
3145
+
3146
+
3147
+
3148
+
3149
+
3150
+
3151
+
3152
+
3153
+
3154
+
3155
+
3156
+
3157
+
3158
+
3159
+
3160
+
3161
+
3162
+
3163
+
3164
+
3165
+
3166
+
3167
+
3168
+
3169
+
3170
+
3171
+
3172
+
3173
+
3174
+
3175
+
3176
+
3177
+
3178
+
3179
+
3180
+
3181
+
3182
+
3183
+
3184
+
3185
+
3186
+
3187
+
3188
+
3189
+
3190
+
3191
+
3192
+
3193
+
3194
+
3195
+
3196
+
3197
+
3198
+
3199
+
3200
+
3201
+
3202
+
3203
+
3204
+
3205
+
3206
+
3207
+
3208
+
3209
+
3210
+
3211
+
3212
+
3213
+
3214
+
3215
+
3216
+
3217
+
3218
+
3219
+
3220
+
3221
+
3222
+
3223
+
3224
+
3225
+
3226
+
3227
+
3228
+
3229
+
3230
+
3231
+
3232
+
3233
+
3234
+
3235
+
3236
+
3237
+
3238
+
3239
+ 廿
3240
+
3241
+
3242
+
3243
+
3244
+
3245
+
3246
+
3247
+
3248
+
3249
+
3250
+
3251
+
3252
+
3253
+
3254
+
3255
+
3256
+
3257
+
3258
+
3259
+
3260
+
3261
+
3262
+
3263
+
3264
+
3265
+
3266
+
3267
+
3268
+
3269
+
3270
+
3271
+
3272
+
3273
+
3274
+
3275
+
3276
+
3277
+
3278
+
3279
+
3280
+
3281
+
3282
+
3283
+
3284
+
3285
+
3286
+
3287
+
3288
+
3289
+
3290
+
3291
+
3292
+
3293
+
3294
+
3295
+
3296
+
3297
+
3298
+
3299
+
3300
+
3301
+
3302
+
3303
+
3304
+
3305
+
3306
+
3307
+
3308
+
3309
+
3310
+
3311
+
3312
+
3313
+
3314
+
3315
+
3316
+ z
3317
+
3318
+
3319
+ ±
3320
+
3321
+
3322
+
3323
+
3324
+
3325
+
3326
+
3327
+
3328
+
3329
+
3330
+
3331
+
3332
+ e
3333
+ t
3334
+
3335
+
3336
+
3337
+
3338
+
3339
+
3340
+
3341
+
3342
+
3343
+
3344
+
3345
+
3346
+
3347
+
3348
+
3349
+
3350
+
3351
+
3352
+
3353
+
3354
+
3355
+
3356
+
3357
+
3358
+
3359
+
3360
+
3361
+
3362
+
3363
+
3364
+
3365
+
3366
+
3367
+
3368
+
3369
+
3370
+
3371
+
3372
+
3373
+
3374
+
3375
+
3376
+
3377
+
3378
+
3379
+
3380
+ §
3381
+
3382
+
3383
+
3384
+
3385
+
3386
+
3387
+
3388
+
3389
+
3390
+
3391
+
3392
+
3393
+
3394
+
3395
+
3396
+
3397
+
3398
+
3399
+
3400
+ 姿
3401
+
3402
+
3403
+
3404
+
3405
+
3406
+
3407
+
3408
+
3409
+
3410
+
3411
+
3412
+
3413
+
3414
+
3415
+
3416
+
3417
+
3418
+
3419
+
3420
+
3421
+
3422
+
3423
+
3424
+
3425
+
3426
+
3427
+
3428
+
3429
+
3430
+
3431
+
3432
+
3433
+
3434
+
3435
+
3436
+
3437
+
3438
+
3439
+
3440
+
3441
+
3442
+
3443
+
3444
+
3445
+
3446
+
3447
+
3448
+
3449
+
3450
+
3451
+
3452
+
3453
+
3454
+
3455
+
3456
+
3457
+
3458
+
3459
+
3460
+
3461
+
3462
+
3463
+ b
3464
+
3465
+
3466
+
3467
+
3468
+
3469
+
3470
+
3471
+
3472
+
3473
+
3474
+
3475
+
3476
+
3477
+
3478
+
3479
+
3480
+
3481
+
3482
+
3483
+
3484
+
3485
+
3486
+
3487
+
3488
+
3489
+
3490
+ <
3491
+
3492
+
3493
+
3494
+
3495
+
3496
+
3497
+
3498
+
3499
+
3500
+
3501
+
3502
+
3503
+
3504
+
3505
+ 退
3506
+ L
3507
+
3508
+
3509
+
3510
+
3511
+
3512
+
3513
+
3514
+
3515
+
3516
+
3517
+ 鹿
3518
+
3519
+
3520
+
3521
+
3522
+
3523
+
3524
+
3525
+
3526
+
3527
+
3528
+
3529
+
3530
+
3531
+
3532
+
3533
+
3534
+
3535
+
3536
+
3537
+ w
3538
+ i
3539
+ h
3540
+
3541
+
3542
+
3543
+
3544
+
3545
+
3546
+
3547
+
3548
+
3549
+
3550
+
3551
+
3552
+
3553
+
3554
+
3555
+
3556
+
3557
+
3558
+
3559
+
3560
+
3561
+
3562
+
3563
+
3564
+
3565
+
3566
+
3567
+
3568
+
3569
+
3570
+
3571
+
3572
+
3573
+ +
3574
+
3575
+
3576
+
3577
+
3578
+
3579
+
3580
+
3581
+
3582
+
3583
+
3584
+
3585
+
3586
+
3587
+ I
3588
+ B
3589
+ N
3590
+
3591
+
3592
+
3593
+
3594
+
3595
+
3596
+
3597
+
3598
+
3599
+
3600
+
3601
+
3602
+
3603
+
3604
+
3605
+
3606
+
3607
+
3608
+
3609
+
3610
+
3611
+
3612
+
3613
+
3614
+
3615
+
3616
+
3617
+
3618
+
3619
+
3620
+
3621
+
3622
+
3623
+
3624
+
3625
+
3626
+ ^
3627
+ _
3628
+
3629
+
3630
+
3631
+
3632
+
3633
+
3634
+
3635
+
3636
+
3637
+
3638
+
3639
+ M
3640
+
3641
+
3642
+
3643
+
3644
+
3645
+
3646
+
3647
+
3648
+
3649
+
3650
+
3651
+
3652
+
3653
+
3654
+
3655
+
3656
+
3657
+
3658
+
3659
+
3660
+
3661
+
3662
+
3663
+
3664
+
3665
+
3666
+
3667
+
3668
+
3669
+
3670
+
3671
+
3672
+
3673
+ 鱿
3674
+
3675
+
3676
+
3677
+
3678
+
3679
+
3680
+
3681
+
3682
+
3683
+
3684
+
3685
+
3686
+
3687
+
3688
+
3689
+
3690
+
3691
+
3692
+
3693
+
3694
+
3695
+
3696
+
3697
+
3698
+
3699
+
3700
+
3701
+
3702
+
3703
+
3704
+
3705
+
3706
+
3707
+
3708
+
3709
+
3710
+
3711
+
3712
+
3713
+
3714
+
3715
+
3716
+
3717
+
3718
+
3719
+
3720
+
3721
+
3722
+
3723
+
3724
+
3725
+
3726
+
3727
+
3728
+
3729
+
3730
+
3731
+
3732
+
3733
+
3734
+
3735
+
3736
+
3737
+
3738
+
3739
+
3740
+
3741
+
3742
+
3743
+
3744
+
3745
+
3746
+
3747
+
3748
+
3749
+
3750
+
3751
+
3752
+
3753
+
3754
+
3755
+
3756
+
3757
+
3758
+
3759
+
3760
+
3761
+
3762
+
3763
+
3764
+
3765
+
3766
+
3767
+
3768
+
3769
+
3770
+
3771
+
3772
+
3773
+
3774
+
3775
+
3776
+
3777
+
3778
+
3779
+
3780
+
3781
+
3782
+
3783
+
3784
+
3785
+
3786
+
3787
+
3788
+
3789
+
3790
+
3791
+
3792
+
3793
+
3794
+
3795
+
3796
+
3797
+
3798
+
3799
+
3800
+
3801
+
3802
+
3803
+
3804
+
3805
+
3806
+
3807
+
3808
+
3809
+
3810
+
3811
+
3812
+
3813
+
3814
+
3815
+
3816
+
3817
+
3818
+
3819
+
3820
+
3821
+
3822
+
3823
+
3824
+
3825
+
3826
+
3827
+
3828
+
3829
+
3830
+
3831
+
3832
+
3833
+
3834
+
3835
+
3836
+
3837
+
3838
+
3839
+
3840
+
3841
+
3842
+
3843
+
3844
+
3845
+
3846
+
3847
+
3848
+
3849
+
3850
+
3851
+
3852
+
3853
+
3854
+
3855
+
3856
+
3857
+
3858
+
3859
+
3860
+
3861
+
3862
+
3863
+
3864
+
3865
+
3866
+
3867
+
3868
+
3869
+
3870
+
3871
+
3872
+
3873
+
3874
+
3875
+
3876
+
3877
+ 怀
3878
+
3879
+
3880
+
3881
+
3882
+
3883
+
3884
+
3885
+
3886
+
3887
+
3888
+
3889
+
3890
+
3891
+
3892
+
3893
+
3894
+
3895
+
3896
+
3897
+
3898
+
3899
+
3900
+
3901
+
3902
+
3903
+
3904
+
3905
+
3906
+
3907
+
3908
+
3909
+
3910
+
3911
+
3912
+
3913
+
3914
+
3915
+
3916
+
3917
+
3918
+
3919
+
3920
+
3921
+
3922
+
3923
+
3924
+
3925
+
3926
+
3927
+
3928
+
3929
+
3930
+
3931
+
3932
+
3933
+
3934
+
3935
+
3936
+
3937
+
3938
+
3939
+
3940
+
3941
+
3942
+
3943
+
3944
+
3945
+
3946
+
3947
+
3948
+
3949
+
3950
+
3951
+
3952
+
3953
+
3954
+
3955
+
3956
+
3957
+
3958
+
3959
+
3960
+
3961
+
3962
+
3963
+
3964
+
3965
+
3966
+
3967
+
3968
+
3969
+
3970
+
3971
+
3972
+
3973
+
3974
+
3975
+
3976
+
3977
+
3978
+
3979
+
3980
+
3981
+
3982
+
3983
+
3984
+
3985
+
3986
+
3987
+
3988
+
3989
+
3990
+
3991
+
3992
+
3993
+
3994
+
3995
+
3996
+
3997
+
3998
+
3999
+
4000
+
4001
+
4002
+
4003
+
4004
+
4005
+
4006
+
4007
+
4008
+
4009
+
4010
+
4011
+
4012
+
4013
+
4014
+
4015
+
4016
+
4017
+
4018
+
4019
+
4020
+
4021
+
4022
+
4023
+
4024
+
4025
+
4026
+
4027
+
4028
+
4029
+
4030
+
4031
+
4032
+
4033
+
4034
+
4035
+
4036
+
4037
+
4038
+
4039
+
4040
+
4041
+
4042
+
4043
+
4044
+
4045
+
4046
+
4047
+
4048
+
4049
+
4050
+
4051
+
4052
+
4053
+
4054
+
4055
+
4056
+
4057
+
4058
+
4059
+
4060
+
4061
+
4062
+
4063
+
4064
+
4065
+
4066
+
4067
+
4068
+
4069
+
4070
+
4071
+
4072
+
4073
+
4074
+
4075
+
4076
+
4077
+
4078
+ }
4079
+
4080
+
4081
+
4082
+
4083
+
4084
+
4085
+
4086
+
4087
+
4088
+
4089
+
4090
+
4091
+
4092
+
4093
+
4094
+
4095
+
4096
+
4097
+
4098
+
4099
+
4100
+
4101
+
4102
+
4103
+
4104
+
4105
+
4106
+
4107
+
4108
+
4109
+
4110
+
4111
+
4112
+
4113
+
4114
+
4115
+
4116
+
4117
+
4118
+
4119
+
4120
+
4121
+
4122
+
4123
+
4124
+
4125
+
4126
+
4127
+
4128
+ ~
4129
+
4130
+
4131
+
4132
+
4133
+
4134
+
4135
+
4136
+ Z
4137
+
4138
+
4139
+
4140
+
4141
+
4142
+
4143
+
4144
+
4145
+
4146
+
4147
+
4148
+
4149
+
4150
+
4151
+
4152
+
4153
+
4154
+
4155
+
4156
+
4157
+
4158
+
4159
+
4160
+
4161
+
4162
+
4163
+
4164
+
4165
+
4166
+
4167
+
4168
+
4169
+
4170
+
4171
+
4172
+
4173
+
4174
+
4175
+
4176
+
4177
+
4178
+
4179
+
4180
+
4181
+
4182
+
4183
+
4184
+
4185
+
4186
+
4187
+
4188
+
4189
+
4190
+
4191
+
4192
+
4193
+
4194
+
4195
+
4196
+
4197
+
4198
+
4199
+
4200
+
4201
+
4202
+
4203
+
4204
+
4205
+
4206
+
4207
+
4208
+
4209
+
4210
+
4211
+
4212
+
4213
+
4214
+
4215
+
4216
+
4217
+
4218
+
4219
+
4220
+
4221
+
4222
+
4223
+
4224
+
4225
+
4226
+
4227
+
4228
+
4229
+
4230
+
4231
+
4232
+
4233
+
4234
+
4235
+
4236
+ 槿
4237
+
4238
+
4239
+
4240
+
4241
+
4242
+
4243
+
4244
+ C
4245
+ o
4246
+
4247
+
4248
+
4249
+
4250
+
4251
+
4252
+
4253
+
4254
+
4255
+
4256
+
4257
+
4258
+
4259
+
4260
+
4261
+
4262
+
4263
+
4264
+
4265
+
4266
+
4267
+
4268
+
4269
+
4270
+
4271
+
4272
+
4273
+
4274
+
4275
+
4276
+
4277
+
4278
+
4279
+
4280
+
4281
+
4282
+
4283
+
4284
+
4285
+
4286
+
4287
+
4288
+
4289
+
4290
+
4291
+
4292
+
4293
+
4294
+
4295
+
4296
+
4297
+
4298
+
4299
+
4300
+
4301
+
4302
+
4303
+
4304
+
4305
+
4306
+
4307
+
4308
+
4309
+
4310
+
4311
+
4312
+
4313
+
4314
+
4315
+
4316
+
4317
+
4318
+
4319
+
4320
+
4321
+
4322
+
4323
+
4324
+
4325
+
4326
+
4327
+
4328
+
4329
+
4330
+
4331
+
4332
+
4333
+
4334
+
4335
+
4336
+
4337
+
4338
+
4339
+
4340
+
4341
+
4342
+
4343
+
4344
+
4345
+
4346
+
4347
+
4348
+
4349
+
4350
+
4351
+
4352
+
4353
+
4354
+
4355
+
4356
+
4357
+
4358
+
4359
+
4360
+
4361
+
4362
+
4363
+
4364
+
4365
+
4366
+
4367
+
4368
+
4369
+
4370
+
4371
+
4372
+
4373
+
4374
+
4375
+
4376
+
4377
+
4378
+
4379
+
4380
+
4381
+ E
4382
+
4383
+
4384
+
4385
+
4386
+
4387
+
4388
+
4389
+ f
4390
+
4391
+
4392
+
4393
+
4394
+
4395
+
4396
+
4397
+
4398
+
4399
+
4400
+
4401
+
4402
+
4403
+
4404
+
4405
+
4406
+
4407
+
4408
+
4409
+
4410
+
4411
+
4412
+
4413
+
4414
+
4415
+
4416
+
4417
+
4418
+
4419
+
4420
+
4421
+
4422
+
4423
+
4424
+
4425
+
4426
+
4427
+
4428
+ \
4429
+
4430
+
4431
+
4432
+
4433
+
4434
+
4435
+
4436
+
4437
+
4438
+
4439
+
4440
+
4441
+
4442
+
4443
+
4444
+
4445
+
4446
+
4447
+
4448
+
4449
+
4450
+
4451
+
4452
+
4453
+
4454
+
4455
+
4456
+
4457
+
4458
+
4459
+
4460
+
4461
+
4462
+
4463
+
4464
+
4465
+
4466
+
4467
+
4468
+
4469
+
4470
+
4471
+
4472
+
4473
+ 屿
4474
+
4475
+
4476
+
4477
+
4478
+
4479
+
4480
+
4481
+
4482
+
4483
+
4484
+
4485
+
4486
+
4487
+
4488
+
4489
+
4490
+
4491
+
4492
+
4493
+
4494
+
4495
+
4496
+
4497
+ U
4498
+
4499
+
4500
+
4501
+
4502
+
4503
+
4504
+
4505
+
4506
+
4507
+
4508
+
4509
+
4510
+
4511
+
4512
+
4513
+
4514
+
4515
+
4516
+
4517
+
4518
+
4519
+
4520
+
4521
+
4522
+
4523
+
4524
+
4525
+
4526
+
4527
+
4528
+
4529
+
4530
+
4531
+
4532
+
4533
+
4534
+
4535
+
4536
+
4537
+
4538
+
4539
+
4540
+
4541
+
4542
+
4543
+
4544
+ a
4545
+ p
4546
+ y
4547
+ n
4548
+ g
4549
+
4550
+
4551
+
4552
+
4553
+
4554
+
4555
+
4556
+
4557
+
4558
+
4559
+
4560
+
4561
+
4562
+
4563
+
4564
+
4565
+
4566
+
4567
+
4568
+
4569
+
4570
+
4571
+
4572
+
4573
+
4574
+
4575
+
4576
+
4577
+
4578
+
4579
+
4580
+
4581
+
4582
+
4583
+
4584
+
4585
+
4586
+
4587
+
4588
+
4589
+
4590
+
4591
+
4592
+
4593
+
4594
+
4595
+
4596
+
4597
+
4598
+
4599
+
4600
+
4601
+
4602
+
4603
+
4604
+
4605
+
4606
+
4607
+
4608
+
4609
+
4610
+
4611
+
4612
+
4613
+
4614
+
4615
+
4616
+
4617
+
4618
+
4619
+
4620
+
4621
+
4622
+
4623
+
4624
+
4625
+
4626
+
4627
+
4628
+
4629
+
4630
+
4631
+
4632
+
4633
+
4634
+
4635
+
4636
+
4637
+
4638
+
4639
+
4640
+
4641
+
4642
+
4643
+
4644
+
4645
+
4646
+
4647
+
4648
+
4649
+
4650
+
4651
+
4652
+
4653
+
4654
+
4655
+
4656
+
4657
+
4658
+
4659
+
4660
+
4661
+
4662
+
4663
+
4664
+
4665
+
4666
+
4667
+
4668
+
4669
+
4670
+
4671
+
4672
+
4673
+
4674
+
4675
+
4676
+
4677
+
4678
+
4679
+
4680
+
4681
+
4682
+
4683
+
4684
+
4685
+
4686
+
4687
+
4688
+
4689
+
4690
+
4691
+
4692
+
4693
+
4694
+
4695
+
4696
+
4697
+
4698
+
4699
+
4700
+
4701
+
4702
+
4703
+
4704
+
4705
+
4706
+
4707
+ 竿
4708
+
4709
+
4710
+
4711
+
4712
+
4713
+
4714
+
4715
+
4716
+
4717
+
4718
+
4719
+
4720
+
4721
+
4722
+
4723
+
4724
+
4725
+
4726
+
4727
+
4728
+
4729
+
4730
+
4731
+ Q
4732
+
4733
+
4734
+
4735
+
4736
+
4737
+
4738
+
4739
+ 羿
4740
+
4741
+ O
4742
+
4743
+
4744
+
4745
+
4746
+
4747
+
4748
+
4749
+
4750
+
4751
+
4752
+
4753
+
4754
+
4755
+
4756
+
4757
+
4758
+
4759
+ 宿
4760
+
4761
+
4762
+
4763
+
4764
+
4765
+
4766
+
4767
+
4768
+
4769
+
4770
+
4771
+
4772
+
4773
+
4774
+
4775
+
4776
+
4777
+
4778
+
4779
+
4780
+
4781
+
4782
+
4783
+
4784
+
4785
+
4786
+
4787
+
4788
+
4789
+
4790
+
4791
+
4792
+
4793
+
4794
+
4795
+
4796
+
4797
+
4798
+
4799
+
4800
+
4801
+
4802
+
4803
+
4804
+
4805
+
4806
+
4807
+
4808
+
4809
+
4810
+
4811
+
4812
+
4813
+
4814
+
4815
+
4816
+
4817
+
4818
+
4819
+
4820
+
4821
+
4822
+
4823
+
4824
+
4825
+
4826
+
4827
+
4828
+
4829
+
4830
+
4831
+
4832
+
4833
+
4834
+
4835
+
4836
+
4837
+
4838
+
4839
+
4840
+
4841
+
4842
+
4843
+
4844
+
4845
+
4846
+
4847
+
4848
+
4849
+ k
4850
+
4851
+
4852
+
4853
+
4854
+
4855
+
4856
+
4857
+
4858
+
4859
+
4860
+
4861
+
4862
+
4863
+
4864
+
4865
+
4866
+
4867
+
4868
+
4869
+
4870
+
4871
+
4872
+
4873
+
4874
+
4875
+
4876
+
4877
+
4878
+
4879
+
4880
+
4881
+
4882
+
4883
+
4884
+
4885
+ $
4886
+
4887
+
4888
+
4889
+
4890
+
4891
+
4892
+
4893
+
4894
+
4895
+
4896
+
4897
+
4898
+
4899
+
4900
+
4901
+
4902
+ c
4903
+
4904
+
4905
+
4906
+
4907
+
4908
+
4909
+
4910
+
4911
+
4912
+
4913
+
4914
+
4915
+
4916
+
4917
+
4918
+
4919
+
4920
+
4921
+
4922
+ v
4923
+
4924
+
4925
+
4926
+
4927
+
4928
+
4929
+
4930
+
4931
+
4932
+
4933
+
4934
+
4935
+
4936
+
4937
+
4938
+
4939
+
4940
+
4941
+
4942
+
4943
+
4944
+
4945
+
4946
+
4947
+
4948
+
4949
+
4950
+
4951
+
4952
+
4953
+
4954
+
4955
+
4956
+
4957
+
4958
+
4959
+
4960
+
4961
+
4962
+
4963
+
4964
+
4965
+
4966
+
4967
+
4968
+
4969
+
4970
+
4971
+
4972
+
4973
+
4974
+
4975
+
4976
+
4977
+
4978
+
4979
+
4980
+
4981
+
4982
+
4983
+
4984
+
4985
+
4986
+
4987
+
4988
+
4989
+
4990
+
4991
+
4992
+
4993
+
4994
+
4995
+
4996
+
4997
+
4998
+
4999
+
5000
+
5001
+
5002
+
5003
+
5004
+
5005
+
5006
+
5007
+
5008
+
5009
+
5010
+
5011
+
5012
+
5013
+
5014
+
5015
+
5016
+
5017
+
5018
+
5019
+
5020
+
5021
+
5022
+
5023
+
5024
+
5025
+
5026
+
5027
+
5028
+
5029
+
5030
+
5031
+
5032
+
5033
+ W
5034
+
5035
+
5036
+
5037
+
5038
+
5039
+
5040
+
5041
+
5042
+
5043
+
5044
+
5045
+ 穿
5046
+
5047
+
5048
+
5049
+
5050
+
5051
+
5052
+
5053
+
5054
+
5055
+
5056
+
5057
+
5058
+
5059
+
5060
+
5061
+
5062
+
5063
+
5064
+
5065
+
5066
+
5067
+
5068
+
5069
+
5070
+
5071
+
5072
+
5073
+
5074
+
5075
+
5076
+
5077
+
5078
+
5079
+
5080
+
5081
+
5082
+
5083
+
5084
+
5085
+
5086
+ ×
5087
+
5088
+
5089
+
5090
+
5091
+
5092
+
5093
+
5094
+
5095
+
5096
+
5097
+
5098
+
5099
+ 轿
5100
+
5101
+
5102
+
5103
+
5104
+
5105
+
5106
+
5107
+
5108
+
5109
+
5110
+
5111
+
5112
+
5113
+
5114
+
5115
+
5116
+
5117
+
5118
+
5119
+
5120
+
5121
+
5122
+
5123
+
5124
+
5125
+
5126
+
5127
+ R
5128
+ G
5129
+
5130
+
5131
+
5132
+
5133
+
5134
+
5135
+
5136
+
5137
+
5138
+
5139
+
5140
+
5141
+
5142
+
5143
+
5144
+
5145
+
5146
+
5147
+
5148
+
5149
+
5150
+
5151
+
5152
+
5153
+
5154
+
5155
+
5156
+
5157
+
5158
+
5159
+
5160
+
5161
+
5162
+
5163
+
5164
+
5165
+
5166
+
5167
+
5168
+
5169
+ ˉ
5170
+
5171
+ d
5172
+ °
5173
+
5174
+
5175
+
5176
+
5177
+
5178
+
5179
+
5180
+
5181
+
5182
+
5183
+
5184
+
5185
+
5186
+
5187
+
5188
+
5189
+
5190
+
5191
+
5192
+
5193
+ K
5194
+
5195
+
5196
+
5197
+
5198
+
5199
+
5200
+ X
5201
+
5202
+
5203
+
5204
+
5205
+
5206
+
5207
+
5208
+
5209
+
5210
+
5211
+
5212
+
5213
+
5214
+
5215
+
5216
+
5217
+
5218
+
5219
+
5220
+
5221
+
5222
+
5223
+
5224
+
5225
+
5226
+
5227
+
5228
+
5229
+
5230
+
5231
+
5232
+
5233
+ m
5234
+
5235
+
5236
+
5237
+
5238
+
5239
+
5240
+
5241
+
5242
+
5243
+
5244
+ 涿
5245
+
5246
+
5247
+
5248
+
5249
+
5250
+
5251
+
5252
+
5253
+
5254
+
5255
+
5256
+
5257
+
5258
+
5259
+
5260
+
5261
+
5262
+
5263
+
5264
+
5265
+
5266
+
5267
+
5268
+
5269
+
5270
+
5271
+
5272
+
5273
+
5274
+
5275
+
5276
+
5277
+
5278
+
5279
+
5280
+
5281
+
5282
+
5283
+
5284
+
5285
+
5286
+
5287
+
5288
+
5289
+
5290
+
5291
+
5292
+
5293
+
5294
+
5295
+
5296
+
5297
+
5298
+
5299
+
5300
+
5301
+
5302
+
5303
+
5304
+
5305
+
5306
+
5307
+
5308
+
5309
+
5310
+
5311
+
5312
+
5313
+
5314
+
5315
+
5316
+
5317
+
5318
+
5319
+
5320
+
5321
+
5322
+
5323
+
5324
+
5325
+
5326
+
5327
+
5328
+
5329
+
5330
+
5331
+
5332
+
5333
+
5334
+
5335
+ `
5336
+
5337
+
5338
+
5339
+
5340
+
5341
+
5342
+
5343
+
5344
+
5345
+
5346
+
5347
+
5348
+
5349
+
5350
+
5351
+
5352
+
5353
+
5354
+
5355
+
5356
+
5357
+
5358
+
5359
+
5360
+
5361
+
5362
+
5363
+
5364
+
5365
+
5366
+
5367
+
5368
+
5369
+
5370
+
5371
+
5372
+
5373
+
5374
+
5375
+
5376
+
5377
+
5378
+
5379
+
5380
+
5381
+
5382
+
5383
+
5384
+
5385
+
5386
+
5387
+
5388
+
5389
+
5390
+
5391
+
5392
+
5393
+
5394
+
5395
+
5396
+
5397
+
5398
+
5399
+
5400
+
5401
+
5402
+
5403
+
5404
+
5405
+ V
5406
+
5407
+
5408
+
5409
+
5410
+
5411
+
5412
+
5413
+
5414
+
5415
+
5416
+
5417
+
5418
+
5419
+
5420
+
5421
+
5422
+
5423
+
5424
+
5425
+
5426
+
5427
+
5428
+
5429
+
5430
+
5431
+
5432
+
5433
+
5434
+
5435
+
5436
+
5437
+
5438
+
5439
+
5440
+
5441
+
5442
+
5443
+
5444
+
5445
+
5446
+
5447
+
5448
+
5449
+
5450
+
5451
+
5452
+
5453
+
5454
+
5455
+
5456
+
5457
+
5458
+
5459
+
5460
+
5461
+ #
5462
+
5463
+
5464
+
5465
+
5466
+
5467
+
5468
+
5469
+
5470
+
5471
+
5472
+
5473
+
5474
+
5475
+
5476
+
5477
+
5478
+
5479
+
5480
+
5481
+
5482
+
5483
+ 簿
5484
+
5485
+
5486
+
5487
+
5488
+
5489
+ {
5490
+
5491
+
5492
+
5493
+ j
5494
+
5495
+
5496
+
5497
+
5498
+
5499
+
5500
+
5501
+
5502
+
5503
+
5504
+
5505
+
5506
+
5507
+
5508
+
5509
+
5510
+
5511
+
5512
+
5513
+
5514
+
5515
+
5516
+
5517
+
5518
+
5519
+
5520
+
5521
+
5522
+
5523
+
5524
+
5525
+
5526
+
5527
+
5528
+
5529
+ ·
5530
+
5531
+
5532
+
5533
+ Ë
5534
+
5535
+
5536
+
5537
+
5538
+
5539
+
5540
+
5541
+
5542
+
5543
+
5544
+
5545
+
5546
+ ¥
5547
+
5548
+
5549
+
5550
+
5551
+
5552
+
5553
+
5554
+
5555
+
5556
+
5557
+
5558
+
5559
+
5560
+ π
5561
+
5562
+
5563
+
5564
+ é
5565
+
5566
+
5567
+ Λ
5568
+
5569
+
5570
+
5571
+
5572
+
5573
+
5574
+
5575
+
5576
+
5577
+
5578
+
5579
+
5580
+
5581
+
5582
+
5583
+
5584
+
5585
+
5586
+
5587
+
5588
+
5589
+
5590
+
5591
+
5592
+
5593
+
5594
+
5595
+
5596
+
5597
+
5598
+
5599
+
5600
+
5601
+
5602
+
5603
+
5604
+
5605
+ Ο
5606
+
5607
+
5608
+
5609
+
5610
+
5611
+
5612
+
5613
+
5614
+
5615
+
5616
+
5617
+
5618
+
5619
+
5620
+
5621
+
5622
+
5623
+
5624
+
5625
+
5626
+
5627
+
5628
+
5629
+
5630
+
5631
+
5632
+
5633
+
5634
+
5635
+
5636
+
5637
+
5638
+
5639
+
5640
+
5641
+
5642
+
5643
+
5644
+
5645
+
5646
+
5647
+
5648
+
5649
+
5650
+
5651
+
5652
+
5653
+
5654
+
5655
+
5656
+
5657
+
5658
+
5659
+
5660
+
5661
+
5662
+
5663
+
5664
+
5665
+
5666
+
5667
+
5668
+
5669
+
5670
+
5671
+
5672
+
5673
+
5674
+ α
5675
+
5676
+
5677
+
5678
+
5679
+
5680
+
5681
+
5682
+
5683
+
5684
+
5685
+
5686
+
5687
+
5688
+
5689
+
5690
+
5691
+
5692
+
5693
+
5694
+
5695
+
5696
+
5697
+
5698
+
5699
+
5700
+
5701
+
5702
+
5703
+
5704
+
5705
+
5706
+
5707
+
5708
+
5709
+
5710
+  
5711
+
5712
+
5713
+
5714
+
5715
+
5716
+
5717
+
5718
+
5719
+
5720
+
5721
+
5722
+
5723
+
5724
+
5725
+
5726
+
5727
+
5728
+ 鴿
5729
+
5730
+
5731
+
5732
+
5733
+
5734
+
5735
+
5736
+
5737
+
5738
+
5739
+
5740
+
5741
+
5742
+
5743
+
5744
+
5745
+
5746
+
5747
+
5748
+
5749
+
5750
+
5751
+
5752
+
5753
+
5754
+
5755
+
5756
+
5757
+
5758
+
5759
+
5760
+
5761
+
5762
+
5763
+
5764
+
5765
+
5766
+
5767
+
5768
+
5769
+
5770
+
5771
+
5772
+
5773
+
5774
+
5775
+
5776
+
5777
+
5778
+
5779
+
5780
+
5781
+
5782
+
5783
+
5784
+
5785
+
5786
+
5787
+
5788
+
5789
+
5790
+
5791
+
5792
+
5793
+
5794
+
5795
+
5796
+
5797
+
5798
+
5799
+
5800
+ è
5801
+
5802
+
5803
+
5804
+
5805
+
5806
+ Ü
5807
+
5808
+
5809
+
5810
+
5811
+
5812
+
5813
+
5814
+
5815
+
5816
+
5817
+ И
5818
+
5819
+
5820
+
5821
+
5822
+
5823
+
5824
+
5825
+
5826
+
5827
+
5828
+
5829
+
5830
+
5831
+
5832
+
5833
+
5834
+
5835
+
5836
+
5837
+
5838
+ »
5839
+
5840
+
5841
+ ä
5842
+
5843
+
5844
+
5845
+
5846
+
5847
+
5848
+
5849
+
5850
+
5851
+
5852
+
5853
+
5854
+
5855
+
5856
+
5857
+
5858
+
5859
+
5860
+
5861
+
5862
+
5863
+
5864
+
5865
+
5866
+
5867
+
5868
+
5869
+
5870
+
5871
+
5872
+
5873
+
5874
+
5875
+
5876
+ ɔ
5877
+
5878
+
5879
+
5880
+
5881
+
5882
+
5883
+ ´
5884
+
5885
+
5886
+
5887
+
5888
+ í
5889
+
5890
+
5891
+
5892
+
5893
+
5894
+
5895
+
5896
+
5897
+
5898
+
5899
+
5900
+
5901
+
5902
+
5903
+
5904
+
5905
+
5906
+
5907
+
5908
+
5909
+ É
5910
+
5911
+
5912
+
5913
+
5914
+ ʌ
5915
+
5916
+
5917
+
5918
+
5919
+
5920
+
5921
+
5922
+
5923
+
5924
+
5925
+ Я
5926
+ Й
5927
+
5928
+
5929
+
5930
+
5931
+
5932
+
5933
+
5934
+
5935
+
5936
+
5937
+
5938
+
5939
+
5940
+
5941
+
5942
+
5943
+
5944
+
5945
+
5946
+
5947
+
5948
+ 粿
5949
+
5950
+
5951
+
5952
+
5953
+ ®
5954
+
5955
+
5956
+
5957
+
5958
+
5959
+
5960
+
5961
+
5962
+
5963
+
5964
+
5965
+
5966
+ З
5967
+
5968
+
5969
+
5970
+
5971
+
5972
+
5973
+
5974
+
5975
+
5976
+ β
5977
+
5978
+ á
5979
+
5980
+
5981
+
5982
+
5983
+
5984
+
5985
+
5986
+
5987
+
5988
+
5989
+
5990
+
5991
+
5992
+
5993
+
5994
+
5995
+
5996
+
5997
+
5998
+
5999
+
6000
+
6001
+
6002
+
6003
+
6004
+
6005
+
6006
+
6007
+
6008
+
6009
+
6010
+
6011
+
6012
+
6013
+
6014
+
6015
+
6016
+
6017
+
6018
+
6019
+
6020
+
6021
+
6022
+
6023
+
6024
+
6025
+
6026
+
6027
+
6028
+
6029
+
6030
+
6031
+
6032
+
6033
+
6034
+
6035
+
6036
+
6037
+
6038
+
6039
+
6040
+
6041
+
6042
+
6043
+
6044
+
6045
+
6046
+
6047
+
6048
+
6049
+
6050
+
6051
+
6052
+
6053
+
6054
+
6055
+
6056
+
6057
+
6058
+
6059
+
6060
+
6061
+
6062
+
6063
+
6064
+
6065
+
6066
+ Ó
6067
+
6068
+
6069
+
6070
+
6071
+
6072
+
6073
+
6074
+
6075
+
6076
+
6077
+
6078
+
6079
+
6080
+
6081
+
6082
+
6083
+
6084
+
6085
+
6086
+
6087
+
6088
+
6089
+
6090
+
6091
+
6092
+
6093
+
6094
+
6095
+
6096
+ ò
6097
+
6098
+
6099
+
6100
+
6101
+
6102
+
6103
+
6104
+
6105
+
6106
+
6107
+
6108
+
6109
+
6110
+
6111
+
6112
+
6113
+
6114
+
6115
+
6116
+
6117
+
6118
+
6119
+
6120
+
6121
+
6122
+
6123
+
6124
+
6125
+ 貿
6126
+
6127
+
6128
+
6129
+
6130
+
6131
+
6132
+
6133
+
6134
+
6135
+
6136
+
6137
+
6138
+ 𣇉
6139
+
6140
+
6141
+
6142
+
6143
+
6144
+
6145
+
6146
+
6147
+
6148
+
6149
+
6150
+
6151
+
6152
+
6153
+
6154
+
6155
+
6156
+
6157
+
6158
+
6159
+
6160
+
6161
+
6162
+
6163
+
6164
+
6165
+
6166
+
6167
+ г
6168
+
6169
+
6170
+
6171
+
6172
+
6173
+
6174
+
6175
+
6176
+
6177
+
6178
+
6179
+
6180
+
6181
+
6182
+
6183
+
6184
+
6185
+ ���
6186
+
6187
+
6188
+
6189
+
6190
+
6191
+ 楿
6192
+
6193
+
6194
+
6195
+
6196
+
6197
+
6198
+ 滿
6199
+
6200
+
6201
+
6202
+
6203
+
6204
+
6205
+
6206
+
6207
+
6208
+
6209
+
6210
+
6211
+
6212
+
6213
+
6214
+
6215
+
6216
+
6217
+
6218
+
6219
+
6220
+
6221
+
6222
+
6223
+
6224
+
6225
+
6226
+
6227
+
6228
+
6229
+
6230
+
6231
+
6232
+
6233
+
6234
+
6235
+
6236
+
6237
+
6238
+
6239
+
6240
+
6241
+
6242
+
6243
+
6244
+
6245
+
6246
+
6247
+
6248
+
6249
+
6250
+
6251
+
6252
+
6253
+
6254
+ Φ
6255
+
6256
+
6257
+
6258
+
6259
+
6260
+
6261
+ ε
6262
+
6263
+
6264
+
6265
+
6266
+
6267
+
6268
+
6269
+
6270
+
6271
+
6272
+
6273
+
6274
+ ü
6275
+
6276
+
6277
+
6278
+
6279
+ 調
6280
+
6281
+
6282
+
6283
+
6284
+
6285
+
6286
+
6287
+
6288
+
6289
+
6290
+
6291
+
6292
+
6293
+
6294
+
6295
+
6296
+
6297
+
6298
+
6299
+
6300
+
6301
+
6302
+
6303
+
6304
+
6305
+
6306
+
6307
+
6308
+
6309
+
6310
+
6311
+
6312
+
6313
+
6314
+
6315
+
6316
+
6317
+
6318
+
6319
+
6320
+
6321
+
6322
+
6323
+
6324
+
6325
+
6326
+ ˋ
6327
+
6328
+
6329
+ ā
6330
+
6331
+
6332
+
6333
+
6334
+
6335
+
6336
+
6337
+
6338
+
6339
+
6340
+
6341
+
6342
+
6343
+
6344
+
6345
+
6346
+
6347
+
6348
+
6349
+
6350
+
6351
+
6352
+
6353
+
6354
+
6355
+
6356
+
6357
+
6358
+
6359
+
6360
+
6361
+
6362
+
6363
+
6364
+
6365
+
6366
+
6367
+
6368
+
6369
+ ú
6370
+ ó
6371
+
6372
+
6373
+
6374
+
6375
+
6376
+
6377
+
6378
+
6379
+
6380
+
6381
+
6382
+
6383
+
6384
+
6385
+
6386
+
6387
+
6388
+
6389
+
6390
+ ē
6391
+
6392
+
6393
+
6394
+
6395
+
6396
+
6397
+
6398
+
6399
+
6400
+
6401
+
6402
+
6403
+
6404
+
6405
+
6406
+
6407
+
6408
+
6409
+
6410
+
6411
+
6412
+ Ω
6413
+
6414
+
6415
+
6416
+
6417
+
6418
+
6419
+
6420
+
6421
+
6422
+
6423
+
6424
+
6425
+
6426
+
6427
+
6428
+
6429
+
6430
+
6431
+
6432
+
6433
+
6434
+
6435
+
6436
+
6437
+ П
6438
+
6439
+
6440
+
6441
+
6442
+
6443
+
6444
+
6445
+
6446
+
6447
+
6448
+
6449
+
6450
+
6451
+
6452
+
6453
+
6454
+
6455
+
6456
+
6457
+
6458
+
6459
+
6460
+ ǐ
6461
+ ō
6462
+ ǒ
6463
+
6464
+
6465
+
6466
+ μ
6467
+
6468
+
6469
+
6470
+
6471
+
6472
+
6473
+
6474
+
6475
+ à
6476
+ ɡ
6477
+
6478
+
6479
+
6480
+
6481
+
6482
+
6483
+
6484
+
6485
+ ī
6486
+
6487
+
6488
+
6489
+
6490
+
6491
+
6492
+
6493
+
6494
+
6495
+
6496
+
6497
+
6498
+
6499
+
6500
+
6501
+
6502
+
6503
+
6504
+
6505
+
6506
+
6507
+
6508
+
6509
+
6510
+
6511
+
6512
+
6513
+
6514
+
6515
+
6516
+
6517
+
6518
+
6519
+
6520
+
6521
+
6522
+
6523
+
6524
+
6525
+
6526
+
6527
+
6528
+
6529
+
6530
+
6531
+
6532
+
6533
+
6534
+
6535
+
6536
+
6537
+
6538
+
6539
+
6540
+
6541
+ ²
6542
+
6543
+
6544
+
6545
+
6546
+
6547
+
6548
+
6549
+
6550
+
6551
+
6552
+
6553
+
6554
+
6555
+
6556
+
6557
+
6558
+
6559
+
6560
+
6561
+
6562
+
6563
+
6564
+
6565
+
6566
+
6567
+
6568
+
6569
+
6570
+
6571
+
6572
+
6573
+
6574
+
6575
+
6576
+
6577
+
6578
+
6579
+
6580
+
6581
+
6582
+ 駿
6583
+
6584
+
6585
+
6586
+
6587
+
6588
+
6589
+
6590
+
6591
+
6592
+
6593
+
6594
+
6595
+
6596
+
6597
+
6598
+
6599
+
6600
+
6601
+
6602
+
6603
+
6604
+
6605
+
6606
+
6607
+
6608
+
6609
+ θ
6610
+
6611
+
6612
+
6613
+ ū
6614
+ ì
6615
+
6616
+
6617
+
6618
+
6619
+
6620
+
6621
+
6622
+
6623
+
deepdoc/models/rec.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c7cf60de2afd728d512f4190cf37455092b45f06175365c6fc58d8cd7e2a68b
3
+ size 10826336
deepdoc/models/tsr.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04c14c3e41802450a1f437a3865ce1a3186046262ea4d75c8975289687a43223
3
+ size 12243020
deepdoc/vision/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ from .ragFlow import RagFlow
2
+
3
+ __all__ = ['ragFlow']
deepdoc/vision/ocr.res ADDED
@@ -0,0 +1,6623 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '
2
+
3
+
4
+
5
+
6
+
7
+
8
+ 贿
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+
24
+
25
+ 2
26
+ 0
27
+ 8
28
+ -
29
+ 7
30
+
31
+ >
32
+ :
33
+ ]
34
+ ,
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
44
+
45
+
46
+
47
+
48
+
49
+
50
+
51
+
52
+
53
+
54
+
55
+
56
+
57
+
58
+
59
+
60
+
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+
73
+
74
+
75
+
76
+
77
+
78
+
79
+
80
+
81
+
82
+ 蹿
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+
91
+
92
+
93
+ 1
94
+ 3
95
+
96
+
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+
105
+
106
+
107
+
108
+
109
+
110
+
111
+
112
+
113
+
114
+
115
+
116
+
117
+
118
+
119
+
120
+
121
+
122
+
123
+
124
+
125
+
126
+
127
+
128
+
129
+
130
+
131
+
132
+
133
+
134
+
135
+
136
+
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+
154
+
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+
165
+
166
+ !
167
+
168
+
169
+
170
+
171
+
172
+
173
+
174
+
175
+
176
+
177
+
178
+
179
+
180
+
181
+
182
+
183
+
184
+
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+
216
+
217
+
218
+
219
+
220
+
221
+
222
+
223
+
224
+
225
+
226
+
227
+
228
+
229
+
230
+
231
+
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+
240
+
241
+
242
+
243
+
244
+
245
+
246
+
247
+
248
+
249
+
250
+
251
+
252
+
253
+
254
+
255
+
256
+
257
+
258
+
259
+
260
+
261
+
262
+
263
+
264
+
265
+
266
+
267
+
268
+
269
+
270
+
271
+
272
+
273
+
274
+
275
+
276
+
277
+
278
+
279
+
280
+
281
+
282
+
283
+
284
+
285
+
286
+
287
+
288
+
289
+
290
+
291
+
292
+
293
+
294
+
295
+
296
+
297
+
298
+
299
+
300
+
301
+
302
+
303
+
304
+ 诿
305
+
306
+
307
+
308
+
309
+
310
+
311
+
312
+
313
+
314
+
315
+
316
+ 线
317
+
318
+
319
+
320
+
321
+
322
+
323
+
324
+
325
+
326
+
327
+
328
+
329
+
330
+
331
+
332
+
333
+
334
+
335
+
336
+
337
+
338
+
339
+
340
+
341
+
342
+
343
+
344
+
345
+
346
+
347
+
348
+
349
+
350
+
351
+
352
+
353
+
354
+
355
+
356
+
357
+
358
+
359
+
360
+
361
+
362
+
363
+
364
+
365
+
366
+
367
+
368
+
369
+
370
+
371
+
372
+
373
+
374
+
375
+
376
+
377
+
378
+
379
+
380
+
381
+
382
+
383
+
384
+
385
+
386
+
387
+
388
+
389
+
390
+
391
+
392
+ 尿
393
+
394
+
395
+
396
+
397
+
398
+
399
+
400
+
401
+ |
402
+ ;
403
+
404
+
405
+
406
+
407
+
408
+
409
+
410
+
411
+
412
+
413
+
414
+
415
+
416
+
417
+
418
+
419
+
420
+
421
+
422
+
423
+
424
+
425
+ H
426
+
427
+
428
+
429
+
430
+
431
+
432
+
433
+
434
+
435
+
436
+
437
+
438
+
439
+
440
+
441
+
442
+
443
+
444
+
445
+
446
+
447
+
448
+
449
+
450
+
451
+
452
+
453
+
454
+
455
+
456
+
457
+
458
+
459
+
460
+
461
+
462
+
463
+
464
+
465
+
466
+ .
467
+
468
+
469
+
470
+
471
+
472
+
473
+
474
+
475
+
476
+
477
+
478
+
479
+
480
+
481
+
482
+
483
+
484
+
485
+
486
+
487
+ /
488
+ *
489
+
490
+ 忿
491
+
492
+
493
+
494
+
495
+
496
+
497
+
498
+
499
+
500
+
501
+
502
+
503
+
504
+ 齿
505
+
506
+
507
+
508
+
509
+
510
+
511
+
512
+
513
+
514
+
515
+
516
+
517
+
518
+
519
+
520
+
521
+
522
+
523
+
524
+
525
+
526
+
527
+
528
+
529
+
530
+
531
+
532
+
533
+
534
+
535
+
536
+
537
+
538
+
539
+
540
+
541
+
542
+
543
+
544
+
545
+
546
+
547
+
548
+
549
+
550
+
551
+
552
+
553
+
554
+
555
+
556
+
557
+
558
+
559
+
560
+
561
+
562
+
563
+
564
+
565
+
566
+
567
+
568
+
569
+
570
+
571
+ 西
572
+
573
+
574
+
575
+
576
+
577
+
578
+
579
+
580
+
581
+
582
+
583
+
584
+
585
+
586
+
587
+
588
+
589
+
590
+
591
+
592
+
593
+
594
+
595
+
596
+
597
+
598
+
599
+
600
+
601
+
602
+
603
+
604
+
605
+
606
+
607
+
608
+
609
+
610
+
611
+
612
+
613
+
614
+
615
+
616
+
617
+
618
+
619
+
620
+
621
+
622
+
623
+
624
+
625
+
626
+
627
+
628
+
629
+
630
+
631
+ 5
632
+ 4
633
+
634
+
635
+
636
+
637
+
638
+
639
+
640
+
641
+
642
+
643
+
644
+
645
+
646
+
647
+
648
+
649
+
650
+
651
+
652
+
653
+
654
+
655
+
656
+ 亿
657
+
658
+
659
+
660
+
661
+
662
+
663
+
664
+
665
+
666
+
667
+
668
+
669
+
670
+
671
+
672
+
673
+
674
+
675
+
676
+
677
+
678
+
679
+
680
+
681
+
682
+
683
+
684
+
685
+
686
+
687
+
688
+
689
+
690
+
691
+
692
+
693
+
694
+
695
+
696
+
697
+
698
+
699
+
700
+
701
+
702
+
703
+
704
+
705
+
706
+
707
+
708
+
709
+
710
+
711
+
712
+
713
+
714
+
715
+
716
+
717
+
718
+
719
+
720
+
721
+ (
722
+
723
+
724
+
725
+
726
+
727
+
728
+
729
+
730
+
731
+
732
+
733
+
734
+
735
+
736
+
737
+
738
+
739
+
740
+
741
+
742
+
743
+
744
+
745
+
746
+
747
+
748
+
749
+
750
+
751
+
752
+
753
+
754
+ 访
755
+
756
+
757
+
758
+
759
+
760
+
761
+
762
+
763
+
764
+
765
+
766
+
767
+
768
+
769
+
770
+
771
+
772
+
773
+
774
+
775
+
776
+
777
+
778
+
779
+
780
+
781
+
782
+
783
+
784
+
785
+
786
+
787
+
788
+
789
+
790
+
791
+
792
+
793
+
794
+
795
+
796
+
797
+
798
+
799
+
800
+
801
+
802
+
803
+
804
+
805
+
806
+
807
+
808
+
809
+
810
+
811
+
812
+
813
+
814
+
815
+
816
+
817
+
818
+
819
+
820
+
821
+
822
+
823
+
824
+
825
+
826
+
827
+
828
+
829
+
830
+
831
+
832
+
833
+
834
+
835
+
836
+
837
+
838
+
839
+
840
+
841
+
842
+
843
+
844
+
845
+
846
+
847
+
848
+
849
+
850
+
851
+
852
+
853
+
854
+
855
+
856
+
857
+
858
+ ��
859
+
860
+
861
+
862
+
863
+
864
+
865
+
866
+
867
+
868
+
869
+
870
+
871
+
872
+
873
+
874
+
875
+
876
+
877
+
878
+
879
+
880
+
881
+
882
+
883
+
884
+
885
+
886
+
887
+
888
+
889
+
890
+
891
+
892
+
893
+
894
+
895
+
896
+
897
+
898
+
899
+
900
+
901
+
902
+
903
+
904
+
905
+
906
+
907
+
908
+
909
+
910
+
911
+
912
+
913
+
914
+
915
+
916
+
917
+
918
+
919
+
920
+
921
+
922
+
923
+
924
+
925
+
926
+
927
+
928
+
929
+
930
+
931
+
932
+
933
+ 6
934
+
935
+
936
+
937
+
938
+
939
+
940
+
941
+
942
+
943
+
944
+
945
+
946
+
947
+
948
+
949
+
950
+
951
+
952
+
953
+
954
+
955
+
956
+
957
+
958
+
959
+
960
+
961
+
962
+
963
+
964
+
965
+ )
966
+
967
+
968
+
969
+
970
+
971
+
972
+
973
+
974
+
975
+
976
+
977
+
978
+
979
+
980
+
981
+
982
+
983
+
984
+
985
+
986
+
987
+
988
+
989
+ 稿
990
+
991
+
992
+
993
+
994
+
995
+
996
+
997
+
998
+
999
+
1000
+
1001
+
1002
+
1003
+
1004
+
1005
+
1006
+
1007
+
1008
+
1009
+
1010
+
1011
+
1012
+
1013
+
1014
+
1015
+
1016
+
1017
+
1018
+
1019
+
1020
+
1021
+
1022
+
1023
+
1024
+
1025
+
1026
+
1027
+
1028
+
1029
+
1030
+
1031
+
1032
+
1033
+ s
1034
+ u
1035
+
1036
+
1037
+
1038
+
1039
+
1040
+
1041
+
1042
+
1043
+
1044
+
1045
+
1046
+
1047
+
1048
+
1049
+
1050
+
1051
+
1052
+
1053
+
1054
+
1055
+
1056
+
1057
+
1058
+
1059
+
1060
+
1061
+
1062
+
1063
+
1064
+
1065
+
1066
+
1067
+
1068
+
1069
+
1070
+
1071
+
1072
+
1073
+
1074
+
1075
+
1076
+
1077
+
1078
+
1079
+
1080
+
1081
+
1082
+
1083
+
1084
+
1085
+
1086
+
1087
+
1088
+
1089
+
1090
+
1091
+
1092
+
1093
+
1094
+
1095
+
1096
+
1097
+
1098
+
1099
+
1100
+
1101
+
1102
+
1103
+
1104
+
1105
+ [
1106
+
1107
+
1108
+
1109
+ 9
1110
+
1111
+
1112
+
1113
+
1114
+
1115
+
1116
+
1117
+
1118
+
1119
+
1120
+
1121
+
1122
+
1123
+
1124
+
1125
+
1126
+
1127
+
1128
+
1129
+
1130
+
1131
+
1132
+
1133
+
1134
+
1135
+
1136
+
1137
+
1138
+
1139
+
1140
+
1141
+
1142
+
1143
+
1144
+
1145
+
1146
+
1147
+
1148
+
1149
+
1150
+
1151
+
1152
+
1153
+
1154
+
1155
+
1156
+
1157
+
1158
+
1159
+
1160
+
1161
+
1162
+
1163
+
1164
+
1165
+
1166
+
1167
+
1168
+
1169
+
1170
+
1171
+
1172
+
1173
+
1174
+
1175
+
1176
+
1177
+
1178
+
1179
+
1180
+
1181
+
1182
+ 岿
1183
+
1184
+
1185
+
1186
+
1187
+
1188
+
1189
+
1190
+
1191
+
1192
+
1193
+
1194
+
1195
+
1196
+
1197
+
1198
+
1199
+
1200
+ 广
1201
+
1202
+
1203
+
1204
+
1205
+
1206
+
1207
+
1208
+
1209
+
1210
+
1211
+
1212
+
1213
+
1214
+
1215
+
1216
+
1217
+ S
1218
+ Y
1219
+ F
1220
+ D
1221
+ A
1222
+
1223
+
1224
+
1225
+
1226
+
1227
+
1228
+
1229
+
1230
+
1231
+
1232
+
1233
+
1234
+
1235
+
1236
+
1237
+
1238
+
1239
+
1240
+
1241
+
1242
+
1243
+
1244
+
1245
+
1246
+
1247
+
1248
+
1249
+
1250
+
1251
+
1252
+
1253
+
1254
+
1255
+
1256
+
1257
+
1258
+
1259
+
1260
+
1261
+
1262
+
1263
+
1264
+
1265
+
1266
+
1267
+
1268
+
1269
+
1270
+
1271
+
1272
+
1273
+
1274
+
1275
+
1276
+
1277
+
1278
+
1279
+
1280
+
1281
+
1282
+
1283
+
1284
+
1285
+
1286
+
1287
+
1288
+
1289
+
1290
+
1291
+
1292
+
1293
+
1294
+
1295
+
1296
+
1297
+
1298
+
1299
+
1300
+
1301
+
1302
+
1303
+
1304
+
1305
+
1306
+
1307
+
1308
+
1309
+
1310
+ P
1311
+
1312
+
1313
+
1314
+
1315
+
1316
+
1317
+
1318
+
1319
+
1320
+
1321
+
1322
+
1323
+
1324
+
1325
+
1326
+
1327
+
1328
+
1329
+
1330
+
1331
+
1332
+
1333
+
1334
+
1335
+
1336
+
1337
+
1338
+
1339
+
1340
+
1341
+
1342
+
1343
+
1344
+
1345
+
1346
+
1347
+
1348
+
1349
+
1350
+
1351
+
1352
+
1353
+
1354
+
1355
+
1356
+
1357
+
1358
+
1359
+
1360
+
1361
+
1362
+
1363
+
1364
+
1365
+
1366
+
1367
+
1368
+
1369
+
1370
+
1371
+
1372
+
1373
+
1374
+
1375
+
1376
+
1377
+
1378
+
1379
+
1380
+
1381
+ T
1382
+
1383
+
1384
+
1385
+
1386
+ 湿
1387
+
1388
+
1389
+
1390
+
1391
+
1392
+
1393
+
1394
+
1395
+
1396
+
1397
+ 窿
1398
+
1399
+
1400
+
1401
+
1402
+
1403
+
1404
+
1405
+
1406
+
1407
+
1408
+
1409
+
1410
+
1411
+
1412
+
1413
+
1414
+
1415
+
1416
+
1417
+
1418
+
1419
+
1420
+
1421
+
1422
+
1423
+
1424
+
1425
+
1426
+
1427
+
1428
+
1429
+
1430
+
1431
+
1432
+
1433
+
1434
+
1435
+
1436
+
1437
+
1438
+
1439
+
1440
+
1441
+
1442
+
1443
+
1444
+
1445
+
1446
+
1447
+
1448
+
1449
+
1450
+
1451
+
1452
+
1453
+
1454
+
1455
+
1456
+
1457
+
1458
+
1459
+
1460
+
1461
+
1462
+
1463
+
1464
+
1465
+
1466
+
1467
+
1468
+
1469
+
1470
+
1471
+
1472
+
1473
+
1474
+
1475
+
1476
+
1477
+
1478
+
1479
+
1480
+
1481
+
1482
+
1483
+
1484
+
1485
+
1486
+
1487
+
1488
+
1489
+
1490
+
1491
+
1492
+
1493
+
1494
+
1495
+
1496
+
1497
+
1498
+
1499
+
1500
+
1501
+
1502
+
1503
+
1504
+
1505
+
1506
+
1507
+
1508
+
1509
+
1510
+
1511
+
1512
+
1513
+
1514
+
1515
+
1516
+
1517
+
1518
+
1519
+
1520
+
1521
+
1522
+
1523
+
1524
+
1525
+
1526
+
1527
+
1528
+
1529
+ @
1530
+
1531
+
1532
+
1533
+
1534
+
1535
+
1536
+
1537
+
1538
+
1539
+
1540
+
1541
+
1542
+
1543
+
1544
+
1545
+
1546
+
1547
+
1548
+
1549
+
1550
+
1551
+
1552
+
1553
+
1554
+
1555
+
1556
+
1557
+
1558
+
1559
+
1560
+
1561
+
1562
+
1563
+ 丿
1564
+
1565
+
1566
+
1567
+
1568
+
1569
+
1570
+
1571
+
1572
+
1573
+
1574
+
1575
+
1576
+
1577
+
1578
+
1579
+
1580
+
1581
+
1582
+
1583
+
1584
+
1585
+
1586
+
1587
+
1588
+
1589
+
1590
+
1591
+
1592
+
1593
+
1594
+
1595
+
1596
+
1597
+
1598
+
1599
+
1600
+
1601
+
1602
+
1603
+
1604
+
1605
+
1606
+
1607
+
1608
+
1609
+
1610
+
1611
+
1612
+
1613
+
1614
+
1615
+
1616
+
1617
+
1618
+
1619
+
1620
+
1621
+
1622
+
1623
+
1624
+
1625
+
1626
+
1627
+
1628
+
1629
+
1630
+
1631
+
1632
+
1633
+
1634
+
1635
+
1636
+
1637
+
1638
+
1639
+
1640
+
1641
+
1642
+
1643
+ 沿
1644
+
1645
+
1646
+
1647
+
1648
+
1649
+
1650
+
1651
+
1652
+
1653
+
1654
+
1655
+
1656
+
1657
+
1658
+
1659
+
1660
+
1661
+
1662
+
1663
+
1664
+
1665
+
1666
+
1667
+
1668
+
1669
+
1670
+
1671
+
1672
+
1673
+
1674
+
1675
+
1676
+
1677
+
1678
+
1679
+
1680
+
1681
+ 使
1682
+
1683
+
1684
+
1685
+
1686
+
1687
+
1688
+
1689
+
1690
+
1691
+
1692
+
1693
+
1694
+
1695
+ 绿
1696
+
1697
+
1698
+
1699
+
1700
+
1701
+
1702
+
1703
+
1704
+
1705
+
1706
+
1707
+
1708
+
1709
+
1710
+
1711
+
1712
+
1713
+
1714
+
1715
+
1716
+ ��
1717
+
1718
+
1719
+
1720
+
1721
+
1722
+
1723
+
1724
+
1725
+
1726
+
1727
+
1728
+
1729
+
1730
+
1731
+
1732
+
1733
+
1734
+
1735
+
1736
+
1737
+
1738
+
1739
+
1740
+
1741
+
1742
+
1743
+
1744
+
1745
+
1746
+
1747
+
1748
+
1749
+
1750
+
1751
+
1752
+
1753
+
1754
+
1755
+
1756
+
1757
+
1758
+
1759
+
1760
+
1761
+
1762
+
1763
+
1764
+
1765
+
1766
+
1767
+
1768
+
1769
+
1770
+
1771
+
1772
+
1773
+
1774
+
1775
+
1776
+
1777
+
1778
+
1779
+
1780
+
1781
+
1782
+
1783
+
1784
+
1785
+
1786
+
1787
+
1788
+
1789
+
1790
+
1791
+
1792
+
1793
+
1794
+
1795
+
1796
+
1797
+
1798
+
1799
+
1800
+
1801
+
1802
+
1803
+
1804
+
1805
+
1806
+
1807
+
1808
+
1809
+
1810
+
1811
+ %
1812
+
1813
+
1814
+
1815
+
1816
+
1817
+
1818
+
1819
+
1820
+
1821
+
1822
+ "
1823
+
1824
+
1825
+
1826
+
1827
+
1828
+
1829
+
1830
+
1831
+
1832
+
1833
+
1834
+
1835
+
1836
+
1837
+ 婿
1838
+
1839
+
1840
+
1841
+
1842
+
1843
+
1844
+
1845
+
1846
+
1847
+
1848
+
1849
+
1850
+
1851
+
1852
+
1853
+
1854
+
1855
+
1856
+
1857
+
1858
+
1859
+
1860
+
1861
+
1862
+
1863
+
1864
+
1865
+
1866
+
1867
+
1868
+
1869
+
1870
+
1871
+
1872
+
1873
+
1874
+
1875
+
1876
+
1877
+
1878
+
1879
+
1880
+
1881
+
1882
+
1883
+
1884
+
1885
+
1886
+
1887
+
1888
+
1889
+
1890
+
1891
+
1892
+
1893
+
1894
+
1895
+
1896
+
1897
+
1898
+
1899
+
1900
+
1901
+
1902
+
1903
+
1904
+
1905
+
1906
+
1907
+
1908
+
1909
+
1910
+
1911
+
1912
+
1913
+
1914
+
1915
+
1916
+
1917
+
1918
+
1919
+
1920
+
1921
+
1922
+
1923
+
1924
+
1925
+
1926
+
1927
+
1928
+
1929
+
1930
+
1931
+
1932
+
1933
+
1934
+
1935
+
1936
+
1937
+
1938
+
1939
+
1940
+
1941
+
1942
+
1943
+
1944
+
1945
+
1946
+
1947
+
1948
+
1949
+
1950
+
1951
+
1952
+
1953
+
1954
+
1955
+
1956
+
1957
+
1958
+ r
1959
+
1960
+
1961
+
1962
+
1963
+
1964
+
1965
+
1966
+
1967
+
1968
+
1969
+
1970
+
1971
+
1972
+
1973
+
1974
+
1975
+
1976
+
1977
+
1978
+
1979
+
1980
+
1981
+
1982
+
1983
+
1984
+
1985
+
1986
+
1987
+
1988
+
1989
+ =
1990
+
1991
+
1992
+
1993
+
1994
+
1995
+
1996
+
1997
+
1998
+
1999
+
2000
+ 饿
2001
+
2002
+
2003
+
2004
+
2005
+
2006
+
2007
+
2008
+
2009
+
2010
+
2011
+
2012
+
2013
+
2014
+
2015
+
2016
+
2017
+
2018
+
2019
+
2020
+
2021
+
2022
+
2023
+
2024
+
2025
+
2026
+
2027
+
2028
+
2029
+
2030
+
2031
+
2032
+
2033
+
2034
+
2035
+
2036
+
2037
+
2038
+
2039
+
2040
+
2041
+
2042
+
2043
+
2044
+
2045
+
2046
+
2047
+
2048
+
2049
+
2050
+
2051
+
2052
+
2053
+
2054
+
2055
+
2056
+
2057
+
2058
+
2059
+
2060
+
2061
+
2062
+
2063
+
2064
+
2065
+
2066
+
2067
+
2068
+
2069
+
2070
+
2071
+
2072
+
2073
+
2074
+
2075
+
2076
+
2077
+
2078
+
2079
+
2080
+
2081
+
2082
+
2083
+
2084
+
2085
+
2086
+
2087
+
2088
+
2089
+
2090
+
2091
+
2092
+
2093
+
2094
+
2095
+
2096
+
2097
+
2098
+
2099
+
2100
+
2101
+
2102
+
2103
+
2104
+
2105
+
2106
+
2107
+
2108
+
2109
+
2110
+
2111
+
2112
+
2113
+
2114
+
2115
+ ˇ
2116
+
2117
+
2118
+
2119
+
2120
+
2121
+
2122
+
2123
+
2124
+
2125
+
2126
+
2127
+
2128
+
2129
+
2130
+
2131
+
2132
+
2133
+
2134
+
2135
+
2136
+
2137
+
2138
+
2139
+
2140
+
2141
+
2142
+
2143
+
2144
+
2145
+
2146
+
2147
+
2148
+
2149
+
2150
+
2151
+
2152
+
2153
+
2154
+
2155
+
2156
+ q
2157
+
2158
+
2159
+
2160
+
2161
+
2162
+
2163
+
2164
+
2165
+
2166
+
2167
+
2168
+
2169
+
2170
+
2171
+
2172
+
2173
+
2174
+
2175
+
2176
+
2177
+
2178
+
2179
+
2180
+
2181
+
2182
+
2183
+
2184
+
2185
+
2186
+
2187
+
2188
+
2189
+
2190
+
2191
+
2192
+
2193
+
2194
+
2195
+
2196
+
2197
+
2198
+
2199
+
2200
+
2201
+
2202
+
2203
+
2204
+
2205
+
2206
+
2207
+
2208
+
2209
+
2210
+
2211
+
2212
+
2213
+
2214
+
2215
+
2216
+
2217
+
2218
+
2219
+
2220
+
2221
+
2222
+
2223
+
2224
+
2225
+
2226
+
2227
+
2228
+
2229
+
2230
+
2231
+
2232
+
2233
+
2234
+
2235
+
2236
+
2237
+
2238
+
2239
+
2240
+
2241
+
2242
+
2243
+
2244
+
2245
+
2246
+
2247
+
2248
+
2249
+
2250
+
2251
+
2252
+
2253
+
2254
+
2255
+
2256
+
2257
+
2258
+
2259
+
2260
+
2261
+
2262
+
2263
+
2264
+
2265
+
2266
+
2267
+
2268
+
2269
+ ÷
2270
+
2271
+
2272
+
2273
+
2274
+
2275
+
2276
+
2277
+
2278
+
2279
+
2280
+
2281
+
2282
+
2283
+
2284
+
2285
+
2286
+
2287
+
2288
+
2289
+
2290
+
2291
+
2292
+
2293
+
2294
+
2295
+
2296
+
2297
+
2298
+
2299
+
2300
+
2301
+
2302
+
2303
+
2304
+
2305
+
2306
+
2307
+
2308
+
2309
+
2310
+
2311
+
2312
+
2313
+
2314
+
2315
+
2316
+
2317
+
2318
+
2319
+
2320
+
2321
+
2322
+
2323
+
2324
+
2325
+
2326
+
2327
+
2328
+
2329
+
2330
+
2331
+
2332
+
2333
+
2334
+
2335
+
2336
+
2337
+
2338
+
2339
+
2340
+
2341
+
2342
+
2343
+
2344
+
2345
+
2346
+
2347
+
2348
+
2349
+
2350
+
2351
+
2352
+
2353
+
2354
+
2355
+
2356
+
2357
+
2358
+
2359
+
2360
+
2361
+
2362
+
2363
+
2364
+
2365
+
2366
+
2367
+
2368
+
2369
+
2370
+
2371
+
2372
+
2373
+
2374
+
2375
+
2376
+
2377
+
2378
+
2379
+
2380
+
2381
+ 椿
2382
+
2383
+
2384
+
2385
+ 寿
2386
+
2387
+
2388
+
2389
+
2390
+
2391
+
2392
+
2393
+
2394
+
2395
+
2396
+
2397
+
2398
+
2399
+
2400
+
2401
+
2402
+
2403
+
2404
+
2405
+
2406
+
2407
+
2408
+
2409
+
2410
+
2411
+
2412
+
2413
+
2414
+
2415
+
2416
+
2417
+
2418
+
2419
+
2420
+
2421
+
2422
+
2423
+
2424
+
2425
+
2426
+
2427
+
2428
+
2429
+
2430
+
2431
+
2432
+
2433
+
2434
+
2435
+
2436
+
2437
+
2438
+
2439
+
2440
+
2441
+
2442
+
2443
+
2444
+
2445
+
2446
+
2447
+
2448
+
2449
+
2450
+
2451
+
2452
+
2453
+
2454
+
2455
+
2456
+ ?
2457
+
2458
+
2459
+
2460
+
2461
+
2462
+
2463
+
2464
+
2465
+
2466
+
2467
+
2468
+
2469
+
2470
+
2471
+
2472
+
2473
+
2474
+
2475
+
2476
+
2477
+
2478
+
2479
+
2480
+
2481
+
2482
+
2483
+
2484
+
2485
+
2486
+
2487
+
2488
+
2489
+
2490
+
2491
+
2492
+
2493
+
2494
+
2495
+
2496
+
2497
+
2498
+
2499
+
2500
+
2501
+
2502
+
2503
+
2504
+
2505
+
2506
+
2507
+
2508
+
2509
+
2510
+
2511
+
2512
+
2513
+
2514
+
2515
+
2516
+
2517
+
2518
+
2519
+
2520
+
2521
+
2522
+
2523
+
2524
+
2525
+
2526
+
2527
+
2528
+
2529
+
2530
+
2531
+
2532
+
2533
+
2534
+
2535
+
2536
+
2537
+
2538
+
2539
+
2540
+
2541
+
2542
+
2543
+
2544
+
2545
+
2546
+
2547
+
2548
+
2549
+
2550
+
2551
+
2552
+
2553
+
2554
+
2555
+
2556
+
2557
+
2558
+
2559
+
2560
+
2561
+
2562
+
2563
+
2564
+
2565
+
2566
+
2567
+
2568
+
2569
+
2570
+
2571
+
2572
+
2573
+
2574
+
2575
+
2576
+
2577
+
2578
+
2579
+
2580
+
2581
+
2582
+
2583
+
2584
+
2585
+
2586
+
2587
+
2588
+
2589
+
2590
+
2591
+
2592
+
2593
+
2594
+
2595
+
2596
+
2597
+
2598
+ 便
2599
+
2600
+
2601
+
2602
+
2603
+
2604
+
2605
+
2606
+
2607
+
2608
+
2609
+
2610
+
2611
+
2612
+
2613
+
2614
+
2615
+
2616
+
2617
+
2618
+
2619
+
2620
+
2621
+
2622
+
2623
+
2624
+
2625
+
2626
+
2627
+
2628
+
2629
+
2630
+
2631
+
2632
+
2633
+
2634
+
2635
+
2636
+
2637
+
2638
+
2639
+
2640
+
2641
+
2642
+
2643
+
2644
+
2645
+
2646
+
2647
+
2648
+
2649
+
2650
+
2651
+
2652
+
2653
+
2654
+
2655
+
2656
+
2657
+
2658
+
2659
+
2660
+
2661
+
2662
+
2663
+
2664
+
2665
+
2666
+
2667
+ 殿
2668
+
2669
+
2670
+
2671
+
2672
+
2673
+
2674
+
2675
+
2676
+
2677
+
2678
+
2679
+
2680
+
2681
+
2682
+
2683
+
2684
+
2685
+
2686
+
2687
+
2688
+
2689
+
2690
+
2691
+
2692
+
2693
+
2694
+
2695
+
2696
+
2697
+ J
2698
+
2699
+
2700
+
2701
+
2702
+
2703
+
2704
+
2705
+
2706
+
2707
+
2708
+
2709
+
2710
+ l
2711
+
2712
+
2713
+
2714
+
2715
+
2716
+
2717
+
2718
+
2719
+
2720
+
2721
+
2722
+
2723
+
2724
+
2725
+
2726
+
2727
+
2728
+
2729
+
2730
+
2731
+
2732
+
2733
+
2734
+
2735
+
2736
+
2737
+
2738
+
2739
+
2740
+
2741
+
2742
+
2743
+
2744
+
2745
+
2746
+
2747
+
2748
+
2749
+
2750
+
2751
+
2752
+
2753
+
2754
+
2755
+
2756
+
2757
+
2758
+
2759
+
2760
+
2761
+
2762
+
2763
+
2764
+
2765
+
2766
+
2767
+
2768
+
2769
+
2770
+
2771
+
2772
+
2773
+
2774
+
2775
+
2776
+
2777
+
2778
+
2779
+
2780
+
2781
+
2782
+
2783
+
2784
+
2785
+
2786
+
2787
+
2788
+
2789
+
2790
+
2791
+
2792
+
2793
+
2794
+
2795
+
2796
+
2797
+
2798
+
2799
+
2800
+
2801
+
2802
+
2803
+
2804
+
2805
+
2806
+
2807
+
2808
+
2809
+
2810
+
2811
+
2812
+
2813
+
2814
+
2815
+
2816
+
2817
+
2818
+
2819
+
2820
+
2821
+
2822
+
2823
+
2824
+
2825
+
2826
+
2827
+
2828
+
2829
+
2830
+
2831
+
2832
+
2833
+
2834
+
2835
+
2836
+
2837
+
2838
+
2839
+
2840
+
2841
+
2842
+
2843
+
2844
+
2845
+
2846
+
2847
+
2848
+
2849
+
2850
+
2851
+
2852
+
2853
+
2854
+ &
2855
+
2856
+
2857
+
2858
+
2859
+
2860
+
2861
+
2862
+
2863
+
2864
+
2865
+
2866
+
2867
+
2868
+
2869
+
2870
+
2871
+
2872
+
2873
+
2874
+
2875
+
2876
+
2877
+
2878
+
2879
+
2880
+
2881
+
2882
+
2883
+
2884
+
2885
+
2886
+
2887
+
2888
+
2889
+
2890
+
2891
+
2892
+
2893
+
2894
+
2895
+
2896
+
2897
+
2898
+
2899
+
2900
+
2901
+
2902
+
2903
+
2904
+
2905
+
2906
+
2907
+
2908
+
2909
+
2910
+
2911
+
2912
+
2913
+
2914
+
2915
+
2916
+
2917
+
2918
+
2919
+
2920
+
2921
+
2922
+
2923
+
2924
+
2925
+
2926
+
2927
+
2928
+
2929
+
2930
+
2931
+
2932
+
2933
+
2934
+
2935
+
2936
+
2937
+
2938
+
2939
+
2940
+
2941
+
2942
+
2943
+ 驿
2944
+
2945
+
2946
+
2947
+
2948
+
2949
+
2950
+
2951
+
2952
+
2953
+
2954
+
2955
+
2956
+
2957
+
2958
+
2959
+
2960
+
2961
+
2962
+
2963
+
2964
+
2965
+
2966
+
2967
+
2968
+
2969
+
2970
+
2971
+
2972
+
2973
+
2974
+
2975
+
2976
+
2977
+
2978
+
2979
+
2980
+
2981
+
2982
+
2983
+
2984
+
2985
+
2986
+
2987
+
2988
+
2989
+
2990
+
2991
+
2992
+
2993
+ x
2994
+
2995
+
2996
+
2997
+
2998
+
2999
+
3000
+
3001
+
3002
+
3003
+
3004
+
3005
+
3006
+
3007
+
3008
+
3009
+
3010
+
3011
+
3012
+
3013
+
3014
+
3015
+
3016
+
3017
+
3018
+
3019
+
3020
+
3021
+
3022
+ 耀
3023
+
3024
+
3025
+
3026
+
3027
+
3028
+
3029
+
3030
+
3031
+
3032
+
3033
+
3034
+
3035
+
3036
+
3037
+
3038
+
3039
+
3040
+
3041
+
3042
+
3043
+
3044
+
3045
+
3046
+
3047
+
3048
+
3049
+
3050
+
3051
+
3052
+
3053
+
3054
+
3055
+
3056
+
3057
+
3058
+
3059
+
3060
+
3061
+
3062
+
3063
+
3064
+
3065
+
3066
+
3067
+
3068
+
3069
+
3070
+
3071
+
3072
+ 仿
3073
+
3074
+
3075
+
3076
+
3077
+
3078
+
3079
+
3080
+
3081
+
3082
+
3083
+
3084
+
3085
+
3086
+
3087
+
3088
+
3089
+
3090
+
3091
+
3092
+
3093
+
3094
+
3095
+
3096
+
3097
+
3098
+
3099
+
3100
+
3101
+
3102
+
3103
+
3104
+
3105
+
3106
+
3107
+
3108
+
3109
+
3110
+
3111
+
3112
+
3113
+
3114
+
3115
+
3116
+
3117
+
3118
+
3119
+
3120
+
3121
+
3122
+
3123
+ 鸿
3124
+
3125
+
3126
+
3127
+
3128
+
3129
+
3130
+
3131
+
3132
+
3133
+
3134
+
3135
+
3136
+
3137
+
3138
+
3139
+
3140
+
3141
+
3142
+
3143
+
3144
+
3145
+
3146
+
3147
+
3148
+
3149
+
3150
+
3151
+
3152
+
3153
+
3154
+
3155
+
3156
+
3157
+
3158
+
3159
+
3160
+
3161
+
3162
+
3163
+
3164
+
3165
+
3166
+
3167
+
3168
+
3169
+
3170
+
3171
+
3172
+
3173
+
3174
+
3175
+
3176
+
3177
+
3178
+
3179
+
3180
+
3181
+
3182
+
3183
+
3184
+
3185
+
3186
+
3187
+
3188
+
3189
+
3190
+
3191
+
3192
+
3193
+
3194
+
3195
+
3196
+
3197
+
3198
+
3199
+
3200
+
3201
+
3202
+
3203
+
3204
+
3205
+
3206
+
3207
+
3208
+
3209
+
3210
+
3211
+
3212
+
3213
+
3214
+
3215
+
3216
+
3217
+
3218
+
3219
+
3220
+
3221
+
3222
+
3223
+
3224
+
3225
+
3226
+
3227
+
3228
+
3229
+
3230
+
3231
+
3232
+
3233
+
3234
+
3235
+
3236
+
3237
+
3238
+
3239
+ 廿
3240
+
3241
+
3242
+
3243
+
3244
+
3245
+
3246
+
3247
+
3248
+
3249
+
3250
+
3251
+
3252
+
3253
+
3254
+
3255
+
3256
+
3257
+
3258
+
3259
+
3260
+
3261
+
3262
+
3263
+
3264
+
3265
+
3266
+
3267
+
3268
+
3269
+
3270
+
3271
+
3272
+
3273
+
3274
+
3275
+
3276
+
3277
+
3278
+
3279
+
3280
+
3281
+
3282
+
3283
+
3284
+
3285
+
3286
+
3287
+
3288
+
3289
+
3290
+
3291
+
3292
+
3293
+
3294
+
3295
+
3296
+
3297
+
3298
+
3299
+
3300
+
3301
+
3302
+
3303
+
3304
+
3305
+
3306
+
3307
+
3308
+
3309
+
3310
+
3311
+
3312
+
3313
+
3314
+
3315
+
3316
+ z
3317
+
3318
+
3319
+ ±
3320
+
3321
+
3322
+
3323
+
3324
+
3325
+
3326
+
3327
+
3328
+
3329
+
3330
+
3331
+
3332
+ e
3333
+ t
3334
+
3335
+
3336
+
3337
+
3338
+
3339
+
3340
+
3341
+
3342
+
3343
+
3344
+
3345
+
3346
+
3347
+
3348
+
3349
+
3350
+
3351
+
3352
+
3353
+
3354
+
3355
+
3356
+
3357
+
3358
+
3359
+
3360
+
3361
+
3362
+
3363
+
3364
+
3365
+
3366
+
3367
+
3368
+
3369
+
3370
+
3371
+
3372
+
3373
+
3374
+
3375
+
3376
+
3377
+
3378
+
3379
+
3380
+ §
3381
+
3382
+
3383
+
3384
+
3385
+
3386
+
3387
+
3388
+
3389
+
3390
+
3391
+
3392
+
3393
+
3394
+
3395
+
3396
+
3397
+
3398
+
3399
+
3400
+ 姿
3401
+
3402
+
3403
+
3404
+
3405
+
3406
+
3407
+
3408
+
3409
+
3410
+
3411
+
3412
+
3413
+
3414
+
3415
+
3416
+
3417
+
3418
+
3419
+
3420
+
3421
+
3422
+
3423
+
3424
+
3425
+
3426
+
3427
+
3428
+
3429
+
3430
+
3431
+
3432
+
3433
+
3434
+
3435
+
3436
+
3437
+
3438
+
3439
+
3440
+
3441
+
3442
+
3443
+
3444
+
3445
+
3446
+
3447
+
3448
+
3449
+
3450
+
3451
+
3452
+
3453
+
3454
+
3455
+
3456
+
3457
+
3458
+
3459
+
3460
+
3461
+
3462
+
3463
+ b
3464
+
3465
+
3466
+
3467
+
3468
+
3469
+
3470
+
3471
+
3472
+
3473
+
3474
+
3475
+
3476
+
3477
+
3478
+
3479
+
3480
+
3481
+
3482
+
3483
+
3484
+
3485
+
3486
+
3487
+
3488
+
3489
+
3490
+ <
3491
+
3492
+
3493
+
3494
+
3495
+
3496
+
3497
+
3498
+
3499
+
3500
+
3501
+
3502
+
3503
+
3504
+
3505
+ 退
3506
+ L
3507
+
3508
+
3509
+
3510
+
3511
+
3512
+
3513
+
3514
+
3515
+
3516
+
3517
+ 鹿
3518
+
3519
+
3520
+
3521
+
3522
+
3523
+
3524
+
3525
+
3526
+
3527
+
3528
+
3529
+
3530
+
3531
+
3532
+
3533
+
3534
+
3535
+
3536
+
3537
+ w
3538
+ i
3539
+ h
3540
+
3541
+
3542
+
3543
+
3544
+
3545
+
3546
+
3547
+
3548
+
3549
+
3550
+
3551
+
3552
+
3553
+
3554
+
3555
+
3556
+
3557
+
3558
+
3559
+
3560
+
3561
+
3562
+
3563
+
3564
+
3565
+
3566
+
3567
+
3568
+
3569
+
3570
+
3571
+
3572
+
3573
+ +
3574
+
3575
+
3576
+
3577
+
3578
+
3579
+
3580
+
3581
+
3582
+
3583
+
3584
+
3585
+
3586
+
3587
+ I
3588
+ B
3589
+ N
3590
+
3591
+
3592
+
3593
+
3594
+
3595
+
3596
+
3597
+
3598
+
3599
+
3600
+
3601
+
3602
+
3603
+
3604
+
3605
+
3606
+
3607
+
3608
+
3609
+
3610
+
3611
+
3612
+
3613
+
3614
+
3615
+
3616
+
3617
+
3618
+
3619
+
3620
+
3621
+
3622
+
3623
+
3624
+
3625
+
3626
+ ^
3627
+ _
3628
+
3629
+
3630
+
3631
+
3632
+
3633
+
3634
+
3635
+
3636
+
3637
+
3638
+
3639
+ M
3640
+
3641
+
3642
+
3643
+
3644
+
3645
+
3646
+
3647
+
3648
+
3649
+
3650
+
3651
+
3652
+
3653
+
3654
+
3655
+
3656
+
3657
+
3658
+
3659
+
3660
+
3661
+
3662
+
3663
+
3664
+
3665
+
3666
+
3667
+
3668
+
3669
+
3670
+
3671
+
3672
+
3673
+ 鱿
3674
+
3675
+
3676
+
3677
+
3678
+
3679
+
3680
+
3681
+
3682
+
3683
+
3684
+
3685
+
3686
+
3687
+
3688
+
3689
+
3690
+
3691
+
3692
+
3693
+
3694
+
3695
+
3696
+
3697
+
3698
+
3699
+
3700
+
3701
+
3702
+
3703
+
3704
+
3705
+
3706
+
3707
+
3708
+
3709
+
3710
+
3711
+
3712
+
3713
+
3714
+
3715
+
3716
+
3717
+
3718
+
3719
+
3720
+
3721
+
3722
+
3723
+
3724
+
3725
+
3726
+
3727
+
3728
+
3729
+
3730
+
3731
+
3732
+
3733
+
3734
+
3735
+
3736
+
3737
+
3738
+
3739
+
3740
+
3741
+
3742
+
3743
+
3744
+
3745
+
3746
+
3747
+
3748
+
3749
+
3750
+
3751
+
3752
+
3753
+
3754
+
3755
+
3756
+
3757
+
3758
+
3759
+
3760
+
3761
+
3762
+
3763
+
3764
+
3765
+
3766
+
3767
+
3768
+
3769
+
3770
+
3771
+
3772
+
3773
+
3774
+
3775
+
3776
+
3777
+
3778
+
3779
+
3780
+
3781
+
3782
+
3783
+
3784
+
3785
+
3786
+
3787
+
3788
+
3789
+
3790
+
3791
+
3792
+
3793
+
3794
+
3795
+
3796
+
3797
+
3798
+
3799
+
3800
+
3801
+
3802
+
3803
+
3804
+
3805
+
3806
+
3807
+
3808
+
3809
+
3810
+
3811
+
3812
+
3813
+
3814
+
3815
+
3816
+
3817
+
3818
+
3819
+
3820
+
3821
+
3822
+
3823
+
3824
+
3825
+
3826
+
3827
+
3828
+
3829
+
3830
+
3831
+
3832
+
3833
+
3834
+
3835
+
3836
+
3837
+
3838
+
3839
+
3840
+
3841
+
3842
+
3843
+
3844
+
3845
+
3846
+
3847
+
3848
+
3849
+
3850
+
3851
+
3852
+
3853
+
3854
+
3855
+
3856
+
3857
+
3858
+
3859
+
3860
+
3861
+
3862
+
3863
+
3864
+
3865
+
3866
+
3867
+
3868
+
3869
+
3870
+
3871
+
3872
+
3873
+
3874
+
3875
+
3876
+
3877
+ 怀
3878
+
3879
+
3880
+
3881
+
3882
+
3883
+
3884
+
3885
+
3886
+
3887
+
3888
+
3889
+
3890
+
3891
+
3892
+
3893
+
3894
+
3895
+
3896
+
3897
+
3898
+
3899
+
3900
+
3901
+
3902
+
3903
+
3904
+
3905
+
3906
+
3907
+
3908
+
3909
+
3910
+
3911
+
3912
+
3913
+
3914
+
3915
+
3916
+
3917
+
3918
+
3919
+
3920
+
3921
+
3922
+
3923
+
3924
+
3925
+
3926
+
3927
+
3928
+
3929
+
3930
+
3931
+
3932
+
3933
+
3934
+
3935
+
3936
+
3937
+
3938
+
3939
+
3940
+
3941
+
3942
+
3943
+
3944
+
3945
+
3946
+
3947
+
3948
+
3949
+
3950
+
3951
+
3952
+
3953
+
3954
+
3955
+
3956
+
3957
+
3958
+
3959
+
3960
+
3961
+
3962
+
3963
+
3964
+
3965
+
3966
+
3967
+
3968
+
3969
+
3970
+
3971
+
3972
+
3973
+
3974
+
3975
+
3976
+
3977
+
3978
+
3979
+
3980
+
3981
+
3982
+
3983
+
3984
+
3985
+
3986
+
3987
+
3988
+
3989
+
3990
+
3991
+
3992
+
3993
+
3994
+
3995
+
3996
+
3997
+
3998
+
3999
+
4000
+
4001
+
4002
+
4003
+
4004
+
4005
+
4006
+
4007
+
4008
+
4009
+
4010
+
4011
+
4012
+
4013
+
4014
+
4015
+
4016
+
4017
+
4018
+
4019
+
4020
+
4021
+
4022
+
4023
+
4024
+
4025
+
4026
+
4027
+
4028
+
4029
+
4030
+
4031
+
4032
+
4033
+
4034
+
4035
+
4036
+
4037
+
4038
+
4039
+
4040
+
4041
+
4042
+
4043
+
4044
+
4045
+
4046
+
4047
+
4048
+
4049
+
4050
+
4051
+
4052
+
4053
+
4054
+
4055
+
4056
+
4057
+
4058
+
4059
+
4060
+
4061
+
4062
+
4063
+
4064
+
4065
+
4066
+
4067
+
4068
+
4069
+
4070
+
4071
+
4072
+
4073
+
4074
+
4075
+
4076
+
4077
+
4078
+ }
4079
+
4080
+
4081
+
4082
+
4083
+
4084
+
4085
+
4086
+
4087
+
4088
+
4089
+
4090
+
4091
+
4092
+
4093
+
4094
+
4095
+
4096
+
4097
+
4098
+
4099
+
4100
+
4101
+
4102
+
4103
+
4104
+
4105
+
4106
+
4107
+
4108
+
4109
+
4110
+
4111
+
4112
+
4113
+
4114
+
4115
+
4116
+
4117
+
4118
+
4119
+
4120
+
4121
+
4122
+
4123
+
4124
+
4125
+
4126
+
4127
+
4128
+ ~
4129
+
4130
+
4131
+
4132
+
4133
+
4134
+
4135
+
4136
+ Z
4137
+
4138
+
4139
+
4140
+
4141
+
4142
+
4143
+
4144
+
4145
+
4146
+
4147
+
4148
+
4149
+
4150
+
4151
+
4152
+
4153
+
4154
+
4155
+
4156
+
4157
+
4158
+
4159
+
4160
+
4161
+
4162
+
4163
+
4164
+
4165
+
4166
+
4167
+
4168
+
4169
+
4170
+
4171
+
4172
+
4173
+
4174
+
4175
+
4176
+
4177
+
4178
+
4179
+
4180
+
4181
+
4182
+
4183
+
4184
+
4185
+
4186
+
4187
+
4188
+
4189
+
4190
+
4191
+
4192
+
4193
+
4194
+
4195
+
4196
+
4197
+
4198
+
4199
+
4200
+
4201
+
4202
+
4203
+
4204
+
4205
+
4206
+
4207
+
4208
+
4209
+
4210
+
4211
+
4212
+
4213
+
4214
+
4215
+
4216
+
4217
+
4218
+
4219
+
4220
+
4221
+
4222
+
4223
+
4224
+
4225
+
4226
+
4227
+
4228
+
4229
+
4230
+
4231
+
4232
+
4233
+
4234
+
4235
+
4236
+ 槿
4237
+
4238
+
4239
+
4240
+
4241
+
4242
+
4243
+
4244
+ C
4245
+ o
4246
+
4247
+
4248
+
4249
+
4250
+
4251
+
4252
+
4253
+
4254
+
4255
+
4256
+
4257
+
4258
+
4259
+
4260
+
4261
+
4262
+
4263
+
4264
+
4265
+
4266
+
4267
+
4268
+
4269
+
4270
+
4271
+
4272
+
4273
+
4274
+
4275
+
4276
+
4277
+
4278
+
4279
+
4280
+
4281
+
4282
+
4283
+
4284
+
4285
+
4286
+
4287
+ ��
4288
+
4289
+
4290
+
4291
+
4292
+
4293
+
4294
+
4295
+
4296
+
4297
+
4298
+
4299
+
4300
+
4301
+
4302
+
4303
+
4304
+
4305
+
4306
+
4307
+
4308
+
4309
+
4310
+
4311
+
4312
+
4313
+
4314
+
4315
+
4316
+
4317
+
4318
+
4319
+
4320
+
4321
+
4322
+
4323
+
4324
+
4325
+
4326
+
4327
+
4328
+
4329
+
4330
+
4331
+
4332
+
4333
+
4334
+
4335
+
4336
+
4337
+
4338
+
4339
+
4340
+
4341
+
4342
+
4343
+
4344
+
4345
+
4346
+
4347
+
4348
+
4349
+
4350
+
4351
+
4352
+
4353
+
4354
+
4355
+
4356
+
4357
+
4358
+
4359
+
4360
+
4361
+
4362
+
4363
+
4364
+
4365
+
4366
+
4367
+
4368
+
4369
+
4370
+
4371
+
4372
+
4373
+
4374
+
4375
+
4376
+
4377
+
4378
+
4379
+
4380
+
4381
+ E
4382
+
4383
+
4384
+
4385
+
4386
+
4387
+
4388
+
4389
+ f
4390
+
4391
+
4392
+
4393
+
4394
+
4395
+
4396
+
4397
+
4398
+
4399
+
4400
+
4401
+
4402
+
4403
+
4404
+
4405
+
4406
+
4407
+
4408
+
4409
+
4410
+
4411
+
4412
+
4413
+
4414
+
4415
+
4416
+
4417
+
4418
+
4419
+
4420
+
4421
+
4422
+
4423
+
4424
+
4425
+
4426
+
4427
+
4428
+ \
4429
+
4430
+
4431
+
4432
+
4433
+
4434
+
4435
+
4436
+
4437
+
4438
+
4439
+
4440
+
4441
+
4442
+
4443
+
4444
+
4445
+
4446
+
4447
+
4448
+
4449
+
4450
+
4451
+
4452
+
4453
+
4454
+
4455
+
4456
+
4457
+
4458
+
4459
+
4460
+
4461
+
4462
+
4463
+
4464
+
4465
+
4466
+
4467
+
4468
+
4469
+
4470
+
4471
+
4472
+
4473
+ 屿
4474
+
4475
+
4476
+
4477
+
4478
+
4479
+
4480
+
4481
+
4482
+
4483
+
4484
+
4485
+
4486
+
4487
+
4488
+
4489
+
4490
+
4491
+
4492
+
4493
+
4494
+
4495
+
4496
+
4497
+ U
4498
+
4499
+
4500
+
4501
+
4502
+
4503
+
4504
+
4505
+
4506
+
4507
+
4508
+
4509
+
4510
+
4511
+
4512
+
4513
+
4514
+
4515
+
4516
+
4517
+
4518
+
4519
+
4520
+
4521
+
4522
+
4523
+
4524
+
4525
+
4526
+
4527
+
4528
+
4529
+
4530
+
4531
+
4532
+
4533
+
4534
+
4535
+
4536
+
4537
+
4538
+
4539
+
4540
+
4541
+
4542
+
4543
+
4544
+ a
4545
+ p
4546
+ y
4547
+ n
4548
+ g
4549
+
4550
+
4551
+
4552
+
4553
+
4554
+
4555
+
4556
+
4557
+
4558
+
4559
+
4560
+
4561
+
4562
+
4563
+
4564
+
4565
+
4566
+
4567
+
4568
+
4569
+
4570
+
4571
+
4572
+
4573
+
4574
+
4575
+
4576
+
4577
+
4578
+
4579
+
4580
+
4581
+
4582
+
4583
+
4584
+
4585
+
4586
+
4587
+
4588
+
4589
+
4590
+
4591
+
4592
+
4593
+
4594
+
4595
+
4596
+
4597
+
4598
+
4599
+
4600
+
4601
+
4602
+
4603
+
4604
+
4605
+
4606
+
4607
+
4608
+
4609
+
4610
+
4611
+
4612
+
4613
+
4614
+
4615
+
4616
+
4617
+
4618
+
4619
+
4620
+
4621
+
4622
+
4623
+
4624
+
4625
+
4626
+
4627
+
4628
+
4629
+
4630
+
4631
+
4632
+
4633
+
4634
+
4635
+
4636
+
4637
+
4638
+
4639
+
4640
+
4641
+
4642
+
4643
+
4644
+
4645
+
4646
+
4647
+
4648
+
4649
+
4650
+
4651
+
4652
+
4653
+
4654
+
4655
+
4656
+
4657
+
4658
+
4659
+
4660
+
4661
+
4662
+
4663
+
4664
+
4665
+
4666
+
4667
+
4668
+
4669
+
4670
+
4671
+
4672
+
4673
+
4674
+
4675
+
4676
+
4677
+
4678
+
4679
+
4680
+
4681
+
4682
+
4683
+
4684
+
4685
+
4686
+
4687
+
4688
+
4689
+
4690
+
4691
+
4692
+
4693
+
4694
+
4695
+
4696
+
4697
+
4698
+
4699
+
4700
+
4701
+
4702
+
4703
+
4704
+
4705
+
4706
+
4707
+ 竿
4708
+
4709
+
4710
+
4711
+
4712
+
4713
+
4714
+
4715
+
4716
+
4717
+
4718
+
4719
+
4720
+
4721
+
4722
+
4723
+
4724
+
4725
+
4726
+
4727
+
4728
+
4729
+
4730
+
4731
+ Q
4732
+
4733
+
4734
+
4735
+
4736
+
4737
+
4738
+
4739
+ 羿
4740
+
4741
+ O
4742
+
4743
+
4744
+
4745
+
4746
+
4747
+
4748
+
4749
+
4750
+
4751
+
4752
+
4753
+
4754
+
4755
+
4756
+
4757
+
4758
+
4759
+ 宿
4760
+
4761
+
4762
+
4763
+
4764
+
4765
+
4766
+
4767
+
4768
+
4769
+
4770
+
4771
+
4772
+
4773
+
4774
+
4775
+
4776
+
4777
+
4778
+
4779
+
4780
+
4781
+
4782
+
4783
+
4784
+
4785
+
4786
+
4787
+
4788
+
4789
+
4790
+
4791
+
4792
+
4793
+
4794
+
4795
+
4796
+
4797
+
4798
+
4799
+
4800
+
4801
+
4802
+
4803
+
4804
+
4805
+
4806
+
4807
+
4808
+
4809
+
4810
+
4811
+
4812
+
4813
+
4814
+
4815
+
4816
+
4817
+
4818
+
4819
+
4820
+
4821
+
4822
+
4823
+
4824
+
4825
+
4826
+
4827
+
4828
+
4829
+
4830
+
4831
+
4832
+
4833
+
4834
+
4835
+
4836
+
4837
+
4838
+
4839
+
4840
+
4841
+
4842
+
4843
+
4844
+
4845
+
4846
+
4847
+
4848
+
4849
+ k
4850
+
4851
+
4852
+
4853
+
4854
+
4855
+
4856
+
4857
+
4858
+
4859
+
4860
+
4861
+
4862
+
4863
+
4864
+
4865
+
4866
+
4867
+
4868
+
4869
+
4870
+
4871
+
4872
+
4873
+
4874
+
4875
+
4876
+
4877
+
4878
+
4879
+
4880
+
4881
+
4882
+
4883
+
4884
+
4885
+ $
4886
+
4887
+
4888
+
4889
+
4890
+
4891
+
4892
+
4893
+
4894
+
4895
+
4896
+
4897
+
4898
+
4899
+
4900
+
4901
+
4902
+ c
4903
+
4904
+
4905
+
4906
+
4907
+
4908
+
4909
+
4910
+
4911
+
4912
+
4913
+
4914
+
4915
+
4916
+
4917
+
4918
+
4919
+
4920
+
4921
+
4922
+ v
4923
+
4924
+
4925
+
4926
+
4927
+
4928
+
4929
+
4930
+
4931
+
4932
+
4933
+
4934
+
4935
+
4936
+
4937
+
4938
+
4939
+
4940
+
4941
+
4942
+
4943
+
4944
+
4945
+
4946
+
4947
+
4948
+
4949
+
4950
+
4951
+
4952
+
4953
+
4954
+
4955
+
4956
+
4957
+
4958
+
4959
+
4960
+
4961
+
4962
+
4963
+
4964
+
4965
+
4966
+
4967
+
4968
+
4969
+
4970
+
4971
+
4972
+
4973
+
4974
+
4975
+
4976
+
4977
+
4978
+
4979
+
4980
+
4981
+
4982
+
4983
+
4984
+
4985
+
4986
+
4987
+
4988
+
4989
+
4990
+
4991
+
4992
+
4993
+
4994
+
4995
+
4996
+
4997
+
4998
+
4999
+
5000
+
5001
+
5002
+
5003
+
5004
+
5005
+
5006
+
5007
+
5008
+
5009
+
5010
+
5011
+
5012
+
5013
+
5014
+
5015
+
5016
+
5017
+
5018
+
5019
+
5020
+
5021
+
5022
+
5023
+
5024
+
5025
+
5026
+
5027
+
5028
+
5029
+
5030
+
5031
+
5032
+
5033
+ W
5034
+
5035
+
5036
+
5037
+
5038
+
5039
+
5040
+
5041
+
5042
+
5043
+
5044
+
5045
+ 穿
5046
+
5047
+
5048
+
5049
+
5050
+
5051
+
5052
+
5053
+
5054
+
5055
+
5056
+
5057
+
5058
+
5059
+
5060
+
5061
+
5062
+
5063
+
5064
+
5065
+
5066
+
5067
+
5068
+
5069
+
5070
+
5071
+
5072
+
5073
+
5074
+
5075
+
5076
+
5077
+
5078
+
5079
+
5080
+
5081
+
5082
+
5083
+
5084
+
5085
+
5086
+ ×
5087
+
5088
+
5089
+
5090
+
5091
+
5092
+
5093
+
5094
+
5095
+
5096
+
5097
+
5098
+
5099
+ 轿
5100
+
5101
+
5102
+
5103
+
5104
+
5105
+
5106
+
5107
+
5108
+
5109
+
5110
+
5111
+
5112
+
5113
+
5114
+
5115
+
5116
+
5117
+
5118
+
5119
+
5120
+
5121
+
5122
+
5123
+
5124
+
5125
+
5126
+
5127
+ R
5128
+ G
5129
+
5130
+
5131
+
5132
+
5133
+
5134
+
5135
+
5136
+
5137
+
5138
+
5139
+
5140
+
5141
+
5142
+
5143
+
5144
+
5145
+
5146
+
5147
+
5148
+
5149
+
5150
+
5151
+
5152
+
5153
+
5154
+
5155
+
5156
+
5157
+
5158
+
5159
+
5160
+
5161
+
5162
+
5163
+
5164
+
5165
+
5166
+
5167
+
5168
+
5169
+ ˉ
5170
+
5171
+ d
5172
+ °
5173
+
5174
+
5175
+
5176
+
5177
+
5178
+
5179
+
5180
+
5181
+
5182
+
5183
+
5184
+
5185
+
5186
+
5187
+
5188
+
5189
+
5190
+
5191
+
5192
+
5193
+ K
5194
+
5195
+
5196
+
5197
+
5198
+
5199
+
5200
+ X
5201
+
5202
+
5203
+
5204
+
5205
+
5206
+
5207
+
5208
+
5209
+
5210
+
5211
+
5212
+
5213
+
5214
+
5215
+
5216
+
5217
+
5218
+
5219
+
5220
+
5221
+
5222
+
5223
+
5224
+
5225
+
5226
+
5227
+
5228
+
5229
+
5230
+
5231
+
5232
+
5233
+ m
5234
+
5235
+
5236
+
5237
+
5238
+
5239
+
5240
+
5241
+
5242
+
5243
+
5244
+ 涿
5245
+
5246
+
5247
+
5248
+
5249
+
5250
+
5251
+
5252
+
5253
+
5254
+
5255
+
5256
+
5257
+
5258
+
5259
+
5260
+
5261
+
5262
+
5263
+
5264
+
5265
+
5266
+
5267
+
5268
+
5269
+
5270
+
5271
+
5272
+
5273
+
5274
+
5275
+
5276
+
5277
+
5278
+
5279
+
5280
+
5281
+
5282
+
5283
+
5284
+
5285
+
5286
+
5287
+
5288
+
5289
+
5290
+
5291
+
5292
+
5293
+
5294
+
5295
+
5296
+
5297
+
5298
+
5299
+
5300
+
5301
+
5302
+
5303
+
5304
+
5305
+
5306
+
5307
+
5308
+
5309
+
5310
+
5311
+
5312
+
5313
+
5314
+
5315
+
5316
+
5317
+
5318
+
5319
+
5320
+
5321
+
5322
+
5323
+
5324
+
5325
+
5326
+
5327
+
5328
+
5329
+
5330
+
5331
+
5332
+
5333
+
5334
+
5335
+ `
5336
+
5337
+
5338
+
5339
+
5340
+
5341
+
5342
+
5343
+
5344
+
5345
+
5346
+
5347
+
5348
+
5349
+
5350
+
5351
+
5352
+
5353
+
5354
+
5355
+
5356
+
5357
+
5358
+
5359
+
5360
+
5361
+
5362
+
5363
+
5364
+
5365
+
5366
+
5367
+
5368
+
5369
+
5370
+
5371
+
5372
+
5373
+
5374
+
5375
+
5376
+
5377
+
5378
+
5379
+
5380
+
5381
+
5382
+
5383
+
5384
+
5385
+
5386
+
5387
+
5388
+
5389
+
5390
+
5391
+
5392
+
5393
+
5394
+
5395
+
5396
+
5397
+
5398
+
5399
+
5400
+
5401
+
5402
+
5403
+
5404
+
5405
+ V
5406
+
5407
+
5408
+
5409
+
5410
+
5411
+
5412
+
5413
+
5414
+
5415
+
5416
+
5417
+
5418
+
5419
+
5420
+
5421
+
5422
+
5423
+
5424
+
5425
+
5426
+
5427
+
5428
+
5429
+
5430
+
5431
+
5432
+
5433
+
5434
+
5435
+
5436
+
5437
+
5438
+
5439
+
5440
+
5441
+
5442
+
5443
+
5444
+
5445
+
5446
+
5447
+
5448
+
5449
+
5450
+
5451
+
5452
+
5453
+
5454
+
5455
+
5456
+
5457
+
5458
+
5459
+
5460
+
5461
+ #
5462
+
5463
+
5464
+
5465
+
5466
+
5467
+
5468
+
5469
+
5470
+
5471
+
5472
+
5473
+
5474
+
5475
+
5476
+
5477
+
5478
+
5479
+
5480
+
5481
+
5482
+
5483
+ 簿
5484
+
5485
+
5486
+
5487
+
5488
+
5489
+ {
5490
+
5491
+
5492
+
5493
+ j
5494
+
5495
+
5496
+
5497
+
5498
+
5499
+
5500
+
5501
+
5502
+
5503
+
5504
+
5505
+
5506
+
5507
+
5508
+
5509
+
5510
+
5511
+
5512
+
5513
+
5514
+
5515
+
5516
+
5517
+
5518
+
5519
+
5520
+
5521
+
5522
+
5523
+
5524
+
5525
+
5526
+
5527
+
5528
+
5529
+ ·
5530
+
5531
+
5532
+
5533
+ Ë
5534
+
5535
+
5536
+
5537
+
5538
+
5539
+
5540
+
5541
+
5542
+
5543
+
5544
+
5545
+
5546
+ ¥
5547
+
5548
+
5549
+
5550
+
5551
+
5552
+
5553
+
5554
+
5555
+
5556
+
5557
+
5558
+
5559
+
5560
+ π
5561
+
5562
+
5563
+
5564
+ é
5565
+
5566
+
5567
+ Λ
5568
+
5569
+
5570
+
5571
+
5572
+
5573
+
5574
+
5575
+
5576
+
5577
+
5578
+
5579
+
5580
+
5581
+
5582
+
5583
+
5584
+
5585
+
5586
+
5587
+
5588
+
5589
+
5590
+
5591
+
5592
+
5593
+
5594
+
5595
+
5596
+
5597
+
5598
+
5599
+
5600
+
5601
+
5602
+
5603
+
5604
+
5605
+ Ο
5606
+
5607
+
5608
+
5609
+
5610
+
5611
+
5612
+
5613
+
5614
+
5615
+
5616
+
5617
+
5618
+
5619
+
5620
+
5621
+
5622
+
5623
+
5624
+
5625
+
5626
+
5627
+
5628
+
5629
+
5630
+
5631
+
5632
+
5633
+
5634
+
5635
+
5636
+
5637
+
5638
+
5639
+
5640
+
5641
+
5642
+
5643
+
5644
+
5645
+
5646
+
5647
+
5648
+
5649
+
5650
+
5651
+
5652
+
5653
+
5654
+
5655
+
5656
+
5657
+
5658
+
5659
+
5660
+
5661
+
5662
+
5663
+
5664
+
5665
+
5666
+
5667
+
5668
+
5669
+
5670
+
5671
+
5672
+
5673
+
5674
+ α
5675
+
5676
+
5677
+
5678
+
5679
+
5680
+
5681
+
5682
+
5683
+
5684
+
5685
+
5686
+
5687
+
5688
+
5689
+
5690
+
5691
+
5692
+
5693
+
5694
+
5695
+
5696
+
5697
+
5698
+
5699
+
5700
+
5701
+
5702
+
5703
+
5704
+
5705
+
5706
+
5707
+
5708
+
5709
+
5710
+  
5711
+
5712
+
5713
+
5714
+
5715
+
5716
+
5717
+
5718
+
5719
+
5720
+
5721
+
5722
+
5723
+
5724
+
5725
+
5726
+
5727
+
5728
+ 鴿
5729
+
5730
+
5731
+
5732
+
5733
+
5734
+
5735
+
5736
+
5737
+
5738
+
5739
+
5740
+
5741
+
5742
+
5743
+
5744
+
5745
+
5746
+
5747
+
5748
+
5749
+
5750
+
5751
+
5752
+
5753
+
5754
+
5755
+
5756
+
5757
+
5758
+
5759
+
5760
+
5761
+
5762
+
5763
+
5764
+
5765
+
5766
+
5767
+
5768
+
5769
+
5770
+
5771
+
5772
+
5773
+
5774
+
5775
+
5776
+
5777
+
5778
+
5779
+
5780
+
5781
+
5782
+
5783
+
5784
+
5785
+
5786
+
5787
+
5788
+
5789
+
5790
+
5791
+
5792
+
5793
+
5794
+
5795
+
5796
+
5797
+
5798
+
5799
+
5800
+ è
5801
+
5802
+
5803
+
5804
+
5805
+
5806
+ Ü
5807
+
5808
+
5809
+
5810
+
5811
+
5812
+
5813
+
5814
+
5815
+
5816
+
5817
+ И
5818
+
5819
+
5820
+
5821
+
5822
+
5823
+
5824
+
5825
+
5826
+
5827
+
5828
+
5829
+
5830
+
5831
+
5832
+
5833
+
5834
+
5835
+
5836
+
5837
+
5838
+ »
5839
+
5840
+
5841
+ ä
5842
+
5843
+
5844
+
5845
+
5846
+
5847
+
5848
+
5849
+
5850
+
5851
+
5852
+
5853
+
5854
+
5855
+
5856
+
5857
+
5858
+
5859
+
5860
+
5861
+
5862
+
5863
+
5864
+
5865
+
5866
+
5867
+
5868
+
5869
+
5870
+
5871
+
5872
+
5873
+
5874
+
5875
+
5876
+ ɔ
5877
+
5878
+
5879
+
5880
+
5881
+
5882
+
5883
+ ´
5884
+
5885
+
5886
+
5887
+
5888
+ í
5889
+
5890
+
5891
+
5892
+
5893
+
5894
+
5895
+
5896
+
5897
+
5898
+
5899
+
5900
+
5901
+
5902
+
5903
+
5904
+
5905
+
5906
+
5907
+
5908
+
5909
+ É
5910
+
5911
+
5912
+
5913
+
5914
+ ʌ
5915
+
5916
+
5917
+
5918
+
5919
+
5920
+
5921
+
5922
+
5923
+
5924
+
5925
+ Я
5926
+ Й
5927
+
5928
+
5929
+
5930
+
5931
+
5932
+
5933
+
5934
+
5935
+
5936
+
5937
+
5938
+
5939
+
5940
+
5941
+
5942
+
5943
+
5944
+
5945
+
5946
+
5947
+
5948
+ 粿
5949
+
5950
+
5951
+
5952
+
5953
+ ®
5954
+
5955
+
5956
+
5957
+
5958
+
5959
+
5960
+
5961
+
5962
+
5963
+
5964
+
5965
+
5966
+ З
5967
+
5968
+
5969
+
5970
+
5971
+
5972
+
5973
+
5974
+
5975
+
5976
+ β
5977
+
5978
+ á
5979
+
5980
+
5981
+
5982
+
5983
+
5984
+
5985
+
5986
+
5987
+
5988
+
5989
+
5990
+
5991
+
5992
+
5993
+
5994
+
5995
+
5996
+
5997
+
5998
+
5999
+
6000
+
6001
+
6002
+
6003
+
6004
+
6005
+
6006
+
6007
+
6008
+
6009
+
6010
+
6011
+
6012
+
6013
+
6014
+
6015
+
6016
+
6017
+
6018
+
6019
+
6020
+
6021
+
6022
+
6023
+
6024
+
6025
+
6026
+
6027
+
6028
+
6029
+
6030
+
6031
+
6032
+
6033
+
6034
+
6035
+
6036
+
6037
+
6038
+
6039
+
6040
+
6041
+
6042
+
6043
+
6044
+
6045
+
6046
+
6047
+
6048
+
6049
+
6050
+
6051
+
6052
+
6053
+
6054
+
6055
+
6056
+
6057
+
6058
+
6059
+
6060
+
6061
+
6062
+
6063
+
6064
+
6065
+
6066
+ Ó
6067
+
6068
+
6069
+
6070
+
6071
+
6072
+
6073
+
6074
+
6075
+
6076
+
6077
+
6078
+
6079
+
6080
+
6081
+
6082
+
6083
+
6084
+
6085
+
6086
+
6087
+
6088
+
6089
+
6090
+
6091
+
6092
+
6093
+
6094
+
6095
+
6096
+ ò
6097
+
6098
+
6099
+
6100
+
6101
+
6102
+
6103
+
6104
+
6105
+
6106
+
6107
+
6108
+
6109
+
6110
+
6111
+
6112
+
6113
+
6114
+
6115
+
6116
+
6117
+
6118
+
6119
+
6120
+
6121
+
6122
+
6123
+
6124
+
6125
+ 貿
6126
+
6127
+
6128
+
6129
+
6130
+
6131
+
6132
+
6133
+
6134
+
6135
+
6136
+
6137
+
6138
+ 𣇉
6139
+
6140
+
6141
+
6142
+
6143
+
6144
+
6145
+
6146
+
6147
+
6148
+
6149
+
6150
+
6151
+
6152
+
6153
+
6154
+
6155
+
6156
+
6157
+
6158
+
6159
+
6160
+
6161
+
6162
+
6163
+
6164
+
6165
+
6166
+
6167
+ г
6168
+
6169
+
6170
+
6171
+
6172
+
6173
+
6174
+
6175
+
6176
+
6177
+
6178
+
6179
+
6180
+
6181
+
6182
+
6183
+
6184
+
6185
+
6186
+
6187
+
6188
+
6189
+
6190
+
6191
+ 楿
6192
+
6193
+
6194
+
6195
+
6196
+
6197
+
6198
+ 滿
6199
+
6200
+
6201
+
6202
+
6203
+
6204
+
6205
+
6206
+
6207
+
6208
+
6209
+
6210
+
6211
+
6212
+
6213
+
6214
+
6215
+
6216
+
6217
+
6218
+
6219
+
6220
+
6221
+
6222
+
6223
+
6224
+
6225
+
6226
+
6227
+
6228
+
6229
+
6230
+
6231
+
6232
+
6233
+
6234
+
6235
+
6236
+
6237
+
6238
+
6239
+
6240
+
6241
+
6242
+
6243
+
6244
+
6245
+
6246
+
6247
+
6248
+
6249
+
6250
+
6251
+
6252
+
6253
+
6254
+ Φ
6255
+
6256
+
6257
+
6258
+
6259
+
6260
+
6261
+ ε
6262
+
6263
+
6264
+
6265
+
6266
+
6267
+
6268
+
6269
+
6270
+
6271
+
6272
+
6273
+
6274
+ ü
6275
+
6276
+
6277
+
6278
+
6279
+ 調
6280
+
6281
+
6282
+
6283
+
6284
+
6285
+
6286
+
6287
+
6288
+
6289
+
6290
+
6291
+
6292
+
6293
+
6294
+
6295
+
6296
+
6297
+
6298
+
6299
+
6300
+
6301
+
6302
+
6303
+
6304
+
6305
+
6306
+
6307
+
6308
+
6309
+
6310
+
6311
+
6312
+
6313
+
6314
+
6315
+
6316
+
6317
+
6318
+
6319
+
6320
+
6321
+
6322
+
6323
+
6324
+
6325
+
6326
+ ˋ
6327
+
6328
+
6329
+ ā
6330
+
6331
+
6332
+
6333
+
6334
+
6335
+
6336
+
6337
+
6338
+
6339
+
6340
+
6341
+
6342
+
6343
+
6344
+
6345
+
6346
+
6347
+
6348
+
6349
+
6350
+
6351
+
6352
+
6353
+
6354
+
6355
+
6356
+
6357
+
6358
+
6359
+
6360
+
6361
+
6362
+
6363
+
6364
+
6365
+
6366
+
6367
+
6368
+
6369
+ ú
6370
+ ó
6371
+
6372
+
6373
+
6374
+
6375
+
6376
+
6377
+
6378
+
6379
+
6380
+
6381
+
6382
+
6383
+
6384
+
6385
+
6386
+
6387
+
6388
+
6389
+
6390
+ ē
6391
+
6392
+
6393
+
6394
+
6395
+
6396
+
6397
+
6398
+
6399
+
6400
+
6401
+
6402
+
6403
+
6404
+
6405
+
6406
+
6407
+
6408
+
6409
+
6410
+
6411
+
6412
+ Ω
6413
+
6414
+
6415
+
6416
+
6417
+
6418
+
6419
+
6420
+
6421
+
6422
+
6423
+
6424
+
6425
+
6426
+
6427
+
6428
+
6429
+
6430
+
6431
+
6432
+
6433
+
6434
+
6435
+
6436
+
6437
+ П
6438
+
6439
+
6440
+
6441
+
6442
+
6443
+
6444
+
6445
+
6446
+
6447
+
6448
+
6449
+
6450
+
6451
+
6452
+
6453
+
6454
+
6455
+
6456
+
6457
+
6458
+
6459
+
6460
+ ǐ
6461
+ ō
6462
+ ǒ
6463
+
6464
+
6465
+
6466
+ μ
6467
+
6468
+
6469
+
6470
+
6471
+
6472
+
6473
+
6474
+
6475
+ à
6476
+ ɡ
6477
+
6478
+
6479
+
6480
+
6481
+
6482
+
6483
+
6484
+
6485
+ ī
6486
+
6487
+
6488
+
6489
+
6490
+
6491
+
6492
+
6493
+
6494
+
6495
+
6496
+
6497
+
6498
+
6499
+
6500
+
6501
+
6502
+
6503
+
6504
+
6505
+
6506
+
6507
+
6508
+
6509
+
6510
+
6511
+
6512
+
6513
+
6514
+
6515
+
6516
+
6517
+
6518
+
6519
+
6520
+
6521
+
6522
+
6523
+
6524
+
6525
+
6526
+
6527
+
6528
+
6529
+
6530
+
6531
+
6532
+
6533
+
6534
+
6535
+
6536
+
6537
+
6538
+
6539
+
6540
+
6541
+ ²
6542
+
6543
+
6544
+
6545
+
6546
+
6547
+
6548
+
6549
+
6550
+
6551
+
6552
+
6553
+
6554
+
6555
+
6556
+
6557
+
6558
+
6559
+
6560
+
6561
+
6562
+
6563
+
6564
+
6565
+
6566
+
6567
+
6568
+
6569
+
6570
+
6571
+
6572
+
6573
+
6574
+
6575
+
6576
+
6577
+
6578
+
6579
+
6580
+
6581
+
6582
+ 駿
6583
+
6584
+
6585
+
6586
+
6587
+
6588
+
6589
+
6590
+
6591
+
6592
+
6593
+
6594
+
6595
+
6596
+
6597
+
6598
+
6599
+
6600
+
6601
+
6602
+
6603
+
6604
+
6605
+
6606
+
6607
+
6608
+
6609
+ θ
6610
+
6611
+
6612
+
6613
+ ū
6614
+ ì
6615
+
6616
+
6617
+
6618
+
6619
+
6620
+
6621
+
6622
+
6623
+
deepdoc/vision/operators.py ADDED
@@ -0,0 +1,711 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+
17
+ import sys
18
+ import six
19
+ import cv2
20
+ import numpy as np
21
+ import math
22
+ from PIL import Image
23
+
24
+
25
+ class DecodeImage(object):
26
+ """ decode image """
27
+
28
+ def __init__(self,
29
+ img_mode='RGB',
30
+ channel_first=False,
31
+ ignore_orientation=False,
32
+ **kwargs):
33
+ self.img_mode = img_mode
34
+ self.channel_first = channel_first
35
+ self.ignore_orientation = ignore_orientation
36
+
37
+ def __call__(self, data):
38
+ img = data['image']
39
+ if six.PY2:
40
+ assert isinstance(img, str) and len(
41
+ img) > 0, "invalid input 'img' in DecodeImage"
42
+ else:
43
+ assert isinstance(img, bytes) and len(
44
+ img) > 0, "invalid input 'img' in DecodeImage"
45
+ img = np.frombuffer(img, dtype='uint8')
46
+ if self.ignore_orientation:
47
+ img = cv2.imdecode(img, cv2.IMREAD_IGNORE_ORIENTATION |
48
+ cv2.IMREAD_COLOR)
49
+ else:
50
+ img = cv2.imdecode(img, 1)
51
+ if img is None:
52
+ return None
53
+ if self.img_mode == 'GRAY':
54
+ img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
55
+ elif self.img_mode == 'RGB':
56
+ assert img.shape[2] == 3, 'invalid shape of image[%s]' % (
57
+ img.shape)
58
+ img = img[:, :, ::-1]
59
+
60
+ if self.channel_first:
61
+ img = img.transpose((2, 0, 1))
62
+
63
+ data['image'] = img
64
+ return data
65
+
66
+
67
+ class StandardizeImage(object):
68
+ """normalize image
69
+ Args:
70
+ mean (list): im - mean
71
+ std (list): im / std
72
+ is_scale (bool): whether need im / 255
73
+ norm_type (str): type in ['mean_std', 'none']
74
+ """
75
+
76
+ def __init__(self, mean, std, is_scale=True, norm_type='mean_std'):
77
+ self.mean = mean
78
+ self.std = std
79
+ self.is_scale = is_scale
80
+ self.norm_type = norm_type
81
+
82
+ def __call__(self, im, im_info):
83
+ """
84
+ Args:
85
+ im (np.ndarray): image (np.ndarray)
86
+ im_info (dict): info of image
87
+ Returns:
88
+ im (np.ndarray): processed image (np.ndarray)
89
+ im_info (dict): info of processed image
90
+ """
91
+ im = im.astype(np.float32, copy=False)
92
+ if self.is_scale:
93
+ scale = 1.0 / 255.0
94
+ im *= scale
95
+
96
+ if self.norm_type == 'mean_std':
97
+ mean = np.array(self.mean)[np.newaxis, np.newaxis, :]
98
+ std = np.array(self.std)[np.newaxis, np.newaxis, :]
99
+ im -= mean
100
+ im /= std
101
+ return im, im_info
102
+
103
+
104
+ class NormalizeImage(object):
105
+ """ normalize image such as substract mean, divide std
106
+ """
107
+
108
+ def __init__(self, scale=None, mean=None, std=None, order='chw', **kwargs):
109
+ if isinstance(scale, str):
110
+ scale = eval(scale)
111
+ self.scale = np.float32(scale if scale is not None else 1.0 / 255.0)
112
+ mean = mean if mean is not None else [0.485, 0.456, 0.406]
113
+ std = std if std is not None else [0.229, 0.224, 0.225]
114
+
115
+ shape = (3, 1, 1) if order == 'chw' else (1, 1, 3)
116
+ self.mean = np.array(mean).reshape(shape).astype('float32')
117
+ self.std = np.array(std).reshape(shape).astype('float32')
118
+
119
+ def __call__(self, data):
120
+ img = data['image']
121
+ from PIL import Image
122
+ if isinstance(img, Image.Image):
123
+ img = np.array(img)
124
+ assert isinstance(img,
125
+ np.ndarray), "invalid input 'img' in NormalizeImage"
126
+ data['image'] = (
127
+ img.astype('float32') * self.scale - self.mean) / self.std
128
+ return data
129
+
130
+
131
+ class ToCHWImage(object):
132
+ """ convert hwc image to chw image
133
+ """
134
+
135
+ def __init__(self, **kwargs):
136
+ pass
137
+
138
+ def __call__(self, data):
139
+ img = data['image']
140
+ from PIL import Image
141
+ if isinstance(img, Image.Image):
142
+ img = np.array(img)
143
+ data['image'] = img.transpose((2, 0, 1))
144
+ return data
145
+
146
+
147
+ class Fasttext(object):
148
+ def __init__(self, path="None", **kwargs):
149
+ import fasttext
150
+ self.fast_model = fasttext.load_model(path)
151
+
152
+ def __call__(self, data):
153
+ label = data['label']
154
+ fast_label = self.fast_model[label]
155
+ data['fast_label'] = fast_label
156
+ return data
157
+
158
+
159
+ class KeepKeys(object):
160
+ def __init__(self, keep_keys, **kwargs):
161
+ self.keep_keys = keep_keys
162
+
163
+ def __call__(self, data):
164
+ data_list = []
165
+ for key in self.keep_keys:
166
+ data_list.append(data[key])
167
+ return data_list
168
+
169
+
170
+ class Pad(object):
171
+ def __init__(self, size=None, size_div=32, **kwargs):
172
+ if size is not None and not isinstance(size, (int, list, tuple)):
173
+ raise TypeError("Type of target_size is invalid. Now is {}".format(
174
+ type(size)))
175
+ if isinstance(size, int):
176
+ size = [size, size]
177
+ self.size = size
178
+ self.size_div = size_div
179
+
180
+ def __call__(self, data):
181
+
182
+ img = data['image']
183
+ img_h, img_w = img.shape[0], img.shape[1]
184
+ if self.size:
185
+ resize_h2, resize_w2 = self.size
186
+ assert (
187
+ img_h < resize_h2 and img_w < resize_w2
188
+ ), '(h, w) of target size should be greater than (img_h, img_w)'
189
+ else:
190
+ resize_h2 = max(
191
+ int(math.ceil(img.shape[0] / self.size_div) * self.size_div),
192
+ self.size_div)
193
+ resize_w2 = max(
194
+ int(math.ceil(img.shape[1] / self.size_div) * self.size_div),
195
+ self.size_div)
196
+ img = cv2.copyMakeBorder(
197
+ img,
198
+ 0,
199
+ resize_h2 - img_h,
200
+ 0,
201
+ resize_w2 - img_w,
202
+ cv2.BORDER_CONSTANT,
203
+ value=0)
204
+ data['image'] = img
205
+ return data
206
+
207
+
208
+ class LinearResize(object):
209
+ """resize image by target_size and max_size
210
+ Args:
211
+ target_size (int): the target size of image
212
+ keep_ratio (bool): whether keep_ratio or not, default true
213
+ interp (int): method of resize
214
+ """
215
+
216
+ def __init__(self, target_size, keep_ratio=True, interp=cv2.INTER_LINEAR):
217
+ if isinstance(target_size, int):
218
+ target_size = [target_size, target_size]
219
+ self.target_size = target_size
220
+ self.keep_ratio = keep_ratio
221
+ self.interp = interp
222
+
223
+ def __call__(self, im, im_info):
224
+ """
225
+ Args:
226
+ im (np.ndarray): image (np.ndarray)
227
+ im_info (dict): info of image
228
+ Returns:
229
+ im (np.ndarray): processed image (np.ndarray)
230
+ im_info (dict): info of processed image
231
+ """
232
+ assert len(self.target_size) == 2
233
+ assert self.target_size[0] > 0 and self.target_size[1] > 0
234
+ im_channel = im.shape[2]
235
+ im_scale_y, im_scale_x = self.generate_scale(im)
236
+ im = cv2.resize(
237
+ im,
238
+ None,
239
+ None,
240
+ fx=im_scale_x,
241
+ fy=im_scale_y,
242
+ interpolation=self.interp)
243
+ im_info['im_shape'] = np.array(im.shape[:2]).astype('float32')
244
+ im_info['scale_factor'] = np.array(
245
+ [im_scale_y, im_scale_x]).astype('float32')
246
+ return im, im_info
247
+
248
+ def generate_scale(self, im):
249
+ """
250
+ Args:
251
+ im (np.ndarray): image (np.ndarray)
252
+ Returns:
253
+ im_scale_x: the resize ratio of X
254
+ im_scale_y: the resize ratio of Y
255
+ """
256
+ origin_shape = im.shape[:2]
257
+ im_c = im.shape[2]
258
+ if self.keep_ratio:
259
+ im_size_min = np.min(origin_shape)
260
+ im_size_max = np.max(origin_shape)
261
+ target_size_min = np.min(self.target_size)
262
+ target_size_max = np.max(self.target_size)
263
+ im_scale = float(target_size_min) / float(im_size_min)
264
+ if np.round(im_scale * im_size_max) > target_size_max:
265
+ im_scale = float(target_size_max) / float(im_size_max)
266
+ im_scale_x = im_scale
267
+ im_scale_y = im_scale
268
+ else:
269
+ resize_h, resize_w = self.target_size
270
+ im_scale_y = resize_h / float(origin_shape[0])
271
+ im_scale_x = resize_w / float(origin_shape[1])
272
+ return im_scale_y, im_scale_x
273
+
274
+
275
+ class Resize(object):
276
+ def __init__(self, size=(640, 640), **kwargs):
277
+ self.size = size
278
+
279
+ def resize_image(self, img):
280
+ resize_h, resize_w = self.size
281
+ ori_h, ori_w = img.shape[:2] # (h, w, c)
282
+ ratio_h = float(resize_h) / ori_h
283
+ ratio_w = float(resize_w) / ori_w
284
+ img = cv2.resize(img, (int(resize_w), int(resize_h)))
285
+ return img, [ratio_h, ratio_w]
286
+
287
+ def __call__(self, data):
288
+ img = data['image']
289
+ if 'polys' in data:
290
+ text_polys = data['polys']
291
+
292
+ img_resize, [ratio_h, ratio_w] = self.resize_image(img)
293
+ if 'polys' in data:
294
+ new_boxes = []
295
+ for box in text_polys:
296
+ new_box = []
297
+ for cord in box:
298
+ new_box.append([cord[0] * ratio_w, cord[1] * ratio_h])
299
+ new_boxes.append(new_box)
300
+ data['polys'] = np.array(new_boxes, dtype=np.float32)
301
+ data['image'] = img_resize
302
+ return data
303
+
304
+
305
+ class DetResizeForTest(object):
306
+ def __init__(self, **kwargs):
307
+ super(DetResizeForTest, self).__init__()
308
+ self.resize_type = 0
309
+ self.keep_ratio = False
310
+ if 'image_shape' in kwargs:
311
+ self.image_shape = kwargs['image_shape']
312
+ self.resize_type = 1
313
+ if 'keep_ratio' in kwargs:
314
+ self.keep_ratio = kwargs['keep_ratio']
315
+ elif 'limit_side_len' in kwargs:
316
+ self.limit_side_len = kwargs['limit_side_len']
317
+ self.limit_type = kwargs.get('limit_type', 'min')
318
+ elif 'resize_long' in kwargs:
319
+ self.resize_type = 2
320
+ self.resize_long = kwargs.get('resize_long', 960)
321
+ else:
322
+ self.limit_side_len = 736
323
+ self.limit_type = 'min'
324
+
325
+ def __call__(self, data):
326
+ img = data['image']
327
+ src_h, src_w, _ = img.shape
328
+ if sum([src_h, src_w]) < 64:
329
+ img = self.image_padding(img)
330
+
331
+ if self.resize_type == 0:
332
+ # img, shape = self.resize_image_type0(img)
333
+ img, [ratio_h, ratio_w] = self.resize_image_type0(img)
334
+ elif self.resize_type == 2:
335
+ img, [ratio_h, ratio_w] = self.resize_image_type2(img)
336
+ else:
337
+ # img, shape = self.resize_image_type1(img)
338
+ img, [ratio_h, ratio_w] = self.resize_image_type1(img)
339
+ data['image'] = img
340
+ data['shape'] = np.array([src_h, src_w, ratio_h, ratio_w])
341
+ return data
342
+
343
+ def image_padding(self, im, value=0):
344
+ h, w, c = im.shape
345
+ im_pad = np.zeros((max(32, h), max(32, w), c), np.uint8) + value
346
+ im_pad[:h, :w, :] = im
347
+ return im_pad
348
+
349
+ def resize_image_type1(self, img):
350
+ resize_h, resize_w = self.image_shape
351
+ ori_h, ori_w = img.shape[:2] # (h, w, c)
352
+ if self.keep_ratio is True:
353
+ resize_w = ori_w * resize_h / ori_h
354
+ N = math.ceil(resize_w / 32)
355
+ resize_w = N * 32
356
+ ratio_h = float(resize_h) / ori_h
357
+ ratio_w = float(resize_w) / ori_w
358
+ img = cv2.resize(img, (int(resize_w), int(resize_h)))
359
+ # return img, np.array([ori_h, ori_w])
360
+ return img, [ratio_h, ratio_w]
361
+
362
+ def resize_image_type0(self, img):
363
+ """
364
+ resize image to a size multiple of 32 which is required by the network
365
+ args:
366
+ img(array): array with shape [h, w, c]
367
+ return(tuple):
368
+ img, (ratio_h, ratio_w)
369
+ """
370
+ limit_side_len = self.limit_side_len
371
+ h, w, c = img.shape
372
+
373
+ # limit the max side
374
+ if self.limit_type == 'max':
375
+ if max(h, w) > limit_side_len:
376
+ if h > w:
377
+ ratio = float(limit_side_len) / h
378
+ else:
379
+ ratio = float(limit_side_len) / w
380
+ else:
381
+ ratio = 1.
382
+ elif self.limit_type == 'min':
383
+ if min(h, w) < limit_side_len:
384
+ if h < w:
385
+ ratio = float(limit_side_len) / h
386
+ else:
387
+ ratio = float(limit_side_len) / w
388
+ else:
389
+ ratio = 1.
390
+ elif self.limit_type == 'resize_long':
391
+ ratio = float(limit_side_len) / max(h, w)
392
+ else:
393
+ raise Exception('not support limit type, image ')
394
+ resize_h = int(h * ratio)
395
+ resize_w = int(w * ratio)
396
+
397
+ resize_h = max(int(round(resize_h / 32) * 32), 32)
398
+ resize_w = max(int(round(resize_w / 32) * 32), 32)
399
+
400
+ try:
401
+ if int(resize_w) <= 0 or int(resize_h) <= 0:
402
+ return None, (None, None)
403
+ img = cv2.resize(img, (int(resize_w), int(resize_h)))
404
+ except BaseException:
405
+ print(img.shape, resize_w, resize_h)
406
+ sys.exit(0)
407
+ ratio_h = resize_h / float(h)
408
+ ratio_w = resize_w / float(w)
409
+ return img, [ratio_h, ratio_w]
410
+
411
+ def resize_image_type2(self, img):
412
+ h, w, _ = img.shape
413
+
414
+ resize_w = w
415
+ resize_h = h
416
+
417
+ if resize_h > resize_w:
418
+ ratio = float(self.resize_long) / resize_h
419
+ else:
420
+ ratio = float(self.resize_long) / resize_w
421
+
422
+ resize_h = int(resize_h * ratio)
423
+ resize_w = int(resize_w * ratio)
424
+
425
+ max_stride = 128
426
+ resize_h = (resize_h + max_stride - 1) // max_stride * max_stride
427
+ resize_w = (resize_w + max_stride - 1) // max_stride * max_stride
428
+ img = cv2.resize(img, (int(resize_w), int(resize_h)))
429
+ ratio_h = resize_h / float(h)
430
+ ratio_w = resize_w / float(w)
431
+
432
+ return img, [ratio_h, ratio_w]
433
+
434
+
435
+ class E2EResizeForTest(object):
436
+ def __init__(self, **kwargs):
437
+ super(E2EResizeForTest, self).__init__()
438
+ self.max_side_len = kwargs['max_side_len']
439
+ self.valid_set = kwargs['valid_set']
440
+
441
+ def __call__(self, data):
442
+ img = data['image']
443
+ src_h, src_w, _ = img.shape
444
+ if self.valid_set == 'totaltext':
445
+ im_resized, [ratio_h, ratio_w] = self.resize_image_for_totaltext(
446
+ img, max_side_len=self.max_side_len)
447
+ else:
448
+ im_resized, (ratio_h, ratio_w) = self.resize_image(
449
+ img, max_side_len=self.max_side_len)
450
+ data['image'] = im_resized
451
+ data['shape'] = np.array([src_h, src_w, ratio_h, ratio_w])
452
+ return data
453
+
454
+ def resize_image_for_totaltext(self, im, max_side_len=512):
455
+
456
+ h, w, _ = im.shape
457
+ resize_w = w
458
+ resize_h = h
459
+ ratio = 1.25
460
+ if h * ratio > max_side_len:
461
+ ratio = float(max_side_len) / resize_h
462
+ resize_h = int(resize_h * ratio)
463
+ resize_w = int(resize_w * ratio)
464
+
465
+ max_stride = 128
466
+ resize_h = (resize_h + max_stride - 1) // max_stride * max_stride
467
+ resize_w = (resize_w + max_stride - 1) // max_stride * max_stride
468
+ im = cv2.resize(im, (int(resize_w), int(resize_h)))
469
+ ratio_h = resize_h / float(h)
470
+ ratio_w = resize_w / float(w)
471
+ return im, (ratio_h, ratio_w)
472
+
473
+ def resize_image(self, im, max_side_len=512):
474
+ """
475
+ resize image to a size multiple of max_stride which is required by the network
476
+ :param im: the resized image
477
+ :param max_side_len: limit of max image size to avoid out of memory in gpu
478
+ :return: the resized image and the resize ratio
479
+ """
480
+ h, w, _ = im.shape
481
+
482
+ resize_w = w
483
+ resize_h = h
484
+
485
+ # Fix the longer side
486
+ if resize_h > resize_w:
487
+ ratio = float(max_side_len) / resize_h
488
+ else:
489
+ ratio = float(max_side_len) / resize_w
490
+
491
+ resize_h = int(resize_h * ratio)
492
+ resize_w = int(resize_w * ratio)
493
+
494
+ max_stride = 128
495
+ resize_h = (resize_h + max_stride - 1) // max_stride * max_stride
496
+ resize_w = (resize_w + max_stride - 1) // max_stride * max_stride
497
+ im = cv2.resize(im, (int(resize_w), int(resize_h)))
498
+ ratio_h = resize_h / float(h)
499
+ ratio_w = resize_w / float(w)
500
+
501
+ return im, (ratio_h, ratio_w)
502
+
503
+
504
+ class KieResize(object):
505
+ def __init__(self, **kwargs):
506
+ super(KieResize, self).__init__()
507
+ self.max_side, self.min_side = kwargs['img_scale'][0], kwargs[
508
+ 'img_scale'][1]
509
+
510
+ def __call__(self, data):
511
+ img = data['image']
512
+ points = data['points']
513
+ src_h, src_w, _ = img.shape
514
+ im_resized, scale_factor, [ratio_h, ratio_w
515
+ ], [new_h, new_w] = self.resize_image(img)
516
+ resize_points = self.resize_boxes(img, points, scale_factor)
517
+ data['ori_image'] = img
518
+ data['ori_boxes'] = points
519
+ data['points'] = resize_points
520
+ data['image'] = im_resized
521
+ data['shape'] = np.array([new_h, new_w])
522
+ return data
523
+
524
+ def resize_image(self, img):
525
+ norm_img = np.zeros([1024, 1024, 3], dtype='float32')
526
+ scale = [512, 1024]
527
+ h, w = img.shape[:2]
528
+ max_long_edge = max(scale)
529
+ max_short_edge = min(scale)
530
+ scale_factor = min(max_long_edge / max(h, w),
531
+ max_short_edge / min(h, w))
532
+ resize_w, resize_h = int(w * float(scale_factor) + 0.5), int(h * float(
533
+ scale_factor) + 0.5)
534
+ max_stride = 32
535
+ resize_h = (resize_h + max_stride - 1) // max_stride * max_stride
536
+ resize_w = (resize_w + max_stride - 1) // max_stride * max_stride
537
+ im = cv2.resize(img, (resize_w, resize_h))
538
+ new_h, new_w = im.shape[:2]
539
+ w_scale = new_w / w
540
+ h_scale = new_h / h
541
+ scale_factor = np.array(
542
+ [w_scale, h_scale, w_scale, h_scale], dtype=np.float32)
543
+ norm_img[:new_h, :new_w, :] = im
544
+ return norm_img, scale_factor, [h_scale, w_scale], [new_h, new_w]
545
+
546
+ def resize_boxes(self, im, points, scale_factor):
547
+ points = points * scale_factor
548
+ img_shape = im.shape[:2]
549
+ points[:, 0::2] = np.clip(points[:, 0::2], 0, img_shape[1])
550
+ points[:, 1::2] = np.clip(points[:, 1::2], 0, img_shape[0])
551
+ return points
552
+
553
+
554
+ class SRResize(object):
555
+ def __init__(self,
556
+ imgH=32,
557
+ imgW=128,
558
+ down_sample_scale=4,
559
+ keep_ratio=False,
560
+ min_ratio=1,
561
+ mask=False,
562
+ infer_mode=False,
563
+ **kwargs):
564
+ self.imgH = imgH
565
+ self.imgW = imgW
566
+ self.keep_ratio = keep_ratio
567
+ self.min_ratio = min_ratio
568
+ self.down_sample_scale = down_sample_scale
569
+ self.mask = mask
570
+ self.infer_mode = infer_mode
571
+
572
+ def __call__(self, data):
573
+ imgH = self.imgH
574
+ imgW = self.imgW
575
+ images_lr = data["image_lr"]
576
+ transform2 = ResizeNormalize(
577
+ (imgW // self.down_sample_scale, imgH // self.down_sample_scale))
578
+ images_lr = transform2(images_lr)
579
+ data["img_lr"] = images_lr
580
+ if self.infer_mode:
581
+ return data
582
+
583
+ images_HR = data["image_hr"]
584
+ label_strs = data["label"]
585
+ transform = ResizeNormalize((imgW, imgH))
586
+ images_HR = transform(images_HR)
587
+ data["img_hr"] = images_HR
588
+ return data
589
+
590
+
591
+ class ResizeNormalize(object):
592
+ def __init__(self, size, interpolation=Image.BICUBIC):
593
+ self.size = size
594
+ self.interpolation = interpolation
595
+
596
+ def __call__(self, img):
597
+ img = img.resize(self.size, self.interpolation)
598
+ img_numpy = np.array(img).astype("float32")
599
+ img_numpy = img_numpy.transpose((2, 0, 1)) / 255
600
+ return img_numpy
601
+
602
+
603
+ class GrayImageChannelFormat(object):
604
+ """
605
+ format gray scale image's channel: (3,h,w) -> (1,h,w)
606
+ Args:
607
+ inverse: inverse gray image
608
+ """
609
+
610
+ def __init__(self, inverse=False, **kwargs):
611
+ self.inverse = inverse
612
+
613
+ def __call__(self, data):
614
+ img = data['image']
615
+ img_single_channel = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
616
+ img_expanded = np.expand_dims(img_single_channel, 0)
617
+
618
+ if self.inverse:
619
+ data['image'] = np.abs(img_expanded - 1)
620
+ else:
621
+ data['image'] = img_expanded
622
+
623
+ data['src_image'] = img
624
+ return data
625
+
626
+
627
+ class Permute(object):
628
+ """permute image
629
+ Args:
630
+ to_bgr (bool): whether convert RGB to BGR
631
+ channel_first (bool): whether convert HWC to CHW
632
+ """
633
+
634
+ def __init__(self, ):
635
+ super(Permute, self).__init__()
636
+
637
+ def __call__(self, im, im_info):
638
+ """
639
+ Args:
640
+ im (np.ndarray): image (np.ndarray)
641
+ im_info (dict): info of image
642
+ Returns:
643
+ im (np.ndarray): processed image (np.ndarray)
644
+ im_info (dict): info of processed image
645
+ """
646
+ im = im.transpose((2, 0, 1)).copy()
647
+ return im, im_info
648
+
649
+
650
+ class PadStride(object):
651
+ """ padding image for model with FPN, instead PadBatch(pad_to_stride) in original config
652
+ Args:
653
+ stride (bool): model with FPN need image shape % stride == 0
654
+ """
655
+
656
+ def __init__(self, stride=0):
657
+ self.coarsest_stride = stride
658
+
659
+ def __call__(self, im, im_info):
660
+ """
661
+ Args:
662
+ im (np.ndarray): image (np.ndarray)
663
+ im_info (dict): info of image
664
+ Returns:
665
+ im (np.ndarray): processed image (np.ndarray)
666
+ im_info (dict): info of processed image
667
+ """
668
+ coarsest_stride = self.coarsest_stride
669
+ if coarsest_stride <= 0:
670
+ return im, im_info
671
+ im_c, im_h, im_w = im.shape
672
+ pad_h = int(np.ceil(float(im_h) / coarsest_stride) * coarsest_stride)
673
+ pad_w = int(np.ceil(float(im_w) / coarsest_stride) * coarsest_stride)
674
+ padding_im = np.zeros((im_c, pad_h, pad_w), dtype=np.float32)
675
+ padding_im[:, :im_h, :im_w] = im
676
+ return padding_im, im_info
677
+
678
+
679
+ def decode_image(im_file, im_info):
680
+ """read rgb image
681
+ Args:
682
+ im_file (str|np.ndarray): input can be image path or np.ndarray
683
+ im_info (dict): info of image
684
+ Returns:
685
+ im (np.ndarray): processed image (np.ndarray)
686
+ im_info (dict): info of processed image
687
+ """
688
+ if isinstance(im_file, str):
689
+ with open(im_file, 'rb') as f:
690
+ im_read = f.read()
691
+ data = np.frombuffer(im_read, dtype='uint8')
692
+ im = cv2.imdecode(data, 1) # BGR mode, but need RGB mode
693
+ im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
694
+ else:
695
+ im = im_file
696
+ im_info['im_shape'] = np.array(im.shape[:2], dtype=np.float32)
697
+ im_info['scale_factor'] = np.array([1., 1.], dtype=np.float32)
698
+ return im, im_info
699
+
700
+
701
+ def preprocess(im, preprocess_ops):
702
+ # process image by preprocess_ops
703
+ im_info = {
704
+ 'scale_factor': np.array(
705
+ [1., 1.], dtype=np.float32),
706
+ 'im_shape': None,
707
+ }
708
+ im, im_info = decode_image(im, im_info)
709
+ for operator in preprocess_ops:
710
+ im, im_info = operator(im, im_info)
711
+ return im, im_info
deepdoc/vision/postprocess.py ADDED
@@ -0,0 +1,353 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+
3
+ import numpy as np
4
+ import cv2
5
+ from shapely.geometry import Polygon
6
+ import pyclipper
7
+
8
+
9
+ def build_post_process(config, global_config=None):
10
+ support_dict = ['DBPostProcess', 'CTCLabelDecode']
11
+
12
+ config = copy.deepcopy(config)
13
+ module_name = config.pop('name')
14
+ if module_name == "None":
15
+ return
16
+ if global_config is not None:
17
+ config.update(global_config)
18
+ assert module_name in support_dict, Exception(
19
+ 'post process only support {}'.format(support_dict))
20
+ module_class = eval(module_name)(**config)
21
+ return module_class
22
+
23
+
24
+ class DBPostProcess(object):
25
+ """
26
+ The post process for Differentiable Binarization (DB).
27
+ """
28
+
29
+ def __init__(self,
30
+ thresh=0.3,
31
+ box_thresh=0.7,
32
+ max_candidates=1000,
33
+ unclip_ratio=2.0,
34
+ use_dilation=False,
35
+ score_mode="fast",
36
+ box_type='quad',
37
+ **kwargs):
38
+ self.thresh = thresh
39
+ self.box_thresh = box_thresh
40
+ self.max_candidates = max_candidates
41
+ self.unclip_ratio = unclip_ratio
42
+ self.min_size = 3
43
+ self.score_mode = score_mode
44
+ self.box_type = box_type
45
+ assert score_mode in [
46
+ "slow", "fast"
47
+ ], "Score mode must be in [slow, fast] but got: {}".format(score_mode)
48
+
49
+ self.dilation_kernel = None if not use_dilation else np.array(
50
+ [[1, 1], [1, 1]])
51
+
52
+ def polygons_from_bitmap(self, pred, _bitmap, dest_width, dest_height):
53
+ '''
54
+ _bitmap: single map with shape (1, H, W),
55
+ whose values are binarized as {0, 1}
56
+ '''
57
+
58
+ bitmap = _bitmap
59
+ height, width = bitmap.shape
60
+
61
+ boxes = []
62
+ scores = []
63
+
64
+ contours, _ = cv2.findContours((bitmap * 255).astype(np.uint8),
65
+ cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
66
+
67
+ for contour in contours[:self.max_candidates]:
68
+ epsilon = 0.002 * cv2.arcLength(contour, True)
69
+ approx = cv2.approxPolyDP(contour, epsilon, True)
70
+ points = approx.reshape((-1, 2))
71
+ if points.shape[0] < 4:
72
+ continue
73
+
74
+ score = self.box_score_fast(pred, points.reshape(-1, 2))
75
+ if self.box_thresh > score:
76
+ continue
77
+
78
+ if points.shape[0] > 2:
79
+ box = self.unclip(points, self.unclip_ratio)
80
+ if len(box) > 1:
81
+ continue
82
+ else:
83
+ continue
84
+ box = box.reshape(-1, 2)
85
+
86
+ _, sside = self.get_mini_boxes(box.reshape((-1, 1, 2)))
87
+ if sside < self.min_size + 2:
88
+ continue
89
+
90
+ box = np.array(box)
91
+ box[:, 0] = np.clip(
92
+ np.round(box[:, 0] / width * dest_width), 0, dest_width)
93
+ box[:, 1] = np.clip(
94
+ np.round(box[:, 1] / height * dest_height), 0, dest_height)
95
+ boxes.append(box.tolist())
96
+ scores.append(score)
97
+ return boxes, scores
98
+
99
+ def boxes_from_bitmap(self, pred, _bitmap, dest_width, dest_height):
100
+ '''
101
+ _bitmap: single map with shape (1, H, W),
102
+ whose values are binarized as {0, 1}
103
+ '''
104
+
105
+ bitmap = _bitmap
106
+ height, width = bitmap.shape
107
+
108
+ outs = cv2.findContours((bitmap * 255).astype(np.uint8), cv2.RETR_LIST,
109
+ cv2.CHAIN_APPROX_SIMPLE)
110
+ if len(outs) == 3:
111
+ img, contours, _ = outs[0], outs[1], outs[2]
112
+ elif len(outs) == 2:
113
+ contours, _ = outs[0], outs[1]
114
+
115
+ num_contours = min(len(contours), self.max_candidates)
116
+
117
+ boxes = []
118
+ scores = []
119
+ for index in range(num_contours):
120
+ contour = contours[index]
121
+ points, sside = self.get_mini_boxes(contour)
122
+ if sside < self.min_size:
123
+ continue
124
+ points = np.array(points)
125
+ if self.score_mode == "fast":
126
+ score = self.box_score_fast(pred, points.reshape(-1, 2))
127
+ else:
128
+ score = self.box_score_slow(pred, contour)
129
+ if self.box_thresh > score:
130
+ continue
131
+
132
+ box = self.unclip(points, self.unclip_ratio).reshape(-1, 1, 2)
133
+ box, sside = self.get_mini_boxes(box)
134
+ if sside < self.min_size + 2:
135
+ continue
136
+ box = np.array(box)
137
+
138
+ box[:, 0] = np.clip(
139
+ np.round(box[:, 0] / width * dest_width), 0, dest_width)
140
+ box[:, 1] = np.clip(
141
+ np.round(box[:, 1] / height * dest_height), 0, dest_height)
142
+ boxes.append(box.astype("int32"))
143
+ scores.append(score)
144
+ return np.array(boxes, dtype="int32"), scores
145
+
146
+ def unclip(self, box, unclip_ratio):
147
+ poly = Polygon(box)
148
+ distance = poly.area * unclip_ratio / poly.length
149
+ offset = pyclipper.PyclipperOffset()
150
+ offset.AddPath(box, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON)
151
+ expanded = np.array(offset.Execute(distance))
152
+ return expanded
153
+
154
+ def get_mini_boxes(self, contour):
155
+ bounding_box = cv2.minAreaRect(contour)
156
+ points = sorted(list(cv2.boxPoints(bounding_box)), key=lambda x: x[0])
157
+
158
+ index_1, index_2, index_3, index_4 = 0, 1, 2, 3
159
+ if points[1][1] > points[0][1]:
160
+ index_1 = 0
161
+ index_4 = 1
162
+ else:
163
+ index_1 = 1
164
+ index_4 = 0
165
+ if points[3][1] > points[2][1]:
166
+ index_2 = 2
167
+ index_3 = 3
168
+ else:
169
+ index_2 = 3
170
+ index_3 = 2
171
+
172
+ box = [
173
+ points[index_1], points[index_2], points[index_3], points[index_4]
174
+ ]
175
+ return box, min(bounding_box[1])
176
+
177
+ def box_score_fast(self, bitmap, _box):
178
+ '''
179
+ box_score_fast: use bbox mean score as the mean score
180
+ '''
181
+ h, w = bitmap.shape[:2]
182
+ box = _box.copy()
183
+ xmin = np.clip(np.floor(box[:, 0].min()).astype("int32"), 0, w - 1)
184
+ xmax = np.clip(np.ceil(box[:, 0].max()).astype("int32"), 0, w - 1)
185
+ ymin = np.clip(np.floor(box[:, 1].min()).astype("int32"), 0, h - 1)
186
+ ymax = np.clip(np.ceil(box[:, 1].max()).astype("int32"), 0, h - 1)
187
+
188
+ mask = np.zeros((ymax - ymin + 1, xmax - xmin + 1), dtype=np.uint8)
189
+ box[:, 0] = box[:, 0] - xmin
190
+ box[:, 1] = box[:, 1] - ymin
191
+ cv2.fillPoly(mask, box.reshape(1, -1, 2).astype("int32"), 1)
192
+ return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0]
193
+
194
+ def box_score_slow(self, bitmap, contour):
195
+ '''
196
+ box_score_slow: use polyon mean score as the mean score
197
+ '''
198
+ h, w = bitmap.shape[:2]
199
+ contour = contour.copy()
200
+ contour = np.reshape(contour, (-1, 2))
201
+
202
+ xmin = np.clip(np.min(contour[:, 0]), 0, w - 1)
203
+ xmax = np.clip(np.max(contour[:, 0]), 0, w - 1)
204
+ ymin = np.clip(np.min(contour[:, 1]), 0, h - 1)
205
+ ymax = np.clip(np.max(contour[:, 1]), 0, h - 1)
206
+
207
+ mask = np.zeros((ymax - ymin + 1, xmax - xmin + 1), dtype=np.uint8)
208
+
209
+ contour[:, 0] = contour[:, 0] - xmin
210
+ contour[:, 1] = contour[:, 1] - ymin
211
+
212
+ cv2.fillPoly(mask, contour.reshape(1, -1, 2).astype("int32"), 1)
213
+ return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0]
214
+
215
+ def __call__(self, outs_dict, shape_list):
216
+ pred = outs_dict['maps']
217
+ if not isinstance(pred, np.ndarray):
218
+ pred = pred.numpy()
219
+ pred = pred[:, 0, :, :]
220
+ segmentation = pred > self.thresh
221
+
222
+ boxes_batch = []
223
+ for batch_index in range(pred.shape[0]):
224
+ src_h, src_w, ratio_h, ratio_w = shape_list[batch_index]
225
+ if self.dilation_kernel is not None:
226
+ mask = cv2.dilate(
227
+ np.array(segmentation[batch_index]).astype(np.uint8),
228
+ self.dilation_kernel)
229
+ else:
230
+ mask = segmentation[batch_index]
231
+ if self.box_type == 'poly':
232
+ boxes, scores = self.polygons_from_bitmap(pred[batch_index],
233
+ mask, src_w, src_h)
234
+ elif self.box_type == 'quad':
235
+ boxes, scores = self.boxes_from_bitmap(pred[batch_index], mask,
236
+ src_w, src_h)
237
+ else:
238
+ raise ValueError(
239
+ "box_type can only be one of ['quad', 'poly']")
240
+
241
+ boxes_batch.append({'points': boxes})
242
+ return boxes_batch
243
+
244
+
245
+ class BaseRecLabelDecode(object):
246
+ """ Convert between text-label and text-index """
247
+
248
+ def __init__(self, character_dict_path=None, use_space_char=False):
249
+ self.beg_str = "sos"
250
+ self.end_str = "eos"
251
+ self.reverse = False
252
+ self.character_str = []
253
+
254
+ if character_dict_path is None:
255
+ self.character_str = "0123456789abcdefghijklmnopqrstuvwxyz"
256
+ dict_character = list(self.character_str)
257
+ else:
258
+ with open(character_dict_path, "rb") as fin:
259
+ lines = fin.readlines()
260
+ for line in lines:
261
+ line = line.decode('utf-8').strip("\n").strip("\r\n")
262
+ self.character_str.append(line)
263
+ if use_space_char:
264
+ self.character_str.append(" ")
265
+ dict_character = list(self.character_str)
266
+ if 'arabic' in character_dict_path:
267
+ self.reverse = True
268
+
269
+ dict_character = self.add_special_char(dict_character)
270
+ self.dict = {}
271
+ for i, char in enumerate(dict_character):
272
+ self.dict[char] = i
273
+ self.character = dict_character
274
+
275
+ def pred_reverse(self, pred):
276
+ pred_re = []
277
+ c_current = ''
278
+ for c in pred:
279
+ if not bool(re.search('[a-zA-Z0-9 :*./%+-]', c)):
280
+ if c_current != '':
281
+ pred_re.append(c_current)
282
+ pred_re.append(c)
283
+ c_current = ''
284
+ else:
285
+ c_current += c
286
+ if c_current != '':
287
+ pred_re.append(c_current)
288
+
289
+ return ''.join(pred_re[::-1])
290
+
291
+ def add_special_char(self, dict_character):
292
+ return dict_character
293
+
294
+ def decode(self, text_index, text_prob=None, is_remove_duplicate=False):
295
+ """ convert text-index into text-label. """
296
+ result_list = []
297
+ ignored_tokens = self.get_ignored_tokens()
298
+ batch_size = len(text_index)
299
+ for batch_idx in range(batch_size):
300
+ selection = np.ones(len(text_index[batch_idx]), dtype=bool)
301
+ if is_remove_duplicate:
302
+ selection[1:] = text_index[batch_idx][1:] != text_index[
303
+ batch_idx][:-1]
304
+ for ignored_token in ignored_tokens:
305
+ selection &= text_index[batch_idx] != ignored_token
306
+
307
+ char_list = [
308
+ self.character[text_id]
309
+ for text_id in text_index[batch_idx][selection]
310
+ ]
311
+ if text_prob is not None:
312
+ conf_list = text_prob[batch_idx][selection]
313
+ else:
314
+ conf_list = [1] * len(selection)
315
+ if len(conf_list) == 0:
316
+ conf_list = [0]
317
+
318
+ text = ''.join(char_list)
319
+
320
+ if self.reverse: # for arabic rec
321
+ text = self.pred_reverse(text)
322
+
323
+ result_list.append((text, np.mean(conf_list).tolist()))
324
+ return result_list
325
+
326
+ def get_ignored_tokens(self):
327
+ return [0] # for ctc blank
328
+
329
+
330
+ class CTCLabelDecode(BaseRecLabelDecode):
331
+ """ Convert between text-label and text-index """
332
+
333
+ def __init__(self, character_dict_path=None, use_space_char=False,
334
+ **kwargs):
335
+ super(CTCLabelDecode, self).__init__(character_dict_path,
336
+ use_space_char)
337
+
338
+ def __call__(self, preds, label=None, *args, **kwargs):
339
+ if isinstance(preds, tuple) or isinstance(preds, list):
340
+ preds = preds[-1]
341
+ if not isinstance(preds, np.ndarray):
342
+ preds = preds.numpy()
343
+ preds_idx = preds.argmax(axis=2)
344
+ preds_prob = preds.max(axis=2)
345
+ text = self.decode(preds_idx, preds_prob, is_remove_duplicate=True)
346
+ if label is None:
347
+ return text
348
+ label = self.decode(label)
349
+ return text, label
350
+
351
+ def add_special_char(self, dict_character):
352
+ dict_character = ['blank'] + dict_character
353
+ return dict_character
deepdoc/vision/ragFlow.py ADDED
@@ -0,0 +1,313 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ import time
3
+ import os
4
+
5
+ from huggingface_hub import snapshot_download
6
+
7
+ from .operators import *
8
+ import numpy as np
9
+ import onnxruntime as ort
10
+ import logging
11
+ from .postprocess import build_post_process
12
+
13
+ from typing import List
14
+
15
+ def get_deepdoc_directory():
16
+ PROJECT_BASE = os.path.abspath(
17
+ os.path.join(
18
+ os.path.dirname(os.path.realpath(__file__)),
19
+ os.pardir
20
+ )
21
+ )
22
+ return PROJECT_BASE
23
+ def transform(data, ops=None):
24
+ """ transform """
25
+ if ops is None:
26
+ ops = []
27
+ for op in ops:
28
+ data = op(data)
29
+ if data is None:
30
+ return None
31
+ return data
32
+
33
+
34
+ def create_operators(op_param_list, global_config=None):
35
+ """
36
+ create operators based on the config
37
+
38
+ Args:
39
+ params(list): a dict list, used to create some operators
40
+ """
41
+ assert isinstance(
42
+ op_param_list, list), ('operator config should be a list')
43
+ ops = []
44
+ for operator in op_param_list:
45
+ assert isinstance(operator,
46
+ dict) and len(operator) == 1, "yaml format error"
47
+ op_name = list(operator)[0]
48
+ param = {} if operator[op_name] is None else operator[op_name]
49
+ if global_config is not None:
50
+ param.update(global_config)
51
+ op = eval(op_name)(**param)
52
+ ops.append(op)
53
+ return ops
54
+
55
+
56
+ def load_model(model_dir, nm):
57
+ model_file_path = os.path.join(model_dir, nm + ".onnx")
58
+ if not os.path.exists(model_file_path):
59
+ raise ValueError("not find model file path {}".format(
60
+ model_file_path))
61
+
62
+ options = ort.SessionOptions()
63
+ options.enable_cpu_mem_arena = False
64
+ options.execution_mode = ort.ExecutionMode.ORT_SEQUENTIAL
65
+ options.intra_op_num_threads = 2
66
+ options.inter_op_num_threads = 2
67
+ if False and ort.get_device() == "GPU":
68
+ sess = ort.InferenceSession(
69
+ model_file_path,
70
+ options=options,
71
+ providers=['CUDAExecutionProvider'])
72
+ else:
73
+ sess = ort.InferenceSession(
74
+ model_file_path,
75
+ options=options,
76
+ providers=['CPUExecutionProvider'])
77
+ print(model_file_path)
78
+ print(sess.get_modelmeta().description)
79
+ return sess, sess.get_inputs()[0]
80
+
81
+
82
+ class RagFlowTextDetector:
83
+ """
84
+ The class depends on TextDetector to perform its primary function of detecting text and retrieving bounding boxes.
85
+ """
86
+ def __init__(self, model_dir):
87
+ pre_process_list = [{
88
+ 'DetResizeForTest': {
89
+ 'limit_side_len': 960,
90
+ 'limit_type': "max",
91
+ }
92
+ }, {
93
+ 'NormalizeImage': {
94
+ 'std': [0.229, 0.224, 0.225],
95
+ 'mean': [0.485, 0.456, 0.406],
96
+ 'scale': '1./255.',
97
+ 'order': 'hwc'
98
+ }
99
+ }, {
100
+ 'ToCHWImage': None
101
+ }, {
102
+ 'KeepKeys': {
103
+ 'keep_keys': ['image', 'shape']
104
+ }
105
+ }]
106
+ postprocess_params = {"name": "DBPostProcess", "thresh": 0.3, "box_thresh": 0.5, "max_candidates": 1000,
107
+ "unclip_ratio": 1.5, "use_dilation": False, "score_mode": "fast", "box_type": "quad"}
108
+
109
+ self.postprocess_op = build_post_process(postprocess_params)
110
+ self.predictor, self.input_tensor = load_model(model_dir, 'det')
111
+
112
+ img_h, img_w = self.input_tensor.shape[2:]
113
+ if isinstance(img_h, str) or isinstance(img_w, str):
114
+ pass
115
+ elif img_h is not None and img_w is not None and img_h > 0 and img_w > 0:
116
+ pre_process_list[0] = {
117
+ 'DetResizeForTest': {
118
+ 'image_shape': [img_h, img_w]
119
+ }
120
+ }
121
+ self.preprocess_op = create_operators(pre_process_list)
122
+
123
+ def order_points_clockwise(self, pts):
124
+ rect = np.zeros((4, 2), dtype="float32")
125
+ s = pts.sum(axis=1)
126
+ rect[0] = pts[np.argmin(s)]
127
+ rect[2] = pts[np.argmax(s)]
128
+ tmp = np.delete(pts, (np.argmin(s), np.argmax(s)), axis=0)
129
+ diff = np.diff(np.array(tmp), axis=1)
130
+ rect[1] = tmp[np.argmin(diff)]
131
+ rect[3] = tmp[np.argmax(diff)]
132
+ return rect
133
+
134
+ def clip_det_res(self, points, img_height, img_width):
135
+ for pno in range(points.shape[0]):
136
+ points[pno, 0] = int(min(max(points[pno, 0], 0), img_width - 1))
137
+ points[pno, 1] = int(min(max(points[pno, 1], 0), img_height - 1))
138
+ return points
139
+
140
+ def filter_tag_det_res(self, dt_boxes, image_shape):
141
+ img_height, img_width = image_shape[0:2]
142
+ dt_boxes_new = []
143
+ for box in dt_boxes:
144
+ if isinstance(box, list):
145
+ box = np.array(box)
146
+ box = self.order_points_clockwise(box)
147
+ box = self.clip_det_res(box, img_height, img_width)
148
+ rect_width = int(np.linalg.norm(box[0] - box[1]))
149
+ rect_height = int(np.linalg.norm(box[0] - box[3]))
150
+ if rect_width <= 3 or rect_height <= 3:
151
+ continue
152
+ dt_boxes_new.append(box)
153
+ dt_boxes = np.array(dt_boxes_new)
154
+ return dt_boxes
155
+
156
+ def filter_tag_det_res_only_clip(self, dt_boxes, image_shape):
157
+ img_height, img_width = image_shape[0:2]
158
+ dt_boxes_new = []
159
+ for box in dt_boxes:
160
+ if isinstance(box, list):
161
+ box = np.array(box)
162
+ box = self.clip_det_res(box, img_height, img_width)
163
+ dt_boxes_new.append(box)
164
+ dt_boxes = np.array(dt_boxes_new)
165
+ return dt_boxes
166
+
167
+ def __call__(self, img):
168
+ ori_im = img.copy()
169
+ data = {'image': img}
170
+
171
+ st = time.time()
172
+ data = transform(data, self.preprocess_op)
173
+ img, shape_list = data
174
+ if img is None:
175
+ return None, 0
176
+ img = np.expand_dims(img, axis=0)
177
+ shape_list = np.expand_dims(shape_list, axis=0)
178
+ img = img.copy()
179
+ input_dict = {}
180
+ input_dict[self.input_tensor.name] = img
181
+ for i in range(100000):
182
+ try:
183
+ outputs = self.predictor.run(None, input_dict)
184
+ break
185
+ except Exception as e:
186
+ if i >= 3:
187
+ raise e
188
+ time.sleep(5)
189
+
190
+ post_result = self.postprocess_op({"maps": outputs[0]}, shape_list)
191
+ dt_boxes = post_result[0]['points']
192
+ dt_boxes = self.filter_tag_det_res(dt_boxes, ori_im.shape)
193
+
194
+ return dt_boxes, time.time() - st
195
+
196
+
197
+ class RagFlow():
198
+ def __init__(self, model_dir=None):
199
+
200
+ if not model_dir:
201
+ try:
202
+ model_dir = os.path.join(
203
+ get_deepdoc_directory(),
204
+ "models")
205
+ self.text_detector = RagFlowTextDetector(model_dir)
206
+
207
+
208
+ except Exception as e:
209
+ model_dir = snapshot_download(repo_id="InfiniFlow/deepdoc",
210
+ local_dir=os.path.join(get_deepdoc_directory(), "models"),
211
+ local_dir_use_symlinks=False)
212
+ self.text_detector = RagFlowTextDetector(model_dir)
213
+
214
+
215
+ self.drop_score = 0.5
216
+ self.crop_image_res_index = 0
217
+
218
+ def get_rotate_crop_image(self, img, points):
219
+ '''
220
+ img_height, img_width = img.shape[0:2]
221
+ left = int(np.min(points[:, 0]))
222
+ right = int(np.max(points[:, 0]))
223
+ top = int(np.min(points[:, 1]))
224
+ bottom = int(np.max(points[:, 1]))
225
+ img_crop = img[top:bottom, left:right, :].copy()
226
+ points[:, 0] = points[:, 0] - left
227
+ points[:, 1] = points[:, 1] - top
228
+ '''
229
+ assert len(points) == 4, "shape of points must be 4*2"
230
+ img_crop_width = int(
231
+ max(
232
+ np.linalg.norm(points[0] - points[1]),
233
+ np.linalg.norm(points[2] - points[3])))
234
+ img_crop_height = int(
235
+ max(
236
+ np.linalg.norm(points[0] - points[3]),
237
+ np.linalg.norm(points[1] - points[2])))
238
+ pts_std = np.float32([[0, 0], [img_crop_width, 0],
239
+ [img_crop_width, img_crop_height],
240
+ [0, img_crop_height]])
241
+ M = cv2.getPerspectiveTransform(points, pts_std)
242
+ dst_img = cv2.warpPerspective(
243
+ img,
244
+ M, (img_crop_width, img_crop_height),
245
+ borderMode=cv2.BORDER_REPLICATE,
246
+ flags=cv2.INTER_CUBIC)
247
+ dst_img_height, dst_img_width = dst_img.shape[0:2]
248
+ if dst_img_height * 1.0 / dst_img_width >= 1.5:
249
+ dst_img = np.rot90(dst_img)
250
+ return dst_img
251
+
252
+ def sorted_boxes(self, dt_boxes):
253
+ """
254
+ Sort text boxes in order from top to bottom, left to right
255
+ args:
256
+ dt_boxes(array):detected text boxes with shape [4, 2]
257
+ return:
258
+ sorted boxes(array) with shape [4, 2]
259
+ """
260
+ num_boxes = dt_boxes.shape[0]
261
+ sorted_boxes = sorted(dt_boxes, key=lambda x: (x[0][1], x[0][0]))
262
+ _boxes = list(sorted_boxes)
263
+
264
+ for i in range(num_boxes - 1):
265
+ for j in range(i, -1, -1):
266
+ if abs(_boxes[j + 1][0][1] - _boxes[j][0][1]) < 10 and \
267
+ (_boxes[j + 1][0][0] < _boxes[j][0][0]):
268
+ tmp = _boxes[j]
269
+ _boxes[j] = _boxes[j + 1]
270
+ _boxes[j + 1] = tmp
271
+ else:
272
+ break
273
+ return _boxes
274
+
275
+ def detect(self, img):
276
+ time_dict = {'det': 0, 'rec': 0, 'cls': 0, 'all': 0}
277
+
278
+ if img is None:
279
+ return None, None, time_dict
280
+
281
+ start = time.time()
282
+ dt_boxes, elapse = self.text_detector(img)
283
+ time_dict['det'] = elapse
284
+
285
+
286
+ return zip(self.sorted_boxes(dt_boxes), [
287
+ ("", 0) for _ in range(len(dt_boxes))])
288
+
289
+ def recognize(self, ori_im, box):
290
+ img_crop = self.get_rotate_crop_image(ori_im, box)
291
+
292
+ rec_res, elapse = self.text_recognizer([img_crop])
293
+ text, score = rec_res[0]
294
+ if score < self.drop_score:
295
+ return ""
296
+ return text
297
+
298
+ def predict(self,img:np.ndarray=None)-> List[List[float]]:
299
+ """
300
+ Return np array of bounding boxes - for each box 4 points of 2 coordinates
301
+ """
302
+ time_dict = {'det': 0, 'rec': 0, 'cls': 0, 'all': 0}
303
+
304
+ dt_boxes, elapse = self.text_detector(img)
305
+ time_dict['det'] = elapse
306
+
307
+
308
+ dt_boxes = self.sorted_boxes(dt_boxes)
309
+
310
+
311
+ return dt_boxes
312
+
313
+
detectionAndOcrTable1.py ADDED
@@ -0,0 +1,425 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Tuple, List, Sequence, Optional, Union
2
+ from torchvision import transforms
3
+ from torch import nn, Tensor
4
+ from PIL import Image
5
+ from pathlib import Path
6
+ from bs4 import BeautifulSoup as bs
7
+
8
+ import numpy as np
9
+ import numpy.typing as npt
10
+ from numpy import uint8
11
+ ImageType = npt.NDArray[uint8]
12
+ from transformers import AutoModelForObjectDetection
13
+ import torch
14
+ import matplotlib.pyplot as plt
15
+ import matplotlib.patches as patches
16
+ from matplotlib.patches import Patch
17
+
18
+ from unitable import UnitablePredictor
19
+ from doctrfiles import DoctrWordDetector,DoctrTextRecognizer
20
+ from utils import crop_an_Image,cropImageExtraMargin
21
+ from utils import denoisingAndSharpening
22
+
23
+ #based on this notebook:https://github.com/NielsRogge/Transformers-Tutorials/blob/master/Table%20Transformer/Inference_with_Table_Transformer_(TATR)_for_parsing_tables.ipynb
24
+ class MaxResize(object):
25
+ def __init__(self, max_size=800):
26
+ self.max_size = max_size
27
+
28
+ def __call__(self, image):
29
+ width, height = image.size
30
+ current_max_size = max(width, height)
31
+ scale = self.max_size / current_max_size
32
+ resized_image = image.resize((int(round(scale*width)), int(round(scale*height))))
33
+
34
+ return resized_image
35
+
36
+
37
+ html_table_template = (
38
+
39
+ lambda table: f"""<html>
40
+ <head> <meta charset="UTF-8">
41
+ <style>
42
+ table, th, td {{
43
+ border: 1px solid black;
44
+ font-size: 10px;
45
+ }}
46
+ </style> </head>
47
+ <body>
48
+ <table frame="hsides" rules="groups" width="100%%">
49
+ {table}
50
+ </table> </body> </html>"""
51
+ )
52
+
53
+ class DetectionAndOcrTable1():
54
+ def __init__(self,englishFlag=True):
55
+ self.unitablePredictor = UnitablePredictor()
56
+ self.wordDetector = DoctrWordDetector(architecture="db_resnet50",
57
+ path_weights="doctrfiles/models/db_resnet50-79bd7d70.pt",
58
+ path_config_json ="doctrfiles/models/db_resnet50_config.json")
59
+
60
+
61
+ if englishFlag:
62
+ self.textRecognizer = DoctrTextRecognizer(architecture="master", path_weights="./doctrfiles/models/master-fde31e4a.pt",
63
+ path_config_json="./doctrfiles/models/master.json")
64
+ else:
65
+ self.textRecognizer = DoctrTextRecognizer(architecture="parseq", path_weights="./doctrfiles/models/doctr-multilingual-parseq.bin",
66
+ path_config_json="./doctrfiles/models/multilingual-parseq-config.json")
67
+
68
+
69
+ @staticmethod
70
+ def build_table_from_html_and_cell(
71
+ structure: List[str], content: List[str] = None
72
+ ) -> List[str]:
73
+ """Build table from html and cell token list"""
74
+ assert structure is not None
75
+ html_code = list()
76
+
77
+ # deal with empty table
78
+ if content is None:
79
+ content = ["placeholder"] * len(structure)
80
+
81
+ for tag in structure:
82
+ if tag in ("<td>[]</td>", ">[]</td>"):
83
+ if len(content) == 0:
84
+ continue
85
+ cell = content.pop(0)
86
+ html_code.append(tag.replace("[]", cell))
87
+ else:
88
+ html_code.append(tag)
89
+
90
+ return html_code
91
+
92
+ @staticmethod
93
+ def save_detection(detected_lines_images:List[ImageType], prefix = './res/test1/res_'):
94
+ i = 0
95
+ for img in detected_lines_images:
96
+ pilimg = Image.fromarray(img)
97
+ pilimg.save(prefix+str(i)+'.png')
98
+ i=i+1
99
+
100
+ @staticmethod
101
+ # for output bounding box post-processing
102
+ def box_cxcywh_to_xyxy(x):
103
+ x_c, y_c, w, h = x.unbind(-1)
104
+ b = [(x_c - 0.5 * w), (y_c - 0.5 * h), (x_c + 0.5 * w), (y_c + 0.5 * h)]
105
+ return torch.stack(b, dim=1)
106
+
107
+ @staticmethod
108
+ def rescale_bboxes(out_bbox, size):
109
+ img_w, img_h = size
110
+ b = DetectionAndOcrTable1.box_cxcywh_to_xyxy(out_bbox)
111
+ b = b * torch.tensor([img_w, img_h, img_w, img_h], dtype=torch.float32)
112
+ return b
113
+
114
+ @staticmethod
115
+ def outputs_to_objects(outputs, img_size, id2label):
116
+ m = outputs.logits.softmax(-1).max(-1)
117
+ pred_labels = list(m.indices.detach().cpu().numpy())[0]
118
+ pred_scores = list(m.values.detach().cpu().numpy())[0]
119
+ pred_bboxes = outputs['pred_boxes'].detach().cpu()[0]
120
+ pred_bboxes = [elem.tolist() for elem in DetectionAndOcrTable1.rescale_bboxes(pred_bboxes, img_size)]
121
+
122
+ objects = []
123
+ for label, score, bbox in zip(pred_labels, pred_scores, pred_bboxes):
124
+ class_label = id2label[int(label)]
125
+ if not class_label == 'no object':
126
+ objects.append({'label': class_label, 'score': float(score),
127
+ 'bbox': [float(elem) for elem in bbox]})
128
+
129
+ return objects
130
+
131
+ @staticmethod
132
+ def fig2img(fig):
133
+ """Convert a Matplotlib figure to a PIL Image and return it"""
134
+ import io
135
+ buf = io.BytesIO()
136
+ fig.savefig(buf)
137
+ buf.seek(0)
138
+ img = Image.open(buf)
139
+ return img
140
+ #For that, the TATR authors employ some padding to make sure the borders of the table are included.
141
+
142
+ @staticmethod
143
+ def objects_to_crops(img, tokens, objects, class_thresholds, padding=10):
144
+ """
145
+ Process the bounding boxes produced by the table detection model into
146
+ cropped table images and cropped tokens.
147
+ """
148
+
149
+ table_crops = []
150
+ for obj in objects:
151
+ # abit unecessary here cause i crop them anywyas
152
+ if obj['score'] < class_thresholds[obj['label']]:
153
+ continue
154
+
155
+ cropped_table = {}
156
+
157
+ bbox = obj['bbox']
158
+ bbox = [bbox[0]-padding, bbox[1]-padding, bbox[2]+padding, bbox[3]+padding]
159
+
160
+ cropped_img = img.crop(bbox)
161
+
162
+ # Add padding to the cropped image
163
+ padded_width = cropped_img.width + 40
164
+ padded_height = cropped_img.height +40
165
+
166
+ new_img_np = np.full((padded_height, padded_width, 3), fill_value=255, dtype=np.uint8)
167
+ y_offset = (padded_height - cropped_img.height) // 2
168
+ x_offset = (padded_width - cropped_img.width) // 2
169
+ new_img_np[y_offset:y_offset + cropped_img.height, x_offset:x_offset+cropped_img.width] = np.array(cropped_img)
170
+
171
+ padded_img = Image.fromarray(new_img_np,'RGB')
172
+
173
+
174
+ table_tokens = [token for token in tokens if iob(token['bbox'], bbox) >= 0.5]
175
+ for token in table_tokens:
176
+ token['bbox'] = [token['bbox'][0]-bbox[0] + padding,
177
+ token['bbox'][1]-bbox[1] + padding,
178
+ token['bbox'][2]-bbox[0] + padding,
179
+ token['bbox'][3]-bbox[1] + padding]
180
+
181
+ # If table is predicted to be rotated, rotate cropped image and tokens/words:
182
+ if obj['label'] == 'table rotated':
183
+ padded_img = padded_img.rotate(270, expand=True)
184
+ for token in table_tokens:
185
+ bbox = token['bbox']
186
+ bbox = [padded_img.size[0]-bbox[3]-1,
187
+ bbox[0],
188
+ padded_img.size[0]-bbox[1]-1,
189
+ bbox[2]]
190
+ token['bbox'] = bbox
191
+
192
+ cropped_table['image'] = padded_img
193
+ cropped_table['tokens'] = table_tokens
194
+
195
+ table_crops.append(cropped_table)
196
+
197
+ return table_crops
198
+
199
+ @staticmethod
200
+ def visualize_detected_tables(img, det_tables, out_path=None):
201
+ plt.imshow(img, interpolation="lanczos")
202
+ fig = plt.gcf()
203
+ fig.set_size_inches(20, 20)
204
+ ax = plt.gca()
205
+
206
+ for det_table in det_tables:
207
+ bbox = det_table['bbox']
208
+
209
+ if det_table['label'] == 'table':
210
+ facecolor = (1, 0, 0.45)
211
+ edgecolor = (1, 0, 0.45)
212
+ alpha = 0.3
213
+ linewidth = 2
214
+ hatch='//////'
215
+ elif det_table['label'] == 'table rotated':
216
+ facecolor = (0.95, 0.6, 0.1)
217
+ edgecolor = (0.95, 0.6, 0.1)
218
+ alpha = 0.3
219
+ linewidth = 2
220
+ hatch='//////'
221
+ else:
222
+ continue
223
+
224
+ rect = patches.Rectangle(bbox[:2], bbox[2]-bbox[0], bbox[3]-bbox[1], linewidth=linewidth,
225
+ edgecolor='none',facecolor=facecolor, alpha=0.1)
226
+ ax.add_patch(rect)
227
+ rect = patches.Rectangle(bbox[:2], bbox[2]-bbox[0], bbox[3]-bbox[1], linewidth=linewidth,
228
+ edgecolor=edgecolor,facecolor='none',linestyle='-', alpha=alpha)
229
+ ax.add_patch(rect)
230
+ rect = patches.Rectangle(bbox[:2], bbox[2]-bbox[0], bbox[3]-bbox[1], linewidth=0,
231
+ edgecolor=edgecolor,facecolor='none',linestyle='-', hatch=hatch, alpha=0.2)
232
+ ax.add_patch(rect)
233
+
234
+ plt.xticks([], [])
235
+ plt.yticks([], [])
236
+
237
+ legend_elements = [Patch(facecolor=(1, 0, 0.45), edgecolor=(1, 0, 0.45),
238
+ label='Table', hatch='//////', alpha=0.3),
239
+ Patch(facecolor=(0.95, 0.6, 0.1), edgecolor=(0.95, 0.6, 0.1),
240
+ label='Table (rotated)', hatch='//////', alpha=0.3)]
241
+ plt.legend(handles=legend_elements, bbox_to_anchor=(0.5, -0.02), loc='upper center', borderaxespad=0,
242
+ fontsize=10, ncol=2)
243
+ plt.gcf().set_size_inches(10, 10)
244
+ plt.axis('off')
245
+
246
+ if out_path is not None:
247
+ plt.savefig(out_path, bbox_inches='tight', dpi=150)
248
+
249
+ return fig
250
+
251
+
252
+ def predict(self,image:Image.Image,debugfolder_filename_page_name,denoise=False):
253
+
254
+
255
+ """
256
+ 0. Locate the table using Table detection
257
+ 1. Unitable
258
+ """
259
+ print("Running table transformer + Unitable Hybrid Model")
260
+
261
+ # Step 0 : Locate the table using Table detection TODO
262
+
263
+ #First we load a Table Transformer pre-trained for table detection. We use the "no_timm" version here to load the checkpoint with a Transformers-native backbone.
264
+ model = AutoModelForObjectDetection.from_pretrained("microsoft/table-transformer-detection", revision="no_timm")
265
+ device = "cuda" if torch.cuda.is_available() else "cpu"
266
+ model.to(device)
267
+
268
+ #Preparing the image for the model
269
+ detection_transform = transforms.Compose([
270
+ MaxResize(800),
271
+ transforms.ToTensor(),
272
+ transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
273
+ ])
274
+ pixel_values = detection_transform(image).unsqueeze(0)
275
+ pixel_values = pixel_values.to(device)
276
+
277
+ # Next, we forward the pixel values through the model.
278
+ # The model outputs logits of shape (batch_size, num_queries, num_labels + 1). The +1 is for the "no object" class.
279
+ with torch.no_grad():
280
+ outputs = model(pixel_values)
281
+
282
+ # update id2label to include "no object"
283
+ id2label = model.config.id2label
284
+ id2label[len(model.config.id2label)] = "no object"
285
+
286
+ #[{'label': 'table', 'score': 0.9999570846557617, 'bbox': [110.24547576904297, 73.31171417236328, 1024.609130859375, 308.7159423828125]}]
287
+ objects = DetectionAndOcrTable1.outputs_to_objects(outputs, image.size, id2label)
288
+
289
+ #Only do these for objects with score greater than 0.8
290
+ objects = [obj for obj in objects if obj['score'] > 0.95]
291
+
292
+ print("detected object from the table transformers are")
293
+ print(objects)
294
+ if objects:
295
+
296
+ #Next, we crop the table out of the image. For that, the TATR authors employ some padding to make sure the borders of the table are included.
297
+
298
+
299
+ tokens = []
300
+ detection_class_thresholds = {
301
+ "table": 0.95, #this is a bit double cause we do up there another filtering but didn't want to modify too much from original code
302
+ "table rotated": 0.95,
303
+ "no object": 10
304
+ }
305
+ crop_padding = 10
306
+
307
+
308
+ tables_crops = DetectionAndOcrTable1.objects_to_crops(image, tokens, objects, detection_class_thresholds, padding=crop_padding)
309
+
310
+ cropped_tables =[]
311
+ for i in range (len(tables_crops)):
312
+ cropped_table = tables_crops[i]['image'].convert("RGB")
313
+ cropped_table.save(debugfolder_filename_page_name+"cropped_table_"+str(i)+".png")
314
+ cropped_tables.append(cropped_table)
315
+
316
+ # Step 1: Unitable
317
+ #This take PIL Images as input
318
+ if denoise:
319
+ cropped_tables =denoisingAndSharpening(cropped_tables)
320
+ pred_htmls, pred_bboxs = self.unitablePredictor.predict(cropped_tables,debugfolder_filename_page_name)
321
+
322
+ table_codes = []
323
+ for k in range(len(cropped_tables)):
324
+ pred_html =pred_htmls[k]
325
+ pred_bbox = pred_bboxs[k]
326
+
327
+ # Some tabless have a lot of words in their header
328
+ # So for the headers, give doctr word ddetector doesn't work when the images aren't square
329
+ table_header_cells = 0
330
+ header_exists = False
331
+ for cell in pred_html:
332
+ if cell=='>[]</td>' or cell == '<td>[]</td>':
333
+ table_header_cells += 1
334
+ if cell =='</thead>':
335
+ header_exists = True
336
+ break
337
+ if not header_exists:
338
+ table_header_cells = 0
339
+ pred_cell = []
340
+ cell_imgs_to_viz = []
341
+ cell_img_num=0
342
+
343
+ # Find what one line should be if there is a cell with a single line
344
+ one_line_height = 100000
345
+ for i in range(table_header_cells):
346
+ box = pred_bbox[i]
347
+ xmin, ymin, xmax, ymax = box
348
+ current_box_height = abs(ymax-ymin)
349
+ if current_box_height<one_line_height:
350
+ one_line_height = current_box_height
351
+
352
+ for box in pred_bbox:
353
+ xmin, ymin, xmax, ymax = box
354
+ fourbytwo = np.array([
355
+ [xmin, ymin],
356
+ [xmax, ymin],
357
+ [xmax, ymax],
358
+ [xmin, ymax]
359
+ ], dtype=np.float32)
360
+ current_box_height = abs(ymax-ymin)
361
+
362
+ # Those are for header cells with more than one line
363
+ if table_header_cells > 0 and current_box_height>one_line_height+5:
364
+
365
+ cell_img= cropImageExtraMargin([fourbytwo],cropped_tables[k],margin=1.4)[0]
366
+ table_header_cells -= 1
367
+
368
+ #List of 4 x 2
369
+ detection_results = self.wordDetector.predict(cell_img,sort_vertical=True)
370
+
371
+ input_to_recog = []
372
+ if detection_results == []:
373
+ input_to_recog.append(cell_img)
374
+ else:
375
+
376
+ for wordbox in detection_results:
377
+
378
+ cropped_image= crop_an_Image(wordbox.box,cell_img)
379
+ if cropped_image.shape[0] >0 and cropped_image.shape[1]>0:
380
+ input_to_recog.append(cropped_image)
381
+ else:
382
+ print("Empty image")
383
+ else:
384
+ cell_img = crop_an_Image(fourbytwo,cropped_tables[k])
385
+ if table_header_cells>0:
386
+ table_header_cells -= 1
387
+ if cell_img.shape[0] >0 and cell_img.shape[1]>0:
388
+ input_to_recog =[cell_img]
389
+
390
+ cell_imgs_to_viz.append(cell_img)
391
+
392
+
393
+ if input_to_recog != []:
394
+ words = self.textRecognizer.predict_for_tables(input_to_recog)
395
+ cell_output = " ".join(words)
396
+ pred_cell.append(cell_output)
397
+ else:
398
+ #Don't lose empty cell
399
+ pred_cell.append("")
400
+
401
+
402
+ print(pred_cell)
403
+ #Step3 :
404
+ pred_code = self.build_table_from_html_and_cell(pred_html, pred_cell)
405
+ pred_code = "".join(pred_code)
406
+ pred_code = html_table_template(pred_code)
407
+
408
+
409
+ soup = bs(pred_code)
410
+ #formatted and indented) string representation of the HTML document
411
+ table_code = soup.prettify()
412
+ print(table_code)
413
+
414
+ # Append extracted table to table_codes
415
+ table_codes.append(table_code)
416
+
417
+ return table_codes
418
+
419
+
420
+
421
+
422
+
423
+
424
+
425
+
detectionAndOcrTable2.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Tuple, List, Sequence, Optional, Union
2
+ from torchvision import transforms
3
+ from torch import nn, Tensor
4
+ from PIL import Image
5
+ from pathlib import Path
6
+ from bs4 import BeautifulSoup as bs
7
+
8
+ import numpy as np
9
+ import numpy.typing as npt
10
+ from numpy import uint8
11
+ ImageType = npt.NDArray[uint8]
12
+ from transformers import AutoModelForObjectDetection
13
+ import torch
14
+ import matplotlib.pyplot as plt
15
+ import matplotlib.patches as patches
16
+ from matplotlib.patches import Patch
17
+
18
+ from unitable import UnitableFullPredictor
19
+
20
+ #based on this notebook:https://github.com/NielsRogge/Transformers-Tutorials/blob/master/Table%20Transformer/Inference_with_Table_Transformer_(TATR)_for_parsing_tables.ipynb
21
+ class MaxResize(object):
22
+ def __init__(self, max_size=800):
23
+ self.max_size = max_size
24
+
25
+ def __call__(self, image):
26
+ width, height = image.size
27
+ current_max_size = max(width, height)
28
+ scale = self.max_size / current_max_size
29
+ resized_image = image.resize((int(round(scale*width)), int(round(scale*height))))
30
+
31
+ return resized_image
32
+
33
+ def iob(boxA, boxB):
34
+ """
35
+ Calculate the Intersection over Bounding Box (IoB) of two bounding boxes.
36
+
37
+ Parameters:
38
+ - boxA: list or tuple with [xmin, ymin, xmax, ymax] of the first box
39
+ - boxB: list or tuple with [xmin, ymin, xmax, ymax] of the second box
40
+
41
+ Returns:
42
+ - iob: float, the IoB ratio
43
+ """
44
+ # Determine the coordinates of the intersection rectangle
45
+ xA = max(boxA[0], boxB[0])
46
+ yA = max(boxA[1], boxB[1])
47
+ xB = min(boxA[2], boxB[2])
48
+ yB = min(boxA[3], boxB[3])
49
+
50
+ # Compute the area of intersection rectangle
51
+ interWidth = max(0, xB - xA)
52
+ interHeight = max(0, yB - yA)
53
+ interArea = interWidth * interHeight
54
+
55
+ # Compute the area of boxB (the second box)
56
+ boxBArea = (boxB[2] - boxB[0]) * (boxB[3] - boxB[1])
57
+
58
+ # Compute the Intersection over Bounding Box (IoB) ratio
59
+ iob = interArea / float(boxBArea)
60
+
61
+ return iob
62
+
63
+ class DetectionAndOcrTable2():
64
+ #This components can take in entire pdf page as input , scan for tables and return the table in html format
65
+ #Uses the full unitable model - different to DetectionAndOcrTable1
66
+ def __init__(self):
67
+ self.unitableFullPredictor = UnitableFullPredictor()
68
+
69
+
70
+ @staticmethod
71
+ def save_detection(detected_lines_images:List[ImageType], prefix = './res/test1/res_'):
72
+ i = 0
73
+ for img in detected_lines_images:
74
+ pilimg = Image.fromarray(img)
75
+ pilimg.save(prefix+str(i)+'.png')
76
+ i=i+1
77
+
78
+ @staticmethod
79
+ # for output bounding box post-processing
80
+ def box_cxcywh_to_xyxy(x):
81
+ x_c, y_c, w, h = x.unbind(-1)
82
+ b = [(x_c - 0.5 * w), (y_c - 0.5 * h), (x_c + 0.5 * w), (y_c + 0.5 * h)]
83
+ return torch.stack(b, dim=1)
84
+
85
+ @staticmethod
86
+ def rescale_bboxes(out_bbox, size):
87
+ img_w, img_h = size
88
+ b = DetectionAndOcrTable2.box_cxcywh_to_xyxy(out_bbox)
89
+ b = b * torch.tensor([img_w, img_h, img_w, img_h], dtype=torch.float32)
90
+ return b
91
+
92
+ @staticmethod
93
+ def outputs_to_objects(outputs, img_size, id2label):
94
+ m = outputs.logits.softmax(-1).max(-1)
95
+ pred_labels = list(m.indices.detach().cpu().numpy())[0]
96
+ pred_scores = list(m.values.detach().cpu().numpy())[0]
97
+ pred_bboxes = outputs['pred_boxes'].detach().cpu()[0]
98
+ pred_bboxes = [elem.tolist() for elem in DetectionAndOcrTable2.rescale_bboxes(pred_bboxes, img_size)]
99
+
100
+ objects = []
101
+ for label, score, bbox in zip(pred_labels, pred_scores, pred_bboxes):
102
+ class_label = id2label[int(label)]
103
+ if not class_label == 'no object':
104
+ objects.append({'label': class_label, 'score': float(score),
105
+ 'bbox': [float(elem) for elem in bbox]})
106
+
107
+ return objects
108
+
109
+
110
+ @staticmethod
111
+ def visualize_detected_tables(img, det_tables, out_path=None):
112
+ plt.imshow(img, interpolation="lanczos")
113
+ fig = plt.gcf()
114
+ fig.set_size_inches(20, 20)
115
+ ax = plt.gca()
116
+
117
+ for det_table in det_tables:
118
+ bbox = det_table['bbox']
119
+
120
+ if det_table['label'] == 'table':
121
+ facecolor = (1, 0, 0.45)
122
+ edgecolor = (1, 0, 0.45)
123
+ alpha = 0.3
124
+ linewidth = 2
125
+ hatch='//////'
126
+ elif det_table['label'] == 'table rotated':
127
+ facecolor = (0.95, 0.6, 0.1)
128
+ edgecolor = (0.95, 0.6, 0.1)
129
+ alpha = 0.3
130
+ linewidth = 2
131
+ hatch='//////'
132
+ else:
133
+ continue
134
+
135
+ rect = patches.Rectangle(bbox[:2], bbox[2]-bbox[0], bbox[3]-bbox[1], linewidth=linewidth,
136
+ edgecolor='none',facecolor=facecolor, alpha=0.1)
137
+ ax.add_patch(rect)
138
+ rect = patches.Rectangle(bbox[:2], bbox[2]-bbox[0], bbox[3]-bbox[1], linewidth=linewidth,
139
+ edgecolor=edgecolor,facecolor='none',linestyle='-', alpha=alpha)
140
+ ax.add_patch(rect)
141
+ rect = patches.Rectangle(bbox[:2], bbox[2]-bbox[0], bbox[3]-bbox[1], linewidth=0,
142
+ edgecolor=edgecolor,facecolor='none',linestyle='-', hatch=hatch, alpha=0.2)
143
+ ax.add_patch(rect)
144
+
145
+ plt.xticks([], [])
146
+ plt.yticks([], [])
147
+
148
+ legend_elements = [Patch(facecolor=(1, 0, 0.45), edgecolor=(1, 0, 0.45),
149
+ label='Table', hatch='//////', alpha=0.3),
150
+ Patch(facecolor=(0.95, 0.6, 0.1), edgecolor=(0.95, 0.6, 0.1),
151
+ label='Table (rotated)', hatch='//////', alpha=0.3)]
152
+ plt.legend(handles=legend_elements, bbox_to_anchor=(0.5, -0.02), loc='upper center', borderaxespad=0,
153
+ fontsize=10, ncol=2)
154
+ plt.gcf().set_size_inches(10, 10)
155
+ plt.axis('off')
156
+
157
+ if out_path is not None:
158
+ plt.savefig(out_path, bbox_inches='tight', dpi=150)
159
+
160
+ return fig
161
+
162
+ #For that, the TATR authors employ some padding to make sure the borders of the table are included.
163
+ @staticmethod
164
+ def objects_to_crops(img, tokens, objects, class_thresholds, padding=10):
165
+ """
166
+ Process the bounding boxes produced by the table detection model into
167
+ cropped table images and cropped tokens.
168
+ """
169
+
170
+ table_crops = []
171
+ for obj in objects:
172
+ # abit unecessary here cause i crop them anywyas
173
+ if obj['score'] < class_thresholds[obj['label']]:
174
+ print('skipping object with score', obj['score'])
175
+ continue
176
+
177
+ cropped_table = {}
178
+
179
+ bbox = obj['bbox']
180
+ bbox = [bbox[0]-padding, bbox[1]-padding, bbox[2]+padding, bbox[3]+padding]
181
+
182
+ cropped_img = img.crop(bbox)
183
+
184
+ # Add padding to the cropped image
185
+ padded_width = cropped_img.width + 40
186
+ padded_height = cropped_img.height +40
187
+
188
+ new_img_np = np.full((padded_height, padded_width, 3), fill_value=255, dtype=np.uint8)
189
+ y_offset = (padded_height - cropped_img.height) // 2
190
+ x_offset = (padded_width - cropped_img.width) // 2
191
+ new_img_np[y_offset:y_offset + cropped_img.height, x_offset:x_offset+cropped_img.width] = np.array(cropped_img)
192
+
193
+ padded_img = Image.fromarray(new_img_np,'RGB')
194
+
195
+
196
+ table_tokens = [token for token in tokens if iob(token['bbox'], bbox) >= 0.5]
197
+ for token in table_tokens:
198
+ token['bbox'] = [token['bbox'][0]-bbox[0] + padding,
199
+ token['bbox'][1]-bbox[1] + padding,
200
+ token['bbox'][2]-bbox[0] + padding,
201
+ token['bbox'][3]-bbox[1] + padding]
202
+
203
+ # If table is predicted to be rotated, rotate cropped image and tokens/words:
204
+ if obj['label'] == 'table rotated':
205
+ padded_img = padded_img.rotate(270, expand=True)
206
+ for token in table_tokens:
207
+ bbox = token['bbox']
208
+ bbox = [padded_img.size[0]-bbox[3]-1,
209
+ bbox[0],
210
+ padded_img.size[0]-bbox[1]-1,
211
+ bbox[2]]
212
+ token['bbox'] = bbox
213
+
214
+ cropped_table['image'] = padded_img
215
+ cropped_table['tokens'] = table_tokens
216
+
217
+ table_crops.append(cropped_table)
218
+
219
+ return table_crops
220
+
221
+ def predict(self,image:Image.Image,debugfolder_filename_page_name):
222
+
223
+
224
+ """
225
+ 0. Locate the table using Table detection
226
+ 1. Unitable
227
+ """
228
+
229
+ # Step 0 : Locate the table using Table detection TODO
230
+
231
+ #First we load a Table Transformer pre-trained for table detection. We use the "no_timm" version here to load the checkpoint with a Transformers-native backbone.
232
+ model = AutoModelForObjectDetection.from_pretrained("microsoft/table-transformer-detection", revision="no_timm")
233
+ device = "cuda" if torch.cuda.is_available() else "cpu"
234
+ model.to(device)
235
+
236
+ #Preparing the image for the model
237
+ detection_transform = transforms.Compose([
238
+ MaxResize(800),
239
+ transforms.ToTensor(),
240
+ transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
241
+ ])
242
+ pixel_values = detection_transform(image).unsqueeze(0)
243
+ pixel_values = pixel_values.to(device)
244
+
245
+ # Next, we forward the pixel values through the model.
246
+ # The model outputs logits of shape (batch_size, num_queries, num_labels + 1). The +1 is for the "no object" class.
247
+ with torch.no_grad():
248
+ outputs = model(pixel_values)
249
+
250
+ # update id2label to include "no object"
251
+ id2label = model.config.id2label
252
+ id2label[len(model.config.id2label)] = "no object"
253
+
254
+ #[{'label': 'table', 'score': 0.9999570846557617, 'bbox': [110.24547576904297, 73.31171417236328, 1024.609130859375, 308.7159423828125]}]
255
+ objects = DetectionAndOcrTable2.outputs_to_objects(outputs, image.size, id2label)
256
+
257
+ #Only do these for objects with score greater than 0.8
258
+ objects = [obj for obj in objects if obj['score'] > 0.95]
259
+
260
+ print(objects)
261
+ if objects:
262
+ fig = DetectionAndOcrTable2.visualize_detected_tables(image, objects,out_path = "./res/table_debug/table_former_detection.jpg")
263
+
264
+ #Next, we crop the table out of the image. For that, the TATR authors employ some padding to make sure the borders of the table are included.
265
+
266
+
267
+ tokens = []
268
+ detection_class_thresholds = {
269
+ "table": 0.95,
270
+ "table rotated": 0.95,
271
+ "no object": 10
272
+ }
273
+ crop_padding = 10
274
+
275
+
276
+ tables_crops = DetectionAndOcrTable2.objects_to_crops(image, tokens, objects, detection_class_thresholds, padding=crop_padding)
277
+
278
+ #[{'image': <PIL.Image.Image image mode=RGB size=1392x903 at 0x7F71B02BCB50>, 'tokens': []}]
279
+ #print(tables_crops)
280
+
281
+ #TODO: Handle the case where there are multiple tables
282
+ cropped_tables =[]
283
+ for i in range (len(tables_crops)):
284
+ cropped_table = tables_crops[i]['image'].convert("RGB")
285
+ cropped_table.save(debugfolder_filename_page_name +"cropped_table_"+str(i)+".png")
286
+ cropped_tables.append(cropped_table)
287
+
288
+ print("number of cropped tables found: "+str(len(cropped_tables)))
289
+
290
+
291
+ # Step 1: Unitable
292
+ #This take PIL Images as input
293
+ table_codes = self.unitableFullPredictor.predict(cropped_tables,debugfolder_filename_page_name)
294
+
295
+ else:
296
+ return
297
+
298
+
299
+
300
+
301
+
302
+
303
+
304
+
305
+
306
+
detectionAndOcrTable3.py ADDED
@@ -0,0 +1,267 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Tuple, List, Sequence, Optional, Union
2
+ from torchvision import transforms
3
+ from torch import nn, Tensor
4
+ from PIL import Image
5
+ from pathlib import Path
6
+ from bs4 import BeautifulSoup as bs
7
+
8
+ import numpy as np
9
+ import numpy.typing as npt
10
+ from numpy import uint8
11
+ ImageType = npt.NDArray[uint8]
12
+ from transformers import AutoModelForObjectDetection
13
+ import torch
14
+ import matplotlib.pyplot as plt
15
+ import matplotlib.patches as patches
16
+ from matplotlib.patches import Patch
17
+ from utils import draw_only_box
18
+
19
+ from unitable import UnitablePredictor
20
+ from ultralyticsplus import YOLO, render_result
21
+ from doctrfiles import DoctrWordDetector,DoctrTextRecognizer
22
+ from utils import crop_an_Image,cropImageExtraMargin
23
+ from utils import denoisingAndSharpening
24
+ """
25
+ USES YOLO FOR DETECITON INSTEAD OF TABLE TRANSFORMER
26
+ Table TransFORMER
27
+ """
28
+
29
+
30
+ html_table_template = (
31
+
32
+ lambda table: f"""<html>
33
+ <head> <meta charset="UTF-8">
34
+ <style>
35
+ table, th, td {{
36
+ border: 1px solid black;
37
+ font-size: 10px;
38
+ }}
39
+ </style> </head>
40
+ <body>
41
+ <table frame="hsides" rules="groups" width="100%%">
42
+ {table}
43
+ </table> </body> </html>"""
44
+ )
45
+
46
+ class DetectionAndOcrTable3():
47
+ #This components can take in entire pdf page as input , scan for tables and return the table in html format
48
+ #Uses the full unitable model - different to DetectionAndOcrTable1
49
+ def __init__(self,englishFlag = True):
50
+ self.unitablePredictor = UnitablePredictor()
51
+ self.detector = YOLO('foduucom/table-detection-and-extraction')
52
+ # set model parameters
53
+ self.detector.overrides['conf'] = 0.25 # NMS confidence threshold
54
+ self.detector.overrides['iou'] = 0.45 # NMS IoU threshold
55
+ self.detector.overrides['agnostic_nms'] = False # NMS class-agnostic
56
+ self.detector.overrides['max_det'] = 1000 # maximum number of detections per image
57
+
58
+ self.wordDetector = DoctrWordDetector(architecture="db_resnet50",
59
+ path_weights="doctrfiles/models/db_resnet50-79bd7d70.pt",
60
+ path_config_json ="doctrfiles/models/db_resnet50_config.json")
61
+
62
+
63
+ if englishFlag:
64
+ self.textRecognizer = DoctrTextRecognizer(architecture="master", path_weights="./doctrfiles/models/master-fde31e4a.pt",
65
+ path_config_json="./doctrfiles/models/master.json")
66
+ else:
67
+ self.textRecognizer = DoctrTextRecognizer(architecture="parseq", path_weights="./doctrfiles/models/doctr-multilingual-parseq.bin",
68
+ path_config_json="./doctrfiles/models/multilingual-parseq-config.json")
69
+
70
+
71
+
72
+ @staticmethod
73
+ def save_detection(detected_lines_images:List[ImageType], prefix = './res/test1/res_'):
74
+ i = 0
75
+ for img in detected_lines_images:
76
+ pilimg = Image.fromarray(img)
77
+ pilimg.save(prefix+str(i)+'.png')
78
+ i=i+1
79
+
80
+ @staticmethod
81
+ def build_table_from_html_and_cell(
82
+ structure: List[str], content: List[str] = None
83
+ ) -> List[str]:
84
+ """Build table from html and cell token list"""
85
+ assert structure is not None
86
+ html_code = list()
87
+
88
+ # deal with empty table
89
+ if content is None:
90
+ content = ["placeholder"] * len(structure)
91
+
92
+ for tag in structure:
93
+ if tag in ("<td>[]</td>", ">[]</td>"):
94
+ if len(content) == 0:
95
+ continue
96
+ cell = content.pop(0)
97
+ html_code.append(tag.replace("[]", cell))
98
+ else:
99
+ html_code.append(tag)
100
+
101
+ return html_code
102
+ """
103
+ Valid 'Boxes' object attributes and properties are:
104
+
105
+ Attributes:
106
+ boxes (torch.Tensor) or (numpy.ndarray): A tensor or numpy array containing the detection boxes,
107
+ with shape (num_boxes, 6).
108
+ orig_shape (torch.Tensor) or (numpy.ndarray): Original image size, in the format (height, width).
109
+
110
+ Properties:
111
+ xyxy (torch.Tensor) or (numpy.ndarray): The boxes in xyxy format.
112
+ conf (torch.Tensor) or (numpy.ndarray): The confidence values of the boxes.
113
+ cls (torch.Tensor) or (numpy.ndarray): The class values of the boxes.
114
+ xywh (torch.Tensor) or (numpy.ndarray): The boxes in xywh format.
115
+ xyxyn (torch.Tensor) or (numpy.ndarray): The boxes in xyxy format normalized by original image size.
116
+ xywhn (torch.Tensor) or (numpy.ndarray): The boxes in xywh format normalized by original image size.
117
+ """
118
+ # Image is page image
119
+ def predict(self,image:Image.Image,debugfolder_filename_page_name = None,denoise =False):
120
+
121
+ results = self.detector.predict(image)
122
+
123
+ #Array of bboxes
124
+ bbxs = results[0].boxes.xyxy.int().tolist()
125
+ #Array of confidences
126
+ conf = results[0].boxes.conf.float().tolist()
127
+ print(bbxs)
128
+ print(conf)
129
+
130
+ #images_to_recognizer = cropImage(bxs, img)
131
+ img_to_save = draw_only_box(image, bbxs)
132
+ img_to_save.save(debugfolder_filename_page_name+"detectionBoxRes.png", quality=95)
133
+
134
+ # we need something to draw the detection
135
+
136
+
137
+ cropped_tables =[]
138
+ for i in range (len(bbxs)):
139
+ # TODO: find the right confidence and padding values
140
+ if conf[i]< 0.65:
141
+ continue
142
+
143
+ padded = [bbxs[i][0]-10,bbxs[i][1]-10,bbxs[i][2]+10,bbxs[i][3]+10]
144
+
145
+ cropped_table = image.convert("RGB").crop(padded)
146
+ cropped_table.save(debugfolder_filename_page_name +"yolo_cropped_table_"+str(i)+".png")
147
+ cropped_tables.append(cropped_table)
148
+
149
+ print("number of cropped tables found: "+str(len(cropped_tables)))
150
+
151
+ # Step 1: Unitable
152
+ #This take PIL Images as input
153
+ if cropped_tables != []:
154
+ if denoise:
155
+ cropped_tables =denoisingAndSharpening(cropped_tables)
156
+ pred_htmls, pred_bboxs = self.unitablePredictor.predict(cropped_tables,debugfolder_filename_page_name)
157
+ table_codes = []
158
+
159
+ for k in range(len(cropped_tables)):
160
+ pred_html =pred_htmls[k]
161
+ pred_bbox = pred_bboxs[k]
162
+
163
+ # Some tabless have a lot of words in their header
164
+ # So for the headers, give doctr word ddetector doesn't work when the images aren't square
165
+ table_header_cells = 0
166
+ header_exists = False
167
+ for cell in pred_html:
168
+ if cell=='>[]</td>' or cell == '<td>[]</td>':
169
+ table_header_cells += 1
170
+ if cell =='</thead>':
171
+ header_exists = True
172
+ break
173
+ if not header_exists:
174
+ table_header_cells = 0
175
+ pred_cell = []
176
+ cell_imgs_to_viz = []
177
+ cell_img_num=0
178
+
179
+ # Find what one line should be if there is a cell with a single line
180
+ one_line_height = 100000
181
+ for i in range(table_header_cells):
182
+ box = pred_bbox[i]
183
+ xmin, ymin, xmax, ymax = box
184
+ current_box_height = abs(ymax-ymin)
185
+ if current_box_height<one_line_height:
186
+ one_line_height = current_box_height
187
+
188
+ for box in pred_bbox:
189
+ xmin, ymin, xmax, ymax = box
190
+ fourbytwo = np.array([
191
+ [xmin, ymin],
192
+ [xmax, ymin],
193
+ [xmax, ymax],
194
+ [xmin, ymax]
195
+ ], dtype=np.float32)
196
+ if ymax-ymin == 0:
197
+ continue
198
+ current_box_height = abs(ymax-ymin)
199
+
200
+ # Those are for header cells with more than one line
201
+ if table_header_cells > 0 and current_box_height>one_line_height+5:
202
+
203
+ cell_img= cropImageExtraMargin([fourbytwo],cropped_tables[k],margin=1.4)[0]
204
+ table_header_cells -= 1
205
+
206
+ #List of 4 x 2
207
+ detection_results = self.wordDetector.predict(cell_img,sort_vertical=True)
208
+
209
+ input_to_recog = []
210
+ if detection_results == []:
211
+ input_to_recog.append(cell_img)
212
+ else:
213
+
214
+ for wordbox in detection_results:
215
+
216
+ cropped_image= crop_an_Image(wordbox.box,cell_img)
217
+ if cropped_image.shape[0] >0 and cropped_image.shape[1]>0:
218
+ input_to_recog.append(cropped_image)
219
+ else:
220
+ print("Empty image")
221
+ else:
222
+ cell_img = crop_an_Image(fourbytwo,cropped_tables[k])
223
+ if table_header_cells>0:
224
+ table_header_cells -= 1
225
+ if cell_img.shape[0] >0 and cell_img.shape[1]>0:
226
+ input_to_recog =[cell_img]
227
+
228
+ cell_imgs_to_viz.append(cell_img)
229
+
230
+ if input_to_recog != []:
231
+ words = self.textRecognizer.predict_for_tables(input_to_recog)
232
+ cell_output = " ".join(words)
233
+ pred_cell.append(cell_output)
234
+ else:
235
+ #Don't lose empty cell
236
+ pred_cell.append("")
237
+
238
+
239
+ #self.save_detection(cell_imgs_to_viz,prefix = './res/test4/cell_imgs_')
240
+ print(pred_cell)
241
+ #Step3 :
242
+ pred_code = self.build_table_from_html_and_cell(pred_html, pred_cell)
243
+ pred_code = "".join(pred_code)
244
+ pred_code = html_table_template(pred_code)
245
+
246
+
247
+ soup = bs(pred_code)
248
+ #formatted and indented) string representation of the HTML document
249
+ table_code = soup.prettify()
250
+ print(table_code)
251
+ table_codes.append(table_code)
252
+
253
+ return table_codes
254
+ return []
255
+
256
+
257
+
258
+
259
+
260
+
261
+
262
+
263
+
264
+
265
+
266
+
267
+
detectionAndOcrTable4.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Tuple, List, Sequence, Optional, Union
2
+ from torchvision import transforms
3
+ from torch import nn, Tensor
4
+ from PIL import Image
5
+ from pathlib import Path
6
+ from bs4 import BeautifulSoup as bs
7
+
8
+ import numpy as np
9
+ import numpy.typing as npt
10
+ from numpy import uint8
11
+ ImageType = npt.NDArray[uint8]
12
+ from transformers import AutoModelForObjectDetection
13
+ import torch
14
+ import matplotlib.pyplot as plt
15
+ import matplotlib.patches as patches
16
+ from matplotlib.patches import Patch
17
+ from utils import draw_only_box
18
+
19
+ from unitable import UnitableFullPredictor
20
+ from ultralyticsplus import YOLO, render_result
21
+ """
22
+ USES YOLO FOR DETECITON INSTEAD OF TABLE TRANSFORMER
23
+ Table TransFORMER
24
+ """
25
+
26
+ class DetectionAndOcrTable4():
27
+ #This components can take in entire pdf page as input , scan for tables and return the table in html format
28
+ #Uses the full unitable model - different to DetectionAndOcrTable1
29
+ def __init__(self):
30
+ self.unitableFullPredictor = UnitableFullPredictor()
31
+ self.detector = YOLO('foduucom/table-detection-and-extraction')
32
+ # set model parameters
33
+ self.detector.overrides['conf'] = 0.25 # NMS confidence threshold
34
+ self.detector.overrides['iou'] = 0.45 # NMS IoU threshold
35
+ self.detector.overrides['agnostic_nms'] = False # NMS class-agnostic
36
+ self.detector.overrides['max_det'] = 1000 # maximum number of detections per image
37
+
38
+
39
+
40
+ @staticmethod
41
+ def save_detection(detected_lines_images:List[ImageType], prefix = './res/test1/res_'):
42
+ i = 0
43
+ for img in detected_lines_images:
44
+ pilimg = Image.fromarray(img)
45
+ pilimg.save(prefix+str(i)+'.png')
46
+ i=i+1
47
+ """
48
+ Valid 'Boxes' object attributes and properties are:
49
+
50
+ Attributes:
51
+ boxes (torch.Tensor) or (numpy.ndarray): A tensor or numpy array containing the detection boxes,
52
+ with shape (num_boxes, 6).
53
+ orig_shape (torch.Tensor) or (numpy.ndarray): Original image size, in the format (height, width).
54
+
55
+ Properties:
56
+ xyxy (torch.Tensor) or (numpy.ndarray): The boxes in xyxy format.
57
+ conf (torch.Tensor) or (numpy.ndarray): The confidence values of the boxes.
58
+ cls (torch.Tensor) or (numpy.ndarray): The class values of the boxes.
59
+ xywh (torch.Tensor) or (numpy.ndarray): The boxes in xywh format.
60
+ xyxyn (torch.Tensor) or (numpy.ndarray): The boxes in xyxy format normalized by original image size.
61
+ xywhn (torch.Tensor) or (numpy.ndarray): The boxes in xywh format normalized by original image size.
62
+ """
63
+ # Image is page image
64
+ def predict(self,image:Image.Image,debugfolder_filename_page_name = None):
65
+
66
+ results = self.detector.predict(image)
67
+
68
+ #Array of bboxes
69
+ bbxs = results[0].boxes.xyxy.int().tolist()
70
+ #Array of confidences
71
+ conf = results[0].boxes.conf.float().tolist()
72
+ print(bbxs)
73
+ print(conf)
74
+
75
+ #images_to_recognizer = cropImage(bxs, img)
76
+ img_to_save = draw_only_box(image, bbxs)
77
+ img_to_save.save(debugfolder_filename_page_name+"detectionBoxRes.png", quality=95)
78
+
79
+ # we need something to draw the detection
80
+
81
+
82
+ cropped_tables =[]
83
+ for i in range (len(bbxs)):
84
+ # TODO: find the right confidence and padding values
85
+ if conf[i]< 0.65:
86
+ continue
87
+
88
+ padded = [bbxs[i][0]-10,bbxs[i][1]-10,bbxs[i][2]+10,bbxs[i][3]+10]
89
+
90
+ cropped_table = image.convert("RGB").crop(padded)
91
+ cropped_table.save(debugfolder_filename_page_name +"yolo_cropped_table_"+str(i)+".png")
92
+ cropped_tables.append(cropped_table)
93
+
94
+ print("number of cropped tables found: "+str(len(cropped_tables)))
95
+
96
+ # Step 1: Unitable
97
+ #This take PIL Images as input
98
+ if cropped_tables != []:
99
+ table_codes = self.unitableFullPredictor.predict(cropped_tables,debugfolder_filename_page_name)
100
+ return table_codes
101
+
102
+
103
+
104
+
105
+
106
+
107
+
108
+
109
+
110
+
111
+
112
+
doctrfiles/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .doctr_recognizer import DoctrTextRecognizer
2
+ from .word_detector import Wordboxes,DoctrWordDetector
3
+
4
+ __all__ = ['DoctrTextRecognizer','DoctrWordDetector','Wordboxes']
doctrfiles/doctr_recognizer.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from abc import ABC
3
+ from pathlib import Path
4
+ from typing import Any, List, Literal, Mapping, Optional, Tuple
5
+ from zipfile import ZipFile
6
+ import json
7
+ from typing import Any, List, Literal, Mapping, Optional,Dict
8
+ import uuid
9
+ from doctr.models.preprocessor import PreProcessor
10
+ from doctr.models.recognition.predictor import RecognitionPredictor # pylint: disable=W0611
11
+ from doctr.models.recognition.zoo import ARCHS, recognition
12
+ import torch
13
+ # Numpy image type
14
+ import numpy.typing as npt
15
+ from numpy import uint8
16
+ ImageType = npt.NDArray[uint8]
17
+
18
+ from utils import WordAnnotation,getlogger
19
+
20
+ class DoctrTextRecognizer():
21
+
22
+ def __init__(
23
+ self,
24
+ architecture: str,
25
+ path_weights: str,
26
+ path_config_json: str = None,
27
+ ) -> None:
28
+ """
29
+ :param architecture: DocTR supports various text recognition models, e.g. "crnn_vgg16_bn",
30
+ "crnn_mobilenet_v3_small". The full list can be found here:
31
+ https://github.com/mindee/doctr/blob/main/doctr/models/recognition/zoo.py#L16.
32
+ :param path_weights: Path to the weights of the model
33
+ :param device: "cpu" or "cuda".
34
+ :param lib: "TF" or "PT" or None. If None, env variables USE_TENSORFLOW, USE_PYTORCH will be used.
35
+ :param path_config_json: Path to a json file containing the configuration of the model. Useful, if you have
36
+ a model trained on custom vocab.
37
+ """
38
+
39
+ self.architecture = architecture
40
+ self.path_weights = path_weights
41
+
42
+ self.name = self.get_name(self.path_weights, self.architecture)
43
+
44
+ self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
45
+ self.path_config_json = path_config_json
46
+
47
+ self.built_model = self.build_model(self.architecture, self.path_config_json)
48
+ self.load_model(self.path_weights, self.built_model, self.device)
49
+ self.doctr_predictor = self.get_wrapped_model()
50
+
51
+ def predict(self, inputs: Dict[uuid.UUID, Tuple[ImageType,WordAnnotation]]) -> List[WordAnnotation]:
52
+
53
+ """
54
+ Prediction on a batch of text lines
55
+
56
+ :param images: Dictionary where key is word's object id and the value is tupe of cropped image and word annotation
57
+ :return: A list of DetectionResult
58
+ """
59
+ if inputs:
60
+
61
+
62
+ predictor =self.doctr_predictor
63
+ device = self.device
64
+
65
+ word_uuids = list(inputs.keys())
66
+ cropped_images = [value[0] for value in inputs.values()]
67
+
68
+ raw_output = predictor(list(cropped_images))
69
+ det_results =[]
70
+ for uuid, output in zip(word_uuids, raw_output):
71
+ ann = inputs[uuid][1]
72
+ ann.text = output[0]
73
+ det_results.append(ann)
74
+ return det_results
75
+ return []
76
+
77
+ def predict_for_tables(self, inputs: List[ImageType]) -> List[str]:
78
+
79
+ if inputs:
80
+
81
+ predictor =self.doctr_predictor
82
+ device = self.device
83
+
84
+ raw_output = predictor(list(inputs))
85
+ det_results =[]
86
+ for output in raw_output:
87
+ det_results.append(output[0])
88
+ return det_results
89
+ return []
90
+
91
+ @staticmethod
92
+ def load_model(path_weights: str, doctr_predictor: Any, device: torch.device) -> None:
93
+ """Loading model weights
94
+ 1. Load the State Dictionary:
95
+ state_dict = torch.load(path_weights, map_location=device) loads the state dictionary from the specified file path and maps it to the specified device.
96
+ 2. Modify Keys in the State Dictionary:
97
+ The code prepends "model." to each key in the state dictionary. This is likely necessary to match the keys expected by the doctr_predictor model.
98
+ 3. Load State Dictionary into Model:
99
+ doctr_predictor.load_state_dict(state_dict) loads the modified state dictionary into the model.
100
+ 4. Move Model to Device:
101
+ doctr_predictor.to(device) moves the model to the specified device.
102
+ """
103
+ state_dict = torch.load(path_weights, map_location=device)
104
+ for key in list(state_dict.keys()):
105
+ state_dict["model." + key] = state_dict.pop(key)
106
+ doctr_predictor.load_state_dict(state_dict)
107
+ doctr_predictor.to(device)
108
+
109
+ @staticmethod
110
+ def build_model(architecture: str, path_config_json: Optional[str] = None) -> "RecognitionPredictor":
111
+ """Building the model
112
+ 1. Specific keys (arch, url, task) are removed from custom_configs.
113
+ mean and std values are moved to recognition_configs.
114
+ 2. Creating model
115
+ Check Architecture Type:
116
+ Case 1 :
117
+ If architecture is a string, it checks if it's in the predefined set of architectures (ARCHS).
118
+ If valid, it creates an instance of the model using the specified architecture and custom configurations.
119
+ Handle Custom Architecture Instances:
120
+ Case 2 :
121
+ If architecture is not a string, it checks if it's an **instance** of one of the recognized model classes (e.g., recognition.CRNN, recognition.SAR, etc.).
122
+ If valid, it assigns the provided architecture to model.
123
+ Get Input Shape and Create RecognitionPredictor:
124
+
125
+ 3. Retrieves the input_shape from the model's configuration.
126
+ 4. Returns an instance of RecognitionPredictor initialized with a PreProcessor and the model.
127
+ """
128
+
129
+ # inspired and adapted from https://github.com/mindee/doctr/blob/main/doctr/models/recognition/zoo.py
130
+ custom_configs = {}
131
+ batch_size = 1024
132
+ recognition_configs = {}
133
+ if path_config_json:
134
+ with open(path_config_json, "r", encoding="utf-8") as f:
135
+ custom_configs = json.load(f)
136
+ custom_configs.pop("arch", None)
137
+ custom_configs.pop("url", None)
138
+ custom_configs.pop("task", None)
139
+ recognition_configs["mean"] = custom_configs.pop("mean")
140
+ recognition_configs["std"] = custom_configs.pop("std")
141
+ #batch_size = custom_configs.pop("batch_size")
142
+ recognition_configs["batch_size"] = batch_size
143
+
144
+ if isinstance(architecture, str):
145
+ if architecture not in ARCHS:
146
+ raise ValueError(f"unknown architecture '{architecture}'")
147
+
148
+ model = recognition.__dict__[architecture](pretrained=True, pretrained_backbone=True, **custom_configs)
149
+ else:
150
+ if not isinstance(
151
+ architecture,
152
+ (recognition.CRNN, recognition.SAR, recognition.MASTER, recognition.ViTSTR, recognition.PARSeq),
153
+ ):
154
+ raise ValueError(f"unknown architecture: {type(architecture)}")
155
+ model = architecture
156
+
157
+ input_shape = model.cfg["input_shape"][-2:]
158
+ """
159
+ (class) PreProcessor
160
+ Implements an abstract preprocessor object which performs casting, resizing, batching and normalization.
161
+
162
+ Args:
163
+ output_size: expected size of each page in format (H, W)
164
+ batch_size: the size of page batches
165
+ mean: mean value of the training distribution by channel
166
+ std: standard deviation of the training distribution by channel
167
+ """
168
+ return RecognitionPredictor(PreProcessor(input_shape, preserve_aspect_ratio=True, **recognition_configs), model)
169
+
170
+
171
+ def get_wrapped_model(self) -> Any:
172
+ """
173
+ Get the inner (wrapped) model.
174
+ """
175
+ doctr_predictor = self.build_model(self.architecture, self.path_config_json)
176
+ device_str = torch.device("cuda" if torch.cuda.is_available() else "cpu")
177
+ self.load_model(self.path_weights, doctr_predictor, device_str)
178
+ return doctr_predictor
179
+
180
+ @staticmethod
181
+ def get_name(path_weights: str, architecture: str) -> str:
182
+ """Returns the name of the model"""
183
+ return f"doctr_{architecture}" + "_".join(Path(path_weights).parts[-2:])
doctrfiles/models/config-multi2.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "mean": [
3
+ 0.694,
4
+ 0.695,
5
+ 0.693
6
+ ],
7
+ "std": [
8
+ 0.299,
9
+ 0.296,
10
+ 0.301
11
+ ],
12
+ "input_shape": [
13
+ 3,
14
+ 32,
15
+ 128
16
+ ],
17
+ "vocab": "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~°£€¥¢฿äöüßÄÖÜẞàâéèêëîïôùûçÀÂÉÈÊËÎÏÔÙÛǧ",
18
+ "url": "https://doctr-static.mindee.com/models?id=v0.3.1/crnn_vgg16_bn-9762b0b0.pt&src=0",
19
+ "arch": "crnn_vgg16_bn",
20
+ "task": "recognition"
21
+ }
doctrfiles/models/db_mobilenet_v3_large-81e9b152.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81e9b152c11e9681f5eb4a2ec72e5f5d67df8ab860a846e1004756badfa5d37a
3
+ size 16987510
doctrfiles/models/db_resnet34-cb6aed9e.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb6aed9e4252c8a92d381de1b15e1e75461f7a125a4262ef16768a4b9f797347
3
+ size 89991042
doctrfiles/models/db_resnet50-79bd7d70.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79bd7d702506703b89cf11afa42d12aebf5cf25c3618e6ffd5f85772240ca483
3
+ size 102021912
doctrfiles/models/db_resnet50_config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "mean": [
3
+ 0.798,
4
+ 0.785,
5
+ 0.772
6
+ ],
7
+ "std": [
8
+ 0.264,
9
+ 0.2749,
10
+ 0.287
11
+ ],
12
+ "input_shape": [
13
+ 3,
14
+ 1024,
15
+ 1024
16
+ ],
17
+ "url": "https://doctr-static.mindee.com/models?id=v0.7.0/parseq-56125471.pt&src=0",
18
+ "arch": "db_resnet50",
19
+ "task": "detection"
20
+ }
doctrfiles/models/doctr-multilingual-parseq.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5b1d3f3b9d8ab994e94c671c47828b9a4079f20b4288eb5f0ba3c6dacf6c237
3
+ size 47872130
doctrfiles/models/master-fde31e4a.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fde31e4a9612670af83daf4b730dd9c56216806589546b09290abc347ca3a49d
3
+ size 243889428
doctrfiles/models/master.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "mean": [
3
+ 0.694,
4
+ 0.695,
5
+ 0.693
6
+ ],
7
+ "std": [
8
+ 0.299,
9
+ 0.296,
10
+ 0.301
11
+ ],
12
+ "input_shape": [
13
+ 3,
14
+ 32,
15
+ 128
16
+ ],
17
+ "vocab": "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~°£€¥¢฿àâéèêëîïôùûüçÀÂÉÈÊËÎÏÔÙÛÜÇ",
18
+ "url": null,
19
+ "arch": "master",
20
+ "task": "recognition"
21
+ }
doctrfiles/models/multi2.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bdc3c6922cad527714504b84a9d0efaa6b679d8ca8050a003611076eb514757
3
+ size 63310142
doctrfiles/models/multilingual-parseq-config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "mean": [
3
+ 0.694,
4
+ 0.695,
5
+ 0.693
6
+ ],
7
+ "std": [
8
+ 0.299,
9
+ 0.296,
10
+ 0.301
11
+ ],
12
+ "input_shape": [
13
+ 3,
14
+ 32,
15
+ 128
16
+ ],
17
+ "vocab": "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~°£€¥¢฿àâéèêëîïôùûüçÀÂÉÈÊËÎÏÔÙÛÜÇáãíóõúÁÃÍÓÕÚñÑ¡¿äößÄÖẞčďěňřšťůýžČĎĚŇŘŠŤŮÝŽąćęłńśźżĄĆĘŁŃŚŹŻìòÌÒæøåÆØŧ",
18
+ "url": "https://doctr-static.mindee.com/models?id=v0.7.0/parseq-56125471.pt&src=0",
19
+ "arch": "parseq",
20
+ "task": "recognition"
21
+ }
doctrfiles/word_detector.py ADDED
@@ -0,0 +1,282 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os
3
+ from abc import ABC
4
+ from pathlib import Path
5
+ from typing import Any, List, Literal, Mapping, Optional, Tuple, Union, Dict, Type, Sequence
6
+ import json
7
+ import logging
8
+ import torch
9
+ from doctr.models.preprocessor import PreProcessor
10
+ from doctr.models.detection.predictor import DetectionPredictor # pylint: disable=W0611
11
+ from doctr.models.detection.zoo import detection_predictor,detection
12
+
13
+ import numpy.typing as npt
14
+ import numpy as np
15
+ from numpy import uint8
16
+ ImageType = npt.NDArray[uint8]
17
+
18
+
19
+ from utils import Annotation,getlogger,group_words_into_lines
20
+
21
+ ARCHS = [
22
+ "db_resnet34",
23
+ "db_resnet50",
24
+ "db_mobilenet_v3_large",
25
+ "linknet_resnet18",
26
+ "linknet_resnet34",
27
+ "linknet_resnet50",
28
+ "fast_tiny",
29
+ "fast_small",
30
+ "fast_base",
31
+ ]
32
+ class Wordboxes:
33
+ def __init__(self,score, box):
34
+ self.box = box
35
+ self.score = score
36
+
37
+ class DoctrWordDetector():
38
+ """
39
+ A deepdoctection wrapper of DocTr text line detector. We model text line detection as ObjectDetector
40
+ and assume to use this detector in a ImageLayoutService.
41
+ DocTr supports several text line detection implementations but provides only a subset of pre-trained models.
42
+ The most usable one for document OCR for which a pre-trained model exists is DBNet as described in “Real-time Scene
43
+ Text Detection with Differentiable Binarization”, with a ResNet-50 backbone. This model can be used in either
44
+ Tensorflow or PyTorch.
45
+ Some other pre-trained models exist that have not been registered in `ModelCatalog`. Please check the DocTr library
46
+ and organize the download of the pre-trained model by yourself.
47
+
48
+ **Example:**
49
+
50
+ path_weights_tl = ModelDownloadManager.maybe_download_weights_and_configs("doctr/db_resnet50/pt
51
+ /db_resnet50-ac60cadc.pt")
52
+ # Use "doctr/db_resnet50/tf/db_resnet50-adcafc63.zip" for Tensorflow
53
+
54
+ categories = ModelCatalog.get_profile("doctr/db_resnet50/pt/db_resnet50-ac60cadc.pt").categories
55
+ det = DoctrTextlineDetector("db_resnet50",path_weights_tl,categories,"cpu")
56
+ layout = ImageLayoutService(det,to_image=True, crop_image=True)
57
+
58
+ path_weights_tr = dd.ModelDownloadManager.maybe_download_weights_and_configs("doctr/crnn_vgg16_bn
59
+ /pt/crnn_vgg16_bn-9762b0b0.pt")
60
+ rec = DoctrTextRecognizer("crnn_vgg16_bn", path_weights_tr, "cpu")
61
+ text = TextExtractionService(rec, extract_from_roi="word")
62
+
63
+ analyzer = DoctectionPipe(pipeline_component_list=[layout,text])
64
+
65
+ path = "/path/to/image_dir"
66
+ df = analyzer.analyze(path = path)
67
+
68
+ for dp in df:
69
+ ...
70
+ """
71
+
72
+ def __init__(
73
+ self,
74
+ architecture: str,
75
+ path_weights: str,
76
+ path_config_json:str
77
+ ) -> None:
78
+ """
79
+ :param architecture: DocTR supports various text line detection models, e.g. "db_resnet50",
80
+ "db_mobilenet_v3_large". The full list can be found here:
81
+ https://github.com/mindee/doctr/blob/main/doctr/models/detection/zoo.py#L20
82
+ :param path_weights: Path to the weights of the model
83
+ :param categories: A dict with the model output label and value
84
+ :param device: "cpu" or "cuda" or any tf.device or torch.device. The device must be compatible with the dll
85
+ :param lib: "TF" or "PT" or None. If None, env variables USE_TENSORFLOW, USE_PYTORCH will be used.
86
+ """
87
+ self.architecture = architecture
88
+ self.path_weights = path_weights
89
+ self.path_config_json =path_config_json
90
+
91
+ # Ensure the correct device is chosen (either CPU or CUDA if available)
92
+ self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
93
+
94
+ # Initialize the model with the given architecture and path to weights
95
+ self.doctr_predictor = self.get_wrapped_model()
96
+
97
+ """
98
+ Two static method so that they can be called without creating an instance of the class
99
+ Also, they don't require any instance specific data
100
+ """
101
+
102
+ def get_wrapped_model(
103
+ self
104
+ ) -> Any:
105
+ """
106
+ Get the inner (wrapped) model.
107
+
108
+ :param architecture: DocTR supports various text line detection models, e.g. "db_resnet50",
109
+ "db_mobilenet_v3_large". The full list can be found here:
110
+ https://github.com/mindee/doctr/blob/main/doctr/models/detection/zoo.py#L20
111
+ :param path_weights: Path to the weights of the model
112
+
113
+ :return: Inner model which is a "nn.Module" in PyTorch or a "tf.keras.Model" in Tensorflow
114
+ """
115
+
116
+ """
117
+ (function) detection_predictor: ((arch: Any = "db_resnet50", pretrained: bool = False, assume_straight_pages: bool = True, **kwargs: Any) -> DetectionPredictor)
118
+ """
119
+ #doctr_predictor = detection_predictor(arch=architecture, pretrained=False, pretrained_backbone=False)
120
+ #doctr_predictor = detection_predictor(arch=architecture, pretrained=False)
121
+
122
+ doctr_predictor = self.build_model(self.architecture, self.path_config_json)
123
+
124
+ self.load_model(self.path_weights, doctr_predictor, self.device)
125
+ return doctr_predictor
126
+ @staticmethod
127
+ def build_model(arch: str, pretrained = False,assume_straight_pages=True, path_config_json: Optional[str] = None) -> "DetectionPredictor":
128
+ """Building the model
129
+ 1. Specific keys (arch, url, task) are removed from custom_configs.
130
+ mean and std values are moved to recognition_configs.
131
+ 2. Creating model
132
+ Check Architecture Type:
133
+ Case 1 :
134
+ If architecture is a string, it checks if it's in the predefined set of architectures (ARCHS).
135
+ If valid, it creates an instance of the model using the specified architecture and custom configurations.
136
+ Handle Custom Architecture Instances:
137
+ Case 2 :
138
+ If architecture is not a string, it checks if it's an **instance** of one of the recognized model classes (e.g., recognition.CRNN, recognition.SAR, etc.).
139
+ If valid, it assigns the provided architecture to model.
140
+ Get Input Shape and Create RecognitionPredictor:
141
+
142
+ 3. Retrieves the input_shape from the model's configuration.
143
+ 4. Returns an instance of RecognitionPredictor initialized with a PreProcessor and the model.
144
+ """
145
+
146
+ custom_configs = {}
147
+ batch_size = 4
148
+ detection_configs = {}
149
+ if path_config_json:
150
+ with open(path_config_json, "r", encoding="utf-8") as f:
151
+ custom_configs = json.load(f)
152
+ custom_configs.pop("arch", None)
153
+ custom_configs.pop("url", None)
154
+ custom_configs.pop("task", None)
155
+ detection_configs["mean"] = custom_configs.pop("mean")
156
+ detection_configs["std"] = custom_configs.pop("std")
157
+ #batch_size = custom_configs.pop("batch_size")
158
+ detection_configs["batch_size"] = batch_size
159
+ if isinstance(arch, str):
160
+ if arch not in ARCHS:
161
+ raise ValueError(f"unknown architecture '{arch}'")
162
+
163
+ model = detection.__dict__[arch](
164
+ pretrained=pretrained,
165
+ assume_straight_pages=assume_straight_pages
166
+ )
167
+
168
+ else:
169
+ if not isinstance(arch, (detection.DBNet, detection.LinkNet, detection.FAST)):
170
+ raise ValueError(f"unknown architecture: {type(arch)}")
171
+
172
+ model = arch
173
+ model.assume_straight_pages = assume_straight_pages
174
+
175
+ input_shape = model.cfg["input_shape"][-2:]
176
+
177
+ predictor = DetectionPredictor(
178
+ PreProcessor(input_shape, batch_size=batch_size,**detection_configs),
179
+ model
180
+ )
181
+ return predictor
182
+
183
+ @staticmethod
184
+ def load_model(path_weights: str, doctr_predictor: Any, device: torch.device) -> None:
185
+ """Loading model weights
186
+ 1. Load the State Dictionary:
187
+ state_dict = torch.load(path_weights, map_location=device) loads the state dictionary from the specified file path and maps it to the specified device.
188
+ 2. Modify Keys in the State Dictionary:
189
+ The code prepends "model." to each key in the state dictionary. This is likely necessary to match the keys expected by the doctr_predictor model.
190
+ 3. Load State Dictionary into Model:
191
+ doctr_predictor.load_state_dict(state_dict) loads the modified state dictionary into the model.
192
+ 4. Move Model to Device:
193
+ doctr_predictor.to(device) moves the model to the specified device.
194
+ """
195
+ state_dict = torch.load(path_weights, map_location=device)
196
+ for key in list(state_dict.keys()):
197
+ state_dict["model." + key] = state_dict.pop(key)
198
+ doctr_predictor.load_state_dict(state_dict)
199
+ doctr_predictor.to(device)
200
+
201
+
202
+ def predict(self, np_img: ImageType,sort_vertical = False) -> List[Wordboxes]:
203
+ """
204
+ Prediction per image.
205
+
206
+ :param np_img: image as numpy array
207
+
208
+ :return: A list of DetectionResult
209
+ """
210
+
211
+ raw_output =self.doctr_predictor([np_img])
212
+ height, width = np_img.shape[:2]
213
+
214
+ """
215
+ raw_output is arrary of dictionary with just one key "words"
216
+ 1-4th element : coordinates You take first 4 elements in this array by doing box[:4]
217
+ 5th element - score
218
+ But those are 4 point and we need 4X2
219
+ type(raw_output[0]["words"]) are numpy arrary
220
+ Okay hypothesis :xmin, ymin, xmax, ymax
221
+ Points should be ordered in this order :left_lower, right_lower, right_upper, left_upper
222
+ """
223
+
224
+ logger = getlogger("array")
225
+ # Check if the logger has any handlers
226
+ if (logger.hasHandlers()):
227
+ logger.handlers.clear()
228
+
229
+ # Create a handler
230
+ handler = logging.StreamHandler()
231
+
232
+ # Create a formatter and add it to the handler
233
+ formatter = logging.Formatter('%(levelname)s:%(message)s')
234
+ handler.setFormatter(formatter)
235
+
236
+ # Add the handler to the logger
237
+ logger.addHandler(handler)
238
+ #logger.info(raw_output[0]["words"])
239
+
240
+ #array is numpy array of shape (n,5) where n is number of words and 5 is size of each element(array) with coordinate(xmin,ymin,xmax,ymax) + score
241
+
242
+ array = raw_output[0]["words"]
243
+ if not sort_vertical:
244
+ #Only When input has one line
245
+ sorted_array = array[array[:, 0].argsort()]
246
+ else:
247
+ #When input can have multiple lines
248
+ sorted_array = group_words_into_lines(array)
249
+ #logger.info(sorted_array)
250
+
251
+
252
+ detection_results = []
253
+ for box in sorted_array:
254
+ xmin, ymin, xmax, ymax = box[:4]
255
+ xmin = xmin*width
256
+ ymin = ymin*height
257
+ xmax = xmax*width
258
+ ymax = ymax*height
259
+ newb = np.array([
260
+ [xmin, ymin],
261
+ [xmax, ymin],
262
+ [xmax, ymax],
263
+ [xmin, ymax]
264
+ ], dtype=np.float32)
265
+ assert newb.shape == (4, 2), f"Points array must be of shape (4, 2), but got {box.shape}"
266
+ assert newb.dtype == np.float32, f"Points array must be of dtype float32, but got {box.dtype}"
267
+
268
+ w = Wordboxes(
269
+ score=box[4],
270
+ box = newb
271
+ )
272
+
273
+ detection_results.append(w)
274
+
275
+ return detection_results
276
+
277
+
278
+
279
+
280
+
281
+
282
+
image-1.png ADDED
image-2.png ADDED
image.png ADDED
june11.jpg ADDED