mayurasandakalum commited on
Commit
41c5642
1 Parent(s): f52e560

Upload 10 files

Browse files
__pycache__/app.cpython-36.pyc ADDED
Binary file (2.84 kB). View file
 
data.json ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "intents": [
3
+ {
4
+ "tag": "greeting",
5
+ "patterns": [
6
+ "Hi there",
7
+ "How are you",
8
+ "Is anyone there?",
9
+ "Hey",
10
+ "Hola",
11
+ "Hello",
12
+ "Good day"
13
+ ],
14
+ "responses": [
15
+ "Hello, thanks for asking",
16
+ "Good to see you again",
17
+ "Hi there, how can I help?"
18
+ ],
19
+ "context": [""]
20
+ },
21
+ {
22
+ "tag": "goodbye",
23
+ "patterns": [
24
+ "Bye",
25
+ "See you later",
26
+ "Goodbye",
27
+ "Nice chatting to you, bye",
28
+ "Till next time"
29
+ ],
30
+ "responses": [
31
+ "See you!",
32
+ "Have a nice day",
33
+ "Bye! Come back again soon."
34
+ ],
35
+ "context": [""]
36
+ },
37
+ {
38
+ "tag": "thanks",
39
+ "patterns": [
40
+ "Thanks",
41
+ "Thank you",
42
+ "That's helpful",
43
+ "Awesome, thanks",
44
+ "Thanks for helping me"
45
+ ],
46
+ "responses": ["Happy to help!", "Any time!", "My pleasure"],
47
+ "context": [""]
48
+ },
49
+ {
50
+ "tag": "noanswer",
51
+ "patterns": [],
52
+ "responses": [
53
+ "Sorry, can't understand you",
54
+ "Please give me more info",
55
+ "Not sure I understand"
56
+ ],
57
+ "context": [""]
58
+ },
59
+ {
60
+ "tag": "options",
61
+ "patterns": [
62
+ "How you could help me?",
63
+ "What you can do?",
64
+ "What help you provide?",
65
+ "How you can be helpful?",
66
+ "What support is offered"
67
+ ],
68
+ "responses": [
69
+ "I can guide you through Adverse drug reaction list, Blood pressure tracking, Hospitals and Pharmacies",
70
+ "Offering support for Adverse drug reaction, Blood pressure, Hospitals and Pharmacies"
71
+ ],
72
+ "context": [""]
73
+ },
74
+ {
75
+ "tag": "adverse_drug",
76
+ "patterns": [
77
+ "How to check Adverse drug reaction?",
78
+ "Open adverse drugs module",
79
+ "Give me a list of drugs causing adverse behavior",
80
+ "List all drugs suitable for patient with adverse reaction",
81
+ "Which drugs dont have adverse reaction?"
82
+ ],
83
+ "responses": ["Navigating to Adverse drug reaction module"],
84
+ "context": [""]
85
+ },
86
+ {
87
+ "tag": "blood_pressure",
88
+ "patterns": [
89
+ "Open blood pressure module",
90
+ "Task related to blood pressure",
91
+ "Blood pressure data entry",
92
+ "I want to log blood pressure results",
93
+ "Blood pressure data management"
94
+ ],
95
+ "responses": ["Navigating to Blood Pressure module"],
96
+ "context": [""]
97
+ },
98
+ {
99
+ "tag": "blood_pressure_search",
100
+ "patterns": [
101
+ "I want to search for blood pressure result history",
102
+ "Blood pressure for patient",
103
+ "Load patient blood pressure result",
104
+ "Show blood pressure results for patient",
105
+ "Find blood pressure results by ID"
106
+ ],
107
+ "responses": ["Please provide Patient ID", "Patient ID?"],
108
+ "context": ["search_blood_pressure_by_patient_id"]
109
+ },
110
+ {
111
+ "tag": "search_blood_pressure_by_patient_id",
112
+ "patterns": [],
113
+ "responses": ["Loading Blood pressure result for Patient"],
114
+ "context": [""]
115
+ },
116
+ {
117
+ "tag": "pharmacy_search",
118
+ "patterns": [
119
+ "Find me a pharmacy",
120
+ "Find pharmacy",
121
+ "List of pharmacies nearby",
122
+ "Locate pharmacy",
123
+ "Search pharmacy"
124
+ ],
125
+ "responses": ["Please provide pharmacy name"],
126
+ "context": ["search_pharmacy_by_name"]
127
+ },
128
+ {
129
+ "tag": "search_pharmacy_by_name",
130
+ "patterns": [],
131
+ "responses": ["Loading pharmacy details"],
132
+ "context": [""]
133
+ },
134
+ {
135
+ "tag": "hospital_search",
136
+ "patterns": [
137
+ "Lookup for hospital",
138
+ "Searching for hospital to transfer patient",
139
+ "I want to search hospital data",
140
+ "Hospital lookup for patient",
141
+ "Looking up hospital details"
142
+ ],
143
+ "responses": ["Please provide hospital name or location"],
144
+ "context": ["search_hospital_by_params"]
145
+ },
146
+ {
147
+ "tag": "search_hospital_by_params",
148
+ "patterns": [],
149
+ "responses": ["Please provide hospital type"],
150
+ "context": ["search_hospital_by_type"]
151
+ },
152
+ {
153
+ "tag": "search_hospital_by_type",
154
+ "patterns": [],
155
+ "responses": ["Loading hospital details"],
156
+ "context": [""]
157
+ }
158
+ ]
159
+ }
labels.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d26d7e2c44fbb50d869f9d5c79e647353af3b0c10a82736a930032deb8cbeb0
3
+ size 176
main.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, render_template, request
2
+ import random
3
+ import json
4
+ from keras.models import load_model
5
+ import numpy as np
6
+ import pickle
7
+ from nltk.stem import WordNetLemmatizer
8
+ import nltk
9
+ nltk.download('popular')
10
+ lemmatizer = WordNetLemmatizer()
11
+
12
+ model = load_model('model.h5')
13
+ intents = json.loads(open('data.json').read())
14
+ words = pickle.load(open('texts.pkl', 'rb'))
15
+ classes = pickle.load(open('labels.pkl', 'rb'))
16
+
17
+
18
+ def clean_up_sentence(sentence):
19
+ # tokenize the pattern - split words into array
20
+ sentence_words = nltk.word_tokenize(sentence)
21
+ # stem each word - create short form for word
22
+ sentence_words = [lemmatizer.lemmatize(
23
+ word.lower()) for word in sentence_words]
24
+ return sentence_words
25
+
26
+ # return bag of words array: 0 or 1 for each word in the bag that exists in the sentence
27
+
28
+
29
+ def bow(sentence, words, show_details=True):
30
+ # tokenize the pattern
31
+ sentence_words = clean_up_sentence(sentence)
32
+ # bag of words - matrix of N words, vocabulary matrix
33
+ bag = [0]*len(words)
34
+ for s in sentence_words:
35
+ for i, w in enumerate(words):
36
+ if w == s:
37
+ # assign 1 if current word is in the vocabulary position
38
+ bag[i] = 1
39
+ if show_details:
40
+ print("found in bag: %s" % w)
41
+ return (np.array(bag))
42
+
43
+
44
+ def predict_class(sentence, model):
45
+ # filter out predictions below a threshold
46
+ p = bow(sentence, words, show_details=False)
47
+ res = model.predict(np.array([p]))[0]
48
+ ERROR_THRESHOLD = 0.25
49
+ results = [[i, r] for i, r in enumerate(res) if r > ERROR_THRESHOLD]
50
+ # sort by strength of probability
51
+ results.sort(key=lambda x: x[1], reverse=True)
52
+ return_list = []
53
+ for r in results:
54
+ return_list.append({"intent": classes[r[0]], "probability": str(r[1])})
55
+ return return_list
56
+
57
+
58
+ def getResponse(ints, intents_json):
59
+ tag = ints[0]['intent']
60
+ list_of_intents = intents_json['intents']
61
+ for i in list_of_intents:
62
+ if (i['tag'] == tag):
63
+ result = random.choice(i['responses'])
64
+ break
65
+ return result
66
+
67
+
68
+ def chatbot_response(msg):
69
+ ints = predict_class(msg, model)
70
+ res = getResponse(ints, intents)
71
+ return res
72
+
73
+
74
+ app = Flask(__name__)
75
+ app.static_folder = 'static'
76
+
77
+
78
+ @app.route("/")
79
+ def home():
80
+ return render_template("index.html")
81
+
82
+
83
+ @app.route("/get")
84
+ def get_bot_response():
85
+ userText = request.args.get('msg')
86
+ return chatbot_response(userText)
87
+
88
+
89
+ # if __name__ == "__main__":
90
+ # app.run()
model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37e6e59d3a8573d4a76cd77d3eeb0037b93cb618d57bcf52c1dd22bf154d4d90
3
+ size 192816
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ gunicorn
2
+ flask
3
+ tensorflow
4
+ keras
5
+ pickle
6
+ nltk
static/styles/style.css ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ :root {
2
+ --body-bg: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
3
+ --msger-bg: #fff;
4
+ --border: 2px solid #ddd;
5
+ --left-msg-bg: #ececec;
6
+ --right-msg-bg: #579ffb;
7
+ }
8
+
9
+ html {
10
+ box-sizing: border-box;
11
+ }
12
+
13
+ *,
14
+ *:before,
15
+ *:after {
16
+ margin: 0;
17
+ padding: 0;
18
+ box-sizing: inherit;
19
+ }
20
+
21
+ body {
22
+ display: flex;
23
+ justify-content: center;
24
+ align-items: center;
25
+ height: 100vh;
26
+ background-image: var(--body-bg);
27
+ font-family: Helvetica, sans-serif;
28
+ }
29
+
30
+ .msger {
31
+ display: flex;
32
+ flex-flow: column wrap;
33
+ justify-content: space-between;
34
+ width: 100%;
35
+ max-width: 867px;
36
+ margin: 25px 10px;
37
+ height: calc(100% - 50px);
38
+ border: var(--border);
39
+ border-radius: 5px;
40
+ background: var(--msger-bg);
41
+ box-shadow: 0 15px 15px -5px rgba(0, 0, 0, 0.2);
42
+ }
43
+
44
+ .msger-header {
45
+ /* display: flex; */
46
+ font-size: medium;
47
+ justify-content: space-between;
48
+ padding: 10px;
49
+ text-align: center;
50
+ border-bottom: var(--border);
51
+ background: #eee;
52
+ color: #666;
53
+ }
54
+
55
+ .msger-chat {
56
+ flex: 1;
57
+ overflow-y: auto;
58
+ padding: 10px;
59
+ }
60
+ .msger-chat::-webkit-scrollbar {
61
+ width: 6px;
62
+ }
63
+ .msger-chat::-webkit-scrollbar-track {
64
+ background: #ddd;
65
+ }
66
+ .msger-chat::-webkit-scrollbar-thumb {
67
+ background: #bdbdbd;
68
+ }
69
+ .msg {
70
+ display: flex;
71
+ align-items: flex-end;
72
+ margin-bottom: 10px;
73
+ }
74
+
75
+ .msg-img {
76
+ width: 50px;
77
+ height: 50px;
78
+ margin-right: 10px;
79
+ background: #ddd;
80
+ background-repeat: no-repeat;
81
+ background-position: center;
82
+ background-size: cover;
83
+ border-radius: 50%;
84
+ }
85
+ .msg-bubble {
86
+ max-width: 450px;
87
+ padding: 15px;
88
+ border-radius: 15px;
89
+ background: var(--left-msg-bg);
90
+ }
91
+ .msg-info {
92
+ display: flex;
93
+ justify-content: space-between;
94
+ align-items: center;
95
+ margin-bottom: 10px;
96
+ }
97
+ .msg-info-name {
98
+ margin-right: 10px;
99
+ font-weight: bold;
100
+ }
101
+ .msg-info-time {
102
+ font-size: 0.85em;
103
+ }
104
+
105
+ .left-msg .msg-bubble {
106
+ border-bottom-left-radius: 0;
107
+ }
108
+
109
+ .right-msg {
110
+ flex-direction: row-reverse;
111
+ }
112
+ .right-msg .msg-bubble {
113
+ background: var(--right-msg-bg);
114
+ color: #fff;
115
+ border-bottom-right-radius: 0;
116
+ }
117
+ .right-msg .msg-img {
118
+ margin: 0 0 0 10px;
119
+ }
120
+
121
+ .msger-inputarea {
122
+ display: flex;
123
+ padding: 10px;
124
+ border-top: var(--border);
125
+ background: #eee;
126
+ }
127
+ .msger-inputarea * {
128
+ padding: 10px;
129
+ border: none;
130
+ border-radius: 3px;
131
+ font-size: 1em;
132
+ }
133
+ .msger-input {
134
+ flex: 1;
135
+ background: #ddd;
136
+ }
137
+ .msger-send-btn {
138
+ margin-left: 10px;
139
+ background: rgb(0, 196, 65);
140
+ color: #fff;
141
+ font-weight: bold;
142
+ cursor: pointer;
143
+ transition: background 0.23s;
144
+ }
145
+ .msger-send-btn:hover {
146
+ background: rgb(0, 180, 50);
147
+ }
148
+
149
+ .msger-chat {
150
+ background-color: #fcfcfe;
151
+ background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='260' height='260' viewBox='0 0 260 260'%3E%3Cg fill-rule='evenodd'%3E%3Cg fill='%23dddddd' fill-opacity='0.4'%3E%3Cpath d='M24.37 16c.2.65.39 1.32.54 2H21.17l1.17 2.34.45.9-.24.11V28a5 5 0 0 1-2.23 8.94l-.02.06a8 8 0 0 1-7.75 6h-20a8 8 0 0 1-7.74-6l-.02-.06A5 5 0 0 1-17.45 28v-6.76l-.79-1.58-.44-.9.9-.44.63-.32H-20a23.01 23.01 0 0 1 44.37-2zm-36.82 2a1 1 0 0 0-.44.1l-3.1 1.56.89 1.79 1.31-.66a3 3 0 0 1 2.69 0l2.2 1.1a1 1 0 0 0 .9 0l2.21-1.1a3 3 0 0 1 2.69 0l2.2 1.1a1 1 0 0 0 .9 0l2.21-1.1a3 3 0 0 1 2.69 0l2.2 1.1a1 1 0 0 0 .86.02l2.88-1.27a3 3 0 0 1 2.43 0l2.88 1.27a1 1 0 0 0 .85-.02l3.1-1.55-.89-1.79-1.42.71a3 3 0 0 1-2.56.06l-2.77-1.23a1 1 0 0 0-.4-.09h-.01a1 1 0 0 0-.4.09l-2.78 1.23a3 3 0 0 1-2.56-.06l-2.3-1.15a1 1 0 0 0-.45-.11h-.01a1 1 0 0 0-.44.1L.9 19.22a3 3 0 0 1-2.69 0l-2.2-1.1a1 1 0 0 0-.45-.11h-.01a1 1 0 0 0-.44.1l-2.21 1.11a3 3 0 0 1-2.69 0l-2.2-1.1a1 1 0 0 0-.45-.11h-.01zm0-2h-4.9a21.01 21.01 0 0 1 39.61 0h-2.09l-.06-.13-.26.13h-32.31zm30.35 7.68l1.36-.68h1.3v2h-36v-1.15l.34-.17 1.36-.68h2.59l1.36.68a3 3 0 0 0 2.69 0l1.36-.68h2.59l1.36.68a3 3 0 0 0 2.69 0L2.26 23h2.59l1.36.68a3 3 0 0 0 2.56.06l1.67-.74h3.23l1.67.74a3 3 0 0 0 2.56-.06zM-13.82 27l16.37 4.91L18.93 27h-32.75zm-.63 2h.34l16.66 5 16.67-5h.33a3 3 0 1 1 0 6h-34a3 3 0 1 1 0-6zm1.35 8a6 6 0 0 0 5.65 4h20a6 6 0 0 0 5.66-4H-13.1z'/%3E%3Cpath id='path6_fill-copy' d='M284.37 16c.2.65.39 1.32.54 2H281.17l1.17 2.34.45.9-.24.11V28a5 5 0 0 1-2.23 8.94l-.02.06a8 8 0 0 1-7.75 6h-20a8 8 0 0 1-7.74-6l-.02-.06a5 5 0 0 1-2.24-8.94v-6.76l-.79-1.58-.44-.9.9-.44.63-.32H240a23.01 23.01 0 0 1 44.37-2zm-36.82 2a1 1 0 0 0-.44.1l-3.1 1.56.89 1.79 1.31-.66a3 3 0 0 1 2.69 0l2.2 1.1a1 1 0 0 0 .9 0l2.21-1.1a3 3 0 0 1 2.69 0l2.2 1.1a1 1 0 0 0 .9 0l2.21-1.1a3 3 0 0 1 2.69 0l2.2 1.1a1 1 0 0 0 .86.02l2.88-1.27a3 3 0 0 1 2.43 0l2.88 1.27a1 1 0 0 0 .85-.02l3.1-1.55-.89-1.79-1.42.71a3 3 0 0 1-2.56.06l-2.77-1.23a1 1 0 0 0-.4-.09h-.01a1 1 0 0 0-.4.09l-2.78 1.23a3 3 0 0 1-2.56-.06l-2.3-1.15a1 1 0 0 0-.45-.11h-.01a1 1 0 0 0-.44.1l-2.21 1.11a3 3 0 0 1-2.69 0l-2.2-1.1a1 1 0 0 0-.45-.11h-.01a1 1 0 0 0-.44.1l-2.21 1.11a3 3 0 0 1-2.69 0l-2.2-1.1a1 1 0 0 0-.45-.11h-.01zm0-2h-4.9a21.01 21.01 0 0 1 39.61 0h-2.09l-.06-.13-.26.13h-32.31zm30.35 7.68l1.36-.68h1.3v2h-36v-1.15l.34-.17 1.36-.68h2.59l1.36.68a3 3 0 0 0 2.69 0l1.36-.68h2.59l1.36.68a3 3 0 0 0 2.69 0l1.36-.68h2.59l1.36.68a3 3 0 0 0 2.56.06l1.67-.74h3.23l1.67.74a3 3 0 0 0 2.56-.06zM246.18 27l16.37 4.91L278.93 27h-32.75zm-.63 2h.34l16.66 5 16.67-5h.33a3 3 0 1 1 0 6h-34a3 3 0 1 1 0-6zm1.35 8a6 6 0 0 0 5.65 4h20a6 6 0 0 0 5.66-4H246.9z'/%3E%3Cpath d='M159.5 21.02A9 9 0 0 0 151 15h-42a9 9 0 0 0-8.5 6.02 6 6 0 0 0 .02 11.96A8.99 8.99 0 0 0 109 45h42a9 9 0 0 0 8.48-12.02 6 6 0 0 0 .02-11.96zM151 17h-42a7 7 0 0 0-6.33 4h54.66a7 7 0 0 0-6.33-4zm-9.34 26a8.98 8.98 0 0 0 3.34-7h-2a7 7 0 0 1-7 7h-4.34a8.98 8.98 0 0 0 3.34-7h-2a7 7 0 0 1-7 7h-4.34a8.98 8.98 0 0 0 3.34-7h-2a7 7 0 0 1-7 7h-7a7 7 0 1 1 0-14h42a7 7 0 1 1 0 14h-9.34zM109 27a9 9 0 0 0-7.48 4H101a4 4 0 1 1 0-8h58a4 4 0 0 1 0 8h-.52a9 9 0 0 0-7.48-4h-42z'/%3E%3Cpath d='M39 115a8 8 0 1 0 0-16 8 8 0 0 0 0 16zm6-8a6 6 0 1 1-12 0 6 6 0 0 1 12 0zm-3-29v-2h8v-6H40a4 4 0 0 0-4 4v10H22l-1.33 4-.67 2h2.19L26 130h26l3.81-40H58l-.67-2L56 84H42v-6zm-4-4v10h2V74h8v-2h-8a2 2 0 0 0-2 2zm2 12h14.56l.67 2H22.77l.67-2H40zm13.8 4H24.2l3.62 38h22.36l3.62-38z'/%3E%3Cpath d='M129 92h-6v4h-6v4h-6v14h-3l.24 2 3.76 32h36l3.76-32 .24-2h-3v-14h-6v-4h-6v-4h-8zm18 22v-12h-4v4h3v8h1zm-3 0v-6h-4v6h4zm-6 6v-16h-4v19.17c1.6-.7 2.97-1.8 4-3.17zm-6 3.8V100h-4v23.8a10.04 10.04 0 0 0 4 0zm-6-.63V104h-4v16a10.04 10.04 0 0 0 4 3.17zm-6-9.17v-6h-4v6h4zm-6 0v-8h3v-4h-4v12h1zm27-12v-4h-4v4h3v4h1v-4zm-6 0v-8h-4v4h3v4h1zm-6-4v-4h-4v8h1v-4h3zm-6 4v-4h-4v8h1v-4h3zm7 24a12 12 0 0 0 11.83-10h7.92l-3.53 30h-32.44l-3.53-30h7.92A12 12 0 0 0 130 126z'/%3E%3Cpath d='M212 86v2h-4v-2h4zm4 0h-2v2h2v-2zm-20 0v.1a5 5 0 0 0-.56 9.65l.06.25 1.12 4.48a2 2 0 0 0 1.94 1.52h.01l7.02 24.55a2 2 0 0 0 1.92 1.45h4.98a2 2 0 0 0 1.92-1.45l7.02-24.55a2 2 0 0 0 1.95-1.52L224.5 96l.06-.25a5 5 0 0 0-.56-9.65V86a14 14 0 0 0-28 0zm4 0h6v2h-9a3 3 0 1 0 0 6H223a3 3 0 1 0 0-6H220v-2h2a12 12 0 1 0-24 0h2zm-1.44 14l-1-4h24.88l-1 4h-22.88zm8.95 26l-6.86-24h18.7l-6.86 24h-4.98zM150 242a22 22 0 1 0 0-44 22 22 0 0 0 0 44zm24-22a24 24 0 1 1-48 0 24 24 0 0 1 48 0zm-28.38 17.73l2.04-.87a6 6 0 0 1 4.68 0l2.04.87a2 2 0 0 0 2.5-.82l1.14-1.9a6 6 0 0 1 3.79-2.75l2.15-.5a2 2 0 0 0 1.54-2.12l-.19-2.2a6 6 0 0 1 1.45-4.46l1.45-1.67a2 2 0 0 0 0-2.62l-1.45-1.67a6 6 0 0 1-1.45-4.46l.2-2.2a2 2 0 0 0-1.55-2.13l-2.15-.5a6 6 0 0 1-3.8-2.75l-1.13-1.9a2 2 0 0 0-2.5-.8l-2.04.86a6 6 0 0 1-4.68 0l-2.04-.87a2 2 0 0 0-2.5.82l-1.14 1.9a6 6 0 0 1-3.79 2.75l-2.15.5a2 2 0 0 0-1.54 2.12l.19 2.2a6 6 0 0 1-1.45 4.46l-1.45 1.67a2 2 0 0 0 0 2.62l1.45 1.67a6 6 0 0 1 1.45 4.46l-.2 2.2a2 2 0 0 0 1.55 2.13l2.15.5a6 6 0 0 1 3.8 2.75l1.13 1.9a2 2 0 0 0 2.5.8zm2.82.97a4 4 0 0 1 3.12 0l2.04.87a4 4 0 0 0 4.99-1.62l1.14-1.9a4 4 0 0 1 2.53-1.84l2.15-.5a4 4 0 0 0 3.09-4.24l-.2-2.2a4 4 0 0 1 .97-2.98l1.45-1.67a4 4 0 0 0 0-5.24l-1.45-1.67a4 4 0 0 1-.97-2.97l.2-2.2a4 4 0 0 0-3.09-4.25l-2.15-.5a4 4 0 0 1-2.53-1.84l-1.14-1.9a4 4 0 0 0-5-1.62l-2.03.87a4 4 0 0 1-3.12 0l-2.04-.87a4 4 0 0 0-4.99 1.62l-1.14 1.9a4 4 0 0 1-2.53 1.84l-2.15.5a4 4 0 0 0-3.09 4.24l.2 2.2a4 4 0 0 1-.97 2.98l-1.45 1.67a4 4 0 0 0 0 5.24l1.45 1.67a4 4 0 0 1 .97 2.97l-.2 2.2a4 4 0 0 0 3.09 4.25l2.15.5a4 4 0 0 1 2.53 1.84l1.14 1.9a4 4 0 0 0 5 1.62l2.03-.87zM152 207a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm6 2a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm-11 1a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm-6 0a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm3-5a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm-8 8a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm3 6a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm0 6a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm4 7a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm5-2a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm5 4a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm4-6a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm6-4a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm-4-3a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm4-3a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm-5-4a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm-24 6a1 1 0 1 1 2 0 1 1 0 0 1-2 0zm16 5a5 5 0 1 0 0-10 5 5 0 0 0 0 10zm7-5a7 7 0 1 1-14 0 7 7 0 0 1 14 0zm86-29a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm19 9a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm-14 5a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm-25 1a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm5 4a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm9 0a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm15 1a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm12-2a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm-11-14a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm-19 0a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm6 5a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm-25 15c0-.47.01-.94.03-1.4a5 5 0 0 1-1.7-8 3.99 3.99 0 0 1 1.88-5.18 5 5 0 0 1 3.4-6.22 3 3 0 0 1 1.46-1.05 5 5 0 0 1 7.76-3.27A30.86 30.86 0 0 1 246 184c6.79 0 13.06 2.18 18.17 5.88a5 5 0 0 1 7.76 3.27 3 3 0 0 1 1.47 1.05 5 5 0 0 1 3.4 6.22 4 4 0 0 1 1.87 5.18 4.98 4.98 0 0 1-1.7 8c.02.46.03.93.03 1.4v1h-62v-1zm.83-7.17a30.9 30.9 0 0 0-.62 3.57 3 3 0 0 1-.61-4.2c.37.28.78.49 1.23.63zm1.49-4.61c-.36.87-.68 1.76-.96 2.68a2 2 0 0 1-.21-3.71c.33.4.73.75 1.17 1.03zm2.32-4.54c-.54.86-1.03 1.76-1.49 2.68a3 3 0 0 1-.07-4.67 3 3 0 0 0 1.56 1.99zm1.14-1.7c.35-.5.72-.98 1.1-1.46a1 1 0 1 0-1.1 1.45zm5.34-5.77c-1.03.86-2 1.79-2.9 2.77a3 3 0 0 0-1.11-.77 3 3 0 0 1 4-2zm42.66 2.77c-.9-.98-1.87-1.9-2.9-2.77a3 3 0 0 1 4.01 2 3 3 0 0 0-1.1.77zm1.34 1.54c.38.48.75.96 1.1 1.45a1 1 0 1 0-1.1-1.45zm3.73 5.84c-.46-.92-.95-1.82-1.5-2.68a3 3 0 0 0 1.57-1.99 3 3 0 0 1-.07 4.67zm1.8 4.53c-.29-.9-.6-1.8-.97-2.67.44-.28.84-.63 1.17-1.03a2 2 0 0 1-.2 3.7zm1.14 5.51c-.14-1.21-.35-2.4-.62-3.57.45-.14.86-.35 1.23-.63a2.99 2.99 0 0 1-.6 4.2zM275 214a29 29 0 0 0-57.97 0h57.96zM72.33 198.12c-.21-.32-.34-.7-.34-1.12v-12h-2v12a4.01 4.01 0 0 0 7.09 2.54c.57-.69.91-1.57.91-2.54v-12h-2v12a1.99 1.99 0 0 1-2 2 2 2 0 0 1-1.66-.88zM75 176c.38 0 .74-.04 1.1-.12a4 4 0 0 0 6.19 2.4A13.94 13.94 0 0 1 84 185v24a6 6 0 0 1-6 6h-3v9a5 5 0 1 1-10 0v-9h-3a6 6 0 0 1-6-6v-24a14 14 0 0 1 14-14 5 5 0 0 0 5 5zm-17 15v12a1.99 1.99 0 0 0 1.22 1.84 2 2 0 0 0 2.44-.72c.21-.32.34-.7.34-1.12v-12h2v12a3.98 3.98 0 0 1-5.35 3.77 3.98 3.98 0 0 1-.65-.3V209a4 4 0 0 0 4 4h16a4 4 0 0 0 4-4v-24c.01-1.53-.23-2.88-.72-4.17-.43.1-.87.16-1.28.17a6 6 0 0 1-5.2-3 7 7 0 0 1-6.47-4.88A12 12 0 0 0 58 185v6zm9 24v9a3 3 0 1 0 6 0v-9h-6z'/%3E%3Cpath d='M-17 191a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm19 9a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2H3a1 1 0 0 1-1-1zm-14 5a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm-25 1a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm5 4a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm9 0a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm15 1a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm12-2a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2H4zm-11-14a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm-19 0a1 1 0 0 0 0 2h2a1 1 0 0 0 0-2h-2zm6 5a1 1 0 0 1 1-1h2a1 1 0 0 1 0 2h-2a1 1 0 0 1-1-1zm-25 15c0-.47.01-.94.03-1.4a5 5 0 0 1-1.7-8 3.99 3.99 0 0 1 1.88-5.18 5 5 0 0 1 3.4-6.22 3 3 0 0 1 1.46-1.05 5 5 0 0 1 7.76-3.27A30.86 30.86 0 0 1-14 184c6.79 0 13.06 2.18 18.17 5.88a5 5 0 0 1 7.76 3.27 3 3 0 0 1 1.47 1.05 5 5 0 0 1 3.4 6.22 4 4 0 0 1 1.87 5.18 4.98 4.98 0 0 1-1.7 8c.02.46.03.93.03 1.4v1h-62v-1zm.83-7.17a30.9 30.9 0 0 0-.62 3.57 3 3 0 0 1-.61-4.2c.37.28.78.49 1.23.63zm1.49-4.61c-.36.87-.68 1.76-.96 2.68a2 2 0 0 1-.21-3.71c.33.4.73.75 1.17 1.03zm2.32-4.54c-.54.86-1.03 1.76-1.49 2.68a3 3 0 0 1-.07-4.67 3 3 0 0 0 1.56 1.99zm1.14-1.7c.35-.5.72-.98 1.1-1.46a1 1 0 1 0-1.1 1.45zm5.34-5.77c-1.03.86-2 1.79-2.9 2.77a3 3 0 0 0-1.11-.77 3 3 0 0 1 4-2zm42.66 2.77c-.9-.98-1.87-1.9-2.9-2.77a3 3 0 0 1 4.01 2 3 3 0 0 0-1.1.77zm1.34 1.54c.38.48.75.96 1.1 1.45a1 1 0 1 0-1.1-1.45zm3.73 5.84c-.46-.92-.95-1.82-1.5-2.68a3 3 0 0 0 1.57-1.99 3 3 0 0 1-.07 4.67zm1.8 4.53c-.29-.9-.6-1.8-.97-2.67.44-.28.84-.63 1.17-1.03a2 2 0 0 1-.2 3.7zm1.14 5.51c-.14-1.21-.35-2.4-.62-3.57.45-.14.86-.35 1.23-.63a2.99 2.99 0 0 1-.6 4.2zM15 214a29 29 0 0 0-57.97 0h57.96z'/%3E%3C/g%3E%3C/g%3E%3C/svg%3E");
152
+ }
templates/index.html ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <title>Chatbot</title>
7
+ <meta charset="UTF-8">
8
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
9
+ <meta http-equiv="X-UA-Compatible" content="ie=edge">
10
+ <link rel="stylesheet" href="{{ url_for('static', filename='styles/style.css') }}">
11
+ <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
12
+ </head>
13
+
14
+ <body>
15
+ <!-- partial:index.partial.html -->
16
+ <section class="msger">
17
+ <header class="msger-header">
18
+ <div class="msger-header-title">
19
+ <i class="fas fa-bug"></i> Chatbot <i class="fas fa-bug"></i>
20
+ </div>
21
+ </header>
22
+
23
+ <main class="msger-chat">
24
+ <div class="msg left-msg">
25
+ <div class="msg-img" style="background-image: url(https://image.flaticon.com/icons/svg/327/327779.svg)">
26
+ </div>
27
+
28
+ <div class="msg-bubble">
29
+ <div class="msg-info">
30
+ <div class="msg-info-name">Chatbot</div>
31
+ <div class="msg-info-time">12:45</div>
32
+ </div>
33
+
34
+ <div class="msg-text">
35
+ Hi, welcome to ChatBot! Go ahead and send me a message. 😄
36
+ </div>
37
+ </div>
38
+ </div>
39
+
40
+ </main>
41
+
42
+ <form class="msger-inputarea">
43
+ <input type="text" class="msger-input" id="textInput" placeholder="Enter your message...">
44
+ <button type="submit" class="msger-send-btn">Send</button>
45
+ </form>
46
+ </section>
47
+ <!-- partial -->
48
+ <script src='https://use.fontawesome.com/releases/v5.0.13/js/all.js'></script>
49
+ <script>
50
+
51
+ const msgerForm = get(".msger-inputarea");
52
+ const msgerInput = get(".msger-input");
53
+ const msgerChat = get(".msger-chat");
54
+
55
+
56
+ // Icons made by Freepik from www.flaticon.com
57
+ const BOT_IMG = "https://image.flaticon.com/icons/svg/327/327779.svg";
58
+ const PERSON_IMG = "https://image.flaticon.com/icons/svg/145/145867.svg";
59
+ const BOT_NAME = " ChatBot";
60
+ const PERSON_NAME = "You";
61
+
62
+ msgerForm.addEventListener("submit", event => {
63
+ event.preventDefault();
64
+
65
+ const msgText = msgerInput.value;
66
+ if (!msgText) return;
67
+
68
+ appendMessage(PERSON_NAME, PERSON_IMG, "right", msgText);
69
+ msgerInput.value = "";
70
+ botResponse(msgText);
71
+ });
72
+
73
+ function appendMessage(name, img, side, text) {
74
+ // Simple solution for small apps
75
+ const msgHTML = `
76
+ <div class="msg ${side}-msg">
77
+ <div class="msg-img" style="background-image: url(${img})"></div>
78
+
79
+ <div class="msg-bubble">
80
+ <div class="msg-info">
81
+ <div class="msg-info-name">${name}</div>
82
+ <div class="msg-info-time">${formatDate(new Date())}</div>
83
+ </div>
84
+
85
+ <div class="msg-text">${text}</div>
86
+ </div>
87
+ </div>
88
+ `;
89
+
90
+ msgerChat.insertAdjacentHTML("beforeend", msgHTML);
91
+ msgerChat.scrollTop += 500;
92
+ }
93
+
94
+ function botResponse(rawText) {
95
+
96
+ // Bot Response
97
+ $.get("/get", { msg: rawText }).done(function (data) {
98
+ console.log(rawText);
99
+ console.log(data);
100
+ const msgText = data;
101
+ appendMessage(BOT_NAME, BOT_IMG, "left", msgText);
102
+
103
+ });
104
+
105
+ }
106
+
107
+
108
+ // Utils
109
+ function get(selector, root = document) {
110
+ return root.querySelector(selector);
111
+ }
112
+
113
+ function formatDate(date) {
114
+ const h = "0" + date.getHours();
115
+ const m = "0" + date.getMinutes();
116
+
117
+ return `${h.slice(-2)}:${m.slice(-2)}`;
118
+ }
119
+
120
+
121
+
122
+ </script>
123
+
124
+ </body>
125
+
126
+ </html>
texts.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:672df62425bc41198ca7671f9af837184b3670273e684de2aa188c1817a9045d
3
+ size 1043
training.ipynb ADDED
@@ -0,0 +1,755 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 20,
6
+ "metadata": {
7
+ "id": "wUWIn56C2EVy"
8
+ },
9
+ "outputs": [],
10
+ "source": [
11
+ "import nltk\n",
12
+ "from nltk.stem import WordNetLemmatizer\n",
13
+ "lemmatizer = WordNetLemmatizer()\n",
14
+ "import json\n",
15
+ "import pickle\n",
16
+ "import numpy as np\n",
17
+ "from keras.models import Sequential\n",
18
+ "from keras.layers import Dense, Activation, Dropout\n",
19
+ "# from keras.optimizers import SGD\n",
20
+ "from tensorflow.keras.optimizers import SGD\n",
21
+ "import random"
22
+ ]
23
+ },
24
+ {
25
+ "cell_type": "code",
26
+ "execution_count": 2,
27
+ "metadata": {
28
+ "colab": {
29
+ "base_uri": "https://localhost:8080/"
30
+ },
31
+ "id": "hBb-ddKr2zlg",
32
+ "outputId": "d216a15f-5142-4cad-a214-cc911a214394"
33
+ },
34
+ "outputs": [
35
+ {
36
+ "name": "stderr",
37
+ "output_type": "stream",
38
+ "text": [
39
+ "[nltk_data] Downloading package punkt to C:\\Users\\Makara\n",
40
+ "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n",
41
+ "[nltk_data] Unzipping tokenizers\\punkt.zip.\n"
42
+ ]
43
+ },
44
+ {
45
+ "data": {
46
+ "text/plain": [
47
+ "True"
48
+ ]
49
+ },
50
+ "execution_count": 2,
51
+ "metadata": {},
52
+ "output_type": "execute_result"
53
+ }
54
+ ],
55
+ "source": [
56
+ "nltk.download('punkt')"
57
+ ]
58
+ },
59
+ {
60
+ "cell_type": "code",
61
+ "execution_count": 3,
62
+ "metadata": {
63
+ "colab": {
64
+ "base_uri": "https://localhost:8080/"
65
+ },
66
+ "id": "WJNKSOig29LD",
67
+ "outputId": "4a6505c1-4080-4097-d661-95275788348f"
68
+ },
69
+ "outputs": [
70
+ {
71
+ "name": "stderr",
72
+ "output_type": "stream",
73
+ "text": [
74
+ "[nltk_data] Downloading package wordnet to C:\\Users\\Makara\n",
75
+ "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n"
76
+ ]
77
+ },
78
+ {
79
+ "data": {
80
+ "text/plain": [
81
+ "True"
82
+ ]
83
+ },
84
+ "execution_count": 3,
85
+ "metadata": {},
86
+ "output_type": "execute_result"
87
+ }
88
+ ],
89
+ "source": [
90
+ "nltk.download('wordnet')"
91
+ ]
92
+ },
93
+ {
94
+ "cell_type": "code",
95
+ "execution_count": 7,
96
+ "metadata": {},
97
+ "outputs": [
98
+ {
99
+ "name": "stderr",
100
+ "output_type": "stream",
101
+ "text": [
102
+ "[nltk_data] Downloading package omw-1.4 to C:\\Users\\Makara\n",
103
+ "[nltk_data] PC\\AppData\\Roaming\\nltk_data...\n"
104
+ ]
105
+ },
106
+ {
107
+ "data": {
108
+ "text/plain": [
109
+ "True"
110
+ ]
111
+ },
112
+ "execution_count": 7,
113
+ "metadata": {},
114
+ "output_type": "execute_result"
115
+ }
116
+ ],
117
+ "source": [
118
+ "nltk.download('omw-1.4')"
119
+ ]
120
+ },
121
+ {
122
+ "cell_type": "code",
123
+ "execution_count": 4,
124
+ "metadata": {
125
+ "id": "CcRMqaqy2aXK"
126
+ },
127
+ "outputs": [],
128
+ "source": [
129
+ "words=[]\n",
130
+ "classes = []\n",
131
+ "documents = []\n",
132
+ "ignore_words = ['?', '!']\n",
133
+ "data_file = open('data.json').read()\n",
134
+ "intents = json.loads(data_file)"
135
+ ]
136
+ },
137
+ {
138
+ "cell_type": "code",
139
+ "execution_count": 5,
140
+ "metadata": {
141
+ "id": "85GcOWiP2iWf"
142
+ },
143
+ "outputs": [],
144
+ "source": [
145
+ "for intent in intents['intents']:\n",
146
+ " for pattern in intent['patterns']:\n",
147
+ " #tokenize each word\n",
148
+ " w = nltk.word_tokenize(pattern)\n",
149
+ " words.extend(w)\n",
150
+ " #add documents in the corpus\n",
151
+ " documents.append((w, intent['tag']))\n",
152
+ " # add to our classes list\n",
153
+ " if intent['tag'] not in classes:\n",
154
+ " classes.append(intent['tag'])"
155
+ ]
156
+ },
157
+ {
158
+ "cell_type": "code",
159
+ "execution_count": 8,
160
+ "metadata": {
161
+ "colab": {
162
+ "base_uri": "https://localhost:8080/"
163
+ },
164
+ "id": "p1iYlVBm2i8v",
165
+ "outputId": "a0696f92-8558-484d-fab1-8287685658cc"
166
+ },
167
+ "outputs": [
168
+ {
169
+ "name": "stdout",
170
+ "output_type": "stream",
171
+ "text": [
172
+ "47 documents\n",
173
+ "9 classes ['adverse_drug', 'blood_pressure', 'blood_pressure_search', 'goodbye', 'greeting', 'hospital_search', 'options', 'pharmacy_search', 'thanks']\n",
174
+ "88 unique lemmatized words [\"'s\", ',', 'a', 'adverse', 'all', 'anyone', 'are', 'awesome', 'be', 'behavior', 'blood', 'by', 'bye', 'can', 'causing', 'chatting', 'check', 'could', 'data', 'day', 'detail', 'do', 'dont', 'drug', 'entry', 'find', 'for', 'give', 'good', 'goodbye', 'have', 'hello', 'help', 'helpful', 'helping', 'hey', 'hi', 'history', 'hola', 'hospital', 'how', 'i', 'id', 'is', 'later', 'list', 'load', 'locate', 'log', 'looking', 'lookup', 'management', 'me', 'module', 'nearby', 'next', 'nice', 'of', 'offered', 'open', 'patient', 'pharmacy', 'pressure', 'provide', 'reaction', 'related', 'result', 'search', 'searching', 'see', 'show', 'suitable', 'support', 'task', 'thank', 'thanks', 'that', 'there', 'till', 'time', 'to', 'transfer', 'up', 'want', 'what', 'which', 'with', 'you']\n"
175
+ ]
176
+ }
177
+ ],
178
+ "source": [
179
+ "# lemmaztize and lower each word and remove duplicates\n",
180
+ "words = [lemmatizer.lemmatize(w.lower()) for w in words if w not in ignore_words]\n",
181
+ "words = sorted(list(set(words)))\n",
182
+ "# sort classes\n",
183
+ "classes = sorted(list(set(classes)))\n",
184
+ "# documents = combination between patterns and intents\n",
185
+ "print (len(documents), \"documents\")\n",
186
+ "# classes = intents\n",
187
+ "print (len(classes), \"classes\", classes)\n",
188
+ "# words = all words, vocabulary\n",
189
+ "print (len(words), \"unique lemmatized words\", words)"
190
+ ]
191
+ },
192
+ {
193
+ "cell_type": "code",
194
+ "execution_count": 9,
195
+ "metadata": {
196
+ "id": "H5EZ1wf325dH"
197
+ },
198
+ "outputs": [],
199
+ "source": [
200
+ "pickle.dump(words,open('texts.pkl','wb'))\n",
201
+ "pickle.dump(classes,open('labels.pkl','wb'))"
202
+ ]
203
+ },
204
+ {
205
+ "cell_type": "code",
206
+ "execution_count": 10,
207
+ "metadata": {
208
+ "id": "oTj9egGz3CMZ"
209
+ },
210
+ "outputs": [],
211
+ "source": [
212
+ "# create our training data\n",
213
+ "training = []\n",
214
+ "# create an empty array for our output\n",
215
+ "output_empty = [0] * len(classes)\n",
216
+ "# training set, bag of words for each sentence\n",
217
+ "for doc in documents:\n",
218
+ " # initialize our bag of words\n",
219
+ " bag = []\n",
220
+ " # list of tokenized words for the pattern\n",
221
+ " pattern_words = doc[0]\n",
222
+ " # lemmatize each word - create base word, in attempt to represent related words\n",
223
+ " pattern_words = [lemmatizer.lemmatize(word.lower()) for word in pattern_words]\n",
224
+ " # create our bag of words array with 1, if word match found in current pattern\n",
225
+ " for w in words:\n",
226
+ " bag.append(1) if w in pattern_words else bag.append(0)\n",
227
+ "\n",
228
+ " # output is a '0' for each tag and '1' for current tag (for each pattern)\n",
229
+ " output_row = list(output_empty)\n",
230
+ " output_row[classes.index(doc[1])] = 1\n",
231
+ "\n",
232
+ " training.append([bag, output_row])"
233
+ ]
234
+ },
235
+ {
236
+ "cell_type": "code",
237
+ "execution_count": 11,
238
+ "metadata": {
239
+ "colab": {
240
+ "base_uri": "https://localhost:8080/"
241
+ },
242
+ "id": "TWZKpn-43KaH",
243
+ "outputId": "c2b89f6a-d1e8-4e25-908f-5b8f1a5bb84a"
244
+ },
245
+ "outputs": [
246
+ {
247
+ "name": "stdout",
248
+ "output_type": "stream",
249
+ "text": [
250
+ "Training data created\n"
251
+ ]
252
+ },
253
+ {
254
+ "name": "stderr",
255
+ "output_type": "stream",
256
+ "text": [
257
+ "c:\\Users\\Makara PC\\.conda\\envs\\chat-bot-app\\lib\\site-packages\\ipykernel_launcher.py:3: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray\n",
258
+ " This is separate from the ipykernel package so we can avoid doing imports until\n"
259
+ ]
260
+ }
261
+ ],
262
+ "source": [
263
+ "# shuffle our features and turn into np.array\n",
264
+ "random.shuffle(training)\n",
265
+ "training = np.array(training)\n",
266
+ "# create train and test lists. X - patterns, Y - intents\n",
267
+ "train_x = list(training[:,0])\n",
268
+ "train_y = list(training[:,1])\n",
269
+ "print(\"Training data created\")"
270
+ ]
271
+ },
272
+ {
273
+ "cell_type": "code",
274
+ "execution_count": 12,
275
+ "metadata": {
276
+ "id": "c4rbUrWB3MAX"
277
+ },
278
+ "outputs": [],
279
+ "source": [
280
+ "# Create model - 3 layers. First layer 128 neurons, second layer 64 neurons and 3rd output layer contains number of neurons\n",
281
+ "# equal to number of intents to predict output intent with softmax\n",
282
+ "model = Sequential()\n",
283
+ "model.add(Dense(128, input_shape=(len(train_x[0]),), activation='relu'))\n",
284
+ "model.add(Dropout(0.5))\n",
285
+ "model.add(Dense(64, activation='relu'))\n",
286
+ "model.add(Dropout(0.5))\n",
287
+ "model.add(Dense(len(train_y[0]), activation='softmax'))"
288
+ ]
289
+ },
290
+ {
291
+ "cell_type": "code",
292
+ "execution_count": 21,
293
+ "metadata": {
294
+ "colab": {
295
+ "base_uri": "https://localhost:8080/"
296
+ },
297
+ "id": "fRmg-rBd3OnQ",
298
+ "outputId": "5369506c-da45-4dd5-8773-59f52875bc68"
299
+ },
300
+ "outputs": [],
301
+ "source": [
302
+ "# Compile model. Stochastic gradient descent with Nesterov accelerated gradient gives good results for this model\n",
303
+ "sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)\n",
304
+ "model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])"
305
+ ]
306
+ },
307
+ {
308
+ "cell_type": "code",
309
+ "execution_count": 22,
310
+ "metadata": {
311
+ "colab": {
312
+ "base_uri": "https://localhost:8080/"
313
+ },
314
+ "id": "DeD0fV0c3RBn",
315
+ "outputId": "392059e2-dfe7-46a0-b2c8-db2f3702a483"
316
+ },
317
+ "outputs": [
318
+ {
319
+ "name": "stdout",
320
+ "output_type": "stream",
321
+ "text": [
322
+ "Epoch 1/200\n",
323
+ "10/10 [==============================] - 1s 2ms/step - loss: 2.2413 - accuracy: 0.1064\n",
324
+ "Epoch 2/200\n",
325
+ "10/10 [==============================] - 0s 3ms/step - loss: 2.1823 - accuracy: 0.2340\n",
326
+ "Epoch 3/200\n",
327
+ "10/10 [==============================] - 0s 2ms/step - loss: 2.1345 - accuracy: 0.2128\n",
328
+ "Epoch 4/200\n",
329
+ "10/10 [==============================] - 0s 2ms/step - loss: 1.9794 - accuracy: 0.3191\n",
330
+ "Epoch 5/200\n",
331
+ "10/10 [==============================] - 0s 3ms/step - loss: 1.8818 - accuracy: 0.3191\n",
332
+ "Epoch 6/200\n",
333
+ "10/10 [==============================] - 0s 2ms/step - loss: 1.7872 - accuracy: 0.4043\n",
334
+ "Epoch 7/200\n",
335
+ "10/10 [==============================] - 0s 2ms/step - loss: 1.6584 - accuracy: 0.5106\n",
336
+ "Epoch 8/200\n",
337
+ "10/10 [==============================] - 0s 1ms/step - loss: 1.5289 - accuracy: 0.5319\n",
338
+ "Epoch 9/200\n",
339
+ "10/10 [==============================] - 0s 1ms/step - loss: 1.4448 - accuracy: 0.5957\n",
340
+ "Epoch 10/200\n",
341
+ "10/10 [==============================] - 0s 2ms/step - loss: 1.2668 - accuracy: 0.5957\n",
342
+ "Epoch 11/200\n",
343
+ "10/10 [==============================] - 0s 2ms/step - loss: 1.2086 - accuracy: 0.6809\n",
344
+ "Epoch 12/200\n",
345
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.9905 - accuracy: 0.8085\n",
346
+ "Epoch 13/200\n",
347
+ "10/10 [==============================] - 0s 2ms/step - loss: 1.0099 - accuracy: 0.7872\n",
348
+ "Epoch 14/200\n",
349
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.9804 - accuracy: 0.7234\n",
350
+ "Epoch 15/200\n",
351
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.8112 - accuracy: 0.8298\n",
352
+ "Epoch 16/200\n",
353
+ "10/10 [==============================] - 0s 7ms/step - loss: 0.7849 - accuracy: 0.7447\n",
354
+ "Epoch 17/200\n",
355
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.6714 - accuracy: 0.7872\n",
356
+ "Epoch 18/200\n",
357
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.6601 - accuracy: 0.7872\n",
358
+ "Epoch 19/200\n",
359
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.4989 - accuracy: 0.8936\n",
360
+ "Epoch 20/200\n",
361
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.7604 - accuracy: 0.7447\n",
362
+ "Epoch 21/200\n",
363
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.7019 - accuracy: 0.7872\n",
364
+ "Epoch 22/200\n",
365
+ "10/10 [==============================] - 0s 8ms/step - loss: 0.5007 - accuracy: 0.8936\n",
366
+ "Epoch 23/200\n",
367
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.4494 - accuracy: 0.8723\n",
368
+ "Epoch 24/200\n",
369
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.3297 - accuracy: 0.9362\n",
370
+ "Epoch 25/200\n",
371
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.3112 - accuracy: 0.9362\n",
372
+ "Epoch 26/200\n",
373
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.3624 - accuracy: 0.9362\n",
374
+ "Epoch 27/200\n",
375
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.2498 - accuracy: 0.9362\n",
376
+ "Epoch 28/200\n",
377
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.2607 - accuracy: 0.9362\n",
378
+ "Epoch 29/200\n",
379
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.2573 - accuracy: 0.9362\n",
380
+ "Epoch 30/200\n",
381
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1811 - accuracy: 0.9787\n",
382
+ "Epoch 31/200\n",
383
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.3079 - accuracy: 0.8936\n",
384
+ "Epoch 32/200\n",
385
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.2232 - accuracy: 0.9574\n",
386
+ "Epoch 33/200\n",
387
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1904 - accuracy: 0.9787\n",
388
+ "Epoch 34/200\n",
389
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.2117 - accuracy: 0.9149\n",
390
+ "Epoch 35/200\n",
391
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1476 - accuracy: 0.9787\n",
392
+ "Epoch 36/200\n",
393
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1317 - accuracy: 1.0000\n",
394
+ "Epoch 37/200\n",
395
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0762 - accuracy: 1.0000\n",
396
+ "Epoch 38/200\n",
397
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1502 - accuracy: 0.9149\n",
398
+ "Epoch 39/200\n",
399
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1191 - accuracy: 0.9574\n",
400
+ "Epoch 40/200\n",
401
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1490 - accuracy: 0.9787\n",
402
+ "Epoch 41/200\n",
403
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.2177 - accuracy: 0.9574\n",
404
+ "Epoch 42/200\n",
405
+ "10/10 [==============================] - 0s 5ms/step - loss: 0.1596 - accuracy: 0.9574\n",
406
+ "Epoch 43/200\n",
407
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1574 - accuracy: 0.9574\n",
408
+ "Epoch 44/200\n",
409
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.2133 - accuracy: 0.9149\n",
410
+ "Epoch 45/200\n",
411
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.1228 - accuracy: 0.9787\n",
412
+ "Epoch 46/200\n",
413
+ "10/10 [==============================] - 0s 7ms/step - loss: 0.1345 - accuracy: 0.9574\n",
414
+ "Epoch 47/200\n",
415
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1022 - accuracy: 0.9787\n",
416
+ "Epoch 48/200\n",
417
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1116 - accuracy: 0.9574\n",
418
+ "Epoch 49/200\n",
419
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1366 - accuracy: 0.9362\n",
420
+ "Epoch 50/200\n",
421
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1418 - accuracy: 0.9574\n",
422
+ "Epoch 51/200\n",
423
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1272 - accuracy: 1.0000\n",
424
+ "Epoch 52/200\n",
425
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0773 - accuracy: 1.0000\n",
426
+ "Epoch 53/200\n",
427
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0499 - accuracy: 1.0000\n",
428
+ "Epoch 54/200\n",
429
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1199 - accuracy: 0.9574\n",
430
+ "Epoch 55/200\n",
431
+ "10/10 [==============================] - 0s 5ms/step - loss: 0.1400 - accuracy: 1.0000\n",
432
+ "Epoch 56/200\n",
433
+ "10/10 [==============================] - 0s 5ms/step - loss: 0.0842 - accuracy: 0.9787\n",
434
+ "Epoch 57/200\n",
435
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1037 - accuracy: 0.9787\n",
436
+ "Epoch 58/200\n",
437
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.1494 - accuracy: 0.9362\n",
438
+ "Epoch 59/200\n",
439
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0432 - accuracy: 1.0000\n",
440
+ "Epoch 60/200\n",
441
+ "10/10 [==============================] - 0s 6ms/step - loss: 0.0823 - accuracy: 0.9787\n",
442
+ "Epoch 61/200\n",
443
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0534 - accuracy: 1.0000\n",
444
+ "Epoch 62/200\n",
445
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0671 - accuracy: 1.0000\n",
446
+ "Epoch 63/200\n",
447
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0628 - accuracy: 1.0000\n",
448
+ "Epoch 64/200\n",
449
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0229 - accuracy: 1.0000\n",
450
+ "Epoch 65/200\n",
451
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0617 - accuracy: 1.0000\n",
452
+ "Epoch 66/200\n",
453
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0603 - accuracy: 0.9787\n",
454
+ "Epoch 67/200\n",
455
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0239 - accuracy: 1.0000\n",
456
+ "Epoch 68/200\n",
457
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1004 - accuracy: 0.9574\n",
458
+ "Epoch 69/200\n",
459
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0582 - accuracy: 0.9787\n",
460
+ "Epoch 70/200\n",
461
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1214 - accuracy: 0.9362\n",
462
+ "Epoch 71/200\n",
463
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0270 - accuracy: 1.0000\n",
464
+ "Epoch 72/200\n",
465
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0916 - accuracy: 0.9787\n",
466
+ "Epoch 73/200\n",
467
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0413 - accuracy: 1.0000\n",
468
+ "Epoch 74/200\n",
469
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0502 - accuracy: 1.0000\n",
470
+ "Epoch 75/200\n",
471
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0618 - accuracy: 0.9787\n",
472
+ "Epoch 76/200\n",
473
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0723 - accuracy: 1.0000\n",
474
+ "Epoch 77/200\n",
475
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.1292 - accuracy: 0.9574\n",
476
+ "Epoch 78/200\n",
477
+ "10/10 [==============================] - ETA: 0s - loss: 0.0038 - accuracy: 1.00 - 0s 1ms/step - loss: 0.0298 - accuracy: 1.0000\n",
478
+ "Epoch 79/200\n",
479
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0176 - accuracy: 1.0000\n",
480
+ "Epoch 80/200\n",
481
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1068 - accuracy: 0.9574\n",
482
+ "Epoch 81/200\n",
483
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0200 - accuracy: 1.0000\n",
484
+ "Epoch 82/200\n",
485
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.0183 - accuracy: 1.0000\n",
486
+ "Epoch 83/200\n",
487
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0467 - accuracy: 1.0000\n",
488
+ "Epoch 84/200\n",
489
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0539 - accuracy: 1.0000\n",
490
+ "Epoch 85/200\n",
491
+ "10/10 [==============================] - 0s 5ms/step - loss: 0.0998 - accuracy: 0.9574\n",
492
+ "Epoch 86/200\n",
493
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.1305 - accuracy: 0.9574\n",
494
+ "Epoch 87/200\n",
495
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0236 - accuracy: 1.0000\n",
496
+ "Epoch 88/200\n",
497
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0365 - accuracy: 1.0000\n",
498
+ "Epoch 89/200\n",
499
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0752 - accuracy: 0.9787\n",
500
+ "Epoch 90/200\n",
501
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0443 - accuracy: 1.0000\n",
502
+ "Epoch 91/200\n",
503
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0955 - accuracy: 0.9787\n",
504
+ "Epoch 92/200\n",
505
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0447 - accuracy: 1.0000\n",
506
+ "Epoch 93/200\n",
507
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0775 - accuracy: 0.9787\n",
508
+ "Epoch 94/200\n",
509
+ "10/10 [==============================] - 0s 8ms/step - loss: 0.0479 - accuracy: 1.0000\n",
510
+ "Epoch 95/200\n",
511
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0529 - accuracy: 1.0000\n",
512
+ "Epoch 96/200\n",
513
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0087 - accuracy: 1.0000\n",
514
+ "Epoch 97/200\n",
515
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0415 - accuracy: 1.0000\n",
516
+ "Epoch 98/200\n",
517
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0348 - accuracy: 1.0000\n",
518
+ "Epoch 99/200\n",
519
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0130 - accuracy: 1.0000\n",
520
+ "Epoch 100/200\n",
521
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0250 - accuracy: 1.0000\n",
522
+ "Epoch 101/200\n",
523
+ "10/10 [==============================] - 0s 6ms/step - loss: 0.0513 - accuracy: 0.9787\n",
524
+ "Epoch 102/200\n",
525
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0326 - accuracy: 0.9787\n",
526
+ "Epoch 103/200\n",
527
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0516 - accuracy: 0.9787\n",
528
+ "Epoch 104/200\n",
529
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0376 - accuracy: 1.0000\n",
530
+ "Epoch 105/200\n",
531
+ "10/10 [==============================] - 0s 5ms/step - loss: 0.0236 - accuracy: 1.0000\n",
532
+ "Epoch 106/200\n",
533
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0146 - accuracy: 1.0000\n",
534
+ "Epoch 107/200\n",
535
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0327 - accuracy: 1.0000\n",
536
+ "Epoch 108/200\n",
537
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0333 - accuracy: 1.0000\n",
538
+ "Epoch 109/200\n",
539
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0046 - accuracy: 1.0000\n",
540
+ "Epoch 110/200\n",
541
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0920 - accuracy: 0.9574\n",
542
+ "Epoch 111/200\n",
543
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0155 - accuracy: 1.0000\n",
544
+ "Epoch 112/200\n",
545
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0148 - accuracy: 1.0000\n",
546
+ "Epoch 113/200\n",
547
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0248 - accuracy: 1.0000\n",
548
+ "Epoch 114/200\n",
549
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0260 - accuracy: 1.0000\n",
550
+ "Epoch 115/200\n",
551
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.0162 - accuracy: 1.0000\n",
552
+ "Epoch 116/200\n",
553
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0659 - accuracy: 0.9787\n",
554
+ "Epoch 117/200\n",
555
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0618 - accuracy: 0.9787\n",
556
+ "Epoch 118/200\n",
557
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0301 - accuracy: 1.0000\n",
558
+ "Epoch 119/200\n",
559
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0334 - accuracy: 1.0000\n",
560
+ "Epoch 120/200\n",
561
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0224 - accuracy: 1.0000\n",
562
+ "Epoch 121/200\n",
563
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.1810 - accuracy: 0.9574\n",
564
+ "Epoch 122/200\n",
565
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0677 - accuracy: 1.0000\n",
566
+ "Epoch 123/200\n",
567
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0693 - accuracy: 0.9787\n",
568
+ "Epoch 124/200\n",
569
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0523 - accuracy: 0.9787\n",
570
+ "Epoch 125/200\n",
571
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0281 - accuracy: 1.0000\n",
572
+ "Epoch 126/200\n",
573
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0209 - accuracy: 1.0000\n",
574
+ "Epoch 127/200\n",
575
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.0405 - accuracy: 0.9787\n",
576
+ "Epoch 128/200\n",
577
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0093 - accuracy: 1.0000\n",
578
+ "Epoch 129/200\n",
579
+ "10/10 [==============================] - ETA: 0s - loss: 0.0834 - accuracy: 1.00 - 0s 2ms/step - loss: 0.0413 - accuracy: 1.0000\n",
580
+ "Epoch 130/200\n",
581
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0122 - accuracy: 1.0000\n",
582
+ "Epoch 131/200\n",
583
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0125 - accuracy: 1.0000\n",
584
+ "Epoch 132/200\n",
585
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0099 - accuracy: 1.0000\n",
586
+ "Epoch 133/200\n",
587
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0281 - accuracy: 1.0000\n",
588
+ "Epoch 134/200\n",
589
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0179 - accuracy: 1.0000\n",
590
+ "Epoch 135/200\n",
591
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0070 - accuracy: 1.0000\n",
592
+ "Epoch 136/200\n",
593
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0456 - accuracy: 1.0000\n",
594
+ "Epoch 137/200\n",
595
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0493 - accuracy: 0.9787\n",
596
+ "Epoch 138/200\n",
597
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0211 - accuracy: 1.0000\n",
598
+ "Epoch 139/200\n",
599
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0098 - accuracy: 1.0000\n",
600
+ "Epoch 140/200\n",
601
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0306 - accuracy: 1.0000\n",
602
+ "Epoch 141/200\n",
603
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0076 - accuracy: 1.0000\n",
604
+ "Epoch 142/200\n",
605
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0605 - accuracy: 0.9787\n",
606
+ "Epoch 143/200\n",
607
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0273 - accuracy: 1.0000\n",
608
+ "Epoch 144/200\n",
609
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0450 - accuracy: 1.0000\n",
610
+ "Epoch 145/200\n",
611
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.0090 - accuracy: 1.0000\n",
612
+ "Epoch 146/200\n",
613
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0230 - accuracy: 1.0000\n",
614
+ "Epoch 147/200\n",
615
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0096 - accuracy: 1.0000\n",
616
+ "Epoch 148/200\n",
617
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0137 - accuracy: 1.0000\n",
618
+ "Epoch 149/200\n",
619
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0288 - accuracy: 1.0000\n",
620
+ "Epoch 150/200\n",
621
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0313 - accuracy: 1.0000\n",
622
+ "Epoch 151/200\n",
623
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0315 - accuracy: 1.0000\n",
624
+ "Epoch 152/200\n",
625
+ "10/10 [==============================] - ETA: 0s - loss: 4.1381e-04 - accuracy: 1.00 - 0s 2ms/step - loss: 0.0146 - accuracy: 1.0000\n",
626
+ "Epoch 153/200\n",
627
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0198 - accuracy: 1.0000\n",
628
+ "Epoch 154/200\n",
629
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0291 - accuracy: 0.9787\n",
630
+ "Epoch 155/200\n",
631
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.0294 - accuracy: 0.9787\n",
632
+ "Epoch 156/200\n",
633
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0085 - accuracy: 1.0000\n",
634
+ "Epoch 157/200\n",
635
+ "10/10 [==============================] - 0s 998us/step - loss: 0.0434 - accuracy: 0.9787\n",
636
+ "Epoch 158/200\n",
637
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0236 - accuracy: 1.0000\n",
638
+ "Epoch 159/200\n",
639
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0070 - accuracy: 1.0000\n",
640
+ "Epoch 160/200\n",
641
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0170 - accuracy: 1.0000\n",
642
+ "Epoch 161/200\n",
643
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0199 - accuracy: 1.0000\n",
644
+ "Epoch 162/200\n",
645
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0073 - accuracy: 1.0000\n",
646
+ "Epoch 163/200\n",
647
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0289 - accuracy: 1.0000\n",
648
+ "Epoch 164/200\n",
649
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0165 - accuracy: 1.0000\n",
650
+ "Epoch 165/200\n",
651
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0180 - accuracy: 1.0000\n",
652
+ "Epoch 166/200\n",
653
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0083 - accuracy: 1.0000\n",
654
+ "Epoch 167/200\n",
655
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0038 - accuracy: 1.0000\n",
656
+ "Epoch 168/200\n",
657
+ "10/10 [==============================] - 0s 6ms/step - loss: 0.0112 - accuracy: 1.0000\n",
658
+ "Epoch 169/200\n",
659
+ "10/10 [==============================] - 0s 15ms/step - loss: 0.0166 - accuracy: 1.0000\n",
660
+ "Epoch 170/200\n",
661
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0041 - accuracy: 1.0000\n",
662
+ "Epoch 171/200\n",
663
+ "10/10 [==============================] - 0s 5ms/step - loss: 0.0424 - accuracy: 0.9787\n",
664
+ "Epoch 172/200\n",
665
+ "10/10 [==============================] - 0s 5ms/step - loss: 0.0393 - accuracy: 0.9787\n",
666
+ "Epoch 173/200\n",
667
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0543 - accuracy: 0.9787\n",
668
+ "Epoch 174/200\n",
669
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0177 - accuracy: 1.0000\n",
670
+ "Epoch 175/200\n",
671
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0305 - accuracy: 0.9787\n",
672
+ "Epoch 176/200\n",
673
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0069 - accuracy: 1.0000\n",
674
+ "Epoch 177/200\n",
675
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0440 - accuracy: 0.9787\n",
676
+ "Epoch 178/200\n",
677
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0337 - accuracy: 1.0000\n",
678
+ "Epoch 179/200\n",
679
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0526 - accuracy: 0.9787\n",
680
+ "Epoch 180/200\n",
681
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0137 - accuracy: 1.0000\n",
682
+ "Epoch 181/200\n",
683
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0091 - accuracy: 1.0000\n",
684
+ "Epoch 182/200\n",
685
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0177 - accuracy: 1.0000\n",
686
+ "Epoch 183/200\n",
687
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0137 - accuracy: 1.0000\n",
688
+ "Epoch 184/200\n",
689
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0099 - accuracy: 1.0000\n",
690
+ "Epoch 185/200\n",
691
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.1220 - accuracy: 0.9574\n",
692
+ "Epoch 186/200\n",
693
+ "10/10 [==============================] - 0s 740us/step - loss: 0.0532 - accuracy: 0.9787\n",
694
+ "Epoch 187/200\n",
695
+ "10/10 [==============================] - ETA: 0s - loss: 5.3321e-04 - accuracy: 1.00 - 0s 3ms/step - loss: 0.0055 - accuracy: 1.0000\n",
696
+ "Epoch 188/200\n",
697
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0095 - accuracy: 1.0000\n",
698
+ "Epoch 189/200\n",
699
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0052 - accuracy: 1.0000\n",
700
+ "Epoch 190/200\n",
701
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.0429 - accuracy: 1.0000\n",
702
+ "Epoch 191/200\n",
703
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0163 - accuracy: 1.0000\n",
704
+ "Epoch 192/200\n",
705
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0160 - accuracy: 1.0000\n",
706
+ "Epoch 193/200\n",
707
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0227 - accuracy: 1.0000\n",
708
+ "Epoch 194/200\n",
709
+ "10/10 [==============================] - 0s 4ms/step - loss: 0.0065 - accuracy: 1.0000\n",
710
+ "Epoch 195/200\n",
711
+ "10/10 [==============================] - 0s 3ms/step - loss: 0.0052 - accuracy: 1.0000\n",
712
+ "Epoch 196/200\n",
713
+ "10/10 [==============================] - 0s 2ms/step - loss: 9.3289e-04 - accuracy: 1.0000\n",
714
+ "Epoch 197/200\n",
715
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0115 - accuracy: 1.0000\n",
716
+ "Epoch 198/200\n",
717
+ "10/10 [==============================] - 0s 1ms/step - loss: 0.0444 - accuracy: 0.9787\n",
718
+ "Epoch 199/200\n",
719
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0101 - accuracy: 1.0000\n",
720
+ "Epoch 200/200\n",
721
+ "10/10 [==============================] - 0s 2ms/step - loss: 0.0175 - accuracy: 1.0000\n"
722
+ ]
723
+ }
724
+ ],
725
+ "source": [
726
+ "# Fitting and saving the model\n",
727
+ "hist = model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=1)\n",
728
+ "model.save('model.h5', hist)"
729
+ ]
730
+ }
731
+ ],
732
+ "metadata": {
733
+ "colab": {
734
+ "provenance": []
735
+ },
736
+ "kernelspec": {
737
+ "display_name": "Python 3",
738
+ "name": "python3"
739
+ },
740
+ "language_info": {
741
+ "codemirror_mode": {
742
+ "name": "ipython",
743
+ "version": 3
744
+ },
745
+ "file_extension": ".py",
746
+ "mimetype": "text/x-python",
747
+ "name": "python",
748
+ "nbconvert_exporter": "python",
749
+ "pygments_lexer": "ipython3",
750
+ "version": "3.6.13"
751
+ }
752
+ },
753
+ "nbformat": 4,
754
+ "nbformat_minor": 0
755
+ }