-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathapp.py
59 lines (50 loc) · 1.73 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from flask import Flask
app = Flask(__name__)
from flask import render_template
from form import ReusableForm
from keras.preprocessing import sequence
from keras.models import load_model
import nltk
import pickle
import numpy as np
app.config['SECRET_KEY'] = '5791628bb0b13ce0c676dfde280ba245'
word2index=pickle.load( open( 'word2index.p', "rb" ) )
index2word=pickle.load( open( 'index2word.p', "rb" ) )
model_filename="model.h5"
MAX_SENTENCE_LENGTH=500
# Home page
@app.route("/", methods=['GET', 'POST'])
def home():
"""Home page of app with form"""
# Create form
form = ReusableForm()
isHappy = True
if form.is_submitted():
#form.text.data
isHappy = predict(form.text.data)
# Send template information to index.html
return render_template('index.html', form=form, isHappy=isHappy)
def predict(text):
model=load_model(model_filename)
#model.compile(loss="binary_crossentropy",optimizer="rmsprop",metrics=["accuracy"])
ntext = normalize(np.array([text]))
predictions=model.predict(ntext)
predictions=denormalize_response(predictions)
return denormalize_response(predictions)[0]
def denormalize_response(predictions):
return [True if x > 0.5 else False for x in predictions]
def normalize(train_description):
X=np.empty((train_description.size,),dtype=list)
i=0
for sentence in train_description:
words=nltk.word_tokenize(sentence.lower())
seqs=[]
for word in words:
if word in word2index:
seqs.append(word2index[word])
else:
seqs.append(word2index["UNK"])
X[i]=seqs
i+=1
return sequence.pad_sequences(X,maxlen=MAX_SENTENCE_LENGTH)
app.run(host='0.0.0.0', port=50000, debug=True)