Цель: - Я хочу создать веб-ссылку для других пользователей в моей команде для взаимодействия с ботом
Вот код, который я использую в Google Cloud Shell
from flask import Flask,Response
import pandas as pd
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from sklearn.svm import SVC
from sklearn.model_selection import train_test_split as tts
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder as LE
from sklearn.metrics.pairwise import cosine_similarity
import nltk
nltk.download('punkt')
from nltk.stem.lancaster import LancasterStemmer
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
credentials = ServiceAccountCredentials.from_json_keyfile_name('bot-1234.json', scope)
gc = gspread.authorize(credentials)
faq = pd.DataFrame(gc.open('Data Gathering').worksheet('Sheet1').get_all_records())
stemmer = LancasterStemmer()
def cleanup(sentence):
word_tok = nltk.word_tokenize(sentence)
stemmed_words = [stemmer.stem(w) for w in word_tok]
return ' '.join(stemmed_words)
le = LE()
tfv = TfidfVectorizer(min_df=1, stop_words='english')
questions = faq['Questions'].values
X = []
for question in questions:
X.append(cleanup(question))
tfv.fit(X)
le.fit(faq['Class'])
X = tfv.transform(X)
y = le.transform(faq['Class'])
trainx, testx, trainy, testy = tts(X, y, test_size=.25, random_state=42)
model = SVC(kernel='linear')
model.fit(trainx, trainy)
print("SVC:", model.score(testx, testy))
def get_max5(arr):
ixarr = []
for ix, el in enumerate(arr):
ixarr.append((el, ix))
ixarr.sort()
ixs = []
for i in ixarr[-5:]:
ixs.append(i[1])
return ixs[::-1]
app = Flask(__name__)
@app.route('/', methods=['GET','POST'])
def test():
def chat():
cnt = 0
print("TYPE \"Q\" or \"END\" or \"Quit\" or \"E\" and hit ENTER to QUIT")
print()
print()
DEBUG = False
TOP5 = False
print("BOT: Hi, Welcome to BOT - The Assistant!")
while True:
usr =input("You:")
if usr.lower() == 'yes':
print("BOT: Yes!")
continue
if usr.lower() == 'no':
print("BOT: No?")
continue
if usr == 'DEBUG':
DEBUG = True
print("Debugging mode on")
continue
if usr == 'STOP':
DEBUG = False
print("Debugging mode off")
continue
if usr.lower() in ('q','quit','end','e') :
print("BOT: It was good to be of help.")
break
if usr == 'TOP5':
TOP5 = True
print("Will display 5 most relevent results now")
continue
if usr == 'CONF':
TOP5 = False
print("Only the most relevent result will be displayed")
continue
t_usr = tfv.transform([cleanup(usr.strip().lower())])
class_ = le.inverse_transform(model.predict(t_usr))
questionset = faq[faq['Class']==class_[0]]
if DEBUG:
print("Question classified under category:", class_)
print("{} Questions belong to this class".format(len(questionset)))
cos_sims = []
for question in questionset['Questions']:
sims = cosine_similarity(tfv.transform([question]), t_usr)
cos_sims.append(sims)
ind = cos_sims.index(max(cos_sims))
if DEBUG:
question = questionset["Questions"][questionset.index[ind]]
print("Assuming you asked: {}".format(question))
if not TOP5:
print("BOT:", faq['Answer'][questionset.index[ind]])
else:
inds = get_max5(cos_sims)
for ix in inds:
print("Question: "+faq['Questions'][questionset.index[ix]])
print("Answer: "+faq['Answer'][questionset.index[ix]])
print('-'*50)
print("\n"*2)
outcome = input("Was this answer helpful? Yes/No: ").lower().strip()
if outcome == 'yes':
cnt = 0
elif outcome == 'no':
inds = get_max5(cos_sims)
sugg_choice = input("BOT: Do you want me to suggest you questions ? Yes/No: ").lower()
if sugg_choice == 'yes':
q_cnt = 1
for ix in inds:
print(q_cnt,"Question: "+faq['Questions'][questionset.index[ix]])
# print("Answer> "+faq['Answer'][questionset.index[ix]])a............................
print('-'*50)
q_cnt += 1
num = int(input("Please enter the question number you find most relevant: "))
print("BOT: ", faq['Answer'][questionset.index[inds[num-1]]])
return Response(chat(), mimetype='text/plain')
if __name__ == '__main__':
app.run(host='127.0.0.1', port=8080, debug=True)
Вот файл app.yaml: -
runtime: python
# vm: true has been deprecated
# check how env:flex may affect your billing
env: flex
entrypoint: gunicorn -b :$PORT main:app
runtime_config:
python_version: 3.7
Вот файл needs.txt: -
Flask ==0.11.1
gunicorn==19.6.0
gspread==3.1.0
oauth2client==4.1.3
PyOpenSSL==18.0.0
numpy==1.15.4
scikit-image==0.14.1
scikit-learn==0.20.1
scipy==1.1.0
nltk==3.4
pandas==0.23.4
Ошибка: -
It runs smoothly in Cloud shell without any error. But in error logs it shows this
Traceback (most recent call last):
File "/env/lib/python3.7/site-packages/flask/app.py", line 1988, in wsgi_app
response = self.full_dispatch_request()
File "/env/lib/python3.7/site-packages/flask/app.py", line 1641, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/env/lib/python3.7/site-packages/flask/app.py", line 1544, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/env/lib/python3.7/site-packages/flask/_compat.py", line 33, in reraise
raise value
File "/env/lib/python3.7/site-packages/flask/app.py", line 1639, in full_dispatch_request
rv = self.dispatch_request()
File "/env/lib/python3.7/site-packages/flask/app.py", line 1625, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/home/vmagent/app/main.py", line 170, in test
return Response(chat(), mimetype='text/plain')
File "/home/vmagent/app/main.py", line 90, in chat
usr = input("You:")
EOFError: EOF when reading a line
Готово многоисследую, но не могу найти способ заставить моего бота работать.Я знаю, что проблема в коде, где я прошу его взаимодействовать с приложением с помощью фляги, и я не могу это исправить.
Пожалуйста, помогите!