У меня здесь проблема, я хочу развернуть этот Integrated_app.py на сервере. Взгляните на приведенный ниже код, его единственный код Flask работает нормально, но когда я внес некоторые изменения для запуска gunicorn, он выдает ошибки.
import sys
import os
import json
from glob import glob
import string
from text_segregation.dl_text_seg import dl_text_seg
from text_segregation.utils.read_input import text_seg_read_docx_and_pdf
from json_to_txt import *
from education_details import *
from flask import Flask,jsonify
from work_experience_extraction.comparision import *
from work_experience_extraction.Work_Experience import *
from work_experience_extraction.work_exp_new.testing import *
class text_seg:
def __init__(self):
sys.path.append(os.path.dirname(__file__))
current_dir = os.path.dirname(__file__)
current_dir = current_dir if current_dir is not '' else '.'
self.data_dir_path = current_dir + '/text_segregation/data/resume_to_parse/resumeparser/' # directory to scan for any pdf and docx files
self.parser = dl_text_seg()
self.parser.load_model('./text_segregation' + '/models')
def main(self):
collected = text_seg_read_docx_and_pdf(self.data_dir_path, verbose =False, callback=lambda index, file_path, file_content: {
parse_resume(index,file_path, file_content)
})
print(collected)
print('count: ', len(collected))
def parse_resume(self,index,file_path, file_content):
print('parsing file: ', file_path)
self.parser.parse(file_content)
#print(self.parser.raw_text) # print out the raw contents extracted from pdf or docx files
parsed_info = { }
parsed_info = self.parser.return_parsed_resume()
# print(parsed_info,"1111111111111111111111111111111") # Printing json file on terminal
#===========================saving json file=============================
pre, ext = os.path.splitext(file_path)
# print("----------------",pre,ext,"--------------------------------2222")
#pre = pre.replace(" ", "")
with open(pre + '.json',"w") as json_file:
json.dump(parsed_info, json_file, indent=2, sort_keys=False)
print("Parsed data saved in: ", pre + '.json')
print('++++++++++++++++++++++++++++++++++++++++++++++++++++++')
###########************----------________per_info_extraction__________----------************############
import warnings, os
warnings.filterwarnings('ignore', '.*do not.*',)
warnings.filterwarnings("ignore", category=DeprecationWarning)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import cv2
#import sys
import re
from rasa.nlu.training_data import load_data
from rasa.nlu.config import RasaNLUModelConfig
from rasa.nlu.model import Trainer
from rasa.nlu.model import Metadata, Interpreter
from rasa.nlu import config
from rasa.nlu.components import ComponentBuilder
import json
from parsing_data import *
from format_work_exp import *
from work_json_to_txt import *
class per_info:
def __init__(self):
self.allowed_extensions = ['.txt']
self.builder = ComponentBuilder(use_cache=True)
self.interpreter = Interpreter.load('./per_info_extraction/Rasa_NLU/models/nlu/per_info_extaction', self.builder)
def parse_resume(self, file_path):
pre, ext = os.path.splitext(file_path)
print("Reading file: ",file_path)
if ext in self.allowed_extensions:
with open(pre + '.txt', 'r') as f:
text = f.read()
#===================Entities extraction code============================
print(pre,'ppppppppppppppppperrrrrrrrrrrrrrrrrrrrrrsssssssssssssnnnnnnnnnaaaaaaaaalllllll')
#table = str.maketrans(dict.fromkeys(string.punctuation))
#text = text.translate(table)
parsed_by_rules = all_field(text)
new_text = Punctuation(text)
js = self.interpreter.parse(new_text)
js.update(parsed_by_rules)
#####################################################formate###################################################
list_per = js["entities"]
first_name=""
last_name = ""
cur_ads = ""
curr_city = ""
curr_pin = ""
per_add = ""
per_pin = ""
per_pin = ""
per_city = ""
for i in list_per:
if i["entity"]=="first_name":
first_name = i["value"]
# else: first_name = None
elif i["entity"]=="last_name":
last_name = i["value"]
# else: last_name = None
elif i["entity"]=="current_address":
cur_ads = i["value"]
# else: cur_ads = None
elif i["entity"]=="current_city":
curr_city = i["value"]
# else: curr_city = None
elif i["entity"]=="current_pincode":
curr_pin = i["value"]
# else: curr_pin = None
elif i["entity"]=="permanent_address":
per_add = i["value"]
elif i["entity"]=="permanent_pincode":
per_pin = i["value"]
elif i["entity"]=="permanent_city":
per_city = i["value"]
else:
first_name=""
last_name = ""
cur_ads = ""
curr_city = ""
curr_pin = ""
per_add = ""
per_pin = ""
per_pin = ""
per_city = ""
alter_email=js['alternate_email']
dob=js['dob']
email=js['email']
gender=js['gender']
mari_status=js['marital_status']
if js['mobile_or_phone'] != None:
if len(js['mobile_or_phone'])>1:
Phone=js['mobile_or_phone'][0]
mobile=js['mobile_or_phone'][1]
else:
Phone=js['mobile_or_phone'][0]
mobile=None
else:
Phone=None
mobile=None
text_per=js['text']
##################################################end#############################################################
# print(js,"<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<persional_info>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
#===========================saving json file=============================
with open(pre + '.json',"w") as json_file:
json.dump(js, json_file, indent=2, sort_keys=False)
print("Entities and Intents extracted and saved in: ", pre + '.json')
return {"FirstName":first_name,"LastName":last_name,"DateOfBirth":dob,"Gender":gender,"MaritalStatus":mari_status,"Email":email,"AlternateEmail":alter_email,"Phone":Phone,"Mobile":mobile,"Address":cur_ads,"City":curr_city,"ZipCode":curr_pin,"PermanentAddress":per_add,"PermanentCity":per_city,"PermanentZipCode":per_pin,"Text":text_per}
###########************----------________per_info_extraction__________----------************############
class work_ex:
def __init__(self):
self.allowed_extensions = ['.txt']
self.builder = ComponentBuilder(use_cache=True)
# self.interpreter = Interpreter.load('./work_experience_extraction/models/nlu/Invoice_details_extaction_backup', self.builder)
def parse_work_data(self, file_path):
pre, ext = os.path.splitext(file_path)
print("Reading file: ",file_path)
if ext in self.allowed_extensions:
with open(pre + '.txt', 'r') as f:
text = f.read()
work_exp_text={}
final_work_dict={}
work_exp_text['Experience']=text
js=line_spliting_function(text)
# js = self.interpreter.parse(text)
# print(js,"maaaaaaaaaaaaaattttttthhhhhhhhhhaaaaaaaaaaaannnnnnnnnnnnn")
# jscode=extract_work_exp_details(text)
# print(jscode,"sssssssssssssssssuuuuuuuuuuuuuuurrrrrrrrrrbbbbbbbbbbbhhhhhi")
# print(js,"\n\n woooooooooo<<<<<<<<>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n\n")
# with open(pre + '.json',"w") as json_file:
# json.dump(js, json_file, indent=2, sort_keys=True)
# final_js=work_exp_format(js,text)
# json_work=comparision_file(jscode,final_js)
# print(json_work,"\n\nworkkkkkkkkexpppppppppppeeeeeeeeee\n\n")
work_exp_text.update(js)
final_work_dict["work_experience"]=work_exp_text
with open(pre + '.json',"w") as json_file:
json.dump(final_work_dict["work_experience"], json_file, indent=2, sort_keys=False)
return work_exp_text['Experience'],js['SegregatedExperience']
def education_parse(file_path):
pre, ext = os.path.splitext(file_path)
print("Reading file: ",file_path)
allowed_extensions=['.txt']
if ext in allowed_extensions:
with open(pre + '.txt', 'r') as f:
text = f.read()
result_edu=extract_education_details(text)
# print(result_edu,"<<<<<<<<<<<<<<<<<<<<<<<<<<education detail>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
with open(pre + '.json',"w") as json_file:
json.dump(result_edu, json_file, indent=2, sort_keys=False)
return result_edu
import time
start = time.time()
print("hello there")
end = time.time()
print("Initial time:",end - start)
# start = time.time()
# seg_my_text = text_seg()
# personal_info = per_info()
# work_exp=work_ex()
# end = time.time()
# print("Model loading time:",end - start)
global graph
import tensorflow as tf
#graph = tf.get_default_graph()
from flask import Flask, request
from flask_restful import Resource, Api
from flask_jsonpify import jsonify
import random
import operator
import os
from flask_cors import CORS
from werkzeug.utils import secure_filename
app = Flask(__name__)
# print(app)
# CORS(app)
# api = Api(app)
# print(api)
# app.config['JSON_SORT_KEYS'] = False
import keras
#import tensorflow as tf
#session = keras.backend.get_session()
#init = tf.global_variables_initializer()
#session.run(init)
#seg_my_text = text_seg()
#personal_info = per_info()
#work_exp=work_ex()
#import tensorflow as tf
#graph = tf.get_default_graph()
# print("enjoy!")
# start = time.time()
# seg_my_text = text_seg()
# personal_info = per_info()
# work_exp=work_ex()
# end = time.time()
# print("Model loading time:",end - start)
class process(Resource):
@app.route('/process', methods=['POST','PUT'])
def __init__(self):
print("enjoy!")
self.start = time.time()
self.seg_my_text = text_seg()
self.personal_info = per_info()
self.work_exp=work_ex()
self.end = time.time()
print("Model loading time:",self.end - self.start)
def post():
print("Step 1_"*5,"Entered app")
file = request.files['file']
#type_file = request.form['type']
print("Step 2_"*5,"file name taken")
# seg_my_text = text_seg()
# personal_info = per_info()
# work_exp=work_ex()
filename = secure_filename(file.filename)
print(filename)
file.save(os.path.join(r"/home/nextjob/database/text_seg/", filename))
fileName=os.path.join(r"/home/nextjob/database/text_seg/", filename)
file_path=fileName
pre_name, ext = os.path.splitext(file_path)
#seg_my_text = text_seg()
seg_text_extracted = text_seg_read_docx_and_pdf(file_path)
print(seg_text_extracted)
#with graph.as_default():
# print("_graph trying_"*10)
self.seg_my_text.parse_resume(None, file_path, seg_text_extracted)
###########################persional info#####################################
obj_class=Json_data_clean(pre_name + '.json')
obj_class.json_clean()
#pre_name = pre_name.replace('./database/text_seg/','')
#personal_info = per_info()
#x.main()
pre_name_2 = pre_name.replace('/home/nextjob/database/text_seg/','')
#personal_info = per_info()
parse_file = "/home/nextjob/database/per_info/" + pre_name_2 + ".txt"
per_info_dict=self.personal_info.parse_resume(parse_file)
# print("<<<<<<<<<<<<<<<<<<<persional info>>>>>>>>>>>>>>>>>>>>>>>>>>\n\n",per_info_dict,"\n\n<<<<<<<<<<<<<<<<>>>>>>>>>>>>>>")
################################end persional inf#############################################
#############################education#####################################################
obj_class=Json_data_education(pre_name + '.json')
obj_class.json_clean_education()
parse_file_edu = "/home/nextjob/database/education/" + pre_name_2 + ".txt"
education_dict=education_parse(parse_file_edu)
#############################end education#####################################################
##################################work experience########################################################
work_txt_obj=Json_data_work(pre_name + '.json')
work_txt_obj.json_clean_work()
parse_file_work = "/home/nextjob/database/work_experience/" + pre_name_2 + ".txt"
work_dict=self.work_exp.parse_work_data(parse_file_work)
########################################end work experience#################################################
#################################################professional summary#######################################
# personal_info_dict={}
professional_json={}
skill_json={}
# work_json={}
profe=" "
main_skill=" "
with open(str(pre_name+'.json')) as json_file:
data = None
data = json.load(json_file)
professional_json['professional_info']=[None]
skill_json['skills']={"SkillSet":" "}
# work_json['work_experience']=[None]
key_value=data.keys()
for i in key_value:
if i=='professional_info':
if data['professional_info'] is not None:
profe = " ".join(data['professional_info'])
else:
profe = " "
# professional_json['professional_info']=data['professional_info']
elif i=='skills':
skills_list = [ ]
# skill_main=[]
main_skill=",".join(data['skills'])
for i in data['skills']:
j = None
j = {"Skill": i}
skills_list.append(j)
# d = {"SkillKeywords": {"SkillSet": skills_list}}
skill_json['skills']={"SkillSet": skills_list}
# elif i=='work_experience':
# print(data['work_experience'],"wwwwwwwwwwwwwwwwwoooooooooorrrrrrrrrrrrrrrrkkkkkkkkkkkkk")
# work_json['work_experience']=data['work_experience']
else:
pass
# print(professional_json['professional_info'],"lllllllllllllllllllll\n\n",skill_json['skills'],">>>>>>>>>>>>>>>>>>>>>>>>>>>>")
# final_dict=jsonify(persional_info=list(per_info_dict),education_details=list(education_dict),professional_info=professional_json['professional_info'],skills=skill_json['skills'])
# personal_info_dict['persional_info']=per_info_dict
# education_dict.update(personal_info_dict)
# education_dict.update(professional_json)
# education_dict.update(skill_json)
# education_dict.update(work_dict)
if per_info_dict["FirstName"] and per_info_dict["LastName"] is not None:
FUN= per_info_dict["FirstName"]+" "+per_info_dict["LastName"]
else :
FUN = per_info_dict["FirstName"] or per_info_dict["LastName"]
d={"ResumeParserData":{"FullName":FUN or " ","FirstName":per_info_dict["FirstName"] or " ","LastName":per_info_dict["LastName"] or " ","DateOfBirth":per_info_dict["DateOfBirth"] or " ","Gender":per_info_dict["Gender"] or " ","MaritalStatus":per_info_dict["MaritalStatus"] or " ","Email":per_info_dict["Email"] or " ","AlternateEmail":per_info_dict["AlternateEmail"] or " ","Phone":per_info_dict["Phone"] or " ","Mobile":per_info_dict["Mobile"] or " ","Address":per_info_dict["Address"] or " ","City":per_info_dict["City"] or " ","ZipCode":per_info_dict["ZipCode"] or " ","PermanentAddress":per_info_dict["PermanentAddress"] or " ","PermanentCity":per_info_dict["PermanentCity"] or " ","PermanentZipCode":per_info_dict["PermanentZipCode"],"CurrentSalary":" ","ExpectedSalary": " ","Qualification":education_dict['Qualification'],"SegregatedQualification":education_dict['SegregatedQualification'],"Skills":main_skill,"SkillSet":skill_json['skills']["SkillSet"],"Experience":work_dict[0],"SegregatedExperience":work_dict[1],"Summary":profe or " "}}
skill_string = json.dumps(d, indent = 4, sort_keys=False)
with open("/home/nextjob/database/final_json/"+pre_name_2+'.json',"w") as json_file:
json.dump(d, json_file, indent=2, sort_keys=False)
keras.backend.clear_session()
return jsonify(d)
#api.add_resource(process, '/process')
#@api.app.route(process, '/process')
if __name__ == '__main__':
#test = process()
app.run(host="0.0.0.0",port=5000,debug=True)
После запуска python Integrated_app.py
, Я получаю эту ошибку:
Initial time: 1.6689300537109375e-05
WARNING:werkzeug: * Debugger is active!
INFO:werkzeug: * Debugger PIN: 339-043-829
INFO:werkzeug:127.0.0.1 - - [10/Jul/2020 20:30:57] "POST /process HTTP/1.1" 500 -
Traceback (most recent call last):
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 2464, in __call__
return self.wsgi_app(environ, start_response)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 2450, in wsgi_app
response = self.handle_exception(e)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1867, in handle_exception
reraise(exc_type, exc_value, tb)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
TypeError: __init__() missing 1 required positional argument: 'self'
INFO:werkzeug:127.0.0.1 - - [10/Jul/2020 20:31:00] "POST /process HTTP/1.1" 500 -
Traceback (most recent call last):
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 2464, in __call__
return self.wsgi_app(environ, start_response)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 2450, in wsgi_app
response = self.handle_exception(e)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1867, in handle_exception
reraise(exc_type, exc_value, tb)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/_compat.py", line 39, in reraise
raise value
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
TypeError: __init__() missing 1 required positional argument: 'self'
После запуска export FLASK_APP="Integrated_app:app"
и `` flask run '' я получаю эту ошибку:
* Serving Flask app "Integrated_app:app" (lazy loading)
* Environment: development
* Debug mode: on
* Running on http://127.0.0.1:5000/ (Press CTRL+C to quit)
* Restarting with stat
* Debugger is active!
* Debugger PIN: 158-271-172
Using TensorFlow backend.
Using TensorFlow backend.
Traceback (most recent call last):
File "/home/nextjob/anaconda3/envs/irp/bin/flask", line 8, in <module>
sys.exit(main())
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/cli.py", line 967, in main
cli.main(args=sys.argv[1:], prog_name="python -m flask" if as_module else None)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/cli.py", line 586, in main
return super(FlaskGroup, self).main(*args, **kwargs)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/click/decorators.py", line 73, in new_func
return ctx.invoke(f, obj, *args, **kwargs)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/flask/cli.py", line 860, in run_command
extra_files=extra_files,
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/werkzeug/serving.py", line 1050, in run_simple
run_with_reloader(inner, extra_files, reloader_interval, reloader_type)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/werkzeug/_reloader.py", line 337, in run_with_reloader
reloader.run()
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/werkzeug/_reloader.py", line 202, in run
for filename in chain(_iter_module_files(), self.extra_files):
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/site-packages/werkzeug/_reloader.py", line 24, in _iter_module_files
filename = getattr(module, "__file__", None)
File "/home/nextjob/.local/lib/python3.7/site-packages/tensorflow/__init__.py", line 50, in __getattr__
module = self._load()
File "/home/nextjob/.local/lib/python3.7/site-packages/tensorflow/__init__.py", line 44, in _load
module = _importlib.import_module(self.__name__)
File "/home/nextjob/anaconda3/envs/irp/lib/python3.7/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 965, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'tensorflow_core.keras'
WARNING:tensorflow:
The TensorFlow contrib module will not be included in TensorFlow 2.0.
For more information, please see:
* https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md
* https://github.com/tensorflow/addons
* https://github.com/tensorflow/io (for I/O related ops)
If you depend on functionality not listed there, please file an issue.
WARNING:tensorflow:
The TensorFlow contrib module will not be included in TensorFlow 2.0.
For more information, please see:
* https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md
* https://github.com/tensorflow/addons
* https://github.com/tensorflow/io (for I/O related ops)
If you depend on functionality not listed there, please file an issue.
--------------------------Model Loaded-------------------------------------
Total Time Taken To Load the Model -------------------------> 0:00:03.914416
Total Time Taken For Output of all Files -------------------------> 0:00:00.000083
--------------------------Model Loaded-------------------------------------
Total Time Taken To Load the Model -------------------------> 0:00:03.848447
Total Time Taken For Output of all Files -------------------------> 0:00:00.000088
и используя gunicorn -w 2 'Integrated_app:app'
, я получаю эту ошибку:
[2020-07-10 20:39:23 +0530] [18644] [INFO] Starting gunicorn 20.0.4
[2020-07-10 20:39:23 +0530] [18644] [INFO] Listening at: http://127.0.0.1:8000 (18644)
[2020-07-10 20:39:23 +0530] [18644] [INFO] Using worker: sync
[2020-07-10 20:39:23 +0530] [18647] [INFO] Booting worker with pid: 18647
[2020-07-10 20:39:23 +0530] [18651] [INFO] Booting worker with pid: 18651
Using TensorFlow backend.
Using TensorFlow backend.
WARNING:tensorflow:
The TensorFlow contrib module will not be included in TensorFlow 2.0.
For more information, please see:
* https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md
* https://github.com/tensorflow/addons
* https://github.com/tensorflow/io (for I/O related ops)
If you depend on functionality not listed there, please file an issue.
WARNING:tensorflow:
The TensorFlow contrib module will not be included in TensorFlow 2.0.
For more information, please see:
* https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md
* https://github.com/tensorflow/addons
* https://github.com/tensorflow/io (for I/O related ops)
If you depend on functionality not listed there, please file an issue.
--------------------------Model Loaded-------------------------------------
Total Time Taken To Load the Model -------------------------> 0:00:03.871782
Total Time Taken For Output of all Files -------------------------> 0:00:00.000069
Failed to find attribute 'app' in 'Integrated_app'.
[2020-07-10 20:39:30 +0530] [18647] [INFO] Worker exiting (pid: 18647)
--------------------------Model Loaded-------------------------------------
Total Time Taken To Load the Model -------------------------> 0:00:04.019945
Total Time Taken For Output of all Files -------------------------> 0:00:00.000088
Failed to find attribute 'app' in 'Integrated_app'.
[2020-07-10 20:39:30 +0530] [18651] [INFO] Worker exiting (pid: 18651)
[2020-07-10 20:39:31 +0530] [18644] [INFO] Shutting down: Master
[2020-07-10 20:39:31 +0530] [18644] [INFO] Reason: App failed to load.
Пожалуйста, дайте мне знать мои ошибки и как запустить его с помощью Gunicorn. Спасибо.