Question

Broken Pipe Error in gunicorn server

I have been trying to deploy chat bot that uses a pre trained transformer model, DistilBertForQuestionAnswering and using flask, flask_socketio, tensorflow. Here is my nginx file,

upstream socketio_nodes {
    ip_hash;
#    keepalive 60;
    #server http://unix:/home/web_chatbot/web_chatbot.sock/socket.io:/socket.io/;
    server 127.0.0.1:5000;
    server 127.0.0.1:5001;
    server 127.0.0.1:5002;
    # to scale the app, just add more nodes here!
}

server {
    listen 80;
    server_name xxx.xxx.xxx.194;

location / {
  include proxy_params;
  proxy_pass http://127.0.0.1:5000;
#  proxy_read_timeout     1200;
    }

location /static {
        alias /home/web_chatbot/web_chatbot/static;
        expires 30d;
    }

    location /socket.io {
        include proxy_params;
        proxy_http_version 1.1;
        proxy_buffering off;
        proxy_set_header Upgrade $http_upgrade;
        proxy_set_header Connection "";
        proxy_pass http://socketio_nodes/socket.io;
 #       proxy_read_timeout     1200;
 #       proxy_connect_timeout  1200;
   }

}

my flask app file

from flask import Flask, request, render_template, flash, url_for
from flask_socketio import SocketIO
from web_chatbot.toweb import Conversation
# import redis
from flask_cors import CORS

app = Flask(__name__)
# cache = redis.Redis(host='redis', port=6379)
CORS(app, resources={
    r"/*": {
        "origins": "*"
    }
})
# app.config['SECRET_KEY'] = 'xxxxxxx'
socketio = SocketIO(app)


@app.route("/")
def home():

    return render_template('index.html')


def messageReceived(message, methods=['GET', 'POST']):
    msg_ = str(message['message'])
    if (msg_ != 'User Connected'):

        botReply(str(msg_))
        print('message was received! ')
    else:
        pass

@socketio.on('chat-event')
def handleEvents(json_, methods=['GET', 'POST']):
    dict_object = json_
    print('received my event tada: ' + str(dict_object))
    socketio.emit('msg', json_, callback=messageReceived(dict_object))


def botReply(msg):
    print('BOT INPUT: ' + msg)
    chat_engine = Conversation()
    bot_reply = chat_engine.model_ans(msg)
    print('Bot Reply: ' + bot_reply)
    socketio.emit('reply', bot_reply)


@socketio.on('connected')
def connectedGuest(methods=['GET', 'POST']):
    print('Connected')


if __name__ == '__main__':
    app.debug = True
    socketio.run(app)

my toweb.py file is

from transformers import pipeline
from functools import lru_cache

import multiprocessing
import codecs

class Model():

    context = "./sw_merge.txt"

    @lru_cache(maxsize=10000)
    def __init__(self):
        print('processing - iniit in model')
        self.model = pipeline('question-answering')
        with codecs.open(self.context, 'rb', errors = 'ignore', encoding='utf-8') as f:
            self.lines = f.read()

    def run_qa(self, qn):
        print('run_qa - on proccessing')
        ans = self.model(context = self.lines, question = qn)
        return ans


class Conversation():
    #incoming messages - receives an input from the user
    def incoming(self, question):
        usr_qn = []
        usr_qn.append(question)
        return usr_qn

    #model prediction
    def model_ans(self, input_qn):
        y = Model()
        ans = y.run_qa(input_qn)
        ans_text = ans.get("answer")
        print('model_ans - running')
        return ans_text




if __name__ == '__main__':

    p1 = multiprocessing.Process(target=Conversation)
    p2 = multiprocessing.Process(target=Model)

    p1.start()
    p2.start()

    p1.join()
    p2.join()


    # check if processes are alive 
    print("Process p1 is alive: {}".format(p1.is_alive())) 
    print("Process p2 is alive: {}".format(p2.is_alive()))

    # question = ''
    # chat_conv = Conversation()

    # incoming_text = chat_conv.incoming(question)
    # outgoing_text = chat_conv.model_ans(incoming_text)

So when i type something on my chart bubble the text start being processed but the processes is terminetated with Broken Pipe Error

my gunicorn has been configured to have 3 workers and use gevelent.

My chart app works fine on local, I’m not sure if the problem is package version or there is something wrong on my configurations. I will rely appreciate for some help please!!!

here is my requirements env

absl-py==0.9.0
astor==0.8.1
astunparse==1.6.3
cachetools==4.1.1
certifi==2020.6.20
chardet==3.0.4
click==7.1.2
dataclasses==0.7
dnspython==1.16.0
filelock==3.0.12
Flask==1.1.2
Flask-Cors==3.0.8
Flask-SocketIO==4.3.1
gast==0.3.3
gevent==20.6.2
gevent-websocket==0.10.1
google-auth==1.20.1
google-auth-oauthlib==0.4.1
google-pasta==0.2.0
greenlet==0.4.16
grpcio==1.31.0
gunicorn==20.0.4
h5py==2.10.0
idna==2.10
importlib-metadata==1.7.0
itsdangerous==1.1.0
Jinja2==2.11.2
joblib==0.16.0
Keras-Applications==1.0.8
Keras-Preprocessing==1.1.2
Markdown==3.2.2
MarkupSafe==1.1.1
monotonic==1.5
numpy==1.18.5
oauthlib==3.1.0
opt-einsum==3.3.0
packaging==20.4
protobuf==3.13.0
pyasn1==0.4.8
pyasn1-modules==0.2.8
pyparsing==2.4.7
python-engineio==3.13.1
python-socketio==4.6.0
regex==2020.7.14
requests==2.24.0
requests-oauthlib==1.3.0
rsa==4.6
sacremoses==0.0.43
scipy==1.4.1
sentencepiece==0.1.91
six==1.15.0
tb-nightly==1.14.0a20190603
tensorboard==2.3.0
tensorboard-plugin-wit==1.7.0
tensorflow==2.3.0
tensorflow-estimator==2.3.0
termcolor==1.1.0
tf-estimator-nightly==1.14.0.dev2019060501
tokenizers==0.8.1rc1
tqdm==4.48.2
transformers==3.0.2
urllib3==1.25.10
Werkzeug==1.0.1
wrapt==1.12.1
zipp==3.1.0
zope.event==4.4
zope.interface==5.1.0

Show comments

Submit an answer

This textbox defaults to using Markdown to format your answer.

You can type !ref in this text area to quickly search our full set of tutorials, documentation & marketplace offerings and insert the link!

Sign In or Sign Up to Answer