123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121 |
- import requests
- from flask import Flask, request, jsonify,render_template
- from flask_cors import CORS
-
- app = Flask(__name__)
-
-
-
- CORS(app)
-
- # import nltk
- # nltk.download('popular')
- # from nltk.stem import WordNetLemmatizer
- # lemmatizer = WordNetLemmatizer()
- # import pickle
- # import numpy as np
-
- # from keras.models import load_model
- # model = load_model('model.h5')
- # import json
- # import random
- # intents = json.loads(open('data.json').read())
- # words = pickle.load(open('texts.pkl','rb'))
- # classes = pickle.load(open('labels.pkl','rb'))
-
- # def clean_up_sentence(sentence):
- # # tokenize the pattern - split words into array
- # sentence_words = nltk.word_tokenize(sentence)
- # # stem each word - create short form for word
- # sentence_words = [lemmatizer.lemmatize(word.lower()) for word in sentence_words]
- # return sentence_words
-
- # # return bag of words array: 0 or 1 for each word in the bag that exists in the sentence
-
- # def bow(sentence, words, show_details=True):
- # # tokenize the pattern
- # sentence_words = clean_up_sentence(sentence)
- # # bag of words - matrix of N words, vocabulary matrix
- # bag = [0]*len(words)
- # for s in sentence_words:
- # for i,w in enumerate(words):
- # if w == s:
- # # assign 1 if current word is in the vocabulary position
- # bag[i] = 1
- # if show_details:
- # print ("found in bag: %s" % w)
- # return(np.array(bag))
-
- # def predict_class(sentence, model):
- # # filter out predictions below a threshold
- # p = bow(sentence, words,show_details=False)
- # res = model.predict(np.array([p]))[0]
- # ERROR_THRESHOLD = 0.25
- # results = [[i,r] for i,r in enumerate(res) if r>ERROR_THRESHOLD]
- # # sort by strength of probability
- # results.sort(key=lambda x: x[1], reverse=True)
- # return_list = []
- # for r in results:
- # return_list.append({"intent": classes[r[0]], "probability": str(r[1])})
- # return return_list
-
- # def getResponse(ints, intents_json):
- # tag = ints[0]['intent']
- # list_of_intents = intents_json['intents']
- # for i in list_of_intents:
- # if(i['tag']== tag):
- # result = random.choice(i['responses'])
- # break
- # return result
-
- # def chatbot_response(msg):
- # ints = predict_class(msg, model)
- # res = getResponse(ints, intents)
- # return res
-
-
-
- app.static_folder = 'static'
-
-
-
- # Define the Rasa server URL
- rasa_server_url = "http://localhost:5005/webhooks/rest/webhook"
-
- @app.route("/")
- def home():
- return render_template("index.html")
-
-
- @app.route('/webhook', methods=['POST','GET'])
- def webhook():
- message = request.json['message']
- # message =request.args.get('msg')
-
- # Send the message to the Rasa server
- rasa_response = requests.post(rasa_server_url, json={"message": message}).json()
-
- # for d in rasa_response:
- # a=d["text"]
-
- # Return the Rasa response as a JSON object
- print(rasa_response)
- if len(rasa_response)==0:
- return jsonify([
- {
- "recipient_id": "default",
- "text": "I'm sorry, I didn't understand that. Can you please rephrase?"
- }])
- else:
- return jsonify(rasa_response)
-
-
-
- @app.route("/get")
- def get_bot_response():
- userText = request.args.get('msg')
- return chatbot_response(userText)
-
-
- if __name__ == '__main__':
- app.run(host='0.0.0.0',port=5020)
|