Keras train chatbot
Original posts describes spet by step what needs to be done to train a chat bot, and what is cools its language independend and all you need is good enough dataset.
sudo apt install python3-pip
pip install tensorflow nltk numpy scikit_learn
Here is combinedd train script:
import json
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Embedding, GlobalAveragePooling1D
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from sklearn.preprocessing import LabelEncoder
# load intents
with open('intents.json') as file:
data = json.load(file)
# fill variables from json
# training inputs
training_sentences = []
# training labels
training_labels = []
labels = []
# training responses
responses = []
for intent in data['intents']:
for pattern in intent['patterns']:
training_sentences.append(pattern)
training_labels.append(intent['tag'])
responses.append(intent['responses'])
if intent['tag'] not in labels:
labels.append(intent['tag'])
num_classes = len(labels)
# use LabelEncoder from scikit-learn to convert human readable labels into machine understandable
lbl_encoder = LabelEncoder()
lbl_encoder.fit(training_labels)
training_labels = lbl_encoder.transform(training_labels)
# "vectorize" sentences
vocab_size = 1000
embedding_dim = 16
max_len = 20
oov_token = "<OOV>"
tokenizer = Tokenizer(num_words=vocab_size, oov_token=oov_token) # adding out of vocabulary token
tokenizer.fit_on_texts(training_sentences)
word_index = tokenizer.word_index
sequences = tokenizer.texts_to_sequences(training_sentences)
padded_sequences = pad_sequences(sequences, truncating='post', maxlen=max_len)
# define model
model = Sequential()
model.add(Embedding(vocab_size, embedding_dim, input_length=max_len))
model.add(GlobalAveragePooling1D())
model.add(Dense(16, activation='relu'))
model.add(Dense(16, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# print
model.summary()
# train
epochs = 550
history = model.fit(padded_sequences, np.array(training_labels), epochs=epochs)
# saving model
model.save("chat_model")
import pickle
# saving tokenizer
with open('tokenizer.pickle', 'wb') as handle:
pickle.dump(tokenizer, handle, protocol=pickle.HIGHEST_PROTOCOL)
# saving label encoder
with open('label_encoder.pickle', 'wb') as ecn_file:
pickle.dump(lbl_encoder, ecn_file, protocol=pickle.HIGHEST_PROTOCOL)
And demo script:
embde:chat.py
Bot can answer on few simple phrases from
{
"intents": [
{ "tag": "greeting", "patterns": ["Hi", "Hey", "Is anyone there?", "Hello", "Hay"], "responses": ["Hello", "Hi", "Hi there"] },
{
"tag": "goodbye",
"patterns": ["Bye", "See you later", "Goodbye"],
"responses": ["See you later", "Have a nice day", "Bye! Come back again"]
},
{
"tag": "thanks",
"patterns": ["Thanks", "Thank you", "That's helpful", "Thanks for the help"],
"responses": ["Happy to help!", "Any time!", "My pleasure", "You're most welcome!"]
},
{
"tag": "about",
"patterns": ["Who are you?", "What are you?", "Who you are?"],
"responses": ["I.m Joana, your bot assistant", "I'm Joana, an Artificial Intelligent bot"]
},
{
"tag": "name",
"patterns": ["what is your name", "what should I call you", "whats your name?"],
"responses": ["You can call me Joana.", "I'm Joana!", "Just call me as Joana"]
},
{
"tag": "help",
"patterns": [
"Could you help me?",
"give me a hand please",
"Can you help?",
"What can you do for me?",
"I need a support",
"I need a help",
"support me please"
],
"responses": ["Tell me how can assist you", "Tell me your problem to assist you", "Yes Sure, How can I support you"]
},
{
"tag": "createaccount",
"patterns": [
"I need to create a new account",
"how to open a new account",
"I want to create an account",
"can you create an account for me",
"how to open a new account"
],
"responses": [
"You can just easily create a new account from our web site",
"Just go to our web site and follow the guidelines to create a new account"
]
},
{
"tag": "complaint",
"patterns": ["have a complaint", "I want to raise a complaint", "there is a complaint about a service"],
"responses": [
"Please provide us your complaint in order to assist you",
"Please mention your complaint, we will reach you and sorry for any inconvenience caused"
]
}
]
}
Unfortunatelly there is no way to use it in Slack, original idea was to answer on behalf of me to some messages
But still after short Googling it seems that there are some datasets available for such projects, like:
And it is so cool, there is an archive of Ubuntu support chats imagining a bot which can answer how to write a bash script :)
Or this one with phrases from movies