reorganization of pickles and intents to clean up directory

This commit is contained in:
snbenge
2020-04-20 13:53:05 -04:00
parent cba991bfbe
commit 1c855f1afc
16 changed files with 12 additions and 12 deletions

Binary file not shown.

Binary file not shown.

View File

@@ -12,12 +12,12 @@ model = load_model('chatbot_model.h5')
modelBuilding = load_model('buildings_model.h5') modelBuilding = load_model('buildings_model.h5')
import json import json
import random import random
intents = json.loads(open('intents.json').read()) intents = json.loads(open('intents/intents.json').read())
words = pickle.load(open('words.pkl','rb')) words = pickle.load(open('pickles/words.pkl','rb'))
classes = pickle.load(open('classes.pkl','rb')) classes = pickle.load(open('pickles/classes.pkl','rb'))
buildingsIntents = json.loads(open('buildingIntents.json').read()) buildingsIntents = json.loads(open('intents/buildingIntents.json').read())
building_words = pickle.load(open('building_words.pkl','rb')) building_words = pickle.load(open('pickles/building_words.pkl','rb'))
buildings = pickle.load(open('buildings.pkl','rb')) buildings = pickle.load(open('pickles/buildings.pkl','rb'))
confirmation = 0 confirmation = 0
def clean_up_sentence(sentence): def clean_up_sentence(sentence):

View File

@@ -14,7 +14,7 @@ building_words=[]
buildings = [] buildings = []
documents = [] documents = []
ignore_letters = ['!', '?', ',', '.'] ignore_letters = ['!', '?', ',', '.']
buildingIntents_file = open('buildingIntents.json').read() buildingIntents_file = open('intents/buildingIntents.json').read()
buildingIntents = json.loads(buildingIntents_file) buildingIntents = json.loads(buildingIntents_file)
# download nltk resources # download nltk resources
@@ -44,8 +44,8 @@ print (len(buildings), "buildings", buildings)
# building_words = all building_words, vocabulary # building_words = all building_words, vocabulary
print (len(building_words), "unique lemmatized building_words", building_words) print (len(building_words), "unique lemmatized building_words", building_words)
pickle.dump(building_words,open('building_words.pkl','wb')) pickle.dump(building_words,open('pickles/building_words.pkl','wb'))
pickle.dump(buildings,open('buildings.pkl','wb')) pickle.dump(buildings,open('pickles/buildings.pkl','wb'))
# create our training data # create our training data
training = [] training = []

View File

@@ -14,7 +14,7 @@ words=[]
classes = [] classes = []
documents = [] documents = []
ignore_letters = ['!', '?', ',', '.'] ignore_letters = ['!', '?', ',', '.']
intents_file = open('intents.json').read() intents_file = open('intents/intents.json').read()
intents = json.loads(intents_file) intents = json.loads(intents_file)
# download nltk resources # download nltk resources
@@ -44,8 +44,8 @@ print (len(classes), "classes", classes)
# words = all words, vocabulary # words = all words, vocabulary
print (len(words), "unique lemmatized words", words) print (len(words), "unique lemmatized words", words)
pickle.dump(words,open('words.pkl','wb')) pickle.dump(words,open('pickles/words.pkl','wb'))
pickle.dump(classes,open('classes.pkl','wb')) pickle.dump(classes,open('pickles/classes.pkl','wb'))
# create our training data # create our training data
training = [] training = []