mirror of
https://github.com/Mobile-Robotics-W20-Team-9/UMICH-NCLT-SLAP.git
synced 2025-09-08 20:13:13 +00:00
reorganization of pickles and intents to clean up directory
This commit is contained in:
Binary file not shown.
Binary file not shown.
@@ -12,12 +12,12 @@ model = load_model('chatbot_model.h5')
|
||||
modelBuilding = load_model('buildings_model.h5')
|
||||
import json
|
||||
import random
|
||||
intents = json.loads(open('intents.json').read())
|
||||
words = pickle.load(open('words.pkl','rb'))
|
||||
classes = pickle.load(open('classes.pkl','rb'))
|
||||
buildingsIntents = json.loads(open('buildingIntents.json').read())
|
||||
building_words = pickle.load(open('building_words.pkl','rb'))
|
||||
buildings = pickle.load(open('buildings.pkl','rb'))
|
||||
intents = json.loads(open('intents/intents.json').read())
|
||||
words = pickle.load(open('pickles/words.pkl','rb'))
|
||||
classes = pickle.load(open('pickles/classes.pkl','rb'))
|
||||
buildingsIntents = json.loads(open('intents/buildingIntents.json').read())
|
||||
building_words = pickle.load(open('pickles/building_words.pkl','rb'))
|
||||
buildings = pickle.load(open('pickles/buildings.pkl','rb'))
|
||||
confirmation = 0
|
||||
|
||||
def clean_up_sentence(sentence):
|
||||
|
@@ -14,7 +14,7 @@ building_words=[]
|
||||
buildings = []
|
||||
documents = []
|
||||
ignore_letters = ['!', '?', ',', '.']
|
||||
buildingIntents_file = open('buildingIntents.json').read()
|
||||
buildingIntents_file = open('intents/buildingIntents.json').read()
|
||||
buildingIntents = json.loads(buildingIntents_file)
|
||||
|
||||
# download nltk resources
|
||||
@@ -44,8 +44,8 @@ print (len(buildings), "buildings", buildings)
|
||||
# building_words = all building_words, vocabulary
|
||||
print (len(building_words), "unique lemmatized building_words", building_words)
|
||||
|
||||
pickle.dump(building_words,open('building_words.pkl','wb'))
|
||||
pickle.dump(buildings,open('buildings.pkl','wb'))
|
||||
pickle.dump(building_words,open('pickles/building_words.pkl','wb'))
|
||||
pickle.dump(buildings,open('pickles/buildings.pkl','wb'))
|
||||
|
||||
# create our training data
|
||||
training = []
|
||||
|
@@ -14,7 +14,7 @@ words=[]
|
||||
classes = []
|
||||
documents = []
|
||||
ignore_letters = ['!', '?', ',', '.']
|
||||
intents_file = open('intents.json').read()
|
||||
intents_file = open('intents/intents.json').read()
|
||||
intents = json.loads(intents_file)
|
||||
|
||||
# download nltk resources
|
||||
@@ -44,8 +44,8 @@ print (len(classes), "classes", classes)
|
||||
# words = all words, vocabulary
|
||||
print (len(words), "unique lemmatized words", words)
|
||||
|
||||
pickle.dump(words,open('words.pkl','wb'))
|
||||
pickle.dump(classes,open('classes.pkl','wb'))
|
||||
pickle.dump(words,open('pickles/words.pkl','wb'))
|
||||
pickle.dump(classes,open('pickles/classes.pkl','wb'))
|
||||
|
||||
# create our training data
|
||||
training = []
|
||||
|
Reference in New Issue
Block a user