米修儿:
训练模型代码:importh5pyimportnumpyasnpfromsklearn.model_selectionimporttrain_test_splitfromkeras.utilsimportto_categoricalimporttensorflowastffromtensorflow.kerasimportlayers,modelsimporttime#Loaddatasetdefload_dataset(): data=h5py.File("dataset/data.h5","r") X_data=np.array(data['X']) Y_data=np.array(data['Y']) X_train,X_test,y_train,y_test=train_test_split(X_data,Y_data,train_size=0.9,test_size=0.1,random_state=22) X_train=X_train/255. #Normalize X_test=X_test/255. y_train=to_categorical(y_train,num_classes=11) y_test=to_categorical(y_test,num_classes=11) returnX_train,X_test,y_train,y_test#DefineCNNmodelusingKerasdefcnn_model(input_shape,num_classes): model=models.Sequential() #Convolutionallayers model.add(layers.Conv2D(32,(5,5),activation='relu',input_shape=input_shape)) model.add(layers.MaxPooling2D(pool_size=(2,2))) model.add(layers.Conv2D(64,(5,5),activation='relu')) model.add(layers.MaxPooling2D(pool_size=(2,2))) #Flatten model.add(layers.Flatten()) model.add(layers.Dense(200,activation='relu')) model.add(layers.Dropout(0.2)) #Dropoutlayerforregularization model.add(layers.Dense(num_classes,activation='softmax')) #Compilemodel model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy']) returnmodelif__name__=="__main__": print("Loadingdataset:"+str(time.strftime('%Y-%m-%d%H:%M:%S'))) X_train,X_test,y_train,y_test=load_dataset() print("Startingtraining:"+str(time.strftime('%Y-%m-%d%H:%M:%S'))) #Definemodel model=cnn_model(input_shape=(64,64,3),num_classes=11) #Trainmodel model.fit(X_train,y_train,epochs=500,batch_size=16,validation_data=(X_test,y_test)) #Savemodel model.save("model_500_200_c3/cnn_model.keras")