MNIST_Example/inference.py

202 lines
8.7 KiB
Python

"""
######################## single-dataset inference lenet example ########################
This example is a single-dataset inference tutorial.
######################## Instructions for using the inference environment ########################
The image of the debugging environment and the image of the inference environment are two different images,
and the working local directories are different. In the inference task, you need to pay attention to the following points.
1、(1)The structure of the dataset uploaded for single dataset inference in this example
MNISTData.zip
├── test
│ ├── t10k-images-idx3-ubyte
│ └── t10k-labels-idx1-ubyte
└── train
├── train-images-idx3-ubyte
└── train-labels-idx1-ubyte
(2)The dataset structure of the single dataset in the inference image in this example
workroot
├── data
| ├── test
| └── train
2、Inference task requires predefined functions
(1)Defines whether the task is a inference environment or a debugging environment.
def WorkEnvironment(environment):
if environment == 'train':
workroot = '/home/work/user-job-dir' #The inference task uses this parameter to represent the local path of the inference image
elif environment == 'debug':
workroot = '/home/ma-user/work' #The debug task uses this parameter to represent the local path of the debug image
print('current work mode:' + environment + ', workroot:' + workroot)
return workroot
(2)Copy single dataset from obs to inference image.
def ObsToEnv(obs_data_url, data_dir):
try:
mox.file.copy_parallel(obs_data_url, data_dir)
print("Successfully Download {} to {}".format(obs_data_url, data_dir))
except Exception as e:
print('moxing download {} to {} failed: '.format(obs_data_url, data_dir) + str(e))
return
(3)Copy ckpt file from obs to inference image.
def ObsUrlToEnv(obs_ckpt_url, ckpt_url):
try:
mox.file.copy(obs_ckpt_url, ckpt_url)
print("Successfully Download {} to {}".format(obs_ckpt_url,
ckpt_url))
except Exception as e:
print('moxing download {} to {} failed: '.format(
obs_ckpt_url, ckpt_url) + str(e))
return
(4)Copy the output result to obs.
def EnvToObs(train_dir, obs_train_url):
try:
mox.file.copy_parallel(train_dir, obs_train_url)
print("Successfully Upload {} to {}".format(train_dir,obs_train_url))
except Exception as e:
print('moxing upload {} to {} failed: '.format(train_dir,obs_train_url) + str(e))
return
3、4 parameters need to be defined.
--data_url is the dataset you selected on the Qizhi platform
--ckpt_url is the weight file you choose on the Qizhi platform
--data_url,--ckpt_url,--result_url,--device_target,These 4 parameters must be defined first in a single dataset,
otherwise an error will be reported.
There is no need to add these parameters to the running parameters of the Qizhi platform,
because they are predefined in the background, you only need to define them in your code.
4、How the dataset is used
Inference task uses data_url as the input, and data_dir (ie: workroot + '/data') as the calling method
of the dataset in the image.
For details, please refer to the following sample code.
"""
import os
import argparse
import moxing as mox
import mindspore.nn as nn
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.train import Model
from mindspore.nn.metrics import Accuracy
from mindspore import Tensor
import numpy as np
from glob import glob
from dataset import create_dataset
from config import mnist_cfg as cfg
from lenet import LeNet5
### Defines whether the task is a inference environment or a debugging environment ###
def WorkEnvironment(environment):
if environment == 'train':
workroot = '/home/work/user-job-dir'
elif environment == 'debug':
workroot = '/home/work'
print('current work mode:' + environment + ', workroot:' + workroot)
return workroot
### Copy single dataset from obs to inference image ###
def ObsToEnv(obs_data_url, data_dir):
try:
mox.file.copy_parallel(obs_data_url, data_dir)
print("Successfully Download {} to {}".format(obs_data_url, data_dir))
except Exception as e:
print('moxing download {} to {} failed: '.format(obs_data_url, data_dir) + str(e))
return
### Copy ckpt file from obs to inference image###
### To operate on folders, use mox.file.copy_parallel. If copying a file.
### Please use mox.file.copy to operate the file, this operation is to operate the file
def ObsUrlToEnv(obs_ckpt_url, ckpt_url):
try:
mox.file.copy(obs_ckpt_url, ckpt_url)
print("Successfully Download {} to {}".format(obs_ckpt_url,ckpt_url))
except Exception as e:
print('moxing download {} to {} failed: '.format(obs_ckpt_url, ckpt_url) + str(e))
return
### Copy the output result to obs###
def EnvToObs(train_dir, obs_train_url):
try:
mox.file.copy_parallel(train_dir, obs_train_url)
print("Successfully Upload {} to {}".format(train_dir,obs_train_url))
except Exception as e:
print('moxing upload {} to {} failed: '.format(train_dir,obs_train_url) + str(e))
return
### --data_url,--ckpt_url,--result_url,--device_target,These 4 parameters must be defined first in a inference task,
### otherwise an error will be reported.
### There is no need to add these parameters to the running parameters of the Qizhi platform,
### because they are predefined in the background, you only need to define them in your code.
parser = argparse.ArgumentParser(description='MindSpore Lenet Example')
parser.add_argument('--data_url',
type=str,
default= WorkEnvironment('train') + '/data/',
help='path where the dataset is saved')
parser.add_argument('--ckpt_url',
help='model to save/load',
default= WorkEnvironment('train') + '/checkpoint.ckpt')
parser.add_argument('--result_url',
help='result folder to save/load',
default= WorkEnvironment('train') + '/result/')
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'],
help='device where the code will be implemented (default: Ascend)')
if __name__ == "__main__":
args, unknown = parser.parse_known_args()
### defining the training environment
environment = 'train'
workroot = WorkEnvironment(environment)
###Initialize the data and result directories in the inference image###
data_dir = workroot + '/data'
result_dir = workroot + '/result'
ckpt_url = workroot + '/checkpoint.ckpt'
if not os.path.exists(data_dir):
os.makedirs(data_dir)
if not os.path.exists(result_dir):
os.makedirs(result_dir)
###Copy dataset from obs to inference image
obs_data_url = args.data_url
ObsToEnv(obs_data_url, data_dir)
###Copy ckpt file from obs to inference image
obs_ckpt_url = args.ckpt_url
ObsUrlToEnv(obs_ckpt_url, ckpt_url)
###Set output path result_url
obs_result_url = args.result_url
context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target)
network = LeNet5(cfg.num_classes)
net_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
repeat_size = cfg.epoch_size
net_opt = nn.Momentum(network.trainable_params(), cfg.lr, cfg.momentum)
model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()})
print("============== Starting Testing ==============")
param_dict = load_checkpoint(os.path.join(ckpt_url))
load_param_into_net(network, param_dict)
ds_test = create_dataset(os.path.join(data_dir, "test"), batch_size=1).create_dict_iterator()
data = next(ds_test)
images = data["image"].asnumpy()
labels = data["label"].asnumpy()
print('Tensor:', Tensor(data['image']))
output = model.predict(Tensor(data['image']))
predicted = np.argmax(output.asnumpy(), axis=1)
pred = np.argmax(output.asnumpy(), axis=1)
print('predicted:', predicted)
print('pred:', pred)
print(f'Predicted: "{predicted[0]}", Actual: "{labels[0]}"')
filename = 'result.txt'
file_path = os.path.join(result_dir, filename)
with open(file_path, 'a+') as file:
file.write(" {}: {:.2f} \n".format("Predicted", predicted[0]))
###Copy result data from the local running environment back to obs,
###and download it in the inference task corresponding to the Qizhi platform
EnvToObs(result_dir, obs_result_url)