数据集
http://host.robots.ox.ac.uk:8080/pascal/VOC/voc2011/index.html
说明:本文使用2012
工程目录
datasets
|_____commodity______Annotations
|
|__________JPEGImages
数据预处理
from xml.etree import ElementTree as ET
from collections import OrderedDict as Dict
import numpy as np
import json
import pickle
import os
class xmlProcess(object):
def __init__(self,file_path):
self.xml_path = file_path
self.num_class = 11
self.data = Dict()
def process_xml(self):
for f in os.listdir(self.xml_path):
et = ET.parse(os.path.join(self.xml_path,f))
root = et.getroot()
size = root.find('size')
width = eval(size.find("width").text)
height = eval(size.find("height").text)
depth = eval(size.find("depth").text)
bounding_boxes = []
one_hots = []
for obj in root.findall('object'):
for res in obj.iter('bndbox'):
xmin = float(res.find('xmin').text)/width
ymin = float(res.find('ymin').text)/height
xmax = float(res.find('xmax').text)/width
ymax = float(res.find('ymax').text)/height
bounding_boxes.append([xmin,ymin,xmax,ymax])
obj_name = obj.find('name').text
one_hot_name = self.one_hot(obj_name)
one_hots.append(one_hot_name)
bounding_boxes = np.asarray(bounding_boxes)
one_hots = np.asarray(one_hots)
image_data = np.hstack((bounding_boxes,one_hots))
self.data[f]=image_data
return None
def one_hot(self,name):
one_hot_vector = [0]*self.num_class
if name == "sheep":
one_hot_vector[0] = 1
elif name == "aeroplane":
one_hot_vector[1] = 1
elif name == "boat":
one_hot_vector[2] = 1
elif name == "tvmonitor":
one_hot_vector[3] = 1
elif name == "train":
one_hot_vector[4] = 1
elif name == "bird":
one_hot_vector[5] = 1
elif name == "dog":
one_hot_vector[6] = 1
elif name == "chair":
one_hot_vector[7] = 1
elif name == "bicycle":
one_hot_vector[8] = 1
elif name == "bottle":
one_hot_vector[9] = 1
elif name == "person":
one_hot_vector[10] = 1
else:
pass
return one_hot_vector
if __name__ == "__main__":
pro=xmlProcess("./commodity/Annotations")
pro.process_xml()
pickle.dump(pro.data,open("./image_data.pkl","wb"))
说明
以上代码的功能:从所有xml文件里读取标记物体的类别,物体位置(进行了归一化处理),
物体名称(进行one-hot编码),然后进行存储,为模型训练做准备。