如何将json数据集加载到张量流中?

时间:2019-04-02 23:49:39

标签: json python-3.x tensorflow keras

我是tensorflow的新手,我试图用反对mnist的我自己的数据集来提供它。但是,我不知道该怎么做。目前,我正在遍历目录并获取每个json文件,将其加载为json,并将其存储在列表中。我将如何处理以便可以使用这些数据来构建模型?

到目前为止的代码:

import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import os
import json
import cv2


data_directory = ''
path = os.path.join(data_directory, 'MiniDataSet')

training_data = []
def create_training_data():
    for file in os.listdir(path):
        if file.endswith((".json")):
            with open(path + '/' + file) as json_data:
                data = json.load(json_data)
                training_data.append(data)
            json_data.close()
create_training_data()

数据示例json文件:

[[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 254, 255, 253, 251, 252, 254, 113, 118, 228, 255, 255, 251, 254, 252, 254, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 253, 253, 251, 255, 255, 255, 136, 46, 95, 156, 236, 255, 251, 252, 254, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 189, 175, 255, 161, 42, 55, 54, 95, 208, 255, 252, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 254, 254, 254, 252, 255, 245, 130, 69, 110, 106, 66, 120, 126, 65, 128, 104, 179, 255, 253, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 254, 253, 254, 254, 243, 108, 101, 162, 140, 118, 87, 225, 255, 224, 255, 212, 68, 214, 254, 253, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 254, 244, 118, 74, 211, 250, 239, 255, 252, 253, 250, 255, 249, 255, 136, 111, 255, 253, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 254, 251, 255, 131, 43, 170, 241, 255, 255, 253, 255, 254, 254, 253, 252, 254, 213, 77, 238, 255, 253, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 183, 30, 154, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 254, 83, 186, 255, 252, 254, 255, 255], [255, 255, 255, 255, 255, 255, 255, 254, 254, 242, 62, 94, 255, 250, 252, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 110, 145, 255, 250, 254, 255, 255], [255, 255, 255, 255, 255, 254, 253, 251, 255, 117, 47, 229, 255, 251, 254, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 140, 102, 255, 251, 254, 255, 255], [255, 255, 255, 255, 255, 254, 251, 255, 195, 28, 184, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 164, 74, 255, 253, 254, 255, 255], [255, 255, 255, 255, 255, 253, 254, 232, 60, 78, 251, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 255, 184, 63, 247, 254, 254, 255, 255], [255, 255, 254, 254, 255, 252, 255, 117, 31, 203, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 185, 58, 242, 255, 253, 255, 255], [255, 255, 255, 255, 253, 255, 192, 25, 169, 255, 250, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 169, 82, 255, 254, 255, 255, 255], [255, 255, 255, 253, 254, 246, 72, 84, 254, 251, 252, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 255, 144, 68, 253, 254, 254, 255, 255], [255, 255, 254, 251, 255, 149, 19, 198, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 124, 108, 254, 252, 254, 255, 255], [255, 255, 252, 255, 240, 47, 109, 254, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 255, 90, 145, 255, 250, 255, 255, 255], [255, 255, 252, 255, 185, 37, 218, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 249, 71, 177, 255, 249, 254, 255, 255], [255, 255, 252, 255, 124, 48, 232, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 213, 37, 207, 255, 252, 255, 255, 255], [255, 255, 252, 255, 138, 23, 186, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 255, 150, 52, 242, 255, 254, 255, 255, 255], [255, 255, 252, 255, 180, 29, 206, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 254, 253, 71, 102, 255, 253, 255, 255, 255, 255], [255, 255, 252, 255, 195, 27, 194, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 251, 254, 233, 39, 160, 255, 252, 255, 255, 255, 255], [255, 255, 252, 255, 227, 29, 124, 255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 250, 255, 158, 52, 231, 255, 254, 255, 255, 255, 255], [255, 255, 254, 255, 252, 59, 110, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 236, 43, 113, 255, 253, 255, 255, 255, 255, 255], [255, 255, 254, 252, 255, 92, 75, 253, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 253, 117, 54, 228, 253, 252, 255, 255, 255, 255, 255], [255, 255, 254, 251, 255, 173, 32, 199, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, 255, 214, 21, 145, 255, 250, 253, 255, 255, 255, 255, 255], [255, 255, 254, 251, 255, 239, 51, 95, 255, 253, 254, 255, 255, 255, 255, 255, 255, 255, 254, 253, 253, 100, 57, 226, 253, 251, 254, 255, 255, 255, 255, 255], [255, 255, 254, 254, 253, 255, 153, 28, 166, 255, 251, 251, 253, 254, 253, 254, 252, 249, 248, 255, 143, 30, 198, 255, 253, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 253, 247, 109, 41, 168, 255, 255, 252, 252, 251, 253, 252, 255, 255, 166, 30, 152, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 253, 254, 245, 112, 34, 129, 220, 255, 255, 255, 255, 252, 213, 125, 40, 116, 253, 251, 253, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 253, 255, 242, 143, 64, 64, 100, 117, 128, 131, 102, 59, 52, 159, 253, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 224, 163, 118, 84, 76, 79, 103, 149, 218, 255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]]

如果有帮助,每个json文件都对应一个数字0-9的手写图片。我要做的是向神经网络提供所有这些json文件的列表,并对其进行训练和构建模型之上。同样,我对tensorflow还是陌生的,所以感谢所有提前答复的人。

0 个答案:

没有答案