tensorflow dataset API
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
import tensorflow as tf
from tensorflow import keras
#
# (x,y), (x_test, y_test) = keras.datasets.cifar100.load_data()
# y = tf.squeeze(y, axis=1)
# y_test = tf.squeeze(y_test, axis=1)
# print(x.shape, y.shape, x_test.shape, y_test.shape)
# train_db = tf.data.Dataset.from_tensor_slices((x,y))
dataset = tf.data.Dataset.from_tensor_slices(np.arange(5))
dataset=dataset.repeat(3)
for i in dataset:
print(i)
'''
tf.Tensor(0, shape=(), dtype=int32)
tf.Tensor(1, shape=(), dtype=int32)
tf.Tensor(2, shape=(), dtype=int32)
tf.Tensor(3, shape=(), dtype=int32)
tf.Tensor(4, shape=(), dtype=int32)
tf.Tensor(0, shape=(), dtype=int32)
tf.Tensor(1, shape=(), dtype=int32)
tf.Tensor(2, shape=(), dtype=int32)
tf.Tensor(3, shape=(), dtype=int32)
tf.Tensor(4, shape=(), dtype=int32)
tf.Tensor(0, shape=(), dtype=int32)
tf.Tensor(1, shape=(), dtype=int32)
tf.Tensor(2, shape=(), dtype=int32)
tf.Tensor(3, shape=(), dtype=int32)
tf.Tensor(4, shape=(), dtype=int32)
'''
print("*"*30)
dataset=dataset.shuffle(100).batch(4)
print(type(dataset))
for i in dataset:
print(i)
'''
dataset=dataset.shuffle(100).batch(4)
tf.Tensor([3 0 3 1], shape=(4,), dtype=int32)
tf.Tensor([3 1 2 4], shape=(4,), dtype=int32)
tf.Tensor([0 4 4 2], shape=(4,), dtype=int32)
tf.Tensor([0 1 2], shape=(3,), dtype=int32)
dataset=dataset.batch(4).shuffle(100)
tf.Tensor([3 4 0 1], shape=(4,), dtype=int32)
tf.Tensor([2 3 4], shape=(3,), dtype=int32)
tf.Tensor([0 1 2 3], shape=(4,), dtype=int32)
tf.Tensor([4 0 1 2], shape=(4,), dtype=int32)
'''
x = np.array([[1, 2], [3, 4], [5, 6]])
y = np.array(['cat', 'dog', 'fox'])
dataset3 = tf.data.Dataset.from_tensor_slices((x, y))
print(dataset3)
dataset4 = tf.data.Dataset.from_tensor_slices({"feature": x,
"label": y})
for item in dataset4:
print(item["feature"].numpy(), item["label"].numpy())
'''
[1 2] b'cat'
[3 4] b'dog'
[5 6] b'fox'
'''
a = tf.data.Dataset.range(1, 6) # ==> [ 1, 2, 3, 4, 5 ]
# NOTE: New lines indicate "block" boundaries.
b=a.interleave(lambda x: tf.data.Dataset.from_tensors(x).repeat(6),
cycle_length=2, block_length=4)
for item in b:
print(item.numpy(),end=', ')
'''
1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 3, 3, 4, 4, 5, 5, 5, 5, 5, 5,
'''
參考:interleave
相關文章
- 如何使用TensorFlow中的高階API:Estimator、Experiment和DatasetAPI
- Spark API 全集(1):Spark SQL Dataset & DataFrame APISparkAPISQL
- Tensorflow 1.x 之 dataset.make_initializable_iterator
- 第04講:Flink 常用的 DataSet 和 DataStream APIASTAPI
- Tensorflow Python API 翻譯(nn)PythonAPI
- SciTech-BigDataAIML-Tensorflow-Keras是用在Tensorflow的高層APIAIKerasAPI
- TensorFlow Java API 學習筆記JavaAPI筆記
- JavaScript datasetJavaScript
- Tensorflow Python API 翻譯(array_ops)PythonAPI
- 【Dataset】Maple-IDS - Network Security Malicious Traffic Detection Dataset
- Mmdetection dataset pipline
- image-classification-dataset
- Pytorch Dataset入門PyTorch
- Flink整合面向使用者的資料流SDKs/API(Flink關於棄用Dataset API的論述)API
- Dataset和Dataloader的使用
- AttributeError: module‘ tensorflow_core._api.v2. train‘ has no attribute‘ AdamOptimizer‘ErrorAPIAI
- dataset、setAttribute()和getAttribute() 區別
- flink batch dataset 的基本操作BAT
- [譯]標準化Keras:TensorFlow 2.0中的高階API指南KerasAPI
- AttributeError: module ‘tensorflow._api.v1.nn.rnn_cell‘ has no attribute ‘InputProjectionWrapper‘ErrorAPIRNNProjectAPP
- MLE 5217 : Take-Home Dataset Classification
- CS209A Analysis of the Olympic Historical Dataset
- 05-快速理解SparkSQL的DataSetSparkSQL
- Spark SQL學習——DataFrame和DataSetSparkSQL
- Tensorflow快餐教程(11) – 不懂機器學習就只調API行不行?機器學習API
- Tensorflow快餐教程(11)-不懂機器學習就只調API行不行?機器學習API
- 分散式 TensorFlow:Distribution Strategy API 丨Google 開發者大會 2018分散式APIGo
- Understanding Dataset Design Choices for Multi-hop Reasoning
- Pytorch入門上 —— Dataset、Tensorboard、Transforms、DataloaderPyTorchORBORM
- Windows10 使用 Tensorflow Object_detection API 訓練自己的資料WindowsObjectAPI
- Tensorflow Python API 翻譯(math_ops)(第二部分)PythonAPI
- Tensorflow Python API 翻譯(math_ops)(第一部分)PythonAPI
- Pytorch建模過程中的DataLoader與DatasetPyTorch
- 【小白學PyTorch】3 淺談Dataset和DataloaderPyTorch
- Flink的DataSet基本運算元總結
- flink將bathch dataset 轉換為sql操作BATSQL
- tensorflow:使用conda安裝tensorflow
- TensorFlow