Cat vs non cat classifier

height = 64
width = 64
channels = 1
n_inputs = height * width

conv1_fmaps = 32
conv1_ksize = 3
conv1_stride = 1
conv1_pad = “SAME”

conv2_fmaps = 64
conv2_ksize = 3
conv2_stride = 2
conv2_pad = “SAME”

pool3_fmaps = conv2_fmaps

n_fc1 = 64
n_outputs = 2

reset_graph()

with tf.name_scope(“inputs”):
X = tf.placeholder(tf.float32, shape=[None, n_inputs], name=“X”) # None, 784
X_reshaped = tf.reshape(X, shape=[-1, height, width, channels]) # None, 28, 28, 1
y = tf.placeholder(tf.int32, shape=[None], name=“y”) # [None]

conv1 = tf.layers.conv2d(X_reshaped, filters=conv1_fmaps, kernel_size=conv1_ksize,
strides=conv1_stride, padding=conv1_pad,
activation=tf.nn.relu, name=“conv1”)

conv1 -> None, 28, 28, 32

conv2 = tf.layers.conv2d(conv1, filters=conv2_fmaps, kernel_size=conv2_ksize,
strides=conv2_stride, padding=conv2_pad,
activation=tf.nn.relu, name=“conv2”)

conv2 -> None, 14, 14, 64

with tf.name_scope(“pool3”):
pool3 = tf.nn.max_pool(conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding=“VALID”)
# pool3 -> None, 7, 7, 64
pool3_flat = tf.reshape(pool3, shape=[-1, pool3_fmaps * 160* 160])
#pool3_flat -> None, 6477

with tf.name_scope(“fc1”):
fc1 = tf.layers.dense(pool3_flat, n_fc1, activation=tf.nn.relu, name=“fc1”)

with tf.name_scope(“output”):
logits = tf.layers.dense(fc1, n_outputs, name=“output”)
Y_proba = tf.nn.softmax(logits, name=“Y_proba”)

with tf.name_scope(“train”):
xentropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=y)
loss = tf.reduce_mean(xentropy)
optimizer = tf.train.AdamOptimizer()
training_op = optimizer.minimize(loss)

with tf.name_scope(“eval”):
correct = tf.nn.in_top_k(logits, y, 1)
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))

with tf.name_scope(“init_and_save”):
init = tf.global_variables_initializer()
saver = tf.train.Saver()

gettting this error . Don’t know where is the mistake?

logits and labels must have the same first dimension, got logits shape [1,2] and labels shape [100]
[[{{node train/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits}}]]

Please compare the dimensions of logits and labels.