m = tf.Variable(0.44)
b = tf.Variable(0.87)
error = 0
for x, y in zip(x_data, y_label):
y_hat = m*x+b
error +=(y-y_label)**2
optimizer = tf.train.GradientDescentOptimizer(learning_rate = 0.001)
train = optimizer.minimize(error)
AttributeError Traceback (most recent call last) in () 1 optimizer = tf.train.GradientDescentOptimizer(learning_rate = 0.001) ----> 2 train = optimizer.minimize(error)
C:\Users\ASUS PC\Anaconda3\lib\site-packages\tensorflow\python\training\optimizer.py in minimize(self, loss, global_step, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, name, grad_loss) 341 aggregation_method=aggregation_method, 342 colocate_gradients_with_ops=colocate_gradients_with_ops, --> 343 grad_loss=grad_loss) 344 345 vars_with_grad = [v for g, v in grads_and_vars if g is not None]
C:\Users\ASUS PC\Anaconda3\lib\site-packages\tensorflow\python\training\optimizer.py in compute_gradients(self, loss, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, grad_loss) 392 "Optimizer.GATE_OP, Optimizer.GATE_GRAPH. Not %s" % 393 gate_gradients) --> 394 self._assert_valid_dtypes([loss]) 395 if grad_loss is not None: 396 self._assert_valid_dtypes([grad_loss])
C:\Users\ASUS PC\Anaconda3\lib\site-packages\tensorflow\python\training\optimizer.py in _assert_valid_dtypes(self, tensors) 541 valid_dtypes = self._valid_dtypes() 542 for t in tensors: --> 543 dtype = t.dtype.base_dtype 544 if dtype not in valid_dtypes: 545 raise ValueError(
AttributeError: 'numpy.dtype' object has no attribute 'base_dtype'