Tensorflow vs Pytorch 명령어 비교 -(6)
tf.zeros(shape, dtype=tf.dtypes.float32, name=None) with tf.Session() as sess: print(sess.run(tf.zeros([3, 4], tf.int32))) print(sess.run(tf.zeros([3, 4], tf.float32))) [[0 0 0 0] [0 0 0 0] [0 0 0 0]] [[0. 0. 0. 0.] [0. 0. 0. 0.] [0. 0. 0. 0.]] torch.zeros(*size, *, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) → Tensor torch.zeros([3, 4], dtype=torch.int32) torch..
2021. 6. 3.
Tensorflow vs Pytorch 명령어 비교 -(5)
tf.gather(params, indices, validate_indices=None, name=None, axis=None, batch_dims=0) v1 = tf.constant([1, 3, 5, 7, 9, 0, 2, 4, 6, 8]) v2 = tf.constant([[1, 2, 3, 4, 5, 6], [7, 8, 9, 10, 11, 12]]) with tf.Session() as sess: print(sess.run(tf.gather(v1, [2, 5, 2, 5], axis=0))) print(sess.run(tf.gather(v2, [0, 1], axis=0))) print(sess.run(tf.gather(v2, [0, 1], axis=1))) [5 0 5 0] [[ 1 2 3 4 5 6] [..
2021. 6. 2.
Tensorflow vs Pytorch -(4)
tf.transpose(a) x = tf.constant([[1, 2, 3], [4, 5, 6]]) x2 = tf.transpose(x) with tf.Session() as sess: print(sess.run(x)) print(sess.run(x2)) [[1, 2, 3], [4, 5, 6]] [[1 4] [2 5] [3 6]] torch.transpose(input, dim0, dim1) x = torch.tensor([[1, 2, 3], [4, 5, 6]]) torch.transpose(x, 0, 1) tensor([[1, 2, 3], [4, 5, 6]]) tensor([[1, 4], [2, 5], [3, 6]]) dim0과 dim1을 swap한다. Loss function # mean square..
2021. 5. 19.
Tensorflow vs Pytorch 명령어 비교 - (3)
tf.tile(input, multiples, name=None) w = tf.constant([[1], [2]]) v = tf.tile(w, [3, 4]) with tf.Session() as sess: print(sess.run(w)) print(sess.run(w).shape) print(sess.run(v)) print(sess.run(v).shape) [[1] [2]] (2, 1) [[1 1 1 1] [2 2 2 2] [1 1 1 1] [2 2 2 2] [1 1 1 1] [2 2 2 2]] (6, 4) torch_tensor.repeat((num, ...) w = torch.tensor([[1], [2]]) w.repeat((3, 4)) tensor([[1, 1, 1, 1], [2, 2, 2, ..
2021. 5. 17.