0
点赞
收藏
分享

微信扫一扫

【Pytorch基础】基础点整理1

舍予兄 2022-05-01 阅读 45
# 创建tensor
a = np.array([2,2.3])
b = torch.from_numpy(a)
print(b)

tensor([2.0000, 2.3000], dtype=torch.float64)

c = np.ones([2,3])
d = torch.from_numpy(c)
print(d)

 tensor([[1., 1., 1.],
         [1., 1., 1.]], dtype=torch.float64)

# 从list导入

# 小写tensor放现成数据,大写Tensor放shape维度

a = torch.tensor([2.,3.2])
print(a)
tensor([2.0000, 3.2000])

# 大写也可以放现成数据但是必须得是个list,如果是维度的话只要(2,3)不需要[]

a = torch.FloatTensor([2.,3.2])
print(a)
tensor([2.0000, 3.2000])

# 定义空的容器准备放tensor(会生成随机数据,后续要输入)
# torch.empty(1)##如果不1或其他的话,会出现无穷数据
# torch.FloatTensor(1,2,3)##几维就几个数
# torch.IntTensor(1,2,3)

# 如果没有设置,不管是tensor还是Tensor都默认是float的
a = torch.tensor([1.2,3]).type()
print(a)
# 设置修改默认类型
torch.set_default_tensor_type(torch.DoubleTensor)
a = torch.tensor([1.2,3]).type()
print(a)
torch.FloatTensor
torch.DoubleTensor

 

# 常用的随机初始化函数
a = torch.rand(3,3)#0~1之间均值分布
print(a)

b = torch.rand_like(a)#输入就是一个tensor
print(b)

c = torch.randint(1,10,[3,3])#最小1,最大10,shape
print(c)

# tensor([[0.5065, 0.8315, 0.4257],
#         [0.4295, 0.2938, 0.5987],
#         [0.1072, 0.3201, 0.0917]])
# tensor([[0.1203, 0.8148, 0.8335],
#         [0.3040, 0.6993, 0.0636],
#         [0.3232, 0.0061, 0.6630]])
# tensor([[6, 9, 5],
#         [3, 5, 5],
#         [1, 6, 6]])




# 取正态分布N(0,1)


a = torch.randn(3,3)
print(a)




# 自己设置均值与方差



b = torch.normal(mean=torch.full([10],0.),std=torch.arange(1,0,-0.1))
                                                           #full生成长度为10且都为0的向量
print(b)



# tensor([[-0.2573, -0.9736, -1.0613],
#         [-0.1246,  0.1025, -0.0975],
#         [-1.3035,  0.2722, -0.3027]])
# tensor([-0.0322, -0.0394,  0.6556, -0.0090, -0.4209, -0.3295,  0.3497,  0.1770,
#          0.0176, -0.1186])
# 维度为1,数量为10,后面需要自己reshape





# 定义或清空一个tensor


a = torch.full([2,3],7)
print(a)


# 生成一个7的标量,也就是没有向量的0维的


b = torch.full([],7)
print(b)


# 生成1维


c = torch.full([1],7)


# 如果是[2],也是1维,但是是7,7

# tensor([[7, 7, 7],
#         [7, 7, 7]])
# tensor(7)



# 等差数列



a = torch.arange(0,10)
print(a)
# tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
a = torch.arange(0,10,2)#第三个参数是阶梯
print(a)
# tensor([0, 2, 4, 6, 8])

# 均分



a = torch.linspace(0,10, steps=4)
print(a)
# tensor([ 0.0000,  3.3333,  6.6667, 10.0000])

a = torch.linspace(0,10, steps=11)
print(a)
# tensor([ 0.,  1.,  2.,  3.,  4.,  5.,  6.,  7.,  8.,  9., 10.])




# -1~0出来的是以10为底,每个数为指数的数值



a = torch.logspace(0,-1, steps=10)
print(a)
# tensor([1.0000, 0.7743, 0.5995, 0.4642, 0.3594, 0.2783, 0.2154, 0.1668, 0.1292,
#         0.1000])




# 生成对角矩阵



a = torch.eye(3,4)
b = torch.eye(3)
print(a,b)
# tensor([[1., 0., 0., 0.],
#         [0., 1., 0., 0.],
#         [0., 0., 1., 0.]]) tensor([[1., 0., 0.],
#         [0., 1., 0.],
#         [0., 0., 1.]])

a = torch.zeros(3,3)
b = torch.ones_like(a)
print(b)
# tensor([[1., 1., 1.],
#         [1., 1., 1.],
#         [1., 1., 1.]])



# 随机打乱但保持索引不变


a = torch.rand(2,3)
b = torch.rand(2,2)
idx = torch.randperm(2)
print(idx)
print(idx)

a[idx]
b[idx]

print(a[idx],b[idx])


# tensor([1, 0])
# tensor([1, 0])
# tensor([[0.6402, 0.4217, 0.0593],
#         [0.8788, 0.5715, 0.6452]]) tensor([[0.9691, 0.8659],
#         [0.1530, 0.5879]])

举报

相关推荐

0 条评论