Recode
Back to the previous page | page management
Contents
- 2020 - 1022
- 2020 - 0806
- 2020 - 0628
- 2020 - 0517
- 2020 - 0506
- 2020 - 0429
- 2020 - 0423
- 2020 - 0421
- 2020 - 0414
- 2020 - 0325
- 2020 - 0322
- 2020 - 0321
|URL
2020 - 1022
alert|URL
dos
: alert.bat
@echo off
timeout /t %1
python alert.py
python
: alert.py
import tkinter
window = tkinter.Tk()
window.title("Ailever")
window.geometry("640x400+100+100")
window.resizable(False, False)
label = tkinter.Label(window, text="Go to graduation!!!!!!!!!!!")
label.pack()
window.mainloop()
2020 - 0806
pypi package build|pypi
2020 - 0628
interactive barh|URL
2020 - 0517
2020 - 0506
regression.py|URL
import numpy as np
import matplotlib.pyplot as plt
x = np.loadtxt('data.txt')
a = 1; b = 1
y = lambda x : a*x + b
lr = 0.1
epochs=100
LOSS = list()
for epoch in range(epochs):
loss = np.sum(np.power(y(x[:,0]) - x[:, 1], 2))/len(x); LOSS.append(loss)
grad_a = np.sum(2 * (y(x[:,0]) - x[:,1]) * x[:,0])/len(x)
grad_b = np.sum(2 * (y(x[:,0]) - x[:,1]) * 1)/len(x)
a -= lr*grad_a
b -= lr*grad_b
plt.scatter(x[:,0], x[:,1])
plt.plot(x[:,0], y(x[:,0]), c='red')
#plt.plot(LOSS)
plt.show()
autorun.bat|URL
@ECHO OFF
doskey gitlog = git log --all --graph --oneline
doskey workd = mkdir %date%
doskey workt = type NUL > %time%.py
doskey ls = dir
doskey cat = type $1
doskey grep = find $1 $2
doskey mv = ren $
doskey rm = del $
::doskey = c:\Users\userd\batch_file\dyk_.bat
doskey dyk_cd = c:\Users\userd\batch_file\dyk_cd.bat
doskey dyk_paper = c:\Users\userd\batch_file\dky_paper.bat
::doskey = start firefox
::doskey = c:\Users\userd\batch_file\fdyk_.bat
doskey fdyk_autorun = c:\Users\userd\batch_file\fdyk_autorun.bat
doskey fdyk_paper = c:\Users\userd\batch_file\fdyk_paper.bat
@echo activate customized command.
autorun.bat|URL
@ECHO OFF
doskey gitlog = git log --all --graph --oneline
doskey workd = mkdir %date%
doskey workt = type NUL > %time%.py
doskey ls = dir
doskey cat = type $1
doskey grep = find $1 $2
doskey mv = ren $
doskey rm = del $
::doskey = c:\Users\userd\batch_file\dyk_.bat
doskey dyk_cd = c:\Users\userd\batch_file\dyk_cd.bat
doskey dyk_paper = c:\Users\userd\batch_file\dky_paper.bat
::doskey = start firefox
::doskey = c:\Users\userd\batch_file\fdyk_.bat
doskey fdyk_autorun = start firefox https://github.com/userdyk-github/userdyk-github.github.io/edit/master/_posts/2019-08-13-Recode.md
doskey fdyk_paper = c:\Users\userd\batch_file\fdyk_paper.bat
@echo activate customized command.
2020 - 0429
- self-attention|URL
2020 - 0423
random walk.py|URL
import matplotlib.pyplot as plt
import numpy as np
x = np.random.RandomState(10)
plt.xlim(-1000,1000)
plt.ylim(-1000,1000)
plt.grid(True)
l = []
rv = np.array([[0,0]])
for _ in range(100):
l.append(rv)
#rv = x.beta(1,10, size=(1,2))
#rv = x.chisquare(300, size=(1,2))
rv = x.normal(500,40, size=(1,2))
#rv = x.gamma(1,10, size=(1,2))
#rv = x.beta(1,10, size=(1,2))
plt.ion()
plt.plot([l[0][:,0], rv[:,0]], [l[0][:,1], rv[:,1]], marker='x', c='black'); l.pop()
plt.pause(0.001)
plt.show()
2020 - 0421
2D analytic - numerical.py|URL
from sympy import *
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
x, y = symbols("x,y")
f = exp(-x-y**2)
F = lambdify((x,y), f)
domain = np.mgrid[-3:3:100j,-3:3:100j]
X = domain[1]; Y = domain[0]
fig = plt.figure()
axes = fig.add_subplot(111, projection='3d')
axes.plot_surface(X,Y,F(X,Y))
plt.show()
2020 - 0414
contourplot.py|URL
import numpy as np
import matplotlib.pyplot as plt
domain = np.mgrid[-3:3:100j, -3:3:100j]
x = domain[1]; y = domain[0]
f = lambda x,y : (1 - x / 2 + x ** 5 + y ** 3) * np.exp(-x ** 2 - y ** 2)
t = np.mgrid[-1:1:20j]
for i in range(len(t)):
plt.clf()
plt.contour(x,y,f(x,y))
plt.scatter(i/20, f(i/20,i/20))
plt.ion()
plt.show()
plt.pause(0.01)
trajectory for optimization.py|URL
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import scipy.optimize as si
import scipy.optimize as so
fig = plt.figure()
ax = plt.axes(xlim=(-0.5, 3.5), ylim=(-10, 100))
line, = ax.plot([], [], 'o')
def F(x):
return (x**3-x**2-9.)**2
#get the optimize progress
res_x = []
so.fmin(F, -9, callback=res_x.append)
res_x = np.array(res_x).ravel()
res_y = F(res_x)
def init():
line.set_data([], [])
return line,
# animation function. This is called sequentially
def animate(i):
line.set_data(res_x[i], res_y[i])
return line,
ax.plot(np.linspace(0,10,100), F(np.linspace(0,10,100)), 'g')
#frames is the length of res_x
anim = animation.FuncAnimation(fig, animate, init_func=init,
frames=len(res_x), interval=200, blit=True)
plt.show()
2020 - 0325
nn.linear.py|URL
import torch
import torch.nn as nn
import numpy as np
x_train = np.array([[3.3], [4.4], [5.5], [6.71], [6.93], [4.168],
[9.779], [6.182], [7.59], [2.167], [7.042],
[10.791], [5.313], [7.997], [3.1]], dtype=np.float32)
y_train = np.array([[1.7], [2.76], [2.09], [3.19], [1.694], [1.573],
[3.366], [2.596], [2.53], [1.221], [2.827],
[3.465], [1.65], [2.904], [1.3]], dtype=np.float32)
inputs = torch.from_numpy(x_train)
targets = torch.from_numpy(y_train)
model = nn.Linear(1,1)
model.weight.data.fill_(1.0)
model.bias.data.fill_(1.0)
print(model.weight)
print(model.bias)
print(model(inputs))
2020 - 0322
reshape.py|URL
import torch
import numpy as np
t = np.array([[[0,1,2],
[3,4,5]],
[[6,7,8],
[9,10,11]]])
ft = torch.FloatTensor(t)
print(ft.shape) # 2 2 3
print(ft.view([-1,3]))
print(ft.view([-1,3]).shape) # 2 2 3
print()
print(ft.view([-1,2]))
print(ft.view([-1,2]).shape)
print()
print(ft.view([-1,1,3]))
print(ft.view([-1,1,3]).shape)
max, argmax.py|URL
import torch
t = torch.FloatTensor([[1,2],
[3,4]])
print(t)
print(t.max())
print()
print(t.max(dim=0)) # max
print(t.max(dim=0)[0]) # argmax
print(t.max(dim=0)[1]) # argmax
print()
print(t.max(dim=1))
print(t.max(dim=-1))
sum|URL
import torch
t = torch.FloatTensor([[1,2],
[3,4]])
print(t)
print(t.sum())
print(t.sum(dim=0))
print(t.sum(dim=1))
print(t.sum(dim=-1))
multiplication.py|URL
import torch
t = torch.FloatTensor([1,2])
print(t)
print(t.mean())
print()
# Can't use .mean() for Integers
t = torch.LongTensor([1,2])
try:
print(t.mean())
except Exception as exc:
print(exc)
print()
# for higher rank tensors
t = torch.FloatTensor([[1,2],
[3,4]])
print(t)
print(t.mean())
print(t.mean(dim=0))
print(t.mean(dim=1))
print(t.mean(dim=-1))
broadcasting(2).py|URL
import torch
m1 = torch.FloatTensor([[1,2],[3,4]])
m2 = torch.FloatTensor([[1],[2]])
print("m1 : ",m1.shape) # 2*2
print("m2 : ",m2.shape) # 2*1
print(m1.matmul(m2)) # 2*1
print(m1@m2) # 2*1
print(m1 * m2) # 2*2
print(m1.mul(m2)) # 2*2
broadcasting(1)|URL
import torch
# same shape
m1 = torch.FloatTensor([[3,3]])
m2 = torch.FloatTensor([[2,2]])
print(m1+m2)
# matrix + matrix
m1 = torch.FloatTensor([[1,2]])
m2 = torch.FloatTensor([[3]])
print(m1 + m2)
# matrix + vector
m1 = torch.FloatTensor([[1,2]])
m2 = torch.FloatTensor([3])
print(m1 + m2)
# 2*1 + 1*2 matrix
m1 = torch.FloatTensor([[1,2]])
m2 = torch.FloatTensor([[3],[10]])
print(m1 + m2)
tensor operation|URL
import torch
t = torch.FloatTensor([0,1,2,3,4,5,6])
print(t)
print(t.dim())
print(t.shape)
print(t.size())
print(t[0],t[-1])
print(t[2:3])
print(t[4:-1])
print(t[2:],t[:5])
print()
t = torch.FloatTensor([[1,2,3],
[4,5,6],
[7,8,9],
[10,11,12]])
print(t)
print(t.dim())
print(t.size())
print(t[:,1])
print(t[:,1].size())
print(t[:,:-1])
2020 - 0321
mnist example|URL
import torch
import torchvision
import matplotlib.pyplot as plt
import numpy as np
# data
batch_size = 1000
mnist_train = torchvision.datasets.MNIST(root="MNIST_data/", train=True, transform=torchvision.transforms.ToTensor(), download=True)
mnist_test = torchvision.datasets.MNIST(root="MNIST_data/", train=False, transform=torchvision.transforms.ToTensor(), download=True)
data_loader = torch.utils.data.DataLoader(mnist_train, batch_size=batch_size, shuffle=True, drop_last=True)
# model
device = torch.device("cuda:0")
linear = torch.nn.Linear(784, 10, bias=True).to(device)
loss = torch.nn.CrossEntropyLoss().to(device)
SGD = torch.optim.SGD(linear.parameters(), lr=0.1)
# hyper-parameters
total_batch = len(data_loader)
training_epochs = 5
# training
for epoch in range(training_epochs):
total_cost = 0
AVG_COST = []
for X, Y in data_loader:
X = X.view(-1, 28*28).to(device)
Y = Y.to(device)
hypothesis = linear(X)
cost = loss(hypothesis, Y)
SGD.zero_grad()
cost.backward()
SGD.step()
total_cost += cost
avg_cost = total_cost / total_batch
AVG_COST.append(avg_cost.detach().cpu().clone().numpy()) # https://discuss.pytorch.org/t/cant-convert-cuda-tensor-to-numpy-use-tensor-cpu-to-copy-the-tensor-to-host-memory-first/38301
print("Epoch", "%03d"%(epoch+1), "cost =", "{:.9f}".format(avg_cost))
print(AVG_COST)
plt.plot(AVG_COST)
plt.savefig('result.png')
reference
activation|URL
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(x):
return 1/(1+np.exp(-x))
def step(x):
return np.array(x>0, dtype=int)
def relu(x):
return np.maximum(x,0)
def softmax(x):
return np.exp(x-np.max(x))/np.sum(np.exp(x-np.max(x)))
def tanh(x):
return np.tanh(x)
x_range = np.arange(-10,1000, 0.1)
plt.plot(x_range, sigmoid(x_range), label='sigmoid')
plt.plot(x_range, step(x_range), label='step')
plt.plot(x_range, relu(x_range), label='relu')
plt.plot(x_range, softmax(x_range), label='softmax')
plt.plot(x_range, tanh(x_range), label='tanh')
plt.legend()
plt.savefig('result.png')