auto detect globals()
This commit is contained in:
parent
3583f02c5b
commit
1b4eb16d51
55
example.py
Normal file
55
example.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
import torch
|
||||||
|
import torch.nn as nn
|
||||||
|
import torch.nn.functional as F
|
||||||
|
|
||||||
|
from trace_commentor import Commentor
|
||||||
|
|
||||||
|
|
||||||
|
class Net(nn.Module):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Net, self).__init__()
|
||||||
|
# 1 input image channel, 6 output channels, 5x5 square convolution
|
||||||
|
# kernel
|
||||||
|
self.conv1 = nn.Conv2d(1, 6, 5)
|
||||||
|
self.conv2 = nn.Conv2d(6, 16, 5)
|
||||||
|
# an affine operation: y = Wx + b
|
||||||
|
self.fc1 = nn.Linear(16 * 5 * 5, 120) # 5*5 from image dimension
|
||||||
|
self.fc2 = nn.Linear(120, 84)
|
||||||
|
self.fc3 = nn.Linear(84, 10)
|
||||||
|
|
||||||
|
@Commentor()
|
||||||
|
def forward(self, input):
|
||||||
|
# Convolution layer C1: 1 input image channel, 6 output channels,
|
||||||
|
# 5x5 square convolution, it uses RELU activation function, and
|
||||||
|
# outputs a Tensor with size (N, 6, 28, 28), where N is the size of the batch
|
||||||
|
c1 = F.relu(self.conv1(input))
|
||||||
|
# Subsampling layer S2: 2x2 grid, purely functional,
|
||||||
|
# this layer does not have any parameter, and outputs a (N, 16, 14, 14) Tensor
|
||||||
|
s2 = F.max_pool2d(c1, (2, 2))
|
||||||
|
# Convolution layer C3: 6 input channels, 16 output channels,
|
||||||
|
# 5x5 square convolution, it uses RELU activation function, and
|
||||||
|
# outputs a (N, 16, 10, 10) Tensor
|
||||||
|
c3 = F.relu(self.conv2(s2))
|
||||||
|
# Subsampling layer S4: 2x2 grid, purely functional,
|
||||||
|
# this layer does not have any parameter, and outputs a (N, 16, 5, 5) Tensor
|
||||||
|
s4 = F.max_pool2d(c3, 2)
|
||||||
|
# Flatten operation: purely functional, outputs a (N, 400) Tensor
|
||||||
|
s4 = torch.flatten(s4, 1)
|
||||||
|
# Fully connected layer F5: (N, 400) Tensor input,
|
||||||
|
# and outputs a (N, 120) Tensor, it uses RELU activation function
|
||||||
|
f5 = F.relu(self.fc1(s4))
|
||||||
|
# Fully connected layer F6: (N, 120) Tensor input,
|
||||||
|
# and outputs a (N, 84) Tensor, it uses RELU activation function
|
||||||
|
f6 = F.relu(self.fc2(f5))
|
||||||
|
# Gaussian layer OUTPUT: (N, 84) Tensor input, and
|
||||||
|
# outputs a (N, 10) Tensor
|
||||||
|
output = self.fc3(f6)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
net = Net()
|
||||||
|
|
||||||
|
input = torch.randn(1, 1, 32, 32)
|
||||||
|
out = net(input)
|
||||||
|
print(out)
|
||||||
@ -1,23 +0,0 @@
|
|||||||
import torch
|
|
||||||
import torch.nn as nn
|
|
||||||
from tests.test_utils import *
|
|
||||||
|
|
||||||
def test():
|
|
||||||
|
|
||||||
@Commentor(_globals=globals())
|
|
||||||
def target():
|
|
||||||
x = torch.ones(4, 5)
|
|
||||||
for i in range(3):
|
|
||||||
x = x[..., None, :]
|
|
||||||
|
|
||||||
a = torch.randn(309, 110, 3)[:100]
|
|
||||||
f = nn.Linear(3, 128)
|
|
||||||
b = f(a.reshape(-1, 3)).reshape(-1, 110, 128)
|
|
||||||
c = torch.concat((a, b), dim=-1)
|
|
||||||
|
|
||||||
return c.flatten()
|
|
||||||
|
|
||||||
target()
|
|
||||||
|
|
||||||
|
|
||||||
test()
|
|
||||||
@ -6,7 +6,7 @@ from test_utils import *
|
|||||||
|
|
||||||
def test_torch():
|
def test_torch():
|
||||||
|
|
||||||
@Commentor("<return>", _globals=globals())
|
@Commentor("<return>")
|
||||||
def target():
|
def target():
|
||||||
|
|
||||||
x = torch.ones(4, 5)
|
x = torch.ones(4, 5)
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
import ast
|
import ast
|
||||||
import inspect
|
import inspect
|
||||||
import sys
|
|
||||||
import rich
|
|
||||||
|
|
||||||
from inspect import getfullargspec
|
from inspect import getfullargspec
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
@ -14,9 +12,9 @@ from .utils import sign, to_source, comment_to_file
|
|||||||
|
|
||||||
class Commentor(object):
|
class Commentor(object):
|
||||||
|
|
||||||
def __init__(self, output="<stderr>", _globals=dict(), fmt=[], check=True, _exit=True) -> None:
|
def __init__(self, output="<stderr>", fmt=[], check=True, _exit=True) -> None:
|
||||||
self._locals = dict()
|
self._locals = dict()
|
||||||
self._globals = _globals
|
self._globals = inspect.stack()[1][0].f_globals
|
||||||
self._return = None
|
self._return = None
|
||||||
self._formatters = fmt + formatters.LIST
|
self._formatters = fmt + formatters.LIST
|
||||||
self._lines = []
|
self._lines = []
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user