Skip to content

Instantly share code, notes, and snippets.

@AbsolutelySaurabh
Created December 22, 2018 09:09
Show Gist options
  • Save AbsolutelySaurabh/5786b2308f7c09ba67bc50db1f6eb786 to your computer and use it in GitHub Desktop.
Save AbsolutelySaurabh/5786b2308f7c09ba67bc50db1f6eb786 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
import torch
from torch import nn
import torch.nn.functional as F
from torchvision import datasets, transforms
# Define a transform to normalize the data
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
])
# Download and load the training data
trainset = datasets.MNIST('~/.pytorch/MNIST_data/', download=True, train=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=64, shuffle=True)
# Build a feed-forward network
model = nn.Sequential(nn.Linear(784, 128),
nn.ReLU(),
nn.Linear(128, 64),
nn.ReLU(),
nn.Linear(64, 10))
# Define the loss
criterion = nn.CrossEntropyLoss()
dataiter = iter(trainloader)
images, labels = dataiter.next()
# Flatten images
images = images.view(images.shape[0], -1)
# Forward pass, get our logits
logits = model(images)
# Calculate the loss with the logits and the labels
loss = criterion(logits, labels)
print(loss)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment