Edit model card

xor

A multi-layer perceptron (MLP) that performs the XOR logical computation. It generates the following truth table:

A B C
0 0 0
0 1 1
1 0 1
1 1 0

It takes as input two column vectors of zeros and ones. It outputs a single column vector of zeros and ones.

Code: https://github.com/sambitmukherjee/handson-ml3-pytorch/blob/main/chapter10/xor.ipynb

Usage

import torch
import torch.nn as nn
from huggingface_hub import PyTorchModelHubMixin

# Let's create two column vectors containing `0`s and `1`s.
batch = {'a': torch.tensor([[0.], [0.], [1.], [1.]]), 'b': torch.tensor([[0.], [1.], [0.], [1.]])}

class XOR(nn.Module, PyTorchModelHubMixin):
    def __init__(self):
        super().__init__()
        self.layer0_weight = torch.tensor([[1., 1.], [1., 1.]])
        self.layer0_bias = torch.tensor([-1.5, -0.5])
        self.layer1_weight = torch.tensor([[-1.], [1.]])
        self.layer1_bias = torch.tensor([-0.5])

    def heaviside(self, x):
        return (x >= 0).float()

    def forward(self, x):
        inputs = torch.cat([x['a'], x['b']], dim=1)
        out = self.heaviside(inputs @ self.layer0_weight + self.layer0_bias)
        out = self.heaviside(out @ self.layer1_weight + self.layer1_bias)
        return out

# Instantiate:
logical_xor = XOR.from_pretrained("sadhaklal/xor")

# Forward pass:
output = logical_xor(batch)
print(output)
Downloads last month
0
Unable to determine this model’s pipeline type. Check the docs .