Python mnist.MNIST Examples

The following are 5 code examples of mnist.MNIST(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module mnist , or try the search function .
Example #1
Source File: data_loader.py    From bayes_nn with MIT License 6 votes vote down vote up
def __init__(self, loc='data/raw'):
        """
        Dataloader for the MNIST data. Relies on this library
        https://pypi.python.org/pypi/python-mnist/0.3
        :param loc:
        """
        mndata = MNIST(loc)
        self.data = {}

        # train data
        images, labels = mndata.load_training()
        images = np.array(images)
        labels = np.array(labels).astype(np.int64)

        self.data['X_train'] = self.normalize(images)
        self.data['y_train'] = labels

        # test data
        images, labels = mndata.load_testing()
        images = np.array(images)
        labels = np.array(labels).astype(np.int64)

        self.data['X_test'] = self.normalize(images)
        self.data['y_test'] = labels 
Example #2
Source File: mnist_mlp_candle.py    From Benchmarks with MIT License 5 votes vote down vote up
def initialize_parameters():
    mnist_common = mnist.MNIST(mnist.file_path,
        'mnist_params.txt',
        'keras',
        prog='mnist_mlp',
        desc='MNIST example'
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(mnist_common)
    csv_logger = CSVLogger('{}/params.log'.format(gParameters))

    return gParameters 
Example #3
Source File: mnist_cnn_candle.py    From Benchmarks with MIT License 5 votes vote down vote up
def initialize_parameters():
    mnist_common = mnist.MNIST(mnist.file_path,
        'mnist_params.txt',
        'keras',
        prog='mnist_cnn',
        desc='MNIST CNN example'
    )

    # Initialize parameters
    gParameters = candle.finalize_parameters(mnist_common)
    csv_logger = CSVLogger('{}/params.log'.format(gParameters))

    return gParameters 
Example #4
Source File: mnist_dataset.py    From pathnet-pytorch with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, prob):
        """load mnist dataset"""
        print("Loading MNIST dataset...")
        mndata = MNIST('./data/mnist/')

        mnist_train_images, mnist_train_labels = mndata.load_training()

        mnist_train_images = np.asarray(mnist_train_images)
        mnist_train_images = normalize(mnist_train_images)
        mnist_train_labels = np.asarray(mnist_train_labels)

        """divide datset by label"""
        print("Dividing dataset...")
        sorted_train_images = []
        sorted_train_labels = []        

        for label in range(0,10):
            train_index = np.where(mnist_train_labels == label)
            sorted_train_images.append(mnist_train_images[train_index[0]])
            sorted_train_labels.append(np.asarray([label] * len(train_index[0])))
            
        """add salt_and_pepper noise"""
        print("Adding salt and pepper noise...")
        shape = 28 * 28 ##image shape of mnist
        self.train_images = []
        self.train_labels = sorted_train_labels
        for images in sorted_train_images:
            noise_images = []
            for image in images:
                noise_image = salt_and_pepper(image, prob, shape)
                noise_images.append(noise_image)
            self.train_images.append(noise_images) 
Example #5
Source File: runner.py    From pytorch with BSD 2-Clause "Simplified" License 4 votes vote down vote up
def run():
  TorchModel = PyTorchHelpers.load_lua_class('torch_model.lua', 'TorchModel')
  torchModel = TorchModel(backend, 28, 10)

  mndata = MNIST('../../data/mnist')
  imagesList, labelsList = mndata.load_training()
  labels = np.array(labelsList, dtype=np.uint8)
  images = np.array(imagesList, dtype=np.float32)
  labels += 1  # since torch/lua labels are 1-based
  N = labels.shape[0]
  print('loaded mnist training data')

  if numTrain > 0:
    N = min(N, numTrain)
  print('numExamples N', N)
  numBatches = N // batchSize
  for epoch in range(numEpochs):
    epochLoss = 0
    epochNumRight = 0
    for b in range(numBatches):
      res = torchModel.trainBatch(
        learningRate,
        images[b * batchSize:(b+1) * batchSize],
        labels[b * batchSize:(b+1) * batchSize])
#      print('res', res)
      numRight = res['numRight']
      loss = res['loss']
      epochNumRight += numRight
      epochLoss += loss
      print('epoch ' + str(epoch) + ' batch ' + str(b) + ' accuracy: ' + str(numRight * 100.0 / batchSize) + '%')
    print('epoch ' + str(epoch) + ' accuracy: ' + str(epochNumRight * 100.0 / N) + '%')

  print('finished training')
  print('loading test data...')
  imagesList, labelsList = mndata.load_testing()
  labels = np.array(labelsList, dtype=np.uint8)
  images = np.array(imagesList, dtype=np.float32)
  labels += 1  # since torch/lua labels are 1-based
  N = labels.shape[0]
  print('loaded mnist testing data')

  numBatches = N // batchSize
  epochLoss = 0
  epochNumRight = 0
  for b in range(numBatches):
    predictions = torchModel.predict(images[b * batchSize:(b+1) * batchSize]).asNumpyTensor().reshape(batchSize)
    labelsBatch = labels[b * batchSize:(b+1) * batchSize]
    numRight = (predictions == labelsBatch).sum()
    epochNumRight += numRight
  print('test results: accuracy: ' + str(epochNumRight * 100.0 / N) + '%')