This repository has been archived by the owner on Sep 25, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
scotter7902
committed
Jan 9, 2019
0 parents
commit acb099a
Showing
27 changed files
with
3,590 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
# Auto detect text files and perform LF normalization | ||
* text=auto |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
function [h, display_array] = displayData(X, example_width) | ||
%DISPLAYDATA Display 2D data in a nice grid | ||
% [h, display_array] = DISPLAYDATA(X, example_width) displays 2D data | ||
% stored in X in a nice grid. It returns the figure handle h and the | ||
% displayed array if requested. | ||
|
||
% Set example_width automatically if not passed in | ||
if ~exist('example_width', 'var') || isempty(example_width) | ||
example_width = round(sqrt(size(X, 2))); | ||
end | ||
|
||
% Gray Image | ||
colormap(gray); | ||
|
||
% Compute rows, cols | ||
[m n] = size(X); | ||
example_height = (n / example_width); | ||
|
||
% Compute number of items to display | ||
display_rows = floor(sqrt(m)); | ||
display_cols = ceil(m / display_rows); | ||
|
||
% Between images padding | ||
pad = 1; | ||
|
||
% Setup blank display | ||
display_array = - ones(pad + display_rows * (example_height + pad), ... | ||
pad + display_cols * (example_width + pad)); | ||
|
||
% Copy each example into a patch on the display array | ||
curr_ex = 1; | ||
for j = 1:display_rows | ||
for i = 1:display_cols | ||
if curr_ex > m, | ||
break; | ||
end | ||
% Copy the patch | ||
|
||
% Get the max value of the patch | ||
max_val = max(abs(X(curr_ex, :))); | ||
display_array(pad + (j - 1) * (example_height + pad) + (1:example_height), ... | ||
pad + (i - 1) * (example_width + pad) + (1:example_width)) = ... | ||
reshape(X(curr_ex, :), example_height, example_width) / max_val; | ||
curr_ex = curr_ex + 1; | ||
end | ||
if curr_ex > m, | ||
break; | ||
end | ||
end | ||
|
||
% Display Image | ||
h = imagesc(display_array, [-1 1]); | ||
|
||
% Do not show axis | ||
axis image off | ||
|
||
drawnow; | ||
|
||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
%% Machine Learning Online Class - Exercise 3 | Part 1: One-vs-all | ||
|
||
% Instructions | ||
% ------------ | ||
% | ||
% This file contains code that helps you get started on the | ||
% linear exercise. You will need to complete the following functions | ||
% in this exericse: | ||
% | ||
% lrCostFunction.m (logistic regression cost function) | ||
% oneVsAll.m | ||
% predictOneVsAll.m | ||
% predict.m | ||
% | ||
% For this exercise, you will not need to change any code in this file, | ||
% or any other files other than those mentioned above. | ||
% | ||
|
||
%% Initialization | ||
clear ; close all; clc | ||
|
||
%% Setup the parameters you will use for this part of the exercise | ||
input_layer_size = 400; % 20x20 Input Images of Digits | ||
num_labels = 10; % 10 labels, from 1 to 10 | ||
% (note that we have mapped "0" to label 10) | ||
|
||
%% =========== Part 1: Loading and Visualizing Data ============= | ||
% We start the exercise by first loading and visualizing the dataset. | ||
% You will be working with a dataset that contains handwritten digits. | ||
% | ||
|
||
% Load Training Data | ||
fprintf('Loading and Visualizing Data ...\n') | ||
|
||
load('ex3data1.mat'); % training data stored in arrays X, y | ||
m = size(X, 1); | ||
|
||
% Randomly select 100 data points to display | ||
rand_indices = randperm(m); | ||
sel = X(rand_indices(1:100), :); | ||
|
||
displayData(sel); | ||
|
||
fprintf('Program paused. Press enter to continue.\n'); | ||
pause; | ||
|
||
%% ============ Part 2a: Vectorize Logistic Regression ============ | ||
% In this part of the exercise, you will reuse your logistic regression | ||
% code from the last exercise. You task here is to make sure that your | ||
% regularized logistic regression implementation is vectorized. After | ||
% that, you will implement one-vs-all classification for the handwritten | ||
% digit dataset. | ||
% | ||
|
||
% Test case for lrCostFunction | ||
fprintf('\nTesting lrCostFunction() with regularization'); | ||
|
||
theta_t = [-2; -1; 1; 2]; | ||
X_t = [ones(5,1) reshape(1:15,5,3)/10]; | ||
y_t = ([1;0;1;0;1] >= 0.5); | ||
lambda_t = 3; | ||
[J grad] = lrCostFunction(theta_t, X_t, y_t, lambda_t); | ||
|
||
fprintf('\nCost: %f\n', J); | ||
fprintf('Expected cost: 2.534819\n'); | ||
fprintf('Gradients:\n'); | ||
fprintf(' %f \n', grad); | ||
fprintf('Expected gradients:\n'); | ||
fprintf(' 0.146561\n -0.548558\n 0.724722\n 1.398003\n'); | ||
|
||
fprintf('Program paused. Press enter to continue.\n'); | ||
pause; | ||
%% ============ Part 2b: One-vs-All Training ============ | ||
fprintf('\nTraining One-vs-All Logistic Regression...\n') | ||
|
||
lambda = 0.1; | ||
[all_theta] = oneVsAll(X, y, num_labels, lambda); | ||
|
||
fprintf('Program paused. Press enter to continue.\n'); | ||
pause; | ||
|
||
|
||
%% ================ Part 3: Predict for One-Vs-All ================ | ||
|
||
pred = predictOneVsAll(all_theta, X); | ||
|
||
fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100); | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
%% Machine Learning Online Class - Exercise 3 | Part 2: Neural Networks | ||
|
||
% Instructions | ||
% ------------ | ||
% | ||
% This file contains code that helps you get started on the | ||
% linear exercise. You will need to complete the following functions | ||
% in this exericse: | ||
% | ||
% lrCostFunction.m (logistic regression cost function) | ||
% oneVsAll.m | ||
% predictOneVsAll.m | ||
% predict.m | ||
% | ||
% For this exercise, you will not need to change any code in this file, | ||
% or any other files other than those mentioned above. | ||
% | ||
|
||
%% Initialization | ||
clear ; close all; clc | ||
|
||
%% Setup the parameters you will use for this exercise | ||
input_layer_size = 400; % 20x20 Input Images of Digits | ||
hidden_layer_size = 25; % 25 hidden units | ||
num_labels = 10; % 10 labels, from 1 to 10 | ||
% (note that we have mapped "0" to label 10) | ||
|
||
%% =========== Part 1: Loading and Visualizing Data ============= | ||
% We start the exercise by first loading and visualizing the dataset. | ||
% You will be working with a dataset that contains handwritten digits. | ||
% | ||
|
||
% Load Training Data | ||
fprintf('Loading and Visualizing Data ...\n') | ||
|
||
load('ex3data1.mat'); | ||
m = size(X, 1); | ||
|
||
% Randomly select 100 data points to display | ||
sel = randperm(size(X, 1)); | ||
sel = sel(1:100); | ||
|
||
displayData(X(sel, :)); | ||
|
||
fprintf('Program paused. Press enter to continue.\n'); | ||
pause; | ||
|
||
%% ================ Part 2: Loading Pameters ================ | ||
% In this part of the exercise, we load some pre-initialized | ||
% neural network parameters. | ||
|
||
fprintf('\nLoading Saved Neural Network Parameters ...\n') | ||
|
||
% Load the weights into variables Theta1 and Theta2 | ||
load('ex3weights.mat'); | ||
|
||
%% ================= Part 3: Implement Predict ================= | ||
% After training the neural network, we would like to use it to predict | ||
% the labels. You will now implement the "predict" function to use the | ||
% neural network to predict the labels of the training set. This lets | ||
% you compute the training set accuracy. | ||
|
||
pred = predict(Theta1, Theta2, X); | ||
|
||
fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100); | ||
|
||
fprintf('Program paused. Press enter to continue.\n'); | ||
pause; | ||
|
||
% To give you an idea of the network's output, you can also run | ||
% through the examples one at the a time to see what it is predicting. | ||
|
||
% Randomly permute examples | ||
rp = randperm(m); | ||
|
||
for i = 1:m | ||
% Display | ||
fprintf('\nDisplaying Example Image\n'); | ||
displayData(X(rp(i), :)); | ||
|
||
pred = predict(Theta1, Theta2, X(rp(i),:)); | ||
fprintf('\nNeural Network Prediction: %d (digit %d)\n', pred, mod(pred, 10)); | ||
|
||
% Pause with quit option | ||
s = input('Paused - press enter to continue, q to exit:','s'); | ||
if s == 'q' | ||
break | ||
end | ||
end | ||
|
Binary file not shown.
Binary file not shown.
Oops, something went wrong.