stared at the screen for 2h
parent
a73d88737e
commit
0798236e26
|
|
@ -208,7 +208,7 @@ def main():
|
|||
|
||||
# Cap the frame rate
|
||||
# tick_speed = 100
|
||||
tick_speed = 5 if outer_iter % 20 == 0 else 100
|
||||
tick_speed = 5 if outer_iter % 20 == 0 else 50
|
||||
clock.tick(tick_speed)
|
||||
|
||||
pygame.quit()
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ def take_action(s, a, labyrinth):
|
|||
# Check if action caused gameover (Pacman caught by ghost)
|
||||
if s_new[0] == s_new[2] and s_new[1] == s_new[3]:
|
||||
r = -100.0
|
||||
print("Invalid action type shit")
|
||||
# print("Invalid action")
|
||||
else:
|
||||
r = calc_reward(tuple(s_new), labyrinth)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,21 @@
|
|||
import numpy as np
|
||||
from keras.datasets import mnist
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Load MNIST
|
||||
print("Loading MNIST...") # debugging
|
||||
(X_train_raw, y_train), (X_test_raw, y_test) = mnist.load_data()
|
||||
|
||||
# Flatten images from (samples, 28, 28) to (samples, 784)
|
||||
# print("X_train_raw[0]:")
|
||||
# print(X_train_raw[0]) # output: 28x28 vector spelling 5
|
||||
# print("y_train[0]:")
|
||||
# print(y_train[0]) # output: 5
|
||||
# print("X_test_raw[0]:")
|
||||
# print(X_test_raw[0]) # output: 28x28 vector spelling 7
|
||||
# print("y_test[0]:")
|
||||
# print(y_test[0]) # output: 7
|
||||
|
||||
# Flatten images from (samples, 28, 28) to (samples, 784) -> one dimensional
|
||||
X_train = X_train_raw.reshape(X_train_raw.shape[0], -1).astype(np.float32)
|
||||
X_test = X_test_raw.reshape(X_test_raw.shape[0], -1).astype(np.float32)
|
||||
|
||||
|
|
@ -17,46 +27,65 @@ prototype_labels = y_train[:1000]
|
|||
|
||||
print("Using", len(prototypes), "prototype vectors.") # debugging
|
||||
|
||||
# Fully vectorized kNN function
|
||||
# kNN function with explicit loops for readability
|
||||
def knn_predict_batch(X_batch, k=3):
|
||||
"""
|
||||
Predicts labels for a batch of test vectors using fully vectorized kNN.
|
||||
Predicts labels for a batch of test vectors using kNN.
|
||||
X_batch: shape (batch_size, 784)
|
||||
returns: shape (batch_size,)
|
||||
"""
|
||||
|
||||
# distance[i, j] = || X_batch[i] - prototypes[j] ||
|
||||
# Efficient: (a - b)^2 = a^2 + b^2 - 2ab
|
||||
a2 = np.sum(X_batch**2, axis=1, keepdims=True) # shape (N, 1)
|
||||
b2 = np.sum(prototypes**2, axis=1) # shape (1000,)
|
||||
ab = X_batch @ prototypes.T # shape (N, 1000)
|
||||
|
||||
distances = np.sqrt(a2 - 2*ab + b2) # shape (N, 1000)
|
||||
|
||||
# Get k nearest neighbors for each test vector
|
||||
knn_idx = np.argpartition(distances, k, axis=1)[:, :k]
|
||||
|
||||
# Get labels of those neighbors
|
||||
knn_labels = prototype_labels[knn_idx]
|
||||
|
||||
# Majority vote (vectorized)
|
||||
preds = np.array([np.bincount(row, minlength=10).argmax()
|
||||
for row in knn_labels])
|
||||
|
||||
return preds
|
||||
preds = []
|
||||
|
||||
# For each test image
|
||||
for test_img in X_batch:
|
||||
distances = []
|
||||
|
||||
# Euclidean distance to each prototype
|
||||
for prototype in prototypes:
|
||||
# distance = sqrt(sum((test_img - prototype)^2))
|
||||
diff = test_img - prototype
|
||||
distance = np.sqrt(np.sum(diff ** 2))
|
||||
distances.append(distance)
|
||||
|
||||
# Find indices of k nearest neighbors (smallest distances)
|
||||
distances = np.array(distances)
|
||||
nearest_k_indices = np.argsort(distances)[:k] # returns indices of array with sorted distances
|
||||
|
||||
# Get labels of the k nearest neighbors
|
||||
nearest_k_labels = prototype_labels[nearest_k_indices]
|
||||
|
||||
# Majority vote
|
||||
prediction = np.bincount(nearest_k_labels, minlength=10).argmax()
|
||||
preds.append(prediction)
|
||||
|
||||
return np.array(preds)
|
||||
|
||||
|
||||
# 4. Evaluate on first N_TEST test samples
|
||||
# Evaluate on first N_TEST test samples
|
||||
N_TEST = 1000
|
||||
print(f"Evaluating on {N_TEST} test samples...") # debugging
|
||||
|
||||
X_eval = X_test[:N_TEST]
|
||||
y_eval = y_test[:N_TEST]
|
||||
|
||||
preds = knn_predict_batch(X_eval, k=3)
|
||||
preds = knn_predict_batch(X_eval, k=5)
|
||||
|
||||
accuracy = np.mean(preds == y_eval)
|
||||
|
||||
print("Predictions:", preds[:20])
|
||||
print("True labels:", y_eval[:20])
|
||||
print("Accuracy:", accuracy)
|
||||
|
||||
# Visualize first 20 predictions
|
||||
fig, axes = plt.subplots(4, 5, figsize=(12, 10))
|
||||
axes = axes.flatten()
|
||||
|
||||
for i in range(0, 20):
|
||||
# Reshape flattened image back to 28x28
|
||||
img = X_eval[i].reshape(28, 28)
|
||||
axes[i].imshow(img, cmap='gray')
|
||||
axes[i].set_title(f"Pred: {preds[i]}, True: {y_eval[i]}")
|
||||
axes[i].axis('off')
|
||||
|
||||
plt.tight_layout()
|
||||
plt.show()
|
||||
|
|
|
|||
Loading…
Reference in New Issue