I am creating an multi-class classifier to classify stars based on their effective temperatures and absolute magnitudes, but when my model is trained, it classifies all of the stars as one type. Any help or tips would be appreciated.
This is how I want the classifier to classify stars:
Here is my code:
import csv
import numpy
from sklearn.model_selection import train_test_split
import torch
from torch import nn
from torch.utils.data import Dataset, DataLoader
def convertStarToInt(arr):
output = []
for item in arr:
if ("Iab" in item[2]):
output.append([item[0], item[1], 1])
elif ("D" in item[2]):
output.append([item[0], item[1], 8])
elif ("Ia" in item[2]):
output.append([item[0], item[1], 0])
elif ("Ib" in item[2]):
output.append([item[0], item[1], 2])
elif ("III" in item[2]):
output.append([item[0], item[1], 4])
elif ("IV" in item[2]):
output.append([item[0], item[1], 5])
elif ("VII" in item[2]):
output.append([item[0], item[1], 8])
elif ("VI" in item[2]):
output.append([item[0], item[1], 7])
elif ("sd" in item[2]):
output.append([item[0], item[1], 7])
elif ("V" in item[2]):
output.append([item[0], item[1], 6])
elif ("II" in item[2]):
output.append([item[0], item[1], 3])
return output
def countStars(arr):
a = 0
ab = 0
b = 0
ii = 0
wd = 0
iii = 0
iv = 0
v = 0
vi = 0
for item in arr:
if ("Iab" in item[2]):
ab += 1
elif ("D" in item[2]):
wd += 1
elif ("Ia" in item[2]):
a += 1
elif ("Ib" in item[2]):
b += 1
elif ("III" in item[2]):
iii += 1
elif ("IV" in item[2]):
iv += 1
elif ("VII" in item[2]):
wd += 1
elif ("VI" in item[2]):
vi += 1
elif ("sd" in item[2]):
vi += 1
elif ("V" in item[2]):
v += 1
elif ("II" in item[2]):
ii += 1
output = [a, ab, b, ii, iii, iv, v, vi, wd]
return output
def starToInt(arr, dictionary):
output = []
for item in arr:
output.append(dictionary.index(item))
return output
with open("Train&TestData.csv") as OutData:
TrainingTestData = list(csv.reader(OutData))
del TrainingTestData[0]
Data = convertStarToInt(TrainingTestData)
StarTypes = []
StarProperties = []
for row in Data:
StarTypes.append(row[2])
StarProperties.append([row[0], row[1]])
StarCount = countStars(TrainingTestData)
print(StarCount)
weights = []
for item in StarCount:
weights.append(sum(StarCount)/(item))
weight = torch.FloatTensor(weights)
TrainData, TestData, TrainOutput, TestOutput = train_test_split(
StarProperties, StarTypes, test_size=0.5, random_state=356)
TrainData = torch.from_numpy(numpy.array(TrainData).astype(dtype = "float32"))
TestData = torch.from_numpy(numpy.array(TestData).astype(dtype = "float32"))
TrainOutput = torch.from_numpy(numpy.array(TrainOutput).astype(dtype = "int64"))
TestOutput = (numpy.array(TestOutput).astype(dtype = "int64"))
class Data(Dataset):
def __init__(self):
self.x = (TrainData)
self.y = (TrainOutput)
self.len = self.x.shape[0]
def __getitem__(self, index):
return self.x[index], self.y[index]
def __len__(self):
return self.len
class Net(nn.Module):
def __init__(self, D_in, H1, H2, D_out):
super(Net, self).__init__()
self.linear1 = nn.Linear(D_in, H1)
self.linear2 = nn.Linear(H1, H2)
self.linear3 = nn.Linear(H2, D_out)
self.softmax = nn.Softmax(dim = 0)
def forward(self, x):
x = (self.linear1(x))
x = (self.linear2(x))
x = (self.linear3(x))
x = (self.softmax(x))
return x
input_dim = 2
hidden_layer1 = 25
hidden_layer2 = 20
output_classes = 9
model = Net(input_dim, hidden_layer1, hidden_layer2, output_classes)
trainloader = DataLoader(dataset = Data(), batch_size = 50)
CrossELoss = nn.CrossEntropyLoss()
learning_rate = 0.1
optimizer1 = torch.optim.SGD(model.parameters(), lr=learning_rate)
optimizer2 = torch.optim.Adam(model.parameters(), lr=3e-4)
n_epochs = 25
loss_list = []
for epoch in range(n_epochs):
for x, y in trainloader:
optimizer1.zero_grad()
z = model(x)
loss = CrossELoss(z, y)
loss.backward()
optimizer1.step()
loss_list.append(loss.data)
print("Epoch is", epoch)
z = model(TestData)
maximum, prediction = (torch.max(z.data,1))
classes = z.data.tolist()
prediction = prediction.tolist()
a = 0
b = 0.0
for index, item in enumerate(prediction):
if item == TestOutput[index]:
a += 1
b += 1
print("Accuracy is", (a / b))