Skip to content
Closed
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
107 changes: 107 additions & 0 deletions machine_learning/naive_bayes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
"""
Naive Bayes Classifier implementation.

This module implements Gaussian Naive Bayes from scratch without using
external machine learning libraries.

References:
https://en.wikipedia.org/wiki/Naive_Bayes_classifier
"""

from typing import Dict, List, Tuple

Check failure on line 11 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP035)

machine_learning/naive_bayes.py:11:1: UP035 `typing.Tuple` is deprecated, use `tuple` instead

Check failure on line 11 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP035)

machine_learning/naive_bayes.py:11:1: UP035 `typing.List` is deprecated, use `list` instead

Check failure on line 11 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP035)

machine_learning/naive_bayes.py:11:1: UP035 `typing.Dict` is deprecated, use `dict` instead
import math

Check failure on line 12 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (I001)

machine_learning/naive_bayes.py:11:1: I001 Import block is un-sorted or un-formatted


def gaussian_probability(x: float, mean: float, variance: float) -> float:
Comment thread
shivasubrahmanya marked this conversation as resolved.
"""
Calculate Gaussian probability density.

>>> round(gaussian_probability(1.0, 1.0, 1.0), 3)
0.399
>>> gaussian_probability(1.0, 1.0, 0.0)
0.0
"""
if variance == 0.0:
return 0.0

exponent = math.exp(-((x - mean) ** 2) / (2.0 * variance))
coefficient = 1.0 / math.sqrt(2.0 * math.pi * variance)
return coefficient * exponent


class GaussianNaiveBayes:
"""
Gaussian Naive Bayes classifier.
"""

def __init__(self) -> None:
self.class_priors: Dict[int, float] = {}

Check failure on line 38 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP006)

machine_learning/naive_bayes.py:38:28: UP006 Use `dict` instead of `Dict` for type annotation
self.means: Dict[int, List[float]] = {}

Check failure on line 39 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP006)

machine_learning/naive_bayes.py:39:31: UP006 Use `list` instead of `List` for type annotation

Check failure on line 39 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP006)

machine_learning/naive_bayes.py:39:21: UP006 Use `dict` instead of `Dict` for type annotation
self.variances: Dict[int, List[float]] = {}

Check failure on line 40 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP006)

machine_learning/naive_bayes.py:40:35: UP006 Use `list` instead of `List` for type annotation

Check failure on line 40 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP006)

machine_learning/naive_bayes.py:40:25: UP006 Use `dict` instead of `Dict` for type annotation

def fit(self, features: List[List[float]], labels: List[int]) -> None:

Check failure on line 42 in machine_learning/naive_bayes.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (UP006)

machine_learning/naive_bayes.py:42:29: UP006 Use `list` instead of `List` for type annotation
"""
Train the Gaussian Naive Bayes classifier.

:param features: Feature matrix
:param labels: Class labels
:raises ValueError: If input sizes mismatch

>>> model = GaussianNaiveBayes()
>>> model.fit([[1.0], [2.0], [3.0]], [0, 0, 1])
"""
if len(features) != len(labels):
raise ValueError("Features and labels must have the same length")

separated: Dict[int, List[List[float]]] = {}
for feature_vector, label in zip(features, labels):
separated.setdefault(label, []).append(feature_vector)

total_samples = len(labels)

for label, rows in separated.items():
self.class_priors[label] = len(rows) / total_samples

columns = list(zip(*rows))
self.means[label] = [sum(col) / len(col) for col in columns]
self.variances[label] = [
sum((x - mean) ** 2 for x in col) / len(col)
for col, mean in zip(columns, self.means[label])
]

def predict(self, features: List[List[float]]) -> List[int]:
"""
Predict class labels for input features.

:param features: Feature matrix
:return: Predicted labels

>>> model = GaussianNaiveBayes()
>>> X = [[1.0], [2.0], [3.0], [4.0]]
>>> y = [0, 0, 1, 1]
>>> model.fit(X, y)
>>> model.predict([[1.5], [3.5]])
[0, 1]
"""
predictions: List[int] = []

for row in features:
scores: List[Tuple[int, float]] = []

for label in self.class_priors:
log_likelihood = math.log(self.class_priors[label])

for index, value in enumerate(row):
probability = gaussian_probability(
value,
self.means[label][index],
self.variances[label][index],
)
if probability > 0.0:
log_likelihood += math.log(probability)

scores.append((label, log_likelihood))

predictions.append(max(scores, key=lambda pair: pair[1])[0])

return predictions