Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/examples/elu_variants/isrlu_example.pdf
Binary file not shown.
Binary file added docs/examples/elu_variants/isrlu_example.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/elu_variants/isrlu_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import isrlu

x = torch.linspace(-3, 3, 200)
y = isrlu(x)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("ISRLU (Inverse Square Root Linear Unit)")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/examples/elu_variants/pelu_example.pdf
Binary file not shown.
Binary file added docs/examples/elu_variants/pelu_example.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
45 changes: 45 additions & 0 deletions docs/examples/elu_variants/pelu_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
"""Example demonstrating the PELU activation function."""

import matplotlib.pyplot as plt
import torch

from activations_plus.simple import pelu


def main() -> None:
"""Plot the PELU activation function with different parameter values."""
x = torch.linspace(-5, 5, 1000)

# Different parameter combinations
params = [
{"alpha": 1.0, "beta": 1.0, "label": "α=1.0, β=1.0 (default)"},
{"alpha": 1.5, "beta": 1.0, "label": "α=1.5, β=1.0"},
{"alpha": 1.0, "beta": 1.5, "label": "α=1.0, β=1.5"},
{"alpha": 1.5, "beta": 1.5, "label": "α=1.5, β=1.5"},
]

# Create the plot
plt.figure(figsize=(10, 6))

# Plot PELU with different parameter combinations
for param in params:
y_pelu = pelu(x, alpha=param["alpha"], beta=param["beta"])
plt.plot(x.numpy(), y_pelu.numpy(), label=param["label"], linewidth=2)

# Add vertical and horizontal lines at origin
plt.axhline(y=0, color="k", linestyle="-", alpha=0.3)
plt.axvline(x=0, color="k", linestyle="-", alpha=0.3)

# Configure the plot
plt.grid(True, alpha=0.3)
plt.xlabel("x")
plt.ylabel("f(x)")
plt.title("PELU Activation Function with Different Parameters")
plt.legend()
plt.tight_layout()

plt.show()


if __name__ == "__main__":
main()
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/examples/relu_variants/dual_line_example.pdf
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/relu_variants/dual_line_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import dual_line

x = torch.linspace(-3, 3, 200)
y = dual_line(x, a=1.0, b=0.01)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("Dual Line (a=1.0, b=0.01)")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
42 changes: 42 additions & 0 deletions docs/examples/sigmoid_tanh_variants/aria2_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import aria2

x = torch.linspace(-5, 5, 200)

# Default parameters (alpha=1.5, beta=0.5)
y_default = aria2(x)

# Different alpha values
y_alpha_1 = aria2(x, alpha=1.0, beta=0.5)
y_alpha_2 = aria2(x, alpha=2.0, beta=0.5)

# Different beta values
y_beta_1 = aria2(x, alpha=1.5, beta=0.2)
y_beta_2 = aria2(x, alpha=1.5, beta=1.0)

fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))

# Plot different alpha values
ax1.plot(x.numpy(), y_default.numpy(), label="Default (α=1.5, β=0.5)")
ax1.plot(x.numpy(), y_alpha_1.numpy(), label="α=1.0, β=0.5")
ax1.plot(x.numpy(), y_alpha_2.numpy(), label="α=2.0, β=0.5")
ax1.set_title("ARiA2 with Different Alpha Values")
ax1.set_xlabel("Input")
ax1.set_ylabel("Output")
ax1.grid(alpha=0.3)
ax1.legend()

# Plot different beta values
ax2.plot(x.numpy(), y_default.numpy(), label="Default (α=1.5, β=0.5)")
ax2.plot(x.numpy(), y_beta_1.numpy(), label="α=1.5, β=0.2")
ax2.plot(x.numpy(), y_beta_2.numpy(), label="α=1.5, β=1.0")
ax2.set_title("ARiA2 with Different Beta Values")
ax2.set_xlabel("Input")
ax2.set_ylabel("Output")
ax2.grid(alpha=0.3)
ax2.legend()

plt.tight_layout()
fig.show()
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/sigmoid_tanh_variants/isru_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import isru

x = torch.linspace(-3, 3, 200)
y = isru(x, alpha=1.0)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("ISRU (alpha=1.0)")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/sigmoid_tanh_variants/tanh_exp_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import tanh_exp

x = torch.linspace(-3, 3, 200)
y = tanh_exp(x)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("TanhExp")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show()
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/specialized_variants/erf_act_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import erf_act

x = torch.linspace(-3, 3, 200)
y = erf_act(x)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("ErfAct")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/specialized_variants/hat_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import hat

x = torch.linspace(-3, 3, 200)
y = hat(x)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("Hat")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/specialized_variants/pserf_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import pserf

x = torch.linspace(-3, 3, 200)
y = pserf(x)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("Pserf")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/specialized_variants/resp_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import resp

x = torch.linspace(-3, 3, 200)
y = resp(x)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("Rectified Softplus (ReSP)")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
14 changes: 14 additions & 0 deletions docs/examples/tanh_variants/penalized_tanh_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import matplotlib.pyplot as plt
import torch

from activations_plus.simple import penalized_tanh

x = torch.linspace(-3, 3, 200)
y = penalized_tanh(x)
fig, ax = plt.subplots()
ax.plot(x.numpy(), y.numpy())
ax.set_title("Penalized Tanh")
ax.set_xlabel("Input")
ax.set_ylabel("Output")
ax.grid(alpha=0.3)
fig.show() # This will be mocked in tests
17 changes: 16 additions & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,21 @@
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output

html_theme = "sphinx_rtd_theme"
html_theme = "pydata_sphinx_theme"
master_doc = "index"
html_static_path = ["_static"]

# Theme options
html_theme_options = {
"github_url": "https://github.com/DanielAvdar/activations-plus",
"use_edit_page_button": True,
"show_toc_level": 2,
"navbar_align": "left",
}

html_context = {
"github_user": "DanielAvdar",
"github_repo": "activations-plus",
"github_version": "main",
"doc_path": "docs/source",
}
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ dev = [
docs = [
"sphinx>=8.2.0; python_version >= '3.11'",
"sphinx>=7.0.0; python_version < '3.11'",
"sphinx-rtd-theme>=3.0.2",
"pydata-sphinx-theme>=0.13.0",
"matplotlib",

]
Expand Down
Loading