-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
12 changed files
with
384 additions
and
30 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -161,7 +161,7 @@ This project is licensed under the [Creative Commons Attribution-ShareAlike 4.0 | |
For collaborations, press inquiries, or questions: | ||
- Email: [[email protected]](mailto:[email protected]) or [[email protected]](mailto:[email protected]) | ||
- Discord: soul_syrup | ||
- TikTok: [@soul.syrup](https://www.tiktok.com/@soul.syrup) | ||
|
||
|
||
## 📚 Library Testing Invitation | ||
We invite you to test our PyPI library for human brain cortical organoid/spheroid, EEG, ECoG, and other signal analyses: | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -161,7 +161,7 @@ This project is licensed under the [Creative Commons Attribution-ShareAlike 4.0 | |
For collaborations, press inquiries, or questions: | ||
- Email: [[email protected]](mailto:[email protected]) or [[email protected]](mailto:[email protected]) | ||
- Discord: soul_syrup | ||
- TikTok: [@soul.syrup](https://www.tiktok.com/@soul.syrup) | ||
|
||
|
||
## 📚 Library Testing Invitation | ||
We invite you to test our PyPI library for human brain cortical organoid/spheroid, EEG, ECoG, and other signal analyses: | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
45 changes: 45 additions & 0 deletions
45
...Backend/desktop_browser_app/system/nightly/.ipynb_checkpoints/betti_numbers-checkpoint.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
import numpy as np | ||
import gudhi as gd | ||
|
||
# Assuming `reduced_data_umap` is your dimensionality-reduced data from the previous steps | ||
|
||
def compute_persistence_diagrams(data): | ||
""" | ||
Compute the persistence diagrams for a given dataset using Vietoris-Rips complex. | ||
:param data: Input dataset, assumed to be the output of a dimensionality reduction method. | ||
:return: Persistence diagrams for the dataset. | ||
""" | ||
rips_complex = gd.RipsComplex(points=data, max_edge_length=2) | ||
simplex_tree = rips_complex.create_simplex_tree(max_dimension=2) | ||
persistence = simplex_tree.persistence() | ||
return persistence | ||
|
||
def plot_persistence_diagrams(persistence): | ||
""" | ||
Plot the persistence diagrams. | ||
:param persistence: Persistence diagrams. | ||
""" | ||
gd.plot_persistence_diagram(persistence) | ||
plt.show() | ||
|
||
def calculate_betti_numbers(persistence): | ||
""" | ||
Calculate Betti numbers from the persistence diagrams. | ||
:param persistence: Persistence diagrams. | ||
:return: Betti numbers (b0, b1, b2) counting the number of connected components, loops, and voids respectively. | ||
""" | ||
betti_numbers = {i: 0 for i in range(3)} # Assuming we're only interested in dimensions 0, 1, and 2 | ||
for interval in persistence: | ||
if interval[0] < 3: # Filter out infinite persistence intervals | ||
betti_numbers[interval[0]] += 1 | ||
return betti_numbers['b0'], betti_numbers['b1'], betti_numbers['b2'] | ||
|
||
# Compute Persistence Diagrams | ||
persistence = compute_persistence_diagrams(reduced_data_umap) | ||
|
||
# Plot Persistence Diagrams | ||
plot_persistence_diagrams(persistence) | ||
|
||
# Calculate Betti Numbers | ||
betti_numbers = calculate_betti_numbers(persistence) | ||
print("Betti Numbers:", betti_numbers) |
70 changes: 70 additions & 0 deletions
70
..._checkpoints/dimensionality_reduction_manifold_operations_symbolic dynamics-checkpoint.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
import numpy as np | ||
import matplotlib.pyplot as plt | ||
from sklearn.manifold import TSNE | ||
import umap.umap_ as umap | ||
from geomstats.geometry.special_orthogonal import SpecialOrthogonal | ||
import geomstats.learning.frechet_mean as frechet_mean | ||
import geomstats.geometry.hypersphere as hypersphere | ||
from geomstats.learning.frechet_mean import FrechetMean | ||
from geomstats.geometry.euclidean import Euclidean | ||
|
||
# Assuming the file contains continuous data recorded at 500 Hz from 32 channels | ||
file_path = 'ecog_data_last_24h.npy' | ||
ECoG_data = np.load(file_path) | ||
|
||
# Preprocessing steps (filtering, detrending, artifact removal) are assumed to be done prior | ||
# This example focuses on the analysis part | ||
# Apply UMAP for dimensionality reduction to 3D for better visualization and further analysis | ||
reduced_data_umap = umap.UMAP(n_components=3).fit_transform(ECoG_data) | ||
# Apply t-SNE for comparison | ||
reduced_data_tsne = TSNE(n_components=3).fit_transform(ECoG_data) | ||
|
||
plt.figure(figsize=(12, 6)) | ||
plt.subplot(1, 2, 1) | ||
plt.scatter(reduced_data_umap[:, 0], reduced_data_umap[:, 1], s=1) | ||
plt.title('UMAP Reduction') | ||
plt.subplot(1, 2, 2) | ||
plt.scatter(reduced_data_tsne[:, 0], reduced_data_tsne[:, 1], s=1) | ||
plt.title('t-SNE Reduction') | ||
plt.show() | ||
|
||
sphere = hypersphere.Hypersphere(dim=2) | ||
point_a, point_b = sphere.random_point(), sphere.random_point() | ||
distance = sphere.metric.dist(point_a, point_b) | ||
|
||
# Frechet Mean on Hypersphere | ||
points = sphere.random_point(n_samples=10) | ||
frechet_mean_sphere = FrechetMean(metric=sphere.metric) | ||
frechet_mean_sphere.fit(points) | ||
mean_sphere = frechet_mean_sphere.estimate_ | ||
|
||
so3 = SpecialOrthogonal(n=3, point_type='vector') | ||
points_so3 = so3.random_point(n_samples=10) | ||
frechet_mean_so3 = FrechetMean(metric=so3.metric) | ||
frechet_mean_so3.fit(points_so3) | ||
mean_so3 = frechet_mean_so3.estimate_ | ||
|
||
# Partitioning the reduced space into 4 regions as an example | ||
quantiles = np.quantile(reduced_data_umap, [0.25, 0.5, 0.75], axis=0) | ||
|
||
def partition_phase_space(data, quantiles): | ||
symbols = np.zeros(data.shape[0], dtype=int) | ||
for i, point in enumerate(data): | ||
if point[0] < quantiles[0][0]: | ||
symbols[i] = 0 | ||
elif point[0] < quantiles[1][0]: | ||
symbols[i] = 1 | ||
elif point[0] < quantiles[2][0]: | ||
symbols[i] = 2 | ||
else: | ||
symbols[i] = 3 | ||
return symbols | ||
|
||
symbols = partition_phase_space(reduced_data_umap, quantiles) | ||
|
||
plt.figure(figsize=(6, 6)) | ||
for i in range(4): | ||
plt.scatter(reduced_data_umap[symbols == i, 0], reduced_data_umap[symbols == i, 1], s=1, label=f'Partition {i}') | ||
plt.title('Symbolic Dynamics Partitioning (UMAP)') | ||
plt.legend() | ||
plt.show() |
Empty file.
Empty file.
45 changes: 45 additions & 0 deletions
45
Software/PC/Backend/desktop_browser_app/system/nightly/betti_numbers.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
import numpy as np | ||
import gudhi as gd | ||
|
||
# Assuming `reduced_data_umap` is your dimensionality-reduced data from the previous steps | ||
|
||
def compute_persistence_diagrams(data): | ||
""" | ||
Compute the persistence diagrams for a given dataset using Vietoris-Rips complex. | ||
:param data: Input dataset, assumed to be the output of a dimensionality reduction method. | ||
:return: Persistence diagrams for the dataset. | ||
""" | ||
rips_complex = gd.RipsComplex(points=data, max_edge_length=2) | ||
simplex_tree = rips_complex.create_simplex_tree(max_dimension=2) | ||
persistence = simplex_tree.persistence() | ||
return persistence | ||
|
||
def plot_persistence_diagrams(persistence): | ||
""" | ||
Plot the persistence diagrams. | ||
:param persistence: Persistence diagrams. | ||
""" | ||
gd.plot_persistence_diagram(persistence) | ||
plt.show() | ||
|
||
def calculate_betti_numbers(persistence): | ||
""" | ||
Calculate Betti numbers from the persistence diagrams. | ||
:param persistence: Persistence diagrams. | ||
:return: Betti numbers (b0, b1, b2) counting the number of connected components, loops, and voids respectively. | ||
""" | ||
betti_numbers = {i: 0 for i in range(3)} # Assuming we're only interested in dimensions 0, 1, and 2 | ||
for interval in persistence: | ||
if interval[0] < 3: # Filter out infinite persistence intervals | ||
betti_numbers[interval[0]] += 1 | ||
return betti_numbers['b0'], betti_numbers['b1'], betti_numbers['b2'] | ||
|
||
# Compute Persistence Diagrams | ||
persistence = compute_persistence_diagrams(reduced_data_umap) | ||
|
||
# Plot Persistence Diagrams | ||
plot_persistence_diagrams(persistence) | ||
|
||
# Calculate Betti Numbers | ||
betti_numbers = calculate_betti_numbers(persistence) | ||
print("Betti Numbers:", betti_numbers) |
70 changes: 70 additions & 0 deletions
70
...wser_app/system/nightly/dimensionality_reduction_manifold_operations_symbolic dynamics.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
import numpy as np | ||
import matplotlib.pyplot as plt | ||
from sklearn.manifold import TSNE | ||
import umap.umap_ as umap | ||
from geomstats.geometry.special_orthogonal import SpecialOrthogonal | ||
import geomstats.learning.frechet_mean as frechet_mean | ||
import geomstats.geometry.hypersphere as hypersphere | ||
from geomstats.learning.frechet_mean import FrechetMean | ||
from geomstats.geometry.euclidean import Euclidean | ||
|
||
# Assuming the file contains continuous data recorded at 500 Hz from 32 channels | ||
file_path = 'ecog_data_last_24h.npy' | ||
ECoG_data = np.load(file_path) | ||
|
||
# Preprocessing steps (filtering, detrending, artifact removal) are assumed to be done prior | ||
# This example focuses on the analysis part | ||
# Apply UMAP for dimensionality reduction to 3D for better visualization and further analysis | ||
reduced_data_umap = umap.UMAP(n_components=3).fit_transform(ECoG_data) | ||
# Apply t-SNE for comparison | ||
reduced_data_tsne = TSNE(n_components=3).fit_transform(ECoG_data) | ||
|
||
plt.figure(figsize=(12, 6)) | ||
plt.subplot(1, 2, 1) | ||
plt.scatter(reduced_data_umap[:, 0], reduced_data_umap[:, 1], s=1) | ||
plt.title('UMAP Reduction') | ||
plt.subplot(1, 2, 2) | ||
plt.scatter(reduced_data_tsne[:, 0], reduced_data_tsne[:, 1], s=1) | ||
plt.title('t-SNE Reduction') | ||
plt.show() | ||
|
||
sphere = hypersphere.Hypersphere(dim=2) | ||
point_a, point_b = sphere.random_point(), sphere.random_point() | ||
distance = sphere.metric.dist(point_a, point_b) | ||
|
||
# Frechet Mean on Hypersphere | ||
points = sphere.random_point(n_samples=10) | ||
frechet_mean_sphere = FrechetMean(metric=sphere.metric) | ||
frechet_mean_sphere.fit(points) | ||
mean_sphere = frechet_mean_sphere.estimate_ | ||
|
||
so3 = SpecialOrthogonal(n=3, point_type='vector') | ||
points_so3 = so3.random_point(n_samples=10) | ||
frechet_mean_so3 = FrechetMean(metric=so3.metric) | ||
frechet_mean_so3.fit(points_so3) | ||
mean_so3 = frechet_mean_so3.estimate_ | ||
|
||
# Partitioning the reduced space into 4 regions as an example | ||
quantiles = np.quantile(reduced_data_umap, [0.25, 0.5, 0.75], axis=0) | ||
|
||
def partition_phase_space(data, quantiles): | ||
symbols = np.zeros(data.shape[0], dtype=int) | ||
for i, point in enumerate(data): | ||
if point[0] < quantiles[0][0]: | ||
symbols[i] = 0 | ||
elif point[0] < quantiles[1][0]: | ||
symbols[i] = 1 | ||
elif point[0] < quantiles[2][0]: | ||
symbols[i] = 2 | ||
else: | ||
symbols[i] = 3 | ||
return symbols | ||
|
||
symbols = partition_phase_space(reduced_data_umap, quantiles) | ||
|
||
plt.figure(figsize=(6, 6)) | ||
for i in range(4): | ||
plt.scatter(reduced_data_umap[symbols == i, 0], reduced_data_umap[symbols == i, 1], s=1, label=f'Partition {i}') | ||
plt.title('Symbolic Dynamics Partitioning (UMAP)') | ||
plt.legend() | ||
plt.show() |
Oops, something went wrong.