plot tool updates
This commit is contained in:
@@ -1,40 +0,0 @@
|
||||
{
|
||||
description = "Python 3.13 devshell with tensorflow-datasets, matplotlib, scikit-learn and numpy";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs =
|
||||
{
|
||||
self,
|
||||
nixpkgs,
|
||||
flake-utils,
|
||||
...
|
||||
}:
|
||||
flake-utils.lib.eachDefaultSystem (
|
||||
system:
|
||||
let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
# optional: config = { allowUnfree = true; };
|
||||
};
|
||||
in
|
||||
{
|
||||
devShells.default = pkgs.mkShell {
|
||||
name = "py313-devshell";
|
||||
# bring in the Python 3.13 packages
|
||||
buildInputs =
|
||||
with pkgs;
|
||||
[ python313 ]
|
||||
++ (with pkgs.python313Packages; [
|
||||
tensorflow-datasets
|
||||
matplotlib
|
||||
scikit-learn
|
||||
numpy
|
||||
]);
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -150,7 +150,7 @@ def plot_data_points(normal_experiment_paths, anomaly_experiment_paths, title):
|
||||
plt.savefig(output_datetime_path / "missing_points_density.png")
|
||||
|
||||
# create another density version which does not plot number of missing points but percentage of measurements that are missing (total number of points is 32*2048)
|
||||
bins = np.linspace(0, 1, 100)
|
||||
bins = np.linspace(0, 0.6, 100)
|
||||
plt.clf()
|
||||
plt.figure(figsize=(10, 5))
|
||||
plt.hist(
|
||||
|
||||
@@ -78,11 +78,19 @@ def plot_tsne_latent_space(normal_data, anomaly_data, title="TSNE of Latent Spac
|
||||
Plot the TSNE representation of the latent space.
|
||||
This function first applies a PCA-based dimensionality reduction for efficiency.
|
||||
"""
|
||||
# Hardcoded variables to choose every nth normal sample and mth anomaly sample
|
||||
n = 10 # Change this value to select every nth normal sample
|
||||
m = 2 # Change this value to select every mth anomaly sample
|
||||
|
||||
# Select every nth normal sample and mth anomaly sample
|
||||
normal_data = normal_data[::n]
|
||||
anomaly_data = anomaly_data[::m]
|
||||
|
||||
# Combine normal and anomaly data
|
||||
combined_data = np.vstack((normal_data, anomaly_data))
|
||||
|
||||
# Initial dimensionality reduction with PCA
|
||||
reduced_data = reduce_dimensionality(combined_data, n_components=50)
|
||||
reduced_data = reduce_dimensionality(combined_data, n_components=100)
|
||||
|
||||
# Apply TSNE transformation on the PCA-reduced data
|
||||
tsne = TSNE(n_components=2, random_state=42)
|
||||
|
||||
Reference in New Issue
Block a user