diff --git a/README.md b/README.md index 9cd7ead..a8ee86a 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,10 @@ Follow these steps to install Shake&Tune on your printer: # RAM, and should work for everyone. However, if you are using a powerful computer, you may # wish to increase this value to keep more measurements in memory (e.g., 15-20) before writing # the chunk and avoid stressing the filesystem too much. + # max_freq: 200 + # This setting defines the maximum frequency at which the calculation of the power spectral density + # is cutoff. The default value should be fine for most machines and accelerometer combinations and + # avoid touching it unless you know what you're doing. # dpi: 300 # Controls the resolution of the generated graphs. The default value of 300 dpi was optimized # and strikes a balance between performance and readability, ensuring that graphs are clear diff --git a/my_graph.png b/my_graph.png new file mode 100644 index 0000000..c13608b Binary files /dev/null and b/my_graph.png differ diff --git a/shaketune/cli.py b/shaketune/cli.py new file mode 100644 index 0000000..d8d51b0 --- /dev/null +++ b/shaketune/cli.py @@ -0,0 +1,140 @@ +import argparse +import os +import sys +from importlib import import_module +from pathlib import Path + +from .graph_creators.graph_creator_factory import GraphCreatorFactory +from .helpers.accelerometer import MeasurementsManager +from .shaketune_config import ShakeTuneConfig + + +def add_common_arguments(parser): + """Helper function to add common arguments to all subparsers.""" + parser.add_argument('-o', '--output', required=True, help='Output filename') + parser.add_argument('files', nargs='+', help='Input data files (.csv or .stdata)') + parser.add_argument('--max_freq', type=float, help='Maximum frequency to graph') + parser.add_argument('--dpi', type=int, help='DPI value to use for the graph') + + +def configure_graph_creator(graph_type, args, dummy_config): + """Helper function to get and configure a graph creator based on graph type and args.""" + graph_creator = GraphCreatorFactory.create_graph_creator(graph_type, dummy_config) + config_kwargs = {} + + # Dynamically configure the graph creator based on graph type + if graph_type == 'axes map': + config_kwargs.update({'accel': args.accel, 'segment_length': args.length}) + elif graph_type == 'static frequency': + config_kwargs.update({'accel_per_hz': args.accel_per_hz, 'freq': args.frequency, 'duration': args.duration}) + elif graph_type == 'belts comparison': + config_kwargs.update({'kinematics': args.kinematics, 'accel_per_hz': args.accel_per_hz}) + elif graph_type == 'input shaper': + config_kwargs.update({'scv': args.scv, 'max_smoothing': args.max_smoothing, 'accel_per_hz': args.accel_per_hz}) + elif graph_type == 'vibrations profile': + config_kwargs.update({'kinematics': args.kinematics, 'accel': args.accel}) + + graph_creator.configure(**config_kwargs) + return graph_creator + + +def load_klipper_module(args): + """Helper function to load the shaper_calibrate module from the specified Klipper folder.""" + if hasattr(args, 'klipper_dir') and args.klipper_dir: + kdir = os.path.expanduser(args.klipper_dir) + sys.path.append(os.path.join(kdir, 'klippy')) + sys.modules['shaper_calibrate'] = import_module('.shaper_calibrate', 'extras') + + +def main(): + parser = argparse.ArgumentParser(description='Shake&Tune command line interface') + subparsers = parser.add_subparsers(dest='graph_type', help='Type of graph to create') + + # Static frequency graph parser + static_freq_parser = subparsers.add_parser('static_freq', help='Create static frequency graph') + add_common_arguments(static_freq_parser) + static_freq_parser.add_argument('--accel_per_hz', type=float, help='Accel per Hz used during the measurement') + static_freq_parser.add_argument('--frequency', type=float, help='Maintained frequency of the measurement') + static_freq_parser.add_argument('--duration', type=float, help='Duration of the measurement') + + # Axes map detection graph parser + axes_map_parser = subparsers.add_parser('axes_map', help='Create axes map detection graph') + add_common_arguments(axes_map_parser) + axes_map_parser.add_argument('--accel', required=True, type=float, help='Accel value used for the measurement') + axes_map_parser.add_argument('--length', required=True, type=float, help='Recorded length for each segment') + + # Belts graph parser + belts_parser = subparsers.add_parser('belts', help='Create belts comparison graph') + add_common_arguments(belts_parser) + belts_parser.add_argument('-k', '--klipper_dir', default='~/klipper', help='Main klipper directory') + belts_parser.add_argument('--kinematics', help='Machine kinematics configuration') + belts_parser.add_argument('--accel_per_hz', type=float, help='Accel per Hz used during the measurement') + + # Input Shaper graph parser + shaper_parser = subparsers.add_parser('input_shaper', help='Create input shaper graph') + add_common_arguments(shaper_parser) + shaper_parser.add_argument('-k', '--klipper_dir', default='~/klipper', help='Main klipper directory') + shaper_parser.add_argument('--scv', type=float, default=5.0, help='Square corner velocity') + shaper_parser.add_argument('--max_smoothing', type=float, help='Maximum shaper smoothing to allow') + shaper_parser.add_argument('--accel_per_hz', type=float, help='Accel per Hz used during the measurement') + + # Vibrations graph parser + vibrations_parser = subparsers.add_parser('vibrations', help='Create vibrations profile graph') + add_common_arguments(vibrations_parser) + vibrations_parser.add_argument('-k', '--klipper_dir', default='~/klipper', help='Main klipper directory') + vibrations_parser.add_argument('--kinematics', required=True, default='cartesian', help='Used kinematics') + vibrations_parser.add_argument('--accel', type=int, help='Accel value to be printed on the graph') + + args = parser.parse_args() + + if args.graph_type is None: + parser.print_help() + exit(1) + + graph_type_map = { + 'static_freq': 'static frequency', + 'axes_map': 'axes map', + 'belts': 'belts comparison', + 'input_shaper': 'input shaper', + 'vibrations': 'vibrations profile', + } + graph_type = graph_type_map[args.graph_type] + + # Load configuration + dummy_config = ShakeTuneConfig() + if args.dpi is not None: + dummy_config.dpi = args.dpi + if args.max_freq is not None: + if graph_type == 'vibrations profile': + dummy_config.max_freq_vibrations = args.max_freq + else: + dummy_config.max_freq = args.max_freq + + # Load shaper_calibrate module if needed + load_klipper_module(args) + + # Create the graph creator and configure it + graph_creator = configure_graph_creator(graph_type, args, dummy_config) + graph_creator.override_output_target(args.output) + + print(f'Creating {graph_type} graph...') + + # Load measurements + measurements_manager = MeasurementsManager(10) + args.files = [Path(f) for f in args.files] + if args.files[0].suffix == '.csv': + measurements_manager.load_from_csvs(args.files) + elif args.files[0].suffix == '.stdata': + measurements_manager.load_from_stdata(args.files[0]) + else: + raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') + + # Create graph + graph_creator.create_graph(measurements_manager) + + print('...done!') + + +if __name__ == '__main__': + os.environ['SHAKETUNE_IN_CLI'] = '1' + main() diff --git a/shaketune/graph_creators/__init__.py b/shaketune/graph_creators/__init__.py index e8370f8..dd175b2 100644 --- a/shaketune/graph_creators/__init__.py +++ b/shaketune/graph_creators/__init__.py @@ -6,11 +6,22 @@ # File: __init__.py # Description: Imports various graph creator classes for the Shake&Tune package. +import os +import sys -from .axes_map_graph_creator import AxesMapGraphCreator as AxesMapGraphCreator -from .belts_graph_creator import BeltsGraphCreator as BeltsGraphCreator -from .graph_creator import GraphCreator as GraphCreator -from .graph_creator_factory import GraphCreatorFactory as GraphCreatorFactory -from .shaper_graph_creator import ShaperGraphCreator as ShaperGraphCreator -from .static_graph_creator import StaticGraphCreator as StaticGraphCreator -from .vibrations_graph_creator import VibrationsGraphCreator as VibrationsGraphCreator + +def get_shaper_calibrate_module(): + if not os.environ.get('SHAKETUNE_IN_CLI') == '1': + from ... import shaper_calibrate + else: + shaper_calibrate = sys.modules['shaper_calibrate'] + return shaper_calibrate + + +from .axes_map_graph_creator import AxesMapGraphCreator as AxesMapGraphCreator # noqa: E402 +from .belts_graph_creator import BeltsGraphCreator as BeltsGraphCreator # noqa: E402 +from .graph_creator import GraphCreator as GraphCreator # noqa: E402 +from .graph_creator_factory import GraphCreatorFactory as GraphCreatorFactory # noqa: E402 +from .shaper_graph_creator import ShaperGraphCreator as ShaperGraphCreator # noqa: E402 +from .static_graph_creator import StaticGraphCreator as StaticGraphCreator # noqa: E402 +from .vibrations_graph_creator import VibrationsGraphCreator as VibrationsGraphCreator # noqa: E402 diff --git a/shaketune/graph_creators/axes_map_graph_creator.py b/shaketune/graph_creators/axes_map_graph_creator.py index 28641a0..a468217 100644 --- a/shaketune/graph_creators/axes_map_graph_creator.py +++ b/shaketune/graph_creators/axes_map_graph_creator.py @@ -7,34 +7,17 @@ # Description: Implements the axes map detection script for Shake&Tune, including # calibration tools and graph creation for 3D printer vibration analysis. -import optparse -import os -from datetime import datetime from typing import List, Optional, Tuple -import matplotlib -import matplotlib.colors -import matplotlib.font_manager -import matplotlib.pyplot as plt -import matplotlib.ticker import numpy as np import pywt from scipy import stats -matplotlib.use('Agg') - from ..helpers.accelerometer import Measurement, MeasurementsManager from ..helpers.console_output import ConsoleOutput from ..shaketune_config import ShakeTuneConfig from .graph_creator import GraphCreator -KLIPPAIN_COLORS = { - 'purple': '#70088C', - 'orange': '#FF8D32', - 'dark_purple': '#150140', - 'dark_orange': '#F24130', - 'red_pink': '#F2055C', -} MACHINE_AXES = ['x', 'y', 'z'] @@ -50,12 +33,14 @@ def configure(self, accel: int, segment_length: float) -> None: self._segment_length = segment_length def create_graph(self, measurements_manager: MeasurementsManager) -> None: - fig = axesmap_calibration( + computer = AxesMapComputation( measurements=measurements_manager.get_measurements(), accel=self._accel, fixed_length=self._segment_length, st_version=self._version, ) + computation = computer.compute() + fig = self._plotter.plot_axes_map_detection_graph(computation) self._save_figure(fig, measurements_manager) @@ -64,445 +49,232 @@ def create_graph(self, measurements_manager: MeasurementsManager) -> None: ###################################################################### -def wavelet_denoise(data: np.ndarray, wavelet: str = 'db1', level: int = 1) -> Tuple[np.ndarray, np.ndarray]: - coeffs = pywt.wavedec(data, wavelet, mode='smooth') - threshold = np.median(np.abs(coeffs[-level])) / 0.6745 * np.sqrt(2 * np.log(len(data))) - new_coeffs = [pywt.threshold(c, threshold, mode='soft') for c in coeffs] - denoised_data = pywt.waverec(new_coeffs, wavelet) - - # Compute noise by subtracting denoised data from original data - noise = data - denoised_data[: len(data)] - return denoised_data, noise - - -def integrate_trapz(accel: np.ndarray, time: np.ndarray) -> np.ndarray: - return np.array([np.trapz(accel[:i], time[:i]) for i in range(2, len(time) + 1)]) - - -def process_acceleration_data( - time: np.ndarray, accel_x: np.ndarray, accel_y: np.ndarray, accel_z: np.ndarray -) -> Tuple[float, float, float, np.ndarray, np.ndarray, np.ndarray, float]: - # Calculate the constant offset (gravity component) - offset_x = np.mean(accel_x) - offset_y = np.mean(accel_y) - offset_z = np.mean(accel_z) - - # Remove the constant offset from acceleration data - accel_x -= offset_x - accel_y -= offset_y - accel_z -= offset_z - - # Apply wavelet denoising - accel_x, noise_x = wavelet_denoise(accel_x) - accel_y, noise_y = wavelet_denoise(accel_y) - accel_z, noise_z = wavelet_denoise(accel_z) - - # Integrate acceleration to get velocity using trapezoidal rule - velocity_x = integrate_trapz(accel_x, time) - velocity_y = integrate_trapz(accel_y, time) - velocity_z = integrate_trapz(accel_z, time) - - # Correct drift in velocity by resetting to zero at the beginning and end - velocity_x -= np.linspace(velocity_x[0], velocity_x[-1], len(velocity_x)) - velocity_y -= np.linspace(velocity_y[0], velocity_y[-1], len(velocity_y)) - velocity_z -= np.linspace(velocity_z[0], velocity_z[-1], len(velocity_z)) - - # Integrate velocity to get position using trapezoidal rule - position_x = integrate_trapz(velocity_x, time[1:]) - position_y = integrate_trapz(velocity_y, time[1:]) - position_z = integrate_trapz(velocity_z, time[1:]) - - noise_intensity = np.mean([np.std(noise_x), np.std(noise_y), np.std(noise_z)]) - - return offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity - - -def scale_positions_to_fixed_length( - position_x: np.ndarray, position_y: np.ndarray, position_z: np.ndarray, fixed_length: float -) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: - # Calculate the total distance traveled in 3D space - total_distance = np.sqrt(np.diff(position_x) ** 2 + np.diff(position_y) ** 2 + np.diff(position_z) ** 2).sum() - scale_factor = fixed_length / total_distance - - # Apply the scale factor to the positions - position_x *= scale_factor - position_y *= scale_factor - position_z *= scale_factor - - return position_x, position_y, position_z - - -def find_nearest_perfect_vector(average_direction_vector: np.ndarray) -> Tuple[np.ndarray, float]: - # Define the perfect vectors - perfect_vectors = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [-1, 0, 0], [0, -1, 0], [0, 0, -1]]) - - # Find the nearest perfect vector - dot_products = perfect_vectors @ average_direction_vector - nearest_vector_idx = np.argmax(dot_products) - nearest_vector = perfect_vectors[nearest_vector_idx] - - # Calculate the angle error - angle_error = np.arccos(dot_products[nearest_vector_idx]) * 180 / np.pi - - return nearest_vector, angle_error - - -def linear_regression_direction( - position_x: np.ndarray, position_y: np.ndarray, position_z: np.ndarray, trim_length: float = 0.25 -) -> np.ndarray: - # Trim the start and end of the position data to keep only the center of the segment - # as the start and stop positions are not always perfectly aligned and can be a bit noisy - t = len(position_x) - trim_start = int(t * trim_length) - trim_end = int(t * (1 - trim_length)) - position_x = position_x[trim_start:trim_end] - position_y = position_y[trim_start:trim_end] - position_z = position_z[trim_start:trim_end] - - # Compute the direction vector using linear regression over the position data - time = np.arange(len(position_x)) - slope_x, intercept_x, _, _, _ = stats.linregress(time, position_x) - slope_y, intercept_y, _, _, _ = stats.linregress(time, position_y) - slope_z, intercept_z, _, _, _ = stats.linregress(time, position_z) - end_position = np.array( - [slope_x * time[-1] + intercept_x, slope_y * time[-1] + intercept_y, slope_z * time[-1] + intercept_z] - ) - direction_vector = end_position - np.array([intercept_x, intercept_y, intercept_z]) - direction_vector = direction_vector / np.linalg.norm(direction_vector) - return direction_vector - - -###################################################################### -# Graphing -###################################################################### - - -def plot_compare_frequency( - ax: plt.Axes, - time_data: List[np.ndarray], - accel_data: List[Tuple[np.ndarray, np.ndarray, np.ndarray]], - offset: float, - noise_level: str, -) -> None: - # Plot acceleration data - for i, (time, (accel_x, accel_y, accel_z)) in enumerate(zip(time_data, accel_data)): - ax.plot( - time, - accel_x, - label='X' if i == 0 else '', - color=KLIPPAIN_COLORS['purple'], - linewidth=0.5, - zorder=50 if i == 0 else 10, - ) - ax.plot( - time, - accel_y, - label='Y' if i == 0 else '', - color=KLIPPAIN_COLORS['orange'], - linewidth=0.5, - zorder=50 if i == 1 else 10, - ) - ax.plot( - time, - accel_z, - label='Z' if i == 0 else '', - color=KLIPPAIN_COLORS['red_pink'], - linewidth=0.5, - zorder=50 if i == 2 else 10, - ) - - ax.set_xlabel('Time (s)') - ax.set_ylabel('Acceleration (mm/s²)') - - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('small') - ax.set_title( - 'Acceleration (gravity offset removed)', - fontsize=14, - color=KLIPPAIN_COLORS['dark_orange'], - weight='bold', - ) - - ax.legend(loc='upper left', prop=fontP) - - # Add the gravity and noise level to the graph legend - ax2 = ax.twinx() - ax2.yaxis.set_visible(False) - ax2.plot([], [], ' ', label=noise_level) - ax2.plot([], [], ' ', label=f'Measured gravity: {offset / 1000:0.3f} m/s²') - ax2.legend(loc='upper right', prop=fontP) - - -def plot_3d_path( - ax: plt.Axes, - position_data: List[Tuple[np.ndarray, np.ndarray, np.ndarray]], - direction_vectors: List[np.ndarray], - angle_errors: List[float], -) -> None: - # Plot the 3D path of the movement - for i, ((position_x, position_y, position_z), average_direction_vector, angle_error) in enumerate( - zip(position_data, direction_vectors, angle_errors) +class AxesMapComputation: + def __init__( + self, + measurements: List[Measurement], + accel: float, + fixed_length: float, + st_version: str, ): - ax.plot(position_x, position_y, position_z, color=KLIPPAIN_COLORS['orange'], linestyle=':', linewidth=2) - ax.scatter(position_x[0], position_y[0], position_z[0], color=KLIPPAIN_COLORS['red_pink'], zorder=10) - ax.text( - position_x[0] + 1, - position_y[0], - position_z[0], - str(i + 1), - color='black', - fontsize=16, - fontweight='bold', - zorder=20, - ) - - # Plot the average direction vector - start_position = np.array([position_x[0], position_y[0], position_z[0]]) - end_position = start_position + average_direction_vector * np.linalg.norm( - [position_x[-1] - position_x[0], position_y[-1] - position_y[0], position_z[-1] - position_z[0]] - ) - ax.plot( - [start_position[0], end_position[0]], - [start_position[1], end_position[1]], - [start_position[2], end_position[2]], - label=f'{["X", "Y", "Z"][i]} angle: {angle_error:0.2f}°', - color=KLIPPAIN_COLORS['purple'], - linestyle='-', - linewidth=2, - ) - - ax.set_xlabel('X Position (mm)') - ax.set_ylabel('Y Position (mm)') - ax.set_zlabel('Z Position (mm)') - - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('small') - ax.set_title( - 'Estimated movement in 3D space', - fontsize=14, - color=KLIPPAIN_COLORS['dark_orange'], - weight='bold', - ) - - ax.legend(loc='upper left', prop=fontP) - - -def format_direction_vector(vectors: List[np.ndarray]) -> str: - formatted_vector = [] - axes_count = {'x': 0, 'y': 0, 'z': 0} - - for vector in vectors: - for i in range(len(vector)): - if vector[i] > 0: - formatted_vector.append(MACHINE_AXES[i]) - axes_count[MACHINE_AXES[i]] += 1 - break - elif vector[i] < 0: - formatted_vector.append(f'-{MACHINE_AXES[i]}') - axes_count[MACHINE_AXES[i]] += 1 - break - - # Check if all axes are present in the axes_map and return an error message if not - for _, count in axes_count.items(): - if count != 1: - return 'unable to determine it correctly!' - - return ', '.join(formatted_vector) - - -###################################################################### -# Startup and main routines -###################################################################### - - -def axesmap_calibration( - measurements: List[Measurement], fixed_length: float, accel: Optional[float] = None, st_version: str = 'unknown' -) -> plt.Figure: - if len(measurements) != 3: - raise ValueError('This tool needs 3 measurements to work with (like axesmap_X, axesmap_Y and axesmap_Z)') - - raw_datas = {} - for measurement in measurements: - data = np.array(measurement['samples']) - if data is not None: - _axis = measurement['name'].split('_')[1].lower() - raw_datas[_axis] = data - - fig, ((ax1, ax2)) = plt.subplots( - 1, - 2, - gridspec_kw={ - 'width_ratios': [5, 3], - 'bottom': 0.080, - 'top': 0.840, - 'left': 0.055, - 'right': 0.960, - 'hspace': 0.166, - 'wspace': 0.060, - }, - ) - fig.set_size_inches(15, 7) - ax2.remove() - ax2 = fig.add_subplot(122, projection='3d') - - cumulative_start_position = np.array([0, 0, 0]) - direction_vectors = [] - angle_errors = [] - total_noise_intensity = 0.0 - acceleration_data = [] - position_data = [] - gravities = [] - for _, machine_axis in enumerate(MACHINE_AXES): - if machine_axis not in raw_datas: - raise ValueError(f'Missing measurement for axis {machine_axis}') - - # Get the accel data according to the current axes_map - time = raw_datas[machine_axis][:, 0] - accel_x = raw_datas[machine_axis][:, 1] - accel_y = raw_datas[machine_axis][:, 2] - accel_z = raw_datas[machine_axis][:, 3] - - offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity = process_acceleration_data( - time, accel_x, accel_y, accel_z - ) - position_x, position_y, position_z = scale_positions_to_fixed_length( - position_x, position_y, position_z, fixed_length - ) - position_x += cumulative_start_position[0] - position_y += cumulative_start_position[1] - position_z += cumulative_start_position[2] - - gravity = np.linalg.norm(np.array([offset_x, offset_y, offset_z])) - average_direction_vector = linear_regression_direction(position_x, position_y, position_z) - direction_vector, angle_error = find_nearest_perfect_vector(average_direction_vector) - ConsoleOutput.print( - f'Machine axis {machine_axis.upper()} -> nearest accelerometer direction vector: {direction_vector} (angle error: {angle_error:.2f}°)' + self.measurements = measurements + self.accel = accel + self.fixed_length = fixed_length + self.st_version = st_version + + def compute(self): + if len(self.measurements) != 3: + raise ValueError('This tool needs 3 measurements to work with (like axesmap_X, axesmap_Y and axesmap_Z)') + + raw_datas = {} + for measurement in self.measurements: + data = np.array(measurement['samples']) + if data is not None: + _axis = measurement['name'].split('_')[1].lower() + print(_axis) + raw_datas[_axis] = data + + cumulative_start_position = np.array([0, 0, 0]) + direction_vectors = [] + angle_errors = [] + total_noise_intensity = 0.0 + acceleration_data = [] + position_data = [] + gravities = [] + for _, machine_axis in enumerate(MACHINE_AXES): + if machine_axis not in raw_datas: + raise ValueError(f'Missing measurement for axis {machine_axis}') + + # Get the accel data according to the current axes_map + time = raw_datas[machine_axis][:, 0] + accel_x = raw_datas[machine_axis][:, 1] + accel_y = raw_datas[machine_axis][:, 2] + accel_z = raw_datas[machine_axis][:, 3] + + offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity = ( + self._process_acceleration_data(time, accel_x, accel_y, accel_z) + ) + position_x, position_y, position_z = self._scale_positions_to_fixed_length( + position_x, position_y, position_z, self.fixed_length + ) + position_x += cumulative_start_position[0] + position_y += cumulative_start_position[1] + position_z += cumulative_start_position[2] + + gravity = np.linalg.norm(np.array([offset_x, offset_y, offset_z])) + average_direction_vector = self._linear_regression_direction(position_x, position_y, position_z) + direction_vector, angle_error = self._find_nearest_perfect_vector(average_direction_vector) + ConsoleOutput.print( + f'Machine axis {machine_axis.upper()} -> nearest accelerometer direction vector: {direction_vector} (angle error: {angle_error:.2f}°)' + ) + direction_vectors.append(direction_vector) + angle_errors.append(angle_error) + + total_noise_intensity += noise_intensity + + acceleration_data.append((time, (accel_x, accel_y, accel_z))) + position_data.append((position_x, position_y, position_z)) + gravities.append(gravity) + + # Update the cumulative start position for the next segment + cumulative_start_position = np.array([position_x[-1], position_y[-1], position_z[-1]]) + + gravity = np.mean(gravities) + + average_noise_intensity = total_noise_intensity / len(raw_datas) + if average_noise_intensity <= 350: + average_noise_intensity_text = '-> OK' + elif 350 < average_noise_intensity <= 700: + average_noise_intensity_text = '-> WARNING: accelerometer noise is a bit high' + else: + average_noise_intensity_text = '-> ERROR: accelerometer noise is too high!' + + average_noise_intensity_label = ( + f'Dynamic noise level: {average_noise_intensity:.2f} mm/s² {average_noise_intensity_text}' ) - direction_vectors.append(direction_vector) - angle_errors.append(angle_error) - - total_noise_intensity += noise_intensity - - acceleration_data.append((time, (accel_x, accel_y, accel_z))) - position_data.append((position_x, position_y, position_z)) - gravities.append(gravity) - - # Update the cumulative start position for the next segment - cumulative_start_position = np.array([position_x[-1], position_y[-1], position_z[-1]]) - - gravity = np.mean(gravities) - - average_noise_intensity = total_noise_intensity / len(raw_datas) - if average_noise_intensity <= 350: - average_noise_intensity_text = '-> OK' - elif 350 < average_noise_intensity <= 700: - average_noise_intensity_text = '-> WARNING: accelerometer noise is a bit high' - else: - average_noise_intensity_text = '-> ERROR: accelerometer noise is too high!' - - average_noise_intensity_label = ( - f'Dynamic noise level: {average_noise_intensity:.2f} mm/s² {average_noise_intensity_text}' - ) - ConsoleOutput.print(average_noise_intensity_label) - - ConsoleOutput.print(f'--> Detected gravity: {gravity / 1000 :.2f} m/s²') - - formatted_direction_vector = format_direction_vector(direction_vectors) - ConsoleOutput.print(f'--> Detected axes_map: {formatted_direction_vector}') - - # Plot the differents graphs - plot_compare_frequency( - ax1, - [d[0] for d in acceleration_data], - [d[1] for d in acceleration_data], - gravity, - average_noise_intensity_label, - ) - plot_3d_path(ax2, position_data, direction_vectors, angle_errors) - - # Add title - title_line1 = 'AXES MAP CALIBRATION TOOL' - fig.text( - 0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' - ) - try: - filename = measurements[0]['name'] - dt = datetime.strptime(f"{filename.split('_')[2]} {filename.split('_')[3]}", '%Y%m%d %H%M%S') - title_line2 = dt.strftime('%x %X') - if accel is not None: - title_line2 += f' -- at {accel:0.0f} mm/s²' - except Exception: - ConsoleOutput.print( - f"Warning: Shake&Tune measurements names look to be different than expected ({measurements[0]['name']}, " - f"{measurements[1]['name']}, {measurements[2]['name']})" + ConsoleOutput.print(average_noise_intensity_label) + + ConsoleOutput.print(f'--> Detected gravity: {gravity / 1000 :.2f} m/s²') + + formatted_direction_vector = self._format_direction_vector(direction_vectors) + ConsoleOutput.print(f'--> Detected axes_map: {formatted_direction_vector}') + + # Prepare data for plotting + computation_result = { + 'acceleration_data_0': [d[0] for d in acceleration_data], + 'acceleration_data_1': [d[1] for d in acceleration_data], + 'gravity': gravity, + 'average_noise_intensity_label': average_noise_intensity_label, + 'position_data': position_data, + 'direction_vectors': direction_vectors, + 'angle_errors': angle_errors, + 'formatted_direction_vector': formatted_direction_vector, + 'measurements': self.measurements, + 'st_version': self.st_version, + } + + return computation_result + + def _wavelet_denoise(self, data: np.ndarray, wavelet: str = 'db1', level: int = 1) -> Tuple[np.ndarray, np.ndarray]: + coeffs = pywt.wavedec(data, wavelet, mode='smooth') + threshold = np.median(np.abs(coeffs[-level])) / 0.6745 * np.sqrt(2 * np.log(len(data))) + new_coeffs = [pywt.threshold(c, threshold, mode='soft') for c in coeffs] + denoised_data = pywt.waverec(new_coeffs, wavelet) + + # Compute noise by subtracting denoised data from original data + noise = data - denoised_data[: len(data)] + return denoised_data, noise + + def _integrate_trapz(self, accel: np.ndarray, time: np.ndarray) -> np.ndarray: + return np.array([np.trapz(accel[:i], time[:i]) for i in range(2, len(time) + 1)]) + + def _process_acceleration_data( + self, time: np.ndarray, accel_x: np.ndarray, accel_y: np.ndarray, accel_z: np.ndarray + ) -> Tuple[float, float, float, np.ndarray, np.ndarray, np.ndarray, float]: + # Calculate the constant offset (gravity component) + offset_x = np.mean(accel_x) + offset_y = np.mean(accel_y) + offset_z = np.mean(accel_z) + + # Remove the constant offset from acceleration data + accel_x -= offset_x + accel_y -= offset_y + accel_z -= offset_z + + # Apply wavelet denoising + accel_x, noise_x = self._wavelet_denoise(accel_x) + accel_y, noise_y = self._wavelet_denoise(accel_y) + accel_z, noise_z = self._wavelet_denoise(accel_z) + + # Integrate acceleration to get velocity using trapezoidal rule + velocity_x = self._integrate_trapz(accel_x, time) + velocity_y = self._integrate_trapz(accel_y, time) + velocity_z = self._integrate_trapz(accel_z, time) + + # Correct drift in velocity by resetting to zero at the beginning and end + velocity_x -= np.linspace(velocity_x[0], velocity_x[-1], len(velocity_x)) + velocity_y -= np.linspace(velocity_y[0], velocity_y[-1], len(velocity_y)) + velocity_z -= np.linspace(velocity_z[0], velocity_z[-1], len(velocity_z)) + + # Integrate velocity to get position using trapezoidal rule + position_x = self._integrate_trapz(velocity_x, time[1:]) + position_y = self._integrate_trapz(velocity_y, time[1:]) + position_z = self._integrate_trapz(velocity_z, time[1:]) + + noise_intensity = np.mean([np.std(noise_x), np.std(noise_y), np.std(noise_z)]) + + return offset_x, offset_y, offset_z, position_x, position_y, position_z, noise_intensity + + def _scale_positions_to_fixed_length( + self, position_x: np.ndarray, position_y: np.ndarray, position_z: np.ndarray, fixed_length: float + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + # Calculate the total distance traveled in 3D space + total_distance = np.sqrt(np.diff(position_x) ** 2 + np.diff(position_y) ** 2 + np.diff(position_z) ** 2).sum() + scale_factor = fixed_length / total_distance + + # Apply the scale factor to the positions + position_x *= scale_factor + position_y *= scale_factor + position_z *= scale_factor + + return position_x, position_y, position_z + + def _find_nearest_perfect_vector(self, average_direction_vector: np.ndarray) -> Tuple[np.ndarray, float]: + # Define the perfect vectors + perfect_vectors = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [-1, 0, 0], [0, -1, 0], [0, 0, -1]]) + + # Find the nearest perfect vector + dot_products = perfect_vectors @ average_direction_vector + nearest_vector_idx = np.argmax(dot_products) + nearest_vector = perfect_vectors[nearest_vector_idx] + + # Calculate the angle error + angle_error = np.arccos(dot_products[nearest_vector_idx]) * 180 / np.pi + + return nearest_vector, angle_error + + def _linear_regression_direction( + self, position_x: np.ndarray, position_y: np.ndarray, position_z: np.ndarray, trim_length: float = 0.25 + ) -> np.ndarray: + # Trim the start and end of the position data to keep only the center of the segment + # as the start and stop positions are not always perfectly aligned and can be a bit noisy + t = len(position_x) + trim_start = int(t * trim_length) + trim_end = int(t * (1 - trim_length)) + position_x = position_x[trim_start:trim_end] + position_y = position_y[trim_start:trim_end] + position_z = position_z[trim_start:trim_end] + + # Compute the direction vector using linear regression over the position data + time = np.arange(len(position_x)) + slope_x, intercept_x, _, _, _ = stats.linregress(time, position_x) + slope_y, intercept_y, _, _, _ = stats.linregress(time, position_y) + slope_z, intercept_z, _, _, _ = stats.linregress(time, position_z) + end_position = np.array( + [slope_x * time[-1] + intercept_x, slope_y * time[-1] + intercept_y, slope_z * time[-1] + intercept_z] ) - title_line2 = measurements[0]['name'] + ' ...' - fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) - - title_line3 = f'| Detected axes_map: {formatted_direction_vector}' - fig.text(0.50, 0.985, title_line3, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) - - # Adding a small Klippain logo to the top left corner of the figure - ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW') - ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png'))) - ax_logo.axis('off') - - # Adding Shake&Tune version in the top right corner - if st_version != 'unknown': - fig.text(0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple']) - - return fig - - -def main(): - # Parse command-line arguments - usage = '%prog [options] ' - opts = optparse.OptionParser(usage) - opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph') - opts.add_option( - '-a', '--accel', type='string', dest='accel', default=None, help='acceleration value used to do the movements' - ) - opts.add_option( - '-l', '--length', type='float', dest='length', default=None, help='recorded length for each segment' - ) - options, args = opts.parse_args() - if len(args) < 1: - opts.error('No measurements to analyse') - if options.accel is None: - opts.error('You must specify the acceleration value used when recording the measurements (option -a)') - try: - accel_value = float(options.accel) - except ValueError: - opts.error('Invalid acceleration value. It should be a numeric value.') - if options.length is None: - opts.error('You must specify the length of the measured segments (option -l)') - try: - length_value = float(options.length) - except ValueError: - opts.error('Invalid length value. It should be a numeric value.') - if options.output is None: - opts.error('You must specify an output file.png to use the script (option -o)') - - measurements_manager = MeasurementsManager(10) - if args[0].endswith('.csv'): - measurements_manager.load_from_csvs(args) - elif args[0].endswith('.stdata'): - measurements_manager.load_from_stdata(args[0]) - else: - raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') - - fig = axesmap_calibration(measurements_manager.get_measurements(), length_value, accel_value, 'unknown') - fig.savefig(options.output, dpi=150) - - -if __name__ == '__main__': - main() + direction_vector = end_position - np.array([intercept_x, intercept_y, intercept_z]) + direction_vector = direction_vector / np.linalg.norm(direction_vector) + return direction_vector + + def _format_direction_vector(self, vectors: List[np.ndarray]) -> str: + formatted_vector = [] + axes_count = {'x': 0, 'y': 0, 'z': 0} + + for vector in vectors: + for i in range(len(vector)): + if vector[i] > 0: + formatted_vector.append(MACHINE_AXES[i]) + axes_count[MACHINE_AXES[i]] += 1 + break + elif vector[i] < 0: + formatted_vector.append(f'-{MACHINE_AXES[i]}') + axes_count[MACHINE_AXES[i]] += 1 + break + + # Check if all axes are present in the axes_map and return an error message if not + for _, count in axes_count.items(): + if count != 1: + return 'unable to determine it correctly!' + + return ', '.join(formatted_vector) diff --git a/shaketune/graph_creators/belts_graph_creator.py b/shaketune/graph_creators/belts_graph_creator.py index b348c08..7dbe267 100644 --- a/shaketune/graph_creators/belts_graph_creator.py +++ b/shaketune/graph_creators/belts_graph_creator.py @@ -8,59 +8,18 @@ # including computation and graphing functions for 3D printer belt paths analysis. -import optparse -import os -from datetime import datetime from typing import List, NamedTuple, Optional, Tuple -import matplotlib -import matplotlib.colors -import matplotlib.font_manager -import matplotlib.pyplot as plt -import matplotlib.ticker import numpy as np from scipy.stats import pearsonr -matplotlib.use('Agg') - from ..helpers.accelerometer import Measurement, MeasurementsManager -from ..helpers.common_func import detect_peaks, setup_klipper_import +from ..helpers.common_func import detect_peaks from ..helpers.console_output import ConsoleOutput from ..shaketune_config import ShakeTuneConfig +from . import get_shaper_calibrate_module from .graph_creator import GraphCreator -ALPHABET = ( - 'αβγδεζηθικλμνξοπρστυφχψω' # For paired peak names (using the Greek alphabet to avoid confusion with belt names) -) - -PEAKS_DETECTION_THRESHOLD = 0.1 # Threshold to detect peaks in the PSD signal (10% of max) -DC_MAX_PEAKS = 2 # Maximum ideal number of peaks -DC_MAX_UNPAIRED_PEAKS_ALLOWED = 0 # No unpaired peaks are tolerated - -KLIPPAIN_COLORS = { - 'purple': '#70088C', - 'orange': '#FF8D32', - 'dark_purple': '#150140', - 'dark_orange': '#F24130', - 'red_pink': '#F2055C', -} - - -# Define the SignalData type to store the data of a signal (PSD, peaks, etc.) -class SignalData(NamedTuple): - freqs: np.ndarray - psd: np.ndarray - peaks: np.ndarray - paired_peaks: Optional[List[Tuple[Tuple[int, float, float], Tuple[int, float, float]]]] = None - unpaired_peaks: Optional[List[int]] = None - - -# Define the PeakPairingResult type to store the result of the peak pairing function -class PeakPairingResult(NamedTuple): - paired_peaks: List[Tuple[Tuple[int, float, float], Tuple[int, float, float]]] - unpaired_peaks1: List[int] - unpaired_peaks2: List[int] - @GraphCreator.register('belts comparison') class BeltsGraphCreator(GraphCreator): @@ -74,550 +33,223 @@ def configure(self, kinematics: Optional[str] = None, accel_per_hz: Optional[flo self._accel_per_hz = accel_per_hz def create_graph(self, measurements_manager: MeasurementsManager) -> None: - fig = belts_calibration( + computer = BeltsGraphComputation( measurements=measurements_manager.get_measurements(), kinematics=self._kinematics, - klipperdir=str(self._config.klipper_folder), + max_freq=self._config.max_freq, accel_per_hz=self._accel_per_hz, st_version=self._version, ) + computation = computer.compute() + fig = self._plotter.plot_belts_graph(computation) self._save_figure(fig, measurements_manager) -###################################################################### -# Computation of the PSD graph -###################################################################### - - -# This function create pairs of peaks that are close in frequency on two curves (that are known -# to be resonances points and must be similar on both belts on a CoreXY kinematic) -def pair_peaks( - peaks1: np.ndarray, freqs1: np.ndarray, psd1: np.ndarray, peaks2: np.ndarray, freqs2: np.ndarray, psd2: np.ndarray -) -> PeakPairingResult: - # Compute a dynamic detection threshold to filter and pair peaks efficiently - # even if the signal is very noisy (this get clipped to a maximum of 10Hz diff) - distances = [] - for p1 in peaks1: - for p2 in peaks2: - distances.append(abs(freqs1[p1] - freqs2[p2])) - distances = np.array(distances) - - median_distance = np.median(distances) - iqr = np.percentile(distances, 75) - np.percentile(distances, 25) - - threshold = median_distance + 1.5 * iqr - threshold = min(threshold, 10) - - # Pair the peaks using the dynamic thresold - paired_peaks = [] - unpaired_peaks1 = list(peaks1) - unpaired_peaks2 = list(peaks2) - - while unpaired_peaks1 and unpaired_peaks2: - min_distance = threshold + 1 - pair = None - - for p1 in unpaired_peaks1: - for p2 in unpaired_peaks2: - distance = abs(freqs1[p1] - freqs2[p2]) - if distance < min_distance: - min_distance = distance - pair = (p1, p2) - - if pair is None: # No more pairs below the threshold - break - - p1, p2 = pair - paired_peaks.append(((p1, freqs1[p1], psd1[p1]), (p2, freqs2[p2], psd2[p2]))) - unpaired_peaks1.remove(p1) - unpaired_peaks2.remove(p2) - - return PeakPairingResult( - paired_peaks=paired_peaks, unpaired_peaks1=unpaired_peaks1, unpaired_peaks2=unpaired_peaks2 - ) - - -###################################################################### -# Computation of the differential spectrogram -###################################################################### - - -def compute_mhi(similarity_factor: float, signal1: SignalData, signal2: SignalData) -> str: - num_unpaired_peaks = len(signal1.unpaired_peaks) + len(signal2.unpaired_peaks) - num_paired_peaks = len(signal1.paired_peaks) - # Combine unpaired peaks from both signals, tagging each peak with its respective signal - combined_unpaired_peaks = [(peak, signal1) for peak in signal1.unpaired_peaks] + [ - (peak, signal2) for peak in signal2.unpaired_peaks - ] - psd_highest_max = max(signal1.psd.max(), signal2.psd.max()) - - # Start with the similarity factor directly scaled to a percentage - mhi = similarity_factor - - # Bonus for ideal number of total peaks (1 or 2) - if num_paired_peaks >= DC_MAX_PEAKS: - mhi *= DC_MAX_PEAKS / num_paired_peaks # Reduce MHI if more than ideal number of peaks - - # Penalty from unpaired peaks weighted by their amplitude relative to the maximum PSD amplitude - unpaired_peak_penalty = 0 - if num_unpaired_peaks > DC_MAX_UNPAIRED_PEAKS_ALLOWED: - for peak, signal in combined_unpaired_peaks: - unpaired_peak_penalty += (signal.psd[peak] / psd_highest_max) * 30 - mhi -= unpaired_peak_penalty - - # Ensure the result lies between 0 and 100 by clipping the computed value - mhi = np.clip(mhi, 0, 100) - - return mhi_lut(mhi) - - -# LUT to transform the MHI into a textual value easy to understand for the users of the script -def mhi_lut(mhi: float) -> str: - ranges = [ - (70, 100, 'Excellent mechanical health'), - (55, 70, 'Good mechanical health'), - (45, 55, 'Acceptable mechanical health'), - (30, 45, 'Potential signs of a mechanical issue'), - (15, 30, 'Likely a mechanical issue'), - (0, 15, 'Mechanical issue detected'), - ] - mhi = np.clip(mhi, 1, 100) - return next( - (message for lower, upper, message in ranges if lower < mhi <= upper), - 'Unknown mechanical health', - ) - - -###################################################################### -# Graphing -###################################################################### - - -def plot_compare_frequency( - ax: plt.Axes, signal1: SignalData, signal2: SignalData, signal1_belt: str, signal2_belt: str, max_freq: float -) -> None: - # Plot the two belts PSD signals - ax.plot(signal1.freqs, signal1.psd, label='Belt ' + signal1_belt, color=KLIPPAIN_COLORS['orange']) - ax.plot(signal2.freqs, signal2.psd, label='Belt ' + signal2_belt, color=KLIPPAIN_COLORS['purple']) - - psd_highest_max = max(signal1.psd.max(), signal2.psd.max()) - - # Trace and annotate the peaks on the graph - paired_peak_count = 0 - unpaired_peak_count = 0 - offsets_table_data = [] - - for _, (peak1, peak2) in enumerate(signal1.paired_peaks): - label = ALPHABET[paired_peak_count] - amplitude_offset = abs(((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / psd_highest_max) * 100) - frequency_offset = abs(signal2.freqs[peak2[0]] - signal1.freqs[peak1[0]]) - offsets_table_data.append([f'Peaks {label}', f'{frequency_offset:.1f} Hz', f'{amplitude_offset:.1f} %']) - - ax.plot(signal1.freqs[peak1[0]], signal1.psd[peak1[0]], 'x', color='black') - ax.plot(signal2.freqs[peak2[0]], signal2.psd[peak2[0]], 'x', color='black') - ax.plot( - [signal1.freqs[peak1[0]], signal2.freqs[peak2[0]]], - [signal1.psd[peak1[0]], signal2.psd[peak2[0]]], - ':', - color='gray', - ) +class PeakPairingResult(NamedTuple): + paired_peaks: List[Tuple[Tuple[int, float, float], Tuple[int, float, float]]] + unpaired_peaks1: List[int] + unpaired_peaks2: List[int] - ax.annotate( - label + '1', - (signal1.freqs[peak1[0]], signal1.psd[peak1[0]]), - textcoords='offset points', - xytext=(8, 5), - ha='left', - fontsize=13, - color='black', - ) - ax.annotate( - label + '2', - (signal2.freqs[peak2[0]], signal2.psd[peak2[0]]), - textcoords='offset points', - xytext=(8, 5), - ha='left', - fontsize=13, - color='black', - ) - paired_peak_count += 1 - - for peak in signal1.unpaired_peaks: - ax.plot(signal1.freqs[peak], signal1.psd[peak], 'x', color='black') - ax.annotate( - str(unpaired_peak_count + 1), - (signal1.freqs[peak], signal1.psd[peak]), - textcoords='offset points', - xytext=(8, 5), - ha='left', - fontsize=13, - color='red', - weight='bold', - ) - unpaired_peak_count += 1 - - for peak in signal2.unpaired_peaks: - ax.plot(signal2.freqs[peak], signal2.psd[peak], 'x', color='black') - ax.annotate( - str(unpaired_peak_count + 1), - (signal2.freqs[peak], signal2.psd[peak]), - textcoords='offset points', - xytext=(8, 5), - ha='left', - fontsize=13, - color='red', - weight='bold', - ) - unpaired_peak_count += 1 - - # Add estimated similarity to the graph - ax2 = ax.twinx() # To split the legends in two box - ax2.yaxis.set_visible(False) - ax2.plot([], [], ' ', label=f'Number of unpaired peaks: {unpaired_peak_count}') - - # Setting axis parameters, grid and graph title - ax.set_xlabel('Frequency (Hz)') - ax.set_xlim([0, max_freq]) - ax.set_ylabel('Power spectral density') - ax.set_ylim([0, psd_highest_max * 1.1]) - - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('small') - ax.set_title( - 'Belts frequency profiles', - fontsize=14, - color=KLIPPAIN_COLORS['dark_orange'], - weight='bold', - ) - - # Print the table of offsets ontop of the graph below the original legend (upper right) - if len(offsets_table_data) > 0: - columns = [ - '', - 'Frequency delta', - 'Amplitude delta', - ] - offset_table = ax.table( - cellText=offsets_table_data, - colLabels=columns, - bbox=[0.66, 0.79, 0.33, 0.15], - loc='upper right', - cellLoc='center', + +class SignalData(NamedTuple): + freqs: np.ndarray + psd: np.ndarray + peaks: np.ndarray + paired_peaks: Optional[List[Tuple[Tuple[int, float, float], Tuple[int, float, float]]]] = None + unpaired_peaks: Optional[List[int]] = None + + +class BeltsGraphComputation: + PEAKS_DETECTION_THRESHOLD = 0.1 # Threshold to detect peaks in the PSD signal (10% of max) + DC_MAX_PEAKS = 2 # Maximum ideal number of peaks + DC_MAX_UNPAIRED_PEAKS_ALLOWED = 0 # No unpaired peaks are tolerated + + def __init__( + self, + measurements: List[Measurement], + kinematics: Optional[str], + max_freq: float, + accel_per_hz: Optional[float], + st_version: str, + ): + self.measurements = measurements + self.kinematics = kinematics + self.max_freq = max_freq + self.accel_per_hz = accel_per_hz + self.st_version = st_version + + def compute(self): + if len(self.measurements) != 2: + raise ValueError('This tool needs 2 measurements to work with (one for each belt)!') + + datas = [np.array(m['samples']) for m in self.measurements if m['samples'] is not None] + + # Get belt names for labels + belt_info = {'A': ' (axis 1,-1)', 'B': ' (axis 1, 1)'} + signal1_belt = self.measurements[0]['name'].split('_')[1] + signal2_belt = self.measurements[1]['name'].split('_')[1] + signal1_belt += belt_info.get(signal1_belt, '') + signal2_belt += belt_info.get(signal2_belt, '') + + # Compute calibration data + common_freqs = np.linspace(0, self.max_freq, 500) + signal1 = self._compute_signal_data(datas[0], common_freqs, self.max_freq) + signal2 = self._compute_signal_data(datas[1], common_freqs, self.max_freq) + del datas + + # Pair peaks + pairing_result = self._pair_peaks( + signal1.peaks, signal1.freqs, signal1.psd, signal2.peaks, signal2.freqs, signal2.psd ) - offset_table.auto_set_font_size(False) - offset_table.set_fontsize(8) - offset_table.auto_set_column_width([0, 1, 2]) - offset_table.set_zorder(100) - cells = [key for key in offset_table.get_celld().keys()] - for cell in cells: - offset_table[cell].set_facecolor('white') - offset_table[cell].set_alpha(0.6) - - ax.legend(loc='upper left', prop=fontP) - ax2.legend(loc='upper right', prop=fontP) - - return - - -# Compute quantile-quantile plot to compare the two belts -def plot_versus_belts( - ax: plt.Axes, - common_freqs: np.ndarray, - signal1: SignalData, - signal2: SignalData, - signal1_belt: str, - signal2_belt: str, -) -> None: - ax.set_title('Cross-belts comparison plot', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - - max_psd = max(np.max(signal1.psd), np.max(signal2.psd)) - ideal_line = np.linspace(0, max_psd * 1.1, 500) - green_boundary = ideal_line + (0.35 * max_psd * np.exp(-ideal_line / (0.6 * max_psd))) - ax.fill_betweenx(ideal_line, ideal_line, green_boundary, color='green', alpha=0.15) - ax.fill_between(ideal_line, ideal_line, green_boundary, color='green', alpha=0.15, label='Good zone') - ax.plot( - ideal_line, - ideal_line, - '--', - label='Ideal line', - color='red', - linewidth=2, - ) - - ax.plot(signal1.psd, signal2.psd, color='dimgrey', marker='o', markersize=1.5) - ax.fill_betweenx(signal2.psd, signal1.psd, color=KLIPPAIN_COLORS['red_pink'], alpha=0.1) - - paired_peak_count = 0 - unpaired_peak_count = 0 - - for _, (peak1, peak2) in enumerate(signal1.paired_peaks): - label = ALPHABET[paired_peak_count] - freq1 = signal1.freqs[peak1[0]] - freq2 = signal2.freqs[peak2[0]] - - if abs(freq1 - freq2) < 1: - ax.plot(signal1.psd[peak1[0]], signal2.psd[peak2[0]], marker='o', color='black', markersize=7) - ax.annotate( - f'{label}1/{label}2', - (signal1.psd[peak1[0]], signal2.psd[peak2[0]]), - textcoords='offset points', - xytext=(-7, 7), - fontsize=13, - color='black', - ) - else: - ax.plot( - signal1.psd[peak2[0]], signal2.psd[peak2[0]], marker='o', color=KLIPPAIN_COLORS['purple'], markersize=7 - ) - ax.plot( - signal1.psd[peak1[0]], signal2.psd[peak1[0]], marker='o', color=KLIPPAIN_COLORS['orange'], markersize=7 - ) - ax.annotate( - f'{label}1', - (signal1.psd[peak1[0]], signal2.psd[peak1[0]]), - textcoords='offset points', - xytext=(0, 7), - fontsize=13, - color='black', - ) - ax.annotate( - f'{label}2', - (signal1.psd[peak2[0]], signal2.psd[peak2[0]]), - textcoords='offset points', - xytext=(0, 7), - fontsize=13, - color='black', - ) - paired_peak_count += 1 - - for _, peak_index in enumerate(signal1.unpaired_peaks): - ax.plot( - signal1.psd[peak_index], signal2.psd[peak_index], marker='o', color=KLIPPAIN_COLORS['orange'], markersize=7 + signal1 = signal1._replace( + paired_peaks=pairing_result.paired_peaks, unpaired_peaks=pairing_result.unpaired_peaks1 ) - ax.annotate( - str(unpaired_peak_count + 1), - (signal1.psd[peak_index], signal2.psd[peak_index]), - textcoords='offset points', - fontsize=13, - weight='bold', - color=KLIPPAIN_COLORS['red_pink'], - xytext=(0, 7), + signal2 = signal2._replace( + paired_peaks=pairing_result.paired_peaks, unpaired_peaks=pairing_result.unpaired_peaks2 ) - unpaired_peak_count += 1 - for _, peak_index in enumerate(signal2.unpaired_peaks): - ax.plot( - signal1.psd[peak_index], signal2.psd[peak_index], marker='o', color=KLIPPAIN_COLORS['purple'], markersize=7 + # Compute similarity factor and MHI if needed (for symmetric kinematics) + similarity_factor = None + mhi = None + if self.kinematics in {'limited_corexy', 'corexy', 'limited_corexz', 'corexz'}: + correlation, _ = pearsonr(signal1.psd, signal2.psd) + similarity_factor = correlation * 100 + similarity_factor = np.clip(similarity_factor, 0, 100) + ConsoleOutput.print(f'Belts estimated similarity: {similarity_factor:.1f}%') + + mhi = self._compute_mhi(similarity_factor, signal1, signal2) + ConsoleOutput.print(f'Mechanical health: {mhi}') + + # Prepare data for plotting + computation_result = { + 'signal1': signal1, + 'signal2': signal2, + 'similarity_factor': similarity_factor, + 'mhi': mhi, + 'signal1_belt': signal1_belt, + 'signal2_belt': signal2_belt, + 'kinematics': self.kinematics, + 'accel_per_hz': self.accel_per_hz, + 'st_version': self.st_version, + 'measurements': self.measurements, + 'max_freq': self.max_freq, + } + + return computation_result + + def _compute_signal_data(self, data: np.ndarray, common_freqs: np.ndarray, max_freq: float): + helper = get_shaper_calibrate_module().ShaperCalibrate(printer=None) + calibration_data = helper.process_accelerometer_data(data) + + freqs = calibration_data.freq_bins[calibration_data.freq_bins <= max_freq] + psd = calibration_data.get_psd('all')[calibration_data.freq_bins <= max_freq] + + # Re-interpolate the PSD signal to a common frequency range to be able to plot them one against the other + interp_psd = np.interp(common_freqs, freqs, psd) + + _, peaks, _ = detect_peaks( + interp_psd, + common_freqs, + self.PEAKS_DETECTION_THRESHOLD * interp_psd.max(), + window_size=20, + vicinity=15, ) - ax.annotate( - str(unpaired_peak_count + 1), - (signal1.psd[peak_index], signal2.psd[peak_index]), - textcoords='offset points', - fontsize=13, - weight='bold', - color=KLIPPAIN_COLORS['red_pink'], - xytext=(0, 7), + + return SignalData(freqs=common_freqs, psd=interp_psd, peaks=peaks) + + # This function create pairs of peaks that are close in frequency on two curves (that are known + # to be resonances points and must be similar on both belts on a CoreXY kinematic) + def _pair_peaks( + self, + peaks1: np.ndarray, + freqs1: np.ndarray, + psd1: np.ndarray, + peaks2: np.ndarray, + freqs2: np.ndarray, + psd2: np.ndarray, + ) -> PeakPairingResult: + # Compute a dynamic detection threshold to filter and pair peaks efficiently + # even if the signal is very noisy (this get clipped to a maximum of 10Hz diff) + distances = [] + for p1 in peaks1: + for p2 in peaks2: + distances.append(abs(freqs1[p1] - freqs2[p2])) + distances = np.array(distances) + + median_distance = np.median(distances) + iqr = np.percentile(distances, 75) - np.percentile(distances, 25) + + threshold = median_distance + 1.5 * iqr + threshold = min(threshold, 10) + + # Pair the peaks using the dynamic thresold + paired_peaks = [] + unpaired_peaks1 = list(peaks1) + unpaired_peaks2 = list(peaks2) + + while unpaired_peaks1 and unpaired_peaks2: + min_distance = threshold + 1 + pair = None + + for p1 in unpaired_peaks1: + for p2 in unpaired_peaks2: + distance = abs(freqs1[p1] - freqs2[p2]) + if distance < min_distance: + min_distance = distance + pair = (p1, p2) + + if pair is None: # No more pairs below the threshold + break + + p1, p2 = pair + paired_peaks.append(((p1, freqs1[p1], psd1[p1]), (p2, freqs2[p2], psd2[p2]))) + unpaired_peaks1.remove(p1) + unpaired_peaks2.remove(p2) + + return PeakPairingResult( + paired_peaks=paired_peaks, unpaired_peaks1=unpaired_peaks1, unpaired_peaks2=unpaired_peaks2 ) - unpaired_peak_count += 1 - - ax.set_xlabel(f'Belt {signal1_belt}') - ax.set_ylabel(f'Belt {signal2_belt}') - ax.set_xlim([0, max_psd * 1.1]) - ax.set_ylim([0, max_psd * 1.1]) - - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.ticklabel_format(style='scientific', scilimits=(0, 0)) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('medium') - ax.legend(loc='upper left', prop=fontP) - - return - - -###################################################################### -# Custom tools -###################################################################### - - -# Original Klipper function to get the PSD data of a raw accelerometer signal -def compute_signal_data(data: np.ndarray, common_freqs: np.ndarray, max_freq: float) -> SignalData: - helper = shaper_calibrate.ShaperCalibrate(printer=None) - calibration_data = helper.process_accelerometer_data(data) - - freqs = calibration_data.freq_bins[calibration_data.freq_bins <= max_freq] - psd = calibration_data.get_psd('all')[calibration_data.freq_bins <= max_freq] - - # Re-interpolate the PSD signal to a common frequency range to be able to plot them one against the other - interp_psd = np.interp(common_freqs, freqs, psd) - - _, peaks, _ = detect_peaks( - interp_psd, common_freqs, PEAKS_DETECTION_THRESHOLD * interp_psd.max(), window_size=20, vicinity=15 - ) - - return SignalData(freqs=common_freqs, psd=interp_psd, peaks=peaks) - - -###################################################################### -# Startup and main routines -###################################################################### - - -def belts_calibration( - measurements: List[Measurement], - kinematics: Optional[str], - klipperdir: str = '~/klipper', - max_freq: float = 200.0, - accel_per_hz: Optional[float] = None, - st_version: str = 'unknown', -) -> plt.Figure: - if len(measurements) != 2: - raise ValueError('This tool needs 2 measurements to work with (one for each belt)!') - - global shaper_calibrate - shaper_calibrate = setup_klipper_import(klipperdir) - - datas = [np.array(m['samples']) for m in measurements if m['samples'] is not None] - - # Get the belts name for the legend to avoid putting the full file name - belt_info = {'A': ' (axis 1,-1)', 'B': ' (axis 1, 1)'} - signal1_belt = measurements[0]['name'].split('_')[1] - signal2_belt = measurements[1]['name'].split('_')[1] - signal1_belt += belt_info.get(signal1_belt, '') - signal2_belt += belt_info.get(signal2_belt, '') - - # Compute calibration data for the two datasets with automatic peaks detection - common_freqs = np.linspace(0, max_freq, 500) - signal1 = compute_signal_data(datas[0], common_freqs, max_freq) - signal2 = compute_signal_data(datas[1], common_freqs, max_freq) - del datas - - # Pair the peaks across the two datasets - pairing_result = pair_peaks(signal1.peaks, signal1.freqs, signal1.psd, signal2.peaks, signal2.freqs, signal2.psd) - signal1 = signal1._replace(paired_peaks=pairing_result.paired_peaks, unpaired_peaks=pairing_result.unpaired_peaks1) - signal2 = signal2._replace(paired_peaks=pairing_result.paired_peaks, unpaired_peaks=pairing_result.unpaired_peaks2) - - # R² proved to be pretty instable to compute the similarity between the two belts - # So now, we use the Pearson correlation coefficient to compute the similarity - correlation, _ = pearsonr(signal1.psd, signal2.psd) - similarity_factor = correlation * 100 - similarity_factor = np.clip(similarity_factor, 0, 100) - ConsoleOutput.print(f'Belts estimated similarity: {similarity_factor:.1f}%') - - mhi = compute_mhi(similarity_factor, signal1, signal2) - ConsoleOutput.print(f'[experimental] Mechanical health: {mhi}') - - fig, ((ax1, ax3)) = plt.subplots( - 1, - 2, - gridspec_kw={ - 'width_ratios': [5, 3], - 'bottom': 0.080, - 'top': 0.840, - 'left': 0.050, - 'right': 0.985, - 'hspace': 0.166, - 'wspace': 0.138, - }, - ) - fig.set_size_inches(15, 7) - - # Add title - title_line1 = 'RELATIVE BELTS CALIBRATION TOOL' - fig.text( - 0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' - ) - try: - filename = measurements[0]['name'] - dt = datetime.strptime(f"{filename.split('_')[2]} {filename.split('_')[3]}", '%Y%m%d %H%M%S') - title_line2 = dt.strftime('%x %X') - if kinematics is not None: - title_line2 += ' -- ' + kinematics.upper() + ' kinematics' - except Exception: - ConsoleOutput.print( - f'Warning: Unable to parse the date from the measurements names ' - f"({measurements[0]['name']}, {measurements[1]['name']})" + + def _compute_mhi(self, similarity_factor: float, signal1, signal2) -> str: + num_unpaired_peaks = len(signal1.unpaired_peaks) + len(signal2.unpaired_peaks) + num_paired_peaks = len(signal1.paired_peaks) + # Combine unpaired peaks from both signals, tagging each peak with its respective signal + combined_unpaired_peaks = [(peak, signal1) for peak in signal1.unpaired_peaks] + [ + (peak, signal2) for peak in signal2.unpaired_peaks + ] + psd_highest_max = max(signal1.psd.max(), signal2.psd.max()) + + # Start with the similarity factor directly scaled to a percentage + mhi = similarity_factor + + # Bonus for ideal number of total peaks (1 or 2) + if num_paired_peaks >= self.DC_MAX_PEAKS: + mhi *= self.DC_MAX_PEAKS / num_paired_peaks # Reduce MHI if more than ideal number of peaks + + # Penalty from unpaired peaks weighted by their amplitude relative to the maximum PSD amplitude + unpaired_peak_penalty = 0 + if num_unpaired_peaks > self.DC_MAX_UNPAIRED_PEAKS_ALLOWED: + for peak, signal in combined_unpaired_peaks: + unpaired_peak_penalty += (signal.psd[peak] / psd_highest_max) * 30 + mhi -= unpaired_peak_penalty + + # Ensure the result lies between 0 and 100 by clipping the computed value + mhi = np.clip(mhi, 0, 100) + + return self._mhi_lut(mhi) + + # LUT to transform the MHI into a textual value easy to understand for the users of the script + def _mhi_lut(self, mhi: float) -> str: + ranges = [ + (70, 100, 'Excellent mechanical health'), + (55, 70, 'Good mechanical health'), + (45, 55, 'Acceptable mechanical health'), + (30, 45, 'Potential signs of a mechanical issue'), + (15, 30, 'Likely a mechanical issue'), + (0, 15, 'Mechanical issue detected'), + ] + mhi = np.clip(mhi, 1, 100) + return next( + (message for lower, upper, message in ranges if lower < mhi <= upper), + 'Unknown mechanical health', ) - title_line2 = measurements[0]['name'] + ' / ' + measurements[1]['name'] - fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) - - # We add the estimated similarity and the MHI value to the title only if the kinematics is CoreXY - # as it make no sense to compute these values for other kinematics that doesn't have paired belts - if kinematics in {'limited_corexy', 'corexy', 'limited_corexz', 'corexz'}: - title_line3 = f'| Estimated similarity: {similarity_factor:.1f}%' - title_line4 = f'| {mhi} (experimental)' - fig.text(0.55, 0.985, title_line3, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple']) - fig.text(0.55, 0.950, title_line4, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple']) - - # Add the accel_per_hz value to the title - title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' - fig.text(0.551, 0.915, title_line5, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple']) - - # Plot the graphs - plot_compare_frequency(ax1, signal1, signal2, signal1_belt, signal2_belt, max_freq) - plot_versus_belts(ax3, common_freqs, signal1, signal2, signal1_belt, signal2_belt) - - # Adding a small Klippain logo to the top left corner of the figure - ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW') - ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png'))) - ax_logo.axis('off') - - # Adding Shake&Tune version in the top right corner - if st_version != 'unknown': - fig.text(0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple']) - - return fig - - -def main(): - # Parse command-line arguments - usage = '%prog [options] ' - opts = optparse.OptionParser(usage) - opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph') - opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph') - opts.add_option('--accel_per_hz', type='float', default=None, help='accel_per_hz used during the measurement') - opts.add_option( - '-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory' - ) - opts.add_option( - '-m', - '--kinematics', - type='string', - dest='kinematics', - help='machine kinematics configuration', - ) - options, args = opts.parse_args() - if len(args) < 1: - opts.error('Incorrect number of arguments') - if options.output is None: - opts.error('You must specify an output file.png to use the script (option -o)') - - measurements_manager = MeasurementsManager(10) - if args[0].endswith('.csv'): - measurements_manager.load_from_csvs(args) - elif args[0].endswith('.stdata'): - measurements_manager.load_from_stdata(args[0]) - else: - raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') - - fig = belts_calibration( - measurements_manager.get_measurements(), - options.kinematics, - options.klipperdir, - options.max_freq, - options.accel_per_hz, - 'unknown', - ) - fig.savefig(options.output, dpi=150) - - -if __name__ == '__main__': - main() diff --git a/shaketune/graph_creators/graph_creator.py b/shaketune/graph_creators/graph_creator.py index 0106828..7ce1464 100644 --- a/shaketune/graph_creators/graph_creator.py +++ b/shaketune/graph_creators/graph_creator.py @@ -11,6 +11,7 @@ import abc +import os from datetime import datetime from typing import Optional @@ -18,6 +19,7 @@ from ..helpers.accelerometer import MeasurementsManager from ..shaketune_config import ShakeTuneConfig +from .plotter import Plotter class GraphCreator(abc.ABC): @@ -38,25 +40,33 @@ def __init__(self, config: ShakeTuneConfig): self._version = ShakeTuneConfig.get_git_version() self._type = self.__class__.graph_type self._folder = self._config.get_results_folder(self._type) + self._plotter = Plotter() + self._custom_filepath = None def _save_figure( self, fig: Figure, measurements_manager: MeasurementsManager, axis_label: Optional[str] = None ) -> None: - axis_suffix = f'_{axis_label}' if axis_label else '' - filename = self._folder / f"{self._type.replace(' ', '')}_{self._graph_date}{axis_suffix}" - fig.savefig(f'{filename}.png', dpi=self._config.dpi) + if os.environ.get('SHAKETUNE_IN_CLI') == '1' and self._custom_filepath is not None: + fig.savefig(f'{self._custom_filepath}', dpi=self._config.dpi) + else: + axis_suffix = f'_{axis_label}' if axis_label else '' + filename = self._folder / f"{self._type.replace(' ', '')}_{self._graph_date}{axis_suffix}" + fig.savefig(f'{filename}.png', dpi=self._config.dpi) - if self._config.keep_raw_data: + if self._config.keep_raw_data and not os.environ.get('SHAKETUNE_IN_CLI') == '1': measurements_manager.save_stdata(f'{filename}.stdata') def get_type(self) -> str: return self._type + def override_output_target(self, filepath: str) -> None: + self._custom_filepath = filepath + @abc.abstractmethod def create_graph(self, measurements_manager: MeasurementsManager) -> None: pass - def clean_old_files(self, keep_results: int = 3) -> None: + def clean_old_files(self, keep_results: int = 10) -> None: files = sorted(self._folder.glob('*.png'), key=lambda f: f.stat().st_mtime, reverse=True) if len(files) <= keep_results: return # No need to delete any files diff --git a/shaketune/graph_creators/plotter.py b/shaketune/graph_creators/plotter.py new file mode 100644 index 0000000..0db382b --- /dev/null +++ b/shaketune/graph_creators/plotter.py @@ -0,0 +1,1495 @@ +# Shake&Tune: 3D printer analysis tools +# +# Copyright (C) 2022 - 2024 Félix Boisselier (Frix_x on Discord) +# Licensed under the GNU General Public License v3.0 (GPL-3.0) +# +# File: plotter.py +# Description: Contains the Plotter class to handle the plotting logic in order to +# create the .png graphs in Shake&Tune with all the infos using matplotlib. + + +import os +from datetime import datetime + +import matplotlib +import matplotlib.font_manager +import matplotlib.pyplot as plt +import matplotlib.ticker +import numpy as np +from scipy.interpolate import interp1d + +from ..helpers.common_func import compute_mechanical_parameters +from ..helpers.console_output import ConsoleOutput + +matplotlib.use('Agg') # Use 'Agg' backend for non-GUI environments + + +class Plotter: + KLIPPAIN_COLORS = { + 'purple': '#70088C', + 'orange': '#FF8D32', + 'dark_purple': '#150140', + 'dark_orange': '#F24130', + 'red_pink': '#F2055C', + } + + # static_frequency tool + SPECTROGRAM_LOW_PERCENTILE_FILTER = 5 + + # belts tool + ALPHABET = 'αβγδεζηθικλμνξοπρστυφχψω' # For paired peak names (using the Greek alphabet to avoid confusion with belt names) + + # input shaper tool + SPECTROGRAM_LOW_PERCENTILE_FILTER = 5 + MAX_VIBRATIONS_PLOTTED = 80.0 + MAX_VIBRATIONS_PLOTTED_ZOOM = 1.25 # 1.25x max vibs values from the standard filters selection + + def plot_axes_map_detection_graph(self, data): + time_data = data['acceleration_data_0'] + accel_data = data['acceleration_data_1'] + gravity = data['gravity'] + average_noise_intensity_label = data['average_noise_intensity_label'] + position_data = data['position_data'] + direction_vectors = data['direction_vectors'] + angle_errors = data['angle_errors'] + formatted_direction_vector = data['formatted_direction_vector'] + measurements = data['measurements'] + st_version = data['st_version'] + + fig, ((ax_1, ax_3)) = plt.subplots( + 1, + 2, + gridspec_kw={ + 'width_ratios': [5, 3], + 'bottom': 0.080, + 'top': 0.840, + 'left': 0.055, + 'right': 0.960, + 'hspace': 0.166, + 'wspace': 0.060, + }, + ) + fig.set_size_inches(15, 7) + ax_3.remove() + ax_3 = fig.add_subplot(122, projection='3d') + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the differents graphs + for i, (time, (accel_x, accel_y, accel_z)) in enumerate(zip(time_data, accel_data)): + ax_1.plot( + time, + accel_x, + label='X' if i == 0 else '', + color=self.KLIPPAIN_COLORS['purple'], + linewidth=0.5, + zorder=50 if i == 0 else 10, + ) + ax_1.plot( + time, + accel_y, + label='Y' if i == 0 else '', + color=self.KLIPPAIN_COLORS['orange'], + linewidth=0.5, + zorder=50 if i == 1 else 10, + ) + ax_1.plot( + time, + accel_z, + label='Z' if i == 0 else '', + color=self.KLIPPAIN_COLORS['red_pink'], + linewidth=0.5, + zorder=50 if i == 2 else 10, + ) + + ax_1.set_xlabel('Time (s)') + ax_1.set_ylabel('Acceleration (mm/s²)') + + ax_1.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_1.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_1.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) + ax_1.grid(which='major', color='grey') + ax_1.grid(which='minor', color='lightgrey') + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('small') + ax_1.set_title( + 'Acceleration (gravity offset removed)', + fontsize=14, + color=self.KLIPPAIN_COLORS['dark_orange'], + weight='bold', + ) + + ax_1.legend(loc='upper left', prop=fontP) + + # Add the gravity and noise level to the graph legend + ax_1_2 = ax_1.twinx() + ax_1_2.yaxis.set_visible(False) + ax_1_2.plot([], [], ' ', label=average_noise_intensity_label) + ax_1_2.plot([], [], ' ', label=f'Measured gravity: {gravity / 1000:0.3f} m/s²') + ax_1_2.legend(loc='upper right', prop=fontP) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the 3D path of the movement + for i, ((position_x, position_y, position_z), average_direction_vector, angle_error) in enumerate( + zip(position_data, direction_vectors, angle_errors) + ): + ax_3.plot( + position_x, position_y, position_z, color=self.KLIPPAIN_COLORS['orange'], linestyle=':', linewidth=2 + ) + ax_3.scatter(position_x[0], position_y[0], position_z[0], color=self.KLIPPAIN_COLORS['red_pink'], zorder=10) + ax_3.text( + position_x[0] + 1, + position_y[0], + position_z[0], + str(i + 1), + color='black', + fontsize=16, + fontweight='bold', + zorder=20, + ) + + # Plot the average direction vector + start_position = np.array([position_x[0], position_y[0], position_z[0]]) + end_position = start_position + average_direction_vector * np.linalg.norm( + [position_x[-1] - position_x[0], position_y[-1] - position_y[0], position_z[-1] - position_z[0]] + ) + ax_3.plot( + [start_position[0], end_position[0]], + [start_position[1], end_position[1]], + [start_position[2], end_position[2]], + label=f'{["X", "Y", "Z"][i]} angle: {angle_error:0.2f}°', + color=self.KLIPPAIN_COLORS['purple'], + linestyle='-', + linewidth=2, + ) + + ax_3.set_xlabel('X Position (mm)') + ax_3.set_ylabel('Y Position (mm)') + ax_3.set_zlabel('Z Position (mm)') + + ax_3.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.grid(which='major', color='grey') + ax_3.grid(which='minor', color='lightgrey') + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('small') + ax_3.set_title( + 'Estimated movement in 3D space', + fontsize=14, + color=self.KLIPPAIN_COLORS['dark_orange'], + weight='bold', + ) + + ax_3.legend(loc='upper left', prop=fontP) + # ------------------------------------------------------------------------------------------------------------------------------------------------ + + # Add title + title_line1 = 'AXES MAP CALIBRATION TOOL' + fig.text( + 0.060, + 0.947, + title_line1, + ha='left', + va='bottom', + fontsize=20, + color=self.KLIPPAIN_COLORS['purple'], + weight='bold', + ) + try: + filename = measurements[0]['name'] + dt = datetime.strptime(f"{filename.split('_')[2]} {filename.split('_')[3]}", '%Y%m%d %H%M%S') + title_line2 = dt.strftime('%x %X') + if self.accel is not None: + title_line2 += f' -- at {self.accel:0.0f} mm/s²' + except Exception: + title_line2 = measurements[0]['name'] + ' ...' + fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=self.KLIPPAIN_COLORS['dark_purple']) + + title_line3 = f'| Detected axes_map: {formatted_direction_vector}' + fig.text(0.50, 0.985, title_line3, ha='left', va='top', fontsize=16, color=self.KLIPPAIN_COLORS['dark_purple']) + + # Adding logo + current_dir = os.path.dirname(__file__) + image_path = os.path.join(current_dir, 'klippain.png') + ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW') + ax_logo.imshow(plt.imread(image_path)) + ax_logo.axis('off') + + if st_version != 'unknown': + fig.text( + 0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=self.KLIPPAIN_COLORS['purple'] + ) + + return fig + + def plot_static_frequency_graph(self, data): + # Extract data from computation_result + freq = data['freq'] + duration = data['duration'] + accel_per_hz = data['accel_per_hz'] + st_version = data['st_version'] + measurements = data['measurements'] + t = data['t'] + bins = data['bins'] + pdata = data['pdata'] + max_freq = data['max_freq'] + + fig, ((ax_1, ax_3)) = plt.subplots( + 1, + 2, + gridspec_kw={ + 'width_ratios': [5, 3], + 'bottom': 0.080, + 'top': 0.840, + 'left': 0.050, + 'right': 0.985, + 'hspace': 0.166, + 'wspace': 0.138, + }, + ) + fig.set_size_inches(15, 7) + + title_line1 = 'STATIC FREQUENCY HELPER TOOL' + fig.text( + 0.060, + 0.947, + title_line1, + ha='left', + va='bottom', + fontsize=20, + color=self.KLIPPAIN_COLORS['purple'], + weight='bold', + ) + try: + filename_parts = measurements[0]['name'].split('_') + dt = datetime.strptime(f'{filename_parts[2]} {filename_parts[3]}', '%Y%m%d %H%M%S') + title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[1].upper() + ' axis' + title_line3 = f'| Maintained frequency: {freq}Hz for {duration}s' + title_line4 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else '' + except Exception: + title_line2 = measurements[0]['name'] + title_line3 = '' + title_line4 = '' + fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=self.KLIPPAIN_COLORS['dark_purple']) + fig.text(0.55, 0.985, title_line3, ha='left', va='top', fontsize=14, color=self.KLIPPAIN_COLORS['dark_purple']) + fig.text(0.55, 0.950, title_line4, ha='left', va='top', fontsize=11, color=self.KLIPPAIN_COLORS['dark_purple']) + + ax_1.set_title( + 'Time-Frequency Spectrogram', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold' + ) + + vmin_value = np.percentile(pdata, self.SPECTROGRAM_LOW_PERCENTILE_FILTER) + + cm = 'inferno' + norm = matplotlib.colors.LogNorm(vmin=vmin_value) + ax_1.imshow( + pdata.T, + norm=norm, + cmap=cm, + aspect='auto', + extent=[t[0], t[-1], bins[0], bins[-1]], + origin='lower', + interpolation='antialiased', + ) + + ax_1.set_xlim([0.0, max_freq]) + ax_1.set_ylabel('Time (s)') + ax_1.set_xlabel('Frequency (Hz)') + + # Integrate the energy over the frequency bins for each time step and plot this vertically + ax_3.plot(np.trapz(pdata, t, axis=0), bins, color=self.KLIPPAIN_COLORS['orange']) + ax_3.set_title('Vibrations', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold') + ax_3.set_xlabel('Cumulative Energy') + ax_3.set_ylabel('Time (s)') + ax_3.set_ylim([bins[0], bins[-1]]) + + ax_3.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.ticklabel_format(axis='x', style='scientific', scilimits=(0, 0)) + ax_3.grid(which='major', color='grey') + ax_3.grid(which='minor', color='lightgrey') + + # Adding logo + current_dir = os.path.dirname(__file__) + image_path = os.path.join(current_dir, 'klippain.png') + ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW') + ax_logo.imshow(plt.imread(image_path)) + ax_logo.axis('off') + + if st_version != 'unknown': + fig.text( + 0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=self.KLIPPAIN_COLORS['purple'] + ) + + return fig + + def plot_belts_graph(self, data): + # Extract data from computation_result + signal1 = data['signal1'] + signal2 = data['signal2'] + similarity_factor = data['similarity_factor'] + mhi = data['mhi'] + signal1_belt = data['signal1_belt'] + signal2_belt = data['signal2_belt'] + kinematics = data['kinematics'] + accel_per_hz = data['accel_per_hz'] + st_version = data['st_version'] + measurements = data['measurements'] + max_freq = data['max_freq'] + + # Begin plotting + fig, ((ax_1, ax_3)) = plt.subplots( + 1, + 2, + gridspec_kw={ + 'width_ratios': [5, 3], + 'bottom': 0.080, + 'top': 0.840, + 'left': 0.050, + 'right': 0.985, + 'hspace': 0.166, + 'wspace': 0.138, + }, + ) + fig.set_size_inches(15, 7) + + # Add title + title_line1 = 'RELATIVE BELTS CALIBRATION TOOL' + fig.text( + 0.060, + 0.947, + title_line1, + ha='left', + va='bottom', + fontsize=20, + color=self.KLIPPAIN_COLORS['purple'], + weight='bold', + ) + try: + filename = measurements[0]['name'] + dt = datetime.strptime(f"{filename.split('_')[2]} {filename.split('_')[3]}", '%Y%m%d %H%M%S') + title_line2 = dt.strftime('%x %X') + if kinematics is not None: + title_line2 += ' -- ' + kinematics.upper() + ' kinematics' + except Exception: + title_line2 = measurements[0]['name'] + ' / ' + measurements[1]['name'] + fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=self.KLIPPAIN_COLORS['dark_purple']) + + # Add similarity and MHI if kinematics is CoreXY + if kinematics in {'limited_corexy', 'corexy', 'limited_corexz', 'corexz'}: + title_line3 = f'| Estimated similarity: {similarity_factor:.1f}%' + title_line4 = f'| {mhi} (experimental)' + fig.text( + 0.55, 0.985, title_line3, ha='left', va='top', fontsize=14, color=self.KLIPPAIN_COLORS['dark_purple'] + ) + fig.text( + 0.55, 0.950, title_line4, ha='left', va='top', fontsize=14, color=self.KLIPPAIN_COLORS['dark_purple'] + ) + + # Add accel_per_hz to the title + title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' + fig.text(0.551, 0.915, title_line5, ha='left', va='top', fontsize=10, color=self.KLIPPAIN_COLORS['dark_purple']) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the two belts PSD signals + ax_1.plot(signal1.freqs, signal1.psd, label='Belt ' + signal1_belt, color=self.KLIPPAIN_COLORS['orange']) + ax_1.plot(signal2.freqs, signal2.psd, label='Belt ' + signal2_belt, color=self.KLIPPAIN_COLORS['purple']) + + psd_highest_max = max(signal1.psd.max(), signal2.psd.max()) + + # Trace and annotate the peaks on the graph + paired_peak_count = 0 + unpaired_peak_count = 0 + offsets_table_data = [] + + for _, (peak1, peak2) in enumerate(signal1.paired_peaks): + label = self.ALPHABET[paired_peak_count] + amplitude_offset = abs(((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / psd_highest_max) * 100) + frequency_offset = abs(signal2.freqs[peak2[0]] - signal1.freqs[peak1[0]]) + offsets_table_data.append([f'Peaks {label}', f'{frequency_offset:.1f} Hz', f'{amplitude_offset:.1f} %']) + + ax_1.plot(signal1.freqs[peak1[0]], signal1.psd[peak1[0]], 'x', color='black') + ax_1.plot(signal2.freqs[peak2[0]], signal2.psd[peak2[0]], 'x', color='black') + ax_1.plot( + [signal1.freqs[peak1[0]], signal2.freqs[peak2[0]]], + [signal1.psd[peak1[0]], signal2.psd[peak2[0]]], + ':', + color='gray', + ) + + ax_1.annotate( + label + '1', + (signal1.freqs[peak1[0]], signal1.psd[peak1[0]]), + textcoords='offset points', + xytext=(8, 5), + ha='left', + fontsize=13, + color='black', + ) + ax_1.annotate( + label + '2', + (signal2.freqs[peak2[0]], signal2.psd[peak2[0]]), + textcoords='offset points', + xytext=(8, 5), + ha='left', + fontsize=13, + color='black', + ) + paired_peak_count += 1 + + for peak in signal1.unpaired_peaks: + ax_1.plot(signal1.freqs[peak], signal1.psd[peak], 'x', color='black') + ax_1.annotate( + str(unpaired_peak_count + 1), + (signal1.freqs[peak], signal1.psd[peak]), + textcoords='offset points', + xytext=(8, 5), + ha='left', + fontsize=13, + color='red', + weight='bold', + ) + unpaired_peak_count += 1 + + for peak in signal2.unpaired_peaks: + ax_1.plot(signal2.freqs[peak], signal2.psd[peak], 'x', color='black') + ax_1.annotate( + str(unpaired_peak_count + 1), + (signal2.freqs[peak], signal2.psd[peak]), + textcoords='offset points', + xytext=(8, 5), + ha='left', + fontsize=13, + color='red', + weight='bold', + ) + unpaired_peak_count += 1 + + # Add estimated similarity to the graph + ax_1_2 = ax_1.twinx() # To split the legends in two box + ax_1_2.yaxis.set_visible(False) + ax_1_2.plot([], [], ' ', label=f'Number of unpaired peaks: {unpaired_peak_count}') + + # Setting axis parameters, grid and graph title + ax_1.set_xlabel('Frequency (Hz)') + ax_1.set_xlim([0, max_freq]) + ax_1.set_ylabel('Power spectral density') + ax_1.set_ylim([0, psd_highest_max * 1.1]) + + ax_1.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_1.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_1.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) + ax_1.grid(which='major', color='grey') + ax_1.grid(which='minor', color='lightgrey') + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('small') + ax_1.set_title( + 'Belts frequency profiles', + fontsize=14, + color=self.KLIPPAIN_COLORS['dark_orange'], + weight='bold', + ) + + # Print the table of offsets ontop of the graph below the original legend (upper right) + if len(offsets_table_data) > 0: + columns = [ + '', + 'Frequency delta', + 'Amplitude delta', + ] + offset_table = ax_1.table( + cellText=offsets_table_data, + colLabels=columns, + bbox=[0.66, 0.79, 0.33, 0.15], + loc='upper right', + cellLoc='center', + ) + offset_table.auto_set_font_size(False) + offset_table.set_fontsize(8) + offset_table.auto_set_column_width([0, 1, 2]) + offset_table.set_zorder(100) + cells = [key for key in offset_table.get_celld().keys()] + for cell in cells: + offset_table[cell].set_facecolor('white') + offset_table[cell].set_alpha(0.6) + + ax_1.legend(loc='upper left', prop=fontP) + ax_1_2.legend(loc='upper right', prop=fontP) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + ax_3.set_title( + 'Cross-belts comparison plot', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold' + ) + + max_psd = max(np.max(signal1.psd), np.max(signal2.psd)) + ideal_line = np.linspace(0, max_psd * 1.1, 500) + green_boundary = ideal_line + (0.35 * max_psd * np.exp(-ideal_line / (0.6 * max_psd))) + ax_3.fill_betweenx(ideal_line, ideal_line, green_boundary, color='green', alpha=0.15) + ax_3.fill_between(ideal_line, ideal_line, green_boundary, color='green', alpha=0.15, label='Good zone') + ax_3.plot( + ideal_line, + ideal_line, + '--', + label='Ideal line', + color='red', + linewidth=2, + ) + + ax_3.plot(signal1.psd, signal2.psd, color='dimgrey', marker='o', markersize=1.5) + ax_3.fill_betweenx(signal2.psd, signal1.psd, color=self.KLIPPAIN_COLORS['red_pink'], alpha=0.1) + + paired_peak_count = 0 + unpaired_peak_count = 0 + + for _, (peak1, peak2) in enumerate(signal1.paired_peaks): + label = self.ALPHABET[paired_peak_count] + freq1 = signal1.freqs[peak1[0]] + freq2 = signal2.freqs[peak2[0]] + + if abs(freq1 - freq2) < 1: + ax_3.plot(signal1.psd[peak1[0]], signal2.psd[peak2[0]], marker='o', color='black', markersize=7) + ax_3.annotate( + f'{label}1/{label}2', + (signal1.psd[peak1[0]], signal2.psd[peak2[0]]), + textcoords='offset points', + xytext=(-7, 7), + fontsize=13, + color='black', + ) + else: + ax_3.plot( + signal1.psd[peak2[0]], + signal2.psd[peak2[0]], + marker='o', + color=self.KLIPPAIN_COLORS['purple'], + markersize=7, + ) + ax_3.plot( + signal1.psd[peak1[0]], + signal2.psd[peak1[0]], + marker='o', + color=self.KLIPPAIN_COLORS['orange'], + markersize=7, + ) + ax_3.annotate( + f'{label}1', + (signal1.psd[peak1[0]], signal2.psd[peak1[0]]), + textcoords='offset points', + xytext=(0, 7), + fontsize=13, + color='black', + ) + ax_3.annotate( + f'{label}2', + (signal1.psd[peak2[0]], signal2.psd[peak2[0]]), + textcoords='offset points', + xytext=(0, 7), + fontsize=13, + color='black', + ) + paired_peak_count += 1 + + for _, peak_index in enumerate(signal1.unpaired_peaks): + ax_3.plot( + signal1.psd[peak_index], + signal2.psd[peak_index], + marker='o', + color=self.KLIPPAIN_COLORS['orange'], + markersize=7, + ) + ax_3.annotate( + str(unpaired_peak_count + 1), + (signal1.psd[peak_index], signal2.psd[peak_index]), + textcoords='offset points', + fontsize=13, + weight='bold', + color=self.KLIPPAIN_COLORS['red_pink'], + xytext=(0, 7), + ) + unpaired_peak_count += 1 + + for _, peak_index in enumerate(signal2.unpaired_peaks): + ax_3.plot( + signal1.psd[peak_index], + signal2.psd[peak_index], + marker='o', + color=self.KLIPPAIN_COLORS['purple'], + markersize=7, + ) + ax_3.annotate( + str(unpaired_peak_count + 1), + (signal1.psd[peak_index], signal2.psd[peak_index]), + textcoords='offset points', + fontsize=13, + weight='bold', + color=self.KLIPPAIN_COLORS['red_pink'], + xytext=(0, 7), + ) + unpaired_peak_count += 1 + + ax_3.set_xlabel(f'Belt {signal1_belt}') + ax_3.set_ylabel(f'Belt {signal2_belt}') + ax_3.set_xlim([0, max_psd * 1.1]) + ax_3.set_ylim([0, max_psd * 1.1]) + + ax_3.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.ticklabel_format(style='scientific', scilimits=(0, 0)) + ax_3.grid(which='major', color='grey') + ax_3.grid(which='minor', color='lightgrey') + + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('medium') + ax_3.legend(loc='upper left', prop=fontP) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Adding logo + current_dir = os.path.dirname(__file__) + image_path = os.path.join(current_dir, 'klippain.png') + ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW') + ax_logo.imshow(plt.imread(image_path)) + ax_logo.axis('off') + + # Adding version + if st_version != 'unknown': + fig.text( + 0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=self.KLIPPAIN_COLORS['purple'] + ) + + return fig + + def plot_input_shaper_graph(self, data): + measurements = data['measurements'] + compat = data['compat'] + max_smoothing_computed = data['max_smoothing_computed'] + max_freq = data['max_freq'] + calibration_data = data['calibration_data'] + shapers = data['shapers'] + shaper_table_data = data['shaper_table_data'] + shaper_choices = data['shaper_choices'] + peaks = data['peaks'] + peaks_freqs = data['peaks_freqs'] + peaks_threshold = data['peaks_threshold'] + fr = data['fr'] + zeta = data['zeta'] + t = data['t'] + bins = data['bins'] + pdata = data['pdata'] + additional_shapers = data['additional_shapers'] + st_version = data['st_version'] + + fig, ((ax_1, ax_3), (ax_2, ax_4)) = plt.subplots( + 2, + 2, + gridspec_kw={ + 'height_ratios': [4, 3], + 'width_ratios': [5, 4], + 'bottom': 0.050, + 'top': 0.890, + 'left': 0.048, + 'right': 0.966, + 'hspace': 0.169, + 'wspace': 0.150, + }, + ) + ax_4.remove() + fig.set_size_inches(15, 11.6) + + # Add a title with some test info + title_line1 = 'INPUT SHAPER CALIBRATION TOOL' + fig.text( + 0.065, + 0.965, + title_line1, + ha='left', + va='bottom', + fontsize=20, + color=self.KLIPPAIN_COLORS['purple'], + weight='bold', + ) + try: + filename_parts = measurements[0]['name'].split('_') + dt = datetime.strptime(f'{filename_parts[2]} {filename_parts[3]}', '%Y%m%d %H%M%S') + title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[1].upper() + ' axis' + if compat: + title_line3 = '| Older Klipper version detected, damping ratio' + title_line4 = '| and SCV are not used for filter recommendations!' + title_line5 = ( + f'| Accel per Hz used: {self.accel_per_hz} mm/s²/Hz' if self.accel_per_hz is not None else '' + ) + else: + max_smoothing_string = ( + f'maximum ({max_smoothing_computed:0.3f})' + if self.max_smoothing is None + else f'{self.max_smoothing:0.3f}' + ) + title_line3 = f'| Square corner velocity: {self.scv} mm/s' + title_line4 = f'| Allowed smoothing: {max_smoothing_string}' + title_line5 = ( + f'| Accel per Hz used: {self.accel_per_hz} mm/s²/Hz' if self.accel_per_hz is not None else '' + ) + except Exception: + title_line2 = measurements[0]['name'] + title_line3 = '' + title_line4 = '' + title_line5 = '' + fig.text(0.065, 0.957, title_line2, ha='left', va='top', fontsize=16, color=self.KLIPPAIN_COLORS['dark_purple']) + fig.text(0.50, 0.990, title_line3, ha='left', va='top', fontsize=14, color=self.KLIPPAIN_COLORS['dark_purple']) + fig.text(0.50, 0.968, title_line4, ha='left', va='top', fontsize=14, color=self.KLIPPAIN_COLORS['dark_purple']) + fig.text(0.501, 0.945, title_line5, ha='left', va='top', fontsize=10, color=self.KLIPPAIN_COLORS['dark_purple']) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + freqs = calibration_data.freqs + psd = calibration_data.psd_sum + px = calibration_data.psd_x + py = calibration_data.psd_y + pz = calibration_data.psd_z + + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('x-small') + + ax_1.set_xlabel('Frequency (Hz)') + ax_1.set_xlim([0, max_freq]) + ax_1.set_ylabel('Power spectral density') + ax_1.set_ylim([0, psd.max() + psd.max() * 0.05]) + + ax_1.plot(freqs, psd, label='X+Y+Z', color='purple', zorder=5) + ax_1.plot(freqs, px, label='X', color='red') + ax_1.plot(freqs, py, label='Y', color='green') + ax_1.plot(freqs, pz, label='Z', color='blue') + + ax_1.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5)) + ax_1.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_1.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) + ax_1.grid(which='major', color='grey') + ax_1.grid(which='minor', color='lightgrey') + + ax_1_2 = ax_1.twinx() + ax_1_2.yaxis.set_visible(False) + + for shaper in shapers: + ax_1_2.plot(freqs, shaper.vals, label=shaper.name.upper(), linestyle='dotted') + + # Draw the shappers curves and add their specific parameters in the legend + for shaper in shaper_table_data['shapers']: + if shaper['type'] == shaper_choices[0]: + ax_1.plot(freqs, psd * shaper['vals'], label=f'With {shaper_choices[0]} applied', color='cyan') + if len(shaper_choices) == 2 and shaper['type'] == shaper_choices[1]: + ax_1.plot(freqs, psd * shaper['vals'], label=f'With {shaper_choices[1]} applied', color='lime') + + # Draw detected peaks and their respective labels + ax_1.plot(peaks_freqs, psd[peaks], 'x', color='black', markersize=8) + for idx, peak in enumerate(peaks): + if psd[peak] > peaks_threshold[1]: + fontcolor = 'red' + fontweight = 'bold' + else: + fontcolor = 'black' + fontweight = 'normal' + ax_1.annotate( + f'{idx+1}', + (freqs[peak], psd[peak]), + textcoords='offset points', + xytext=(8, 5), + ha='left', + fontsize=13, + color=fontcolor, + weight=fontweight, + ) + ax_1.axhline(y=peaks_threshold[0], color='black', linestyle='--', linewidth=0.5) + ax_1.axhline(y=peaks_threshold[1], color='black', linestyle='--', linewidth=0.5) + ax_1.fill_between(freqs, 0, peaks_threshold[0], color='green', alpha=0.15, label='Relax Region') + ax_1.fill_between( + freqs, peaks_threshold[0], peaks_threshold[1], color='orange', alpha=0.2, label='Warning Region' + ) + + # Add the main resonant frequency and damping ratio of the axis to the graph title + ax_1.set_title( + f'Axis Frequency Profile (ω0={fr:.1f}Hz, ζ={zeta:.3f})', + fontsize=14, + color=self.KLIPPAIN_COLORS['dark_orange'], + weight='bold', + ) + ax_1.legend(loc='upper left', prop=fontP) + ax_1_2.legend(loc='upper right', prop=fontP) + + # ------------------------------------------------------------------------------------------------------------------ + # Plot a time-frequency spectrogram to see how the system respond over time during the + # resonnance test. This can highlight hidden spots from the standard PSD graph from other harmonics + ax_2.set_title( + 'Time-Frequency Spectrogram', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold' + ) + + # We need to normalize the data to get a proper signal on the spectrogram + # However, while using "LogNorm" provide too much background noise, using + # "Normalize" make only the resonnance appearing and hide interesting elements + # So we need to filter out the lower part of the data (ie. find the proper vmin for LogNorm) + vmin_value = np.percentile(pdata, self.SPECTROGRAM_LOW_PERCENTILE_FILTER) + cm = 'inferno' + norm = matplotlib.colors.LogNorm(vmin=vmin_value) + + # Draw the spectrogram using imgshow that is better suited here than pcolormesh since its result is already rasterized and + # we doesn't need to keep vector graphics when saving to a final .png file. Using it also allow to + # save ~150-200MB of RAM during the "fig.savefig" operation. + ax_2.imshow( + pdata.T, + norm=norm, + cmap=cm, + aspect='auto', + extent=[t[0], t[-1], bins[0], bins[-1]], + origin='lower', + interpolation='antialiased', + ) + + ax_2.set_xlim([0.0, max_freq]) + ax_2.set_ylabel('Time (s)') + ax_2.set_xlabel('Frequency (Hz)') + + # Add peaks lines in the spectrogram to get hint from peaks found in the first graph + for idx, peak in enumerate(peaks): + ax_2.axvline(peak, color='cyan', linestyle='dotted', linewidth=1) + ax_2.annotate( + f'Peak {idx+1}', + (peak, bins[-1] * 0.9), + textcoords='data', + color='cyan', + rotation=90, + fontsize=10, + verticalalignment='top', + horizontalalignment='right', + ) + + # ------------------------------------------------------------------------------------------------------------------ + ax_3.set_title( + 'Filters performances over acceleration', + fontsize=14, + color=self.KLIPPAIN_COLORS['dark_orange'], + weight='bold', + ) + ax_3.set_xlabel('Max Acceleration') + ax_3.set_ylabel('Remaining Vibrations (%)') + + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('x-small') + + ax_3.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(1000)) + ax_3.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.grid(which='major', color='grey') + ax_3.grid(which='minor', color='lightgrey') + + shaper_data = {} + + # Extract data from additional_shapers first + for _, shapers in additional_shapers.items(): + for shaper in shapers: + shaper_type = shaper.name.upper() + if shaper_type not in shaper_data: + shaper_data[shaper_type] = [] + shaper_data[shaper_type].append( + { + 'max_accel': shaper.max_accel, + 'vibrs': shaper.vibrs * 100.0, + } + ) + + # Extract data from shaper_table_data and insert into shaper_data + max_shaper_vibrations = 0 + for shaper in shaper_table_data['shapers']: + shaper_type = shaper['type'] + if shaper_type not in shaper_data: + shaper_data[shaper_type] = [] + max_shaper_vibrations = max(max_shaper_vibrations, float(shaper['vibrations']) * 100.0) + shaper_data[shaper_type].append( + { + 'max_accel': float(shaper['max_accel']), + 'vibrs': float(shaper['vibrations']) * 100.0, + } + ) + + # Calculate the maximum `max_accel` for points below the thresholds to get a good plot with + # continuous lines and a zoom on the graph to show details at low vibrations + min_accel_limit = 99999 + max_accel_limit = 0 + max_accel_limit_zoom = 0 + for data in shaper_data.values(): + min_accel_limit = min(min_accel_limit, min(d['max_accel'] for d in data)) + max_accel_limit = max( + max_accel_limit, max(d['max_accel'] for d in data if d['vibrs'] <= self.MAX_VIBRATIONS_PLOTTED) + ) + max_accel_limit_zoom = max( + max_accel_limit_zoom, + max( + d['max_accel'] + for d in data + if d['vibrs'] <= max_shaper_vibrations * self.MAX_VIBRATIONS_PLOTTED_ZOOM + ), + ) + + # Add a zoom axes on the graph to show details at low vibrations + zoomed_window = np.clip(max_shaper_vibrations * self.MAX_VIBRATIONS_PLOTTED_ZOOM, 0, 20) + axins = ax_3.inset_axes( + [0.575, 0.125, 0.40, 0.45], + xlim=(min_accel_limit * 0.95, max_accel_limit_zoom * 1.1), + ylim=(-0.5, zoomed_window), + ) + ax_3.indicate_inset_zoom(axins, edgecolor=self.KLIPPAIN_COLORS['purple'], linewidth=3) + axins.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(500)) + axins.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + axins.grid(which='major', color='grey') + axins.grid(which='minor', color='lightgrey') + + # Draw the green zone on both axes to highlight the low vibrations zone + number_of_interpolated_points = 100 + x_fill = np.linspace(min_accel_limit * 0.95, max_accel_limit * 1.1, number_of_interpolated_points) + y_fill = np.full_like(x_fill, 5.0) + ax_3.axhline(y=5.0, color='black', linestyle='--', linewidth=0.5) + ax_3.fill_between(x_fill, -0.5, y_fill, color='green', alpha=0.15) + if zoomed_window > 5.0: + axins.axhline(y=5.0, color='black', linestyle='--', linewidth=0.5) + axins.fill_between(x_fill, -0.5, y_fill, color='green', alpha=0.15) + + # Plot each shaper remaining vibrations response over acceleration + max_vibrations = 0 + for _, (shaper_type, data) in enumerate(shaper_data.items()): + max_accel_values = np.array([d['max_accel'] for d in data]) + vibrs_values = np.array([d['vibrs'] for d in data]) + + # remove duplicate values in max_accel_values and delete the corresponding vibrs_values + # and interpolate the curves to get them smoother with more datapoints + unique_max_accel_values, unique_indices = np.unique(max_accel_values, return_index=True) + max_accel_values = unique_max_accel_values + vibrs_values = vibrs_values[unique_indices] + interp_func = interp1d(max_accel_values, vibrs_values, kind='cubic') + max_accel_fine = np.linspace(max_accel_values.min(), max_accel_values.max(), number_of_interpolated_points) + vibrs_fine = interp_func(max_accel_fine) + + ax_3.plot(max_accel_fine, vibrs_fine, label=f'{shaper_type}', zorder=10) + axins.plot(max_accel_fine, vibrs_fine, label=f'{shaper_type}', zorder=15) + max_vibrations = max(max_vibrations, max(vibrs_fine)) + + ax_3.set_xlim([min_accel_limit * 0.95, max_accel_limit * 1.1]) + ax_3.set_ylim([-0.5, np.clip(max_vibrations * 1.05, 50, self.MAX_VIBRATIONS_PLOTTED)]) + ax_3.legend(loc='best', prop=fontP) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Print the table of offsets ontop of the graph below the original legend (upper right) + columns = ['Type', 'Frequency', 'Vibrations', 'Smoothing', 'Max Accel'] + table_data = [] + + for shaper_info in shaper_table_data['shapers']: + row = [ + f'{shaper_info["type"].upper()}', + f'{shaper_info["frequency"]:.1f} Hz', + f'{shaper_info["vibrations"] * 100:.1f} %', + f'{shaper_info["smoothing"]:.3f}', + f'{round(shaper_info["max_accel"] / 10) * 10:.0f}', + ] + table_data.append(row) + table = plt.table(cellText=table_data, colLabels=columns, bbox=[1.130, -0.4, 0.803, 0.25], cellLoc='center') + table.auto_set_font_size(False) + table.set_fontsize(10) + table.auto_set_column_width([0, 1, 2, 3, 4]) + table.set_zorder(100) + + # Add the recommendations and damping ratio using fig.text + fig.text( + 0.585, + 0.235, + f'Estimated damping ratio (ζ): {shaper_table_data["damping_ratio"]:.3f}', + fontsize=14, + color=self.KLIPPAIN_COLORS['purple'], + ) + if len(shaper_table_data['recommendations']) == 1: + fig.text( + 0.585, + 0.200, + shaper_table_data['recommendations'][0], + fontsize=14, + color=self.KLIPPAIN_COLORS['red_pink'], + ) + elif len(shaper_table_data['recommendations']) == 2: + fig.text( + 0.585, + 0.200, + shaper_table_data['recommendations'][0], + fontsize=14, + color=self.KLIPPAIN_COLORS['red_pink'], + ) + fig.text( + 0.585, + 0.175, + shaper_table_data['recommendations'][1], + fontsize=14, + color=self.KLIPPAIN_COLORS['red_pink'], + ) + # ------------------------------------------------------------------------------------------------------------------------------------------------ + + # Adding a small Klippain logo to the top left corner of the figure + current_dir = os.path.dirname(__file__) + image_path = os.path.join(current_dir, 'klippain.png') + ax_logo = fig.add_axes([0.001, 0.924, 0.075, 0.075], anchor='NW') + ax_logo.imshow(plt.imread(image_path)) + ax_logo.axis('off') + + # Adding Shake&Tune version in the top right corner + if st_version != 'unknown': + fig.text( + 0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=self.KLIPPAIN_COLORS['purple'] + ) + + return fig + + def plot_vibrations_graph(self, data): + measurements = data['measurements'] + all_speeds = data['all_speeds'] + all_angles = data['all_angles'] + all_angles_energy = data['all_angles_energy'] + good_speeds = data['good_speeds'] + good_angles = data['good_angles'] + kinematics = data['kinematics'] + accel = data['accel'] + motors = data['motors'] + motors_config_differences = data['motors_config_differences'] + symmetry_factor = data['symmetry_factor'] + spectrogram_data = data['spectrogram_data'] + sp_min_energy = data['sp_min_energy'] + sp_max_energy = data['sp_max_energy'] + sp_variance_energy = data['sp_variance_energy'] + vibration_metric = data['vibration_metric'] + num_peaks = data['num_peaks'] + vibration_peaks = data['vibration_peaks'] + target_freqs = data['target_freqs'] + main_angles = data['main_angles'] + global_motor_profile = data['global_motor_profile'] + motor_profiles = data['motor_profiles'] + max_freq = data['max_freq'] + st_version = data['st_version'] + + fig, ((ax_1, ax_2, ax_3), (ax_4, ax_5, ax_6)) = plt.subplots( + 2, + 3, + gridspec_kw={ + 'height_ratios': [1, 1], + 'width_ratios': [4, 8, 6], + 'bottom': 0.050, + 'top': 0.890, + 'left': 0.040, + 'right': 0.985, + 'hspace': 0.166, + 'wspace': 0.138, + }, + ) + + # Transform ax_3 and ax_4 to polar plots + ax_1.remove() + ax_1 = fig.add_subplot(2, 3, 1, projection='polar') + ax_4.remove() + ax_4 = fig.add_subplot(2, 3, 4, projection='polar') + + # Set the global .png figure size + fig.set_size_inches(20, 11.5) + + # Add title + title_line1 = 'MACHINE VIBRATIONS ANALYSIS TOOL' + fig.text( + 0.060, + 0.965, + title_line1, + ha='left', + va='bottom', + fontsize=20, + color=self.KLIPPAIN_COLORS['purple'], + weight='bold', + ) + try: + filename_parts = measurements[0]['name'].split('_') + dt = datetime.strptime(f"{filename_parts[4]} {filename_parts[5].split('-')[0]}", '%Y%m%d %H%M%S') + title_line2 = dt.strftime('%x %X') + if accel is not None: + title_line2 += ' at ' + str(accel) + ' mm/s² -- ' + kinematics.upper() + ' kinematics' + except Exception: + title_line2 = measurements[0]['name'] + fig.text(0.060, 0.957, title_line2, ha='left', va='top', fontsize=16, color=self.KLIPPAIN_COLORS['dark_purple']) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the motors infos to the top of the graph if they are detected / specified (not mandatory for CLI mode) + if motors is not None and len(motors) == 2: + motor_details = [(motors[0], 'X motor'), (motors[1], 'Y motor')] + + distance = 0.12 + if motors[0].get_config('autotune_enabled'): + distance = 0.27 + config_blocks = [ + f"| {lbl}: {mot.get_config('motor').upper()} on {mot.get_config('tmc').upper()} @ {mot.get_config('voltage'):0.1f}V {mot.get_config('run_current'):0.2f}A - {mot.get_config('microsteps')}usteps" + for mot, lbl in motor_details + ] + config_blocks.append( + f'| TMC Autotune enabled (PWM freq target: X={int(motors[0].get_config("pwm_freq_target")/1000)}kHz / Y={int(motors[1].get_config("pwm_freq_target")/1000)}kHz)' + ) + else: + config_blocks = [ + f"| {lbl}: {mot.get_config('tmc').upper()} @ {mot.get_config('run_current'):0.2f}A - {mot.get_config('microsteps')}usteps" + for mot, lbl in motor_details + ] + config_blocks.append('| TMC Autotune not detected') + + for idx, block in enumerate(config_blocks): + fig.text( + 0.41, + 0.990 - 0.015 * idx, + block, + ha='left', + va='top', + fontsize=10, + color=self.KLIPPAIN_COLORS['dark_purple'], + ) + + tmc_registers = motors[0].get_registers() + idx = -1 + for idx, (register, settings) in enumerate(tmc_registers.items()): + settings_str = ' '.join(f'{k}={v}' for k, v in settings.items()) + tmc_block = f'| {register.upper()}: {settings_str}' + fig.text( + 0.41 + distance, + 0.990 - 0.015 * idx, + tmc_block, + ha='left', + va='top', + fontsize=10, + color=self.KLIPPAIN_COLORS['dark_purple'], + ) + + if motors_config_differences is not None: + differences_text = f'| Y motor diff: {motors_config_differences}' + fig.text( + 0.41 + distance, + 0.990 - 0.015 * (idx + 1), + differences_text, + ha='left', + va='top', + fontsize=10, + color=self.KLIPPAIN_COLORS['dark_purple'], + ) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the angle profile in polar coordinates + angles_radians = np.deg2rad(all_angles) + + ax_1.set_title( + 'Polar angle energy profile', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold' + ) + ax_1.set_theta_zero_location('E') + ax_1.set_theta_direction(1) + + ax_1.plot(angles_radians, all_angles_energy, color=self.KLIPPAIN_COLORS['purple'], zorder=5) + ax_1.fill(angles_radians, all_angles_energy, color=self.KLIPPAIN_COLORS['purple'], alpha=0.3) + ax_1.set_xlim([0, np.deg2rad(360)]) + ymax = all_angles_energy.max() * 1.05 + ax_1.set_ylim([0, ymax]) + ax_1.set_thetagrids([theta * 15 for theta in range(360 // 15)]) + + ax_1.text( + 0, + 0, + f'Symmetry: {symmetry_factor:.1f}%', + ha='center', + va='center', + color=self.KLIPPAIN_COLORS['red_pink'], + fontsize=12, + fontweight='bold', + zorder=6, + ) + + for _, (start, end, _) in enumerate(good_angles): + ax_1.axvline( + angles_radians[start], + all_angles_energy[start] / ymax, + color=self.KLIPPAIN_COLORS['red_pink'], + linestyle='dotted', + linewidth=1.5, + ) + ax_1.axvline( + angles_radians[end], + all_angles_energy[end] / ymax, + color=self.KLIPPAIN_COLORS['red_pink'], + linestyle='dotted', + linewidth=1.5, + ) + ax_1.fill_between( + angles_radians[start:end], + all_angles_energy[start:end], + all_angles_energy.max() * 1.05, + color='green', + alpha=0.2, + ) + + ax_1.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_1.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_1.grid(which='major', color='grey') + ax_1.grid(which='minor', color='lightgrey') + + # Polar plot doesn't follow the gridspec margin, so we adjust it manually here + pos = ax_1.get_position() + new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height] + ax_1.set_position(new_pos) + + # ---------------------------------------------------------------------------------------------------------------------------- + # Plot the vibration spectrogram in polar coordinates + + # Assuming speeds defines the radial distance from the center, we need to create a meshgrid + # for both angles and speeds to map the spectrogram data onto a polar plot correctly + radius, theta = np.meshgrid(all_speeds, angles_radians) + + ax_4.set_title( + 'Polar vibrations heatmap', + fontsize=14, + color=self.KLIPPAIN_COLORS['dark_orange'], + weight='bold', + va='bottom', + ) + ax_4.set_theta_zero_location('E') + ax_4.set_theta_direction(1) + + ax_4.pcolormesh( + theta, radius, spectrogram_data, norm=matplotlib.colors.LogNorm(), cmap='inferno', shading='auto' + ) + ax_4.set_thetagrids([theta * 15 for theta in range(360 // 15)]) + ax_4.tick_params(axis='y', which='both', colors='white', labelsize='medium') + ax_4.set_ylim([0, max(all_speeds)]) + + # Polar plot doesn't follow the gridspec margin, so we adjust it manually here + pos = ax_4.get_position() + new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height] + ax_4.set_position(new_pos) + + # ---------------------------------------------------------------------------------------------------------------------------- + # Plot global speed profile + ax_2.set_title( + 'Global speed energy profile', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold' + ) + ax_2.set_xlabel('Speed (mm/s)') + ax_2.set_ylabel('Energy') + ax_2_2 = ax_2.twinx() + ax_2_2.yaxis.set_visible(False) + + ax_2.plot(all_speeds, sp_min_energy, label='Minimum', color=self.KLIPPAIN_COLORS['dark_purple'], zorder=5) + ax_2.plot(all_speeds, sp_max_energy, label='Maximum', color=self.KLIPPAIN_COLORS['purple'], zorder=5) + ax_2.plot( + all_speeds, + sp_variance_energy, + label='Variance', + color=self.KLIPPAIN_COLORS['orange'], + zorder=5, + linestyle='--', + ) + ax_2_2.plot( + all_speeds, + vibration_metric, + label=f'Vibration metric ({num_peaks} bad peaks)', + color=self.KLIPPAIN_COLORS['red_pink'], + zorder=5, + ) + + ax_2.set_xlim([all_speeds.min(), all_speeds.max()]) + ax_2.set_ylim([0, sp_max_energy.max() * 1.15]) + + y2min = -(vibration_metric.max() * 0.025) + y2max = vibration_metric.max() * 1.07 + ax_2_2.set_ylim([y2min, y2max]) + + if vibration_peaks is not None and len(vibration_peaks) > 0: + ax_2_2.plot( + all_speeds[vibration_peaks], + vibration_metric[vibration_peaks], + 'x', + color='black', + markersize=8, + zorder=10, + ) + for idx, peak in enumerate(vibration_peaks): + ax_2_2.annotate( + f'{idx+1}', + (all_speeds[peak], vibration_metric[peak]), + textcoords='offset points', + xytext=(5, 5), + fontweight='bold', + ha='left', + fontsize=13, + color=self.KLIPPAIN_COLORS['red_pink'], + zorder=10, + ) + + for idx, (start, end, _) in enumerate(good_speeds): + # ax_2_2.axvline(all_speeds[start], color=self.KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8) + # ax_2_2.axvline(all_speeds[end], color=self.KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8) + ax_2_2.fill_between( + all_speeds[start:end], + y2min, + vibration_metric[start:end], + color='green', + alpha=0.2, + label=f'Zone {idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s', + ) + + ax_2.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_2.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_2.grid(which='major', color='grey') + ax_2.grid(which='minor', color='lightgrey') + + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('small') + ax_2.legend(loc='upper left', prop=fontP) + ax_2_2.legend(loc='upper right', prop=fontP) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the angular speed profile + ax_3.set_title( + 'Angular speed energy profiles', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold' + ) + ax_3.set_xlabel('Speed (mm/s)') + ax_3.set_ylabel('Energy') + + # Define mappings for labels and colors to simplify plotting commands + angle_settings = { + 0: ('X (0 deg)', 'purple', 10), + 90: ('Y (90 deg)', 'dark_purple', 5), + 45: ('A (45 deg)' if kinematics in {'corexy', 'limited_corexy'} else '45 deg', 'orange', 10), + 135: ('B (135 deg)' if kinematics in {'corexy', 'limited_corexy'} else '135 deg', 'dark_orange', 5), + } + + # Plot each angle using settings from the dictionary + for angle, (label, color, zorder) in angle_settings.items(): + idx = np.searchsorted(all_angles, angle, side='left') + ax_3.plot(all_speeds, spectrogram_data[idx], label=label, color=self.KLIPPAIN_COLORS[color], zorder=zorder) + + ax_3.set_xlim([all_speeds.min(), all_speeds.max()]) + max_value = max(spectrogram_data[angle].max() for angle in {0, 45, 90, 135}) + ax_3.set_ylim([0, max_value * 1.1]) + + ax_3.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_3.grid(which='major', color='grey') + ax_3.grid(which='minor', color='lightgrey') + + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('small') + ax_3.legend(loc='upper right', prop=fontP) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the vibration spectrogram + ax_5.set_title('Vibrations heatmap', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold') + ax_5.set_xlabel('Speed (mm/s)') + ax_5.set_ylabel('Angle (deg)') + + ax_5.imshow( + spectrogram_data, + norm=matplotlib.colors.LogNorm(), + cmap='inferno', + aspect='auto', + extent=[all_speeds[0], all_speeds[-1], all_angles[0], all_angles[-1]], + origin='lower', + interpolation='antialiased', + ) + + # Add peaks lines in the spectrogram to get hint from peaks found in the first graph + if vibration_peaks is not None and len(vibration_peaks) > 0: + for idx, peak in enumerate(vibration_peaks): + ax_5.axvline(all_speeds[peak], color='cyan', linewidth=0.75) + ax_5.annotate( + f'Peak {idx+1}', + (all_speeds[peak], all_angles[-1] * 0.9), + textcoords='data', + color='cyan', + rotation=90, + fontsize=10, + verticalalignment='top', + horizontalalignment='right', + ) + + # ------------------------------------------------------------------------------------------------------------------------------------------------ + # Plot the motor profiles + ax_6.set_title('Motor frequency profile', fontsize=14, color=self.KLIPPAIN_COLORS['dark_orange'], weight='bold') + ax_6.set_ylabel('Energy') + ax_6.set_xlabel('Frequency (Hz)') + + ax_6_2 = ax_6.twinx() + ax_6_2.yaxis.set_visible(False) + + # Global weighted average motor profile + ax_6.plot(target_freqs, global_motor_profile, label='Combined', color=self.KLIPPAIN_COLORS['purple'], zorder=5) + max_value = global_motor_profile.max() + + # Mapping of angles to axis names + angle_settings = {0: 'X', 90: 'Y', 45: 'A', 135: 'B'} + + # And then plot the motor profiles at each measured angles + for angle in main_angles: + profile_max = motor_profiles[angle].max() + if profile_max > max_value: + max_value = profile_max + label = f'{angle_settings[angle]} ({angle} deg)' if angle in angle_settings else f'{angle} deg' + ax_6.plot(target_freqs, motor_profiles[angle], linestyle='--', label=label, zorder=2) + + ax_6.set_xlim([0, max_freq]) + ax_6.set_ylim([0, max_value * 1.1]) + ax_6.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) + + # Then add the motor resonance peak to the graph and print some infos about it + motor_fr, motor_zeta, motor_res_idx, lowfreq_max = compute_mechanical_parameters( + global_motor_profile, target_freqs, 30 + ) + if lowfreq_max: + ConsoleOutput.print( + '[WARNING] There are a lot of low frequency vibrations that can alter the readings. This is probably due to the test being performed at too high an acceleration!' + ) + ConsoleOutput.print( + 'Try lowering the ACCEL value and/or increasing the SIZE value before restarting the macro to ensure that only constant speeds are being recorded and that the dynamic behavior of the machine is not affecting the measurements' + ) + if motor_zeta is not None: + ConsoleOutput.print( + f'Motors have a main resonant frequency at {motor_fr:.1f}Hz with an estimated damping ratio of {motor_zeta:.3f}' + ) + else: + ConsoleOutput.print( + f'Motors have a main resonant frequency at {motor_fr:.1f}Hz but it was impossible to estimate a damping ratio.' + ) + + ax_6.plot(target_freqs[motor_res_idx], global_motor_profile[motor_res_idx], 'x', color='black', markersize=10) + ax_6.annotate( + 'R', + (target_freqs[motor_res_idx], global_motor_profile[motor_res_idx]), + textcoords='offset points', + xytext=(15, 5), + ha='right', + fontsize=14, + color=self.KLIPPAIN_COLORS['red_pink'], + weight='bold', + ) + + ax_6_2.plot([], [], ' ', label=f'Motor resonant frequency (ω0): {motor_fr:.1f}Hz') + if motor_zeta is not None: + ax_6_2.plot([], [], ' ', label=f'Motor damping ratio (ζ): {motor_zeta:.3f}') + else: + ax_6_2.plot([], [], ' ', label='No damping ratio computed') + + ax_6.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_6.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) + ax_6.grid(which='major', color='grey') + ax_6.grid(which='minor', color='lightgrey') + + fontP = matplotlib.font_manager.FontProperties() + fontP.set_size('small') + ax_6.legend(loc='upper left', prop=fontP) + ax_6_2.legend(loc='upper right', prop=fontP) + + # Adding a small Klippain logo to the top left corner of the figure + ax_logo = fig.add_axes([0.001, 0.924, 0.075, 0.075], anchor='NW') + ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png'))) + ax_logo.axis('off') + + # Adding Shake&Tune version in the top right corner + if st_version != 'unknown': + fig.text( + 0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=self.KLIPPAIN_COLORS['purple'] + ) + + return fig diff --git a/shaketune/graph_creators/shaper_graph_creator.py b/shaketune/graph_creators/shaper_graph_creator.py index 37ee27d..a8a80c9 100644 --- a/shaketune/graph_creators/shaper_graph_creator.py +++ b/shaketune/graph_creators/shaper_graph_creator.py @@ -11,53 +11,25 @@ # including computation and graphing functions for 3D printer vibration analysis. -################################################# -######## INPUT SHAPER CALIBRATION SCRIPT ######## -################################################# -# Derived from the calibrate_shaper.py official Klipper script -# Copyright (C) 2020 Dmitry Butyugin -# Copyright (C) 2020 Kevin O'Connor -# Highly modified and improved by Frix_x#0161 # +from typing import List, Optional -import optparse -import os -from datetime import datetime -from typing import Dict, List, Optional - -import matplotlib -import matplotlib.font_manager -import matplotlib.pyplot as plt -import matplotlib.ticker import numpy as np -from scipy.interpolate import interp1d - -matplotlib.use('Agg') from ..helpers.accelerometer import Measurement, MeasurementsManager from ..helpers.common_func import ( compute_mechanical_parameters, compute_spectrogram, detect_peaks, - setup_klipper_import, ) from ..helpers.console_output import ConsoleOutput from ..shaketune_config import ShakeTuneConfig +from . import get_shaper_calibrate_module from .graph_creator import GraphCreator PEAKS_DETECTION_THRESHOLD = 0.05 PEAKS_EFFECT_THRESHOLD = 0.12 -SPECTROGRAM_LOW_PERCENTILE_FILTER = 5 -MAX_VIBRATIONS = 5.0 -MAX_VIBRATIONS_PLOTTED = 80.0 -MAX_VIBRATIONS_PLOTTED_ZOOM = 1.25 # 1.25x max vibs values from the standard filters selection SMOOTHING_TESTS = 10 # Number of smoothing values to test (it will significantly increase the computation time) -KLIPPAIN_COLORS = { - 'purple': '#70088C', - 'orange': '#FF8D32', - 'dark_purple': '#150140', - 'dark_orange': '#F24130', - 'red_pink': '#F2055C', -} +MAX_VIBRATIONS = 5.0 @GraphCreator.register('input shaper') @@ -76,18 +48,20 @@ def configure( self._accel_per_hz = accel_per_hz def create_graph(self, measurements_manager: MeasurementsManager) -> None: - if not self._scv: - raise ValueError('scv must be set to create the input shaper graph!') - - fig = shaper_calibration( + computer = ShaperGraphComputation( measurements=measurements_manager.get_measurements(), - klipperdir=str(self._config.klipper_folder), max_smoothing=self._max_smoothing, scv=self._scv, accel_per_hz=self._accel_per_hz, + max_freq=self._config.max_freq, st_version=self._version, ) - axis_label = (measurements_manager.get_measurements())[0]['name'].split('_')[1] + computation = computer.compute() + fig = self._plotter.plot_input_shaper_graph(computation) + try: + axis_label = (measurements_manager.get_measurements())[0]['name'].split('_')[1] + except IndexError: + axis_label = None self._save_figure(fig, measurements_manager, axis_label) def clean_old_files(self, keep_results: int = 3) -> None: @@ -100,648 +74,255 @@ def clean_old_files(self, keep_results: int = 3) -> None: old_png_file.unlink() -###################################################################### -# Computation -###################################################################### - - -# Find the best shaper parameters using Klipper's official algorithm selection with -# a proper precomputed damping ratio (zeta) and using the configured printer SQV value -def calibrate_shaper(datas: List[np.ndarray], max_smoothing: Optional[float], scv: float, max_freq: float): - helper = shaper_calibrate.ShaperCalibrate(printer=None) - calibration_data = helper.process_accelerometer_data(datas) - calibration_data.normalize_to_frequencies() - - # We compute the damping ratio using the Klipper's default value if it fails - fr, zeta, _, _ = compute_mechanical_parameters(calibration_data.psd_sum, calibration_data.freq_bins) - zeta = zeta if zeta is not None else 0.1 - - compat = False - try: - k_shaper_choice, all_shapers = helper.find_best_shaper( +class ShaperGraphComputation: + def __init__( + self, + measurements: List[Measurement], + accel_per_hz: Optional[float], + scv: float, + max_smoothing: Optional[float], + max_freq: float, + st_version: str, + ): + self.measurements = measurements + self.accel_per_hz = accel_per_hz + self.scv = scv + self.max_smoothing = max_smoothing + self.max_freq = max_freq + self.st_version = st_version + + def compute(self): + if len(self.measurements) == 0: + raise ValueError('No valid data found in the provided measurements!') + if len(self.measurements) > 1: + ConsoleOutput.print('Warning: incorrect number of measurements detected. Only the first one will be used!') + + datas = [np.array(m['samples']) for m in self.measurements if m['samples'] is not None] + + # Compute shapers, PSD outputs and spectrogram + ( + klipper_shaper_choice, + shapers, + additional_shapers, calibration_data, - shapers=None, - damping_ratio=zeta, - scv=scv, - shaper_freqs=None, - max_smoothing=max_smoothing, - test_damping_ratios=None, - max_freq=max_freq, - logger=None, + fr, + zeta, + max_smoothing_computed, + compat, + ) = self._calibrate_shaper(datas[0], self.max_smoothing, self.scv, self.max_freq) + pdata, bins, t = compute_spectrogram(datas[0]) + del datas + + # Select only the relevant part of the PSD data + freqs = calibration_data.freq_bins + calibration_data.psd_sum = calibration_data.psd_sum[freqs <= self.max_freq] + calibration_data.psd_x = calibration_data.psd_x[freqs <= self.max_freq] + calibration_data.psd_y = calibration_data.psd_y[freqs <= self.max_freq] + calibration_data.psd_z = calibration_data.psd_z[freqs <= self.max_freq] + calibration_data.freqs = freqs[freqs <= self.max_freq] + + # Peak detection algorithm + peaks_threshold = [ + PEAKS_DETECTION_THRESHOLD * calibration_data.psd_sum.max(), + PEAKS_EFFECT_THRESHOLD * calibration_data.psd_sum.max(), + ] + num_peaks, peaks, peaks_freqs = detect_peaks( + calibration_data.psd_sum, calibration_data.freqs, peaks_threshold[0] ) + + # Print the peaks info in the console + peak_freqs_formated = ['{:.1f}'.format(f) for f in peaks_freqs] + num_peaks_above_effect_threshold = np.sum(calibration_data.psd_sum[peaks] > peaks_threshold[1]) ConsoleOutput.print( - ( - f'Detected a square corner velocity of {scv:.1f} and a damping ratio of {zeta:.3f}. ' - 'These values will be used to compute the input shaper filter recommendations' - ) + f"Peaks detected on the graph: {num_peaks} @ {', '.join(map(str, peak_freqs_formated))} Hz ({num_peaks_above_effect_threshold} above effect threshold)" ) - except TypeError: - ConsoleOutput.print( - ( - '[WARNING] You seem to be using an older version of Klipper that is not compatible with all the latest ' - 'Shake&Tune features!\nShake&Tune now runs in compatibility mode: be aware that the results may be ' - 'slightly off, since the real damping ratio cannot be used to craft accurate filter recommendations' + + # Consolidate shaper data for plotting the table summary + # and data for the shaper recommendation (performance vs low vibration) + shaper_table_data = { + 'shapers': [], + 'recommendations': [], + 'damping_ratio': zeta, + } + + perf_shaper_choice = None + perf_shaper_freq = None + perf_shaper_accel = 0 + for shaper in shapers: + shaper_info = { + 'type': shaper.name.upper(), + 'frequency': shaper.freq, + 'vibrations': shaper.vibrs, + 'smoothing': shaper.smoothing, + 'max_accel': shaper.max_accel, + 'vals': shaper.vals, + } + shaper_table_data['shapers'].append(shaper_info) + + # Get the Klipper recommended shaper (usually it's a good low vibration compromise) + if shaper.name == klipper_shaper_choice: + klipper_shaper_freq = shaper.freq + klipper_shaper_accel = shaper.max_accel + + # Find the shaper with the highest accel but with vibrs under MAX_VIBRATIONS as it's + # a good performance compromise when injecting the SCV and damping ratio in the computation + if perf_shaper_accel < shaper.max_accel and shaper.vibrs * 100 < MAX_VIBRATIONS: + perf_shaper_choice = shaper.name + perf_shaper_accel = shaper.max_accel + perf_shaper_freq = shaper.freq + + # Recommendations are put in the console: one is Klipper's original suggestion that is usually good for low vibrations + # and the other one is the custom "performance" recommendation that looks for a suitable shaper that doesn't have excessive + # vibrations level but have higher accelerations. If both recommendations are the same shaper, or if no suitable "performance" + # shaper is found, then only a single line as the "best shaper" recommendation is printed + if ( + perf_shaper_choice is not None + and perf_shaper_choice != klipper_shaper_choice + and perf_shaper_accel >= klipper_shaper_accel + ): + perf_shaper_string = ( + f'Recommended for performance: {perf_shaper_choice.upper()} @ {perf_shaper_freq:.1f} Hz' ) - ) - compat = True - k_shaper_choice, all_shapers = helper.find_best_shaper(calibration_data, max_smoothing, None) - - # If max_smoothing is not None, we run the same computation but without a smoothing value - # to get the max smoothing values from the filters and create the testing list - all_shapers_nosmoothing = None - if max_smoothing is not None: - if compat: - _, all_shapers_nosmoothing = helper.find_best_shaper(calibration_data, None, None) - else: - _, all_shapers_nosmoothing = helper.find_best_shaper( - calibration_data, - shapers=None, - damping_ratio=zeta, - scv=scv, - shaper_freqs=None, - max_smoothing=None, - test_damping_ratios=None, - max_freq=max_freq, - logger=None, + lowvibr_shaper_string = ( + f'Recommended for low vibrations: {klipper_shaper_choice.upper()} @ {klipper_shaper_freq:.1f} Hz' ) - - # Then we iterate over the all_shaperts_nosmoothing list to get the max of the smoothing values - max_smoothing = 0.0 - if all_shapers_nosmoothing is not None: - for shaper in all_shapers_nosmoothing: - if shaper.smoothing > max_smoothing: - max_smoothing = shaper.smoothing - else: - for shaper in all_shapers: - if shaper.smoothing > max_smoothing: - max_smoothing = shaper.smoothing - - # Then we create a list of smoothing values to test (no need to test the max smoothing value as it was already tested) - smoothing_test_list = np.linspace(0.001, max_smoothing, SMOOTHING_TESTS)[:-1] - additional_all_shapers = {} - for smoothing in smoothing_test_list: - if compat: - _, all_shapers_bis = helper.find_best_shaper(calibration_data, smoothing, None) + shaper_table_data['recommendations'].append(perf_shaper_string) + shaper_table_data['recommendations'].append(lowvibr_shaper_string) + shaper_choices = [klipper_shaper_choice.upper(), perf_shaper_choice.upper()] + ConsoleOutput.print(f'{perf_shaper_string} (with a damping ratio of {zeta:.3f})') + ConsoleOutput.print(f'{lowvibr_shaper_string} (with a damping ratio of {zeta:.3f})') else: - _, all_shapers_bis = helper.find_best_shaper( + shaper_string = f'Recommended best shaper: {klipper_shaper_choice.upper()} @ {klipper_shaper_freq:.1f} Hz' + shaper_table_data['recommendations'].append(shaper_string) + shaper_choices = [klipper_shaper_choice.upper()] + ConsoleOutput.print(f'{shaper_string} (with a damping ratio of {zeta:.3f})') + + # And finally setup the results to return them + computation_result = { + 'measurements': self.measurements, + 'compat': compat, + 'max_smoothing_computed': max_smoothing_computed, + 'max_freq': self.max_freq, + 'calibration_data': calibration_data, + 'shapers': shapers, + 'shaper_table_data': shaper_table_data, + 'shaper_choices': shaper_choices, + 'peaks': peaks, + 'peaks_freqs': peaks_freqs, + 'peaks_threshold': peaks_threshold, + 'fr': fr, + 'zeta': zeta, + 't': t, + 'bins': bins, + 'pdata': pdata, + 'additional_shapers': additional_shapers, + 'st_version': self.st_version, + } + + return computation_result + + # Find the best shaper parameters using Klipper's official algorithm selection with + # a proper precomputed damping ratio (zeta) and using the configured printer SQV value + # This function also sweep around the smoothing values to help you find the best compromise + def _calibrate_shaper(self, datas: List[np.ndarray], max_smoothing: Optional[float], scv: float, max_freq: float): + helper = get_shaper_calibrate_module().ShaperCalibrate(printer=None) + calibration_data = helper.process_accelerometer_data(datas) + calibration_data.normalize_to_frequencies() + + # We compute the damping ratio using the Klipper's default value if it fails + fr, zeta, _, _ = compute_mechanical_parameters(calibration_data.psd_sum, calibration_data.freq_bins) + zeta = zeta if zeta is not None else 0.1 + + compat = False + try: + k_shaper_choice, all_shapers = helper.find_best_shaper( calibration_data, shapers=None, damping_ratio=zeta, scv=scv, shaper_freqs=None, - max_smoothing=smoothing, + max_smoothing=max_smoothing, test_damping_ratios=None, max_freq=max_freq, logger=None, ) - additional_all_shapers[f'sm_{smoothing}'] = all_shapers_bis - additional_all_shapers['max_smoothing'] = ( - all_shapers_nosmoothing if all_shapers_nosmoothing is not None else all_shapers - ) - - return k_shaper_choice.name, all_shapers, additional_all_shapers, calibration_data, fr, zeta, max_smoothing, compat - - -###################################################################### -# Graphing -###################################################################### - - -def plot_freq_response( - ax: plt.Axes, - calibration_data, - shapers, - klipper_shaper_choice: str, - peaks: np.ndarray, - peaks_freqs: np.ndarray, - peaks_threshold: List[float], - fr: float, - zeta: float, - max_freq: float, -) -> Dict[str, List[Dict[str, str]]]: - freqs = calibration_data.freqs - psd = calibration_data.psd_sum - px = calibration_data.psd_x - py = calibration_data.psd_y - pz = calibration_data.psd_z - - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('x-small') - - ax.set_xlabel('Frequency (Hz)') - ax.set_xlim([0, max_freq]) - ax.set_ylabel('Power spectral density') - ax.set_ylim([0, psd.max() + psd.max() * 0.05]) - - ax.plot(freqs, psd, label='X+Y+Z', color='purple', zorder=5) - ax.plot(freqs, px, label='X', color='red') - ax.plot(freqs, py, label='Y', color='green') - ax.plot(freqs, pz, label='Z', color='blue') - - ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5)) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - - ax2 = ax.twinx() - ax2.yaxis.set_visible(False) - - shaper_table_data = { - 'shapers': [], - 'recommendations': [], - 'damping_ratio': zeta, - } - - # Draw the shappers curves and add their specific parameters in the legend - perf_shaper_choice = None - perf_shaper_vals = None - perf_shaper_freq = None - perf_shaper_accel = 0 - for shaper in shapers: - ax2.plot(freqs, shaper.vals, label=shaper.name.upper(), linestyle='dotted') - - shaper_info = { - 'type': shaper.name.upper(), - 'frequency': shaper.freq, - 'vibrations': shaper.vibrs, - 'smoothing': shaper.smoothing, - 'max_accel': shaper.max_accel, - } - shaper_table_data['shapers'].append(shaper_info) - - # Get the Klipper recommended shaper (usually it's a good low vibration compromise) - if shaper.name == klipper_shaper_choice: - klipper_shaper_freq = shaper.freq - klipper_shaper_vals = shaper.vals - klipper_shaper_accel = shaper.max_accel - - # Find the shaper with the highest accel but with vibrs under MAX_VIBRATIONS as it's - # a good performance compromise when injecting the SCV and damping ratio in the computation - if perf_shaper_accel < shaper.max_accel and shaper.vibrs * 100 < MAX_VIBRATIONS: - perf_shaper_choice = shaper.name - perf_shaper_accel = shaper.max_accel - perf_shaper_freq = shaper.freq - perf_shaper_vals = shaper.vals - - # Recommendations are added to the legend: one is Klipper's original suggestion that is usually good for low vibrations - # and the other one is the custom "performance" recommendation that looks for a suitable shaper that doesn't have excessive - # vibrations level but have higher accelerations. If both recommendations are the same shaper, or if no suitable "performance" - # shaper is found, then only a single line as the "best shaper" recommendation is added to the legend - if ( - perf_shaper_choice is not None - and perf_shaper_choice != klipper_shaper_choice - and perf_shaper_accel >= klipper_shaper_accel - ): - perf_shaper_string = f'Recommended for performance: {perf_shaper_choice.upper()} @ {perf_shaper_freq:.1f} Hz' - lowvibr_shaper_string = ( - f'Recommended for low vibrations: {klipper_shaper_choice.upper()} @ {klipper_shaper_freq:.1f} Hz' - ) - shaper_table_data['recommendations'].append(perf_shaper_string) - shaper_table_data['recommendations'].append(lowvibr_shaper_string) - ConsoleOutput.print(f'{perf_shaper_string} (with a damping ratio of {zeta:.3f})') - ConsoleOutput.print(f'{lowvibr_shaper_string} (with a damping ratio of {zeta:.3f})') - ax.plot( - freqs, - psd * perf_shaper_vals, - label=f'With {perf_shaper_choice.upper()} applied', - color='cyan', - ) - ax.plot( - freqs, - psd * klipper_shaper_vals, - label=f'With {klipper_shaper_choice.upper()} applied', - color='lime', - ) - else: - shaper_string = f'Recommended best shaper: {klipper_shaper_choice.upper()} @ {klipper_shaper_freq:.1f} Hz' - shaper_table_data['recommendations'].append(shaper_string) - ConsoleOutput.print(f'{shaper_string} (with a damping ratio of {zeta:.3f})') - ax.plot( - freqs, - psd * klipper_shaper_vals, - label=f'With {klipper_shaper_choice.upper()} applied', - color='cyan', - ) - - # Draw the detected peaks and name them - # This also draw the detection threshold and warning threshold (aka "effect zone") - ax.plot(peaks_freqs, psd[peaks], 'x', color='black', markersize=8) - for idx, peak in enumerate(peaks): - if psd[peak] > peaks_threshold[1]: - fontcolor = 'red' - fontweight = 'bold' - else: - fontcolor = 'black' - fontweight = 'normal' - ax.annotate( - f'{idx+1}', - (freqs[peak], psd[peak]), - textcoords='offset points', - xytext=(8, 5), - ha='left', - fontsize=13, - color=fontcolor, - weight=fontweight, - ) - ax.axhline(y=peaks_threshold[0], color='black', linestyle='--', linewidth=0.5) - ax.axhline(y=peaks_threshold[1], color='black', linestyle='--', linewidth=0.5) - ax.fill_between(freqs, 0, peaks_threshold[0], color='green', alpha=0.15, label='Relax Region') - ax.fill_between(freqs, peaks_threshold[0], peaks_threshold[1], color='orange', alpha=0.2, label='Warning Region') - - # Add the main resonant frequency and damping ratio of the axis to the graph title - ax.set_title( - f'Axis Frequency Profile (ω0={fr:.1f}Hz, ζ={zeta:.3f})', - fontsize=14, - color=KLIPPAIN_COLORS['dark_orange'], - weight='bold', - ) - ax.legend(loc='upper left', prop=fontP) - ax2.legend(loc='upper right', prop=fontP) - - return shaper_table_data - - -# Plot a time-frequency spectrogram to see how the system respond over time during the -# resonnance test. This can highlight hidden spots from the standard PSD graph from other harmonics -def plot_spectrogram( - ax: plt.Axes, t: np.ndarray, bins: np.ndarray, pdata: np.ndarray, peaks: np.ndarray, max_freq: float -) -> None: - ax.set_title('Time-Frequency Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - - # We need to normalize the data to get a proper signal on the spectrogram - # However, while using "LogNorm" provide too much background noise, using - # "Normalize" make only the resonnance appearing and hide interesting elements - # So we need to filter out the lower part of the data (ie. find the proper vmin for LogNorm) - vmin_value = np.percentile(pdata, SPECTROGRAM_LOW_PERCENTILE_FILTER) - - # Draw the spectrogram using imgshow that is better suited here than pcolormesh since its result is already rasterized and - # we doesn't need to keep vector graphics when saving to a final .png file. Using it also allow to - # save ~150-200MB of RAM during the "fig.savefig" operation. - cm = 'inferno' - norm = matplotlib.colors.LogNorm(vmin=vmin_value) - ax.imshow( - pdata.T, - norm=norm, - cmap=cm, - aspect='auto', - extent=[t[0], t[-1], bins[0], bins[-1]], - origin='lower', - interpolation='antialiased', - ) - - ax.set_xlim([0.0, max_freq]) - ax.set_ylabel('Time (s)') - ax.set_xlabel('Frequency (Hz)') - - # Add peaks lines in the spectrogram to get hint from peaks found in the first graph - if peaks is not None: - for idx, peak in enumerate(peaks): - ax.axvline(peak, color='cyan', linestyle='dotted', linewidth=1) - ax.annotate( - f'Peak {idx+1}', - (peak, bins[-1] * 0.9), - textcoords='data', - color='cyan', - rotation=90, - fontsize=10, - verticalalignment='top', - horizontalalignment='right', + ConsoleOutput.print( + ( + f'Detected a square corner velocity of {scv:.1f} and a damping ratio of {zeta:.3f}. ' + 'These values will be used to compute the input shaper filter recommendations' + ) ) - - return - - -def plot_smoothing_vs_accel( - ax: plt.Axes, - shaper_table_data: Dict[str, List[Dict[str, str]]], - additional_shapers: Dict[str, List[Dict[str, str]]], -) -> None: - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('x-small') - - ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(1000)) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - - shaper_data = {} - - # Extract data from additional_shapers first - for _, shapers in additional_shapers.items(): - for shaper in shapers: - shaper_type = shaper.name.upper() - if shaper_type not in shaper_data: - shaper_data[shaper_type] = [] - shaper_data[shaper_type].append( - { - 'max_accel': shaper.max_accel, - 'vibrs': shaper.vibrs * 100.0, - } + except TypeError: + ConsoleOutput.print( + ( + '[WARNING] You seem to be using an older version of Klipper that is not compatible with all the latest ' + 'Shake&Tune features!\nShake&Tune now runs in compatibility mode: be aware that the results may be ' + 'slightly off, since the real damping ratio cannot be used to craft accurate filter recommendations' + ) ) - - # Extract data from shaper_table_data and insert into shaper_data - max_shaper_vibrations = 0 - for shaper in shaper_table_data['shapers']: - shaper_type = shaper['type'] - if shaper_type not in shaper_data: - shaper_data[shaper_type] = [] - max_shaper_vibrations = max(max_shaper_vibrations, float(shaper['vibrations']) * 100.0) - shaper_data[shaper_type].append( - { - 'max_accel': float(shaper['max_accel']), - 'vibrs': float(shaper['vibrations']) * 100.0, - } - ) - - # Calculate the maximum `max_accel` for points below the thresholds to get a good plot with - # continuous lines and a zoom on the graph to show details at low vibrations - min_accel_limit = 99999 - max_accel_limit = 0 - max_accel_limit_zoom = 0 - for data in shaper_data.values(): - min_accel_limit = min(min_accel_limit, min(d['max_accel'] for d in data)) - max_accel_limit = max( - max_accel_limit, max(d['max_accel'] for d in data if d['vibrs'] <= MAX_VIBRATIONS_PLOTTED) - ) - max_accel_limit_zoom = max( - max_accel_limit_zoom, - max(d['max_accel'] for d in data if d['vibrs'] <= max_shaper_vibrations * MAX_VIBRATIONS_PLOTTED_ZOOM), - ) - - # Add a zoom axes on the graph to show details at low vibrations - zoomed_window = np.clip(max_shaper_vibrations * MAX_VIBRATIONS_PLOTTED_ZOOM, 0, 20) - axins = ax.inset_axes( - [0.575, 0.125, 0.40, 0.45], - xlim=(min_accel_limit * 0.95, max_accel_limit_zoom * 1.1), - ylim=(-0.5, zoomed_window), - ) - ax.indicate_inset_zoom(axins, edgecolor=KLIPPAIN_COLORS['purple'], linewidth=3) - axins.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(500)) - axins.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - axins.grid(which='major', color='grey') - axins.grid(which='minor', color='lightgrey') - - # Draw the green zone on both axes to highlight the low vibrations zone - number_of_interpolated_points = 100 - x_fill = np.linspace(min_accel_limit * 0.95, max_accel_limit * 1.1, number_of_interpolated_points) - y_fill = np.full_like(x_fill, 5.0) - ax.axhline(y=5.0, color='black', linestyle='--', linewidth=0.5) - ax.fill_between(x_fill, -0.5, y_fill, color='green', alpha=0.15) - if zoomed_window > 5.0: - axins.axhline(y=5.0, color='black', linestyle='--', linewidth=0.5) - axins.fill_between(x_fill, -0.5, y_fill, color='green', alpha=0.15) - - # Plot each shaper remaining vibrations response over acceleration - max_vibrations = 0 - for _, (shaper_type, data) in enumerate(shaper_data.items()): - max_accel_values = np.array([d['max_accel'] for d in data]) - vibrs_values = np.array([d['vibrs'] for d in data]) - - # remove duplicate values in max_accel_values and delete the corresponding vibrs_values - # and interpolate the curves to get them smoother with more datapoints - unique_max_accel_values, unique_indices = np.unique(max_accel_values, return_index=True) - max_accel_values = unique_max_accel_values - vibrs_values = vibrs_values[unique_indices] - interp_func = interp1d(max_accel_values, vibrs_values, kind='cubic') - max_accel_fine = np.linspace(max_accel_values.min(), max_accel_values.max(), number_of_interpolated_points) - vibrs_fine = interp_func(max_accel_fine) - - ax.plot(max_accel_fine, vibrs_fine, label=f'{shaper_type}', zorder=10) - axins.plot(max_accel_fine, vibrs_fine, label=f'{shaper_type}', zorder=15) - max_vibrations = max(max_vibrations, max(vibrs_fine)) - - ax.set_xlabel('Max Acceleration') - ax.set_ylabel('Remaining Vibrations (%)') - ax.set_xlim([min_accel_limit * 0.95, max_accel_limit * 1.1]) - ax.set_ylim([-0.5, np.clip(max_vibrations * 1.05, 50, MAX_VIBRATIONS_PLOTTED)]) - ax.set_title( - 'Filters performances over acceleration', - fontsize=14, - color=KLIPPAIN_COLORS['dark_orange'], - weight='bold', - ) - ax.legend(loc='best', prop=fontP) - - -def print_shaper_table(fig: plt.Figure, shaper_table_data: Dict[str, List[Dict[str, str]]]) -> None: - columns = ['Type', 'Frequency', 'Vibrations', 'Smoothing', 'Max Accel'] - table_data = [] - - for shaper_info in shaper_table_data['shapers']: - row = [ - f'{shaper_info["type"].upper()}', - f'{shaper_info["frequency"]:.1f} Hz', - f'{shaper_info["vibrations"] * 100:.1f} %', - f'{shaper_info["smoothing"]:.3f}', - f'{round(shaper_info["max_accel"] / 10) * 10:.0f}', - ] - table_data.append(row) - table = plt.table(cellText=table_data, colLabels=columns, bbox=[1.130, -0.4, 0.803, 0.25], cellLoc='center') - table.auto_set_font_size(False) - table.set_fontsize(10) - table.auto_set_column_width([0, 1, 2, 3, 4]) - table.set_zorder(100) - - # Add the recommendations and damping ratio using fig.text - fig.text( - 0.585, - 0.235, - f'Estimated damping ratio (ζ): {shaper_table_data["damping_ratio"]:.3f}', - fontsize=14, - color=KLIPPAIN_COLORS['purple'], - ) - if len(shaper_table_data['recommendations']) == 1: - fig.text( - 0.585, - 0.200, - shaper_table_data['recommendations'][0], - fontsize=14, - color=KLIPPAIN_COLORS['red_pink'], - ) - elif len(shaper_table_data['recommendations']) == 2: - fig.text( - 0.585, - 0.200, - shaper_table_data['recommendations'][0], - fontsize=14, - color=KLIPPAIN_COLORS['red_pink'], - ) - fig.text( - 0.585, - 0.175, - shaper_table_data['recommendations'][1], - fontsize=14, - color=KLIPPAIN_COLORS['red_pink'], + compat = True + k_shaper_choice, all_shapers = helper.find_best_shaper(calibration_data, max_smoothing, None) + + # If max_smoothing is not None, we run the same computation but without a smoothing value + # to get the max smoothing values from the filters and create the testing list + all_shapers_nosmoothing = None + if max_smoothing is not None: + if compat: + _, all_shapers_nosmoothing = helper.find_best_shaper(calibration_data, None, None) + else: + _, all_shapers_nosmoothing = helper.find_best_shaper( + calibration_data, + shapers=None, + damping_ratio=zeta, + scv=scv, + shaper_freqs=None, + max_smoothing=None, + test_damping_ratios=None, + max_freq=max_freq, + logger=None, + ) + + # Then we iterate over the all_shaperts_nosmoothing list to get the max of the smoothing values + max_smoothing = 0.0 + if all_shapers_nosmoothing is not None: + for shaper in all_shapers_nosmoothing: + if shaper.smoothing > max_smoothing: + max_smoothing = shaper.smoothing + else: + for shaper in all_shapers: + if shaper.smoothing > max_smoothing: + max_smoothing = shaper.smoothing + + # Then we create a list of smoothing values to test (no need to test the max smoothing value as it was already tested) + smoothing_test_list = np.linspace(0.001, max_smoothing, SMOOTHING_TESTS)[:-1] + additional_all_shapers = {} + for smoothing in smoothing_test_list: + if compat: + _, all_shapers_bis = helper.find_best_shaper(calibration_data, smoothing, None) + else: + _, all_shapers_bis = helper.find_best_shaper( + calibration_data, + shapers=None, + damping_ratio=zeta, + scv=scv, + shaper_freqs=None, + max_smoothing=smoothing, + test_damping_ratios=None, + max_freq=max_freq, + logger=None, + ) + additional_all_shapers[f'sm_{smoothing}'] = all_shapers_bis + additional_all_shapers['max_smoothing'] = ( + all_shapers_nosmoothing if all_shapers_nosmoothing is not None else all_shapers ) - -###################################################################### -# Startup and main routines -###################################################################### - - -def shaper_calibration( - measurements: List[Measurement], - klipperdir: str = '~/klipper', - max_smoothing: Optional[float] = None, - scv: float = 5.0, - max_freq: float = 200.0, - accel_per_hz: Optional[float] = None, - st_version: str = 'unknown', -) -> plt.Figure: - if len(measurements) == 0: - raise ValueError('No valid data found in the provided measurements!') - if len(measurements) > 1: - ConsoleOutput.print('Warning: incorrect number of measurements detected. Only the first one will be used!') - - global shaper_calibrate - shaper_calibrate = setup_klipper_import(klipperdir) - - datas = [np.array(m['samples']) for m in measurements if m['samples'] is not None] - - # Compute shapers, PSD outputs and spectrogram - klipper_shaper_choice, shapers, additional_shapers, calibration_data, fr, zeta, max_smoothing_computed, compat = ( - calibrate_shaper(datas[0], max_smoothing, scv, max_freq) - ) - pdata, bins, t = compute_spectrogram(datas[0]) - del datas - - # Select only the relevant part of the PSD data - freqs = calibration_data.freq_bins - calibration_data.psd_sum = calibration_data.psd_sum[freqs <= max_freq] - calibration_data.psd_x = calibration_data.psd_x[freqs <= max_freq] - calibration_data.psd_y = calibration_data.psd_y[freqs <= max_freq] - calibration_data.psd_z = calibration_data.psd_z[freqs <= max_freq] - calibration_data.freqs = freqs[freqs <= max_freq] - - # Peak detection algorithm - peaks_threshold = [ - PEAKS_DETECTION_THRESHOLD * calibration_data.psd_sum.max(), - PEAKS_EFFECT_THRESHOLD * calibration_data.psd_sum.max(), - ] - num_peaks, peaks, peaks_freqs = detect_peaks(calibration_data.psd_sum, calibration_data.freqs, peaks_threshold[0]) - - # Print the peaks info in the console - peak_freqs_formated = ['{:.1f}'.format(f) for f in peaks_freqs] - num_peaks_above_effect_threshold = np.sum(calibration_data.psd_sum[peaks] > peaks_threshold[1]) - ConsoleOutput.print( - f"Peaks detected on the graph: {num_peaks} @ {', '.join(map(str, peak_freqs_formated))} Hz ({num_peaks_above_effect_threshold} above effect threshold)" - ) - - # Create graph layout - fig, ((ax1, ax3), (ax2, ax4)) = plt.subplots( - 2, - 2, - gridspec_kw={ - 'height_ratios': [4, 3], - 'width_ratios': [5, 4], - 'bottom': 0.050, - 'top': 0.890, - 'left': 0.048, - 'right': 0.966, - 'hspace': 0.169, - 'wspace': 0.150, - }, - ) - ax4.remove() - fig.set_size_inches(15, 11.6) - - # Add a title with some test info - title_line1 = 'INPUT SHAPER CALIBRATION TOOL' - fig.text( - 0.065, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' - ) - try: - filename_parts = measurements[0]['name'].split('_') - dt = datetime.strptime(f'{filename_parts[2]} {filename_parts[3]}', '%Y%m%d %H%M%S') - title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[1].upper() + ' axis' - if compat: - title_line3 = '| Older Klipper version detected, damping ratio' - title_line4 = '| and SCV are not used for filter recommendations!' - title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else '' - else: - max_smoothing_string = ( - f'maximum ({max_smoothing_computed:0.3f})' if max_smoothing is None else f'{max_smoothing:0.3f}' - ) - title_line3 = f'| Square corner velocity: {scv} mm/s' - title_line4 = f'| Allowed smoothing: {max_smoothing_string}' - title_line5 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else '' - except Exception: - ConsoleOutput.print( - f'Warning: measurement names look to be different than expected ({measurements[0]["name"]})' + return ( + k_shaper_choice.name, + all_shapers, + additional_all_shapers, + calibration_data, + fr, + zeta, + max_smoothing, + compat, ) - title_line2 = measurements[0]['name'] - title_line3 = '' - title_line4 = '' - title_line5 = '' - fig.text(0.065, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) - fig.text(0.50, 0.990, title_line3, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple']) - fig.text(0.50, 0.968, title_line4, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple']) - fig.text(0.501, 0.945, title_line5, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple']) - - # Plot the graphs - shaper_table_data = plot_freq_response( - ax1, calibration_data, shapers, klipper_shaper_choice, peaks, peaks_freqs, peaks_threshold, fr, zeta, max_freq - ) - plot_spectrogram(ax2, t, bins, pdata, peaks_freqs, max_freq) - plot_smoothing_vs_accel(ax3, shaper_table_data, additional_shapers) - - print_shaper_table(fig, shaper_table_data) - - # Adding a small Klippain logo to the top left corner of the figure - ax_logo = fig.add_axes([0.001, 0.924, 0.075, 0.075], anchor='NW') - ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png'))) - ax_logo.axis('off') - - # Adding Shake&Tune version in the top right corner - if st_version != 'unknown': - fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple']) - - return fig - - -def main(): - # Parse command-line arguments - usage = '%prog [options] ' - opts = optparse.OptionParser(usage) - opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph') - opts.add_option('-f', '--max_freq', type='float', default=200.0, help='maximum frequency to graph') - opts.add_option('-s', '--max_smoothing', type='float', default=None, help='maximum shaper smoothing to allow') - opts.add_option( - '--scv', '--square_corner_velocity', type='float', dest='scv', default=5.0, help='square corner velocity' - ) - opts.add_option('--accel_per_hz', type='float', default=None, help='accel_per_hz used during the measurement') - opts.add_option( - '-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory' - ) - options, args = opts.parse_args() - if len(args) < 1: - opts.error('Incorrect number of arguments') - if options.output is None: - opts.error('You must specify an output file.png to use the script (option -o)') - if options.max_smoothing is not None and options.max_smoothing < 0.05: - opts.error('Too small max_smoothing specified (must be at least 0.05)') - - measurements_manager = MeasurementsManager(10) - if args[0].endswith('.csv'): - measurements_manager.load_from_csvs(args) - elif args[0].endswith('.stdata'): - measurements_manager.load_from_stdata(args[0]) - else: - raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') - - fig = shaper_calibration( - measurements_manager.get_measurements(), - options.klipperdir, - options.max_smoothing, - options.scv, - options.max_freq, - options.accel_per_hz, - 'unknown', - ) - fig.savefig(options.output, dpi=150) - - -if __name__ == '__main__': - main() diff --git a/shaketune/graph_creators/static_graph_creator.py b/shaketune/graph_creators/static_graph_creator.py index 2e3b8ea..7a9cc22 100644 --- a/shaketune/graph_creators/static_graph_creator.py +++ b/shaketune/graph_creators/static_graph_creator.py @@ -8,38 +8,16 @@ # issues, including computation and graphing functions for 3D printer vibration analysis. -import optparse -import os -from datetime import datetime from typing import List, Optional -import matplotlib -import matplotlib.font_manager -import matplotlib.pyplot as plt -import matplotlib.ticker import numpy as np -matplotlib.use('Agg') - from ..helpers.accelerometer import Measurement, MeasurementsManager from ..helpers.common_func import compute_spectrogram from ..helpers.console_output import ConsoleOutput from ..shaketune_config import ShakeTuneConfig from .graph_creator import GraphCreator -PEAKS_DETECTION_THRESHOLD = 0.05 -PEAKS_EFFECT_THRESHOLD = 0.12 -SPECTROGRAM_LOW_PERCENTILE_FILTER = 5 -MAX_VIBRATIONS = 5.0 - -KLIPPAIN_COLORS = { - 'purple': '#70088C', - 'orange': '#FF8D32', - 'dark_purple': '#150140', - 'dark_orange': '#F24130', - 'red_pink': '#F2055C', -} - @GraphCreator.register('static frequency') class StaticGraphCreator(GraphCreator): @@ -49,185 +27,67 @@ def __init__(self, config: ShakeTuneConfig): self._duration: Optional[float] = None self._accel_per_hz: Optional[float] = None - def configure(self, freq: float, duration: float, accel_per_hz: Optional[float] = None) -> None: + def configure(self, freq: float = None, duration: float = None, accel_per_hz: Optional[float] = None) -> None: self._freq = freq self._duration = duration self._accel_per_hz = accel_per_hz def create_graph(self, measurements_manager: MeasurementsManager) -> None: - if not self._freq or not self._duration or not self._accel_per_hz: - raise ValueError('freq, duration and accel_per_hz must be set to create the static frequency graph!') - - fig = static_frequency_tool( + computer = StaticGraphComputation( measurements=measurements_manager.get_measurements(), - klipperdir=str(self._config.klipper_folder), freq=self._freq, duration=self._duration, - max_freq=200.0, + max_freq=self._config.max_freq, accel_per_hz=self._accel_per_hz, st_version=self._version, ) - axis_label = (measurements_manager.get_measurements())[0]['name'].split('_')[1] + computation = computer.compute() + fig = self._plotter.plot_static_frequency_graph(computation) + try: + axis_label = (measurements_manager.get_measurements())[0]['name'].split('_')[1] + except IndexError: + axis_label = None self._save_figure(fig, measurements_manager, axis_label) -###################################################################### -# Graphing -###################################################################### - - -def plot_spectrogram(ax: plt.Axes, t: np.ndarray, bins: np.ndarray, pdata: np.ndarray, max_freq: float) -> None: - ax.set_title('Time-Frequency Spectrogram', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - - vmin_value = np.percentile(pdata, SPECTROGRAM_LOW_PERCENTILE_FILTER) - - cm = 'inferno' - norm = matplotlib.colors.LogNorm(vmin=vmin_value) - ax.imshow( - pdata.T, - norm=norm, - cmap=cm, - aspect='auto', - extent=[t[0], t[-1], bins[0], bins[-1]], - origin='lower', - interpolation='antialiased', - ) - - ax.set_xlim([0.0, max_freq]) - ax.set_ylabel('Time (s)') - ax.set_xlabel('Frequency (Hz)') - - return - - -def plot_energy_accumulation(ax: plt.Axes, t: np.ndarray, bins: np.ndarray, pdata: np.ndarray) -> None: - # Integrate the energy over the frequency bins for each time step and plot this vertically - ax.plot(np.trapz(pdata, t, axis=0), bins, color=KLIPPAIN_COLORS['orange']) - ax.set_title('Vibrations', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - ax.set_xlabel('Cumulative Energy') - ax.set_ylabel('Time (s)') - ax.set_ylim([bins[0], bins[-1]]) - - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.ticklabel_format(axis='x', style='scientific', scilimits=(0, 0)) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - # ax.legend() - - -###################################################################### -# Startup and main routines -###################################################################### - - -def static_frequency_tool( - measurements: List[Measurement], - klipperdir: str = '~/klipper', - freq: Optional[float] = None, - duration: Optional[float] = None, - max_freq: float = 500.0, - accel_per_hz: Optional[float] = None, - st_version: str = 'unknown', -) -> plt.Figure: - if freq is None or duration is None: - raise ValueError('Error: missing frequency or duration parameters!') - - if len(measurements) == 0: - raise ValueError('No valid data found in the provided measurements!') - if len(measurements) > 1: - ConsoleOutput.print('Warning: incorrect number of measurements detected. Only the first one will be used!') - - datas = [np.array(m['samples']) for m in measurements if m['samples'] is not None] - - pdata, bins, t = compute_spectrogram(datas[0]) - del datas - - fig, ((ax1, ax3)) = plt.subplots( - 1, - 2, - gridspec_kw={ - 'width_ratios': [5, 3], - 'bottom': 0.080, - 'top': 0.840, - 'left': 0.050, - 'right': 0.985, - 'hspace': 0.166, - 'wspace': 0.138, - }, - ) - fig.set_size_inches(15, 7) - - title_line1 = 'STATIC FREQUENCY HELPER TOOL' - fig.text( - 0.060, 0.947, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' - ) - try: - filename_parts = measurements[0]['name'].split('_') - dt = datetime.strptime(f'{filename_parts[2]} {filename_parts[3]}', '%Y%m%d %H%M%S') - title_line2 = dt.strftime('%x %X') + ' -- ' + filename_parts[1].upper() + ' axis' - title_line3 = f'| Maintained frequency: {freq}Hz for {duration}s' - title_line4 = f'| Accel per Hz used: {accel_per_hz} mm/s²/Hz' if accel_per_hz is not None else '' - except Exception: - ConsoleOutput.print( - f'Warning: measurement names look to be different than expected ({measurements[0]["name"]})' - ) - title_line2 = measurements[0]['name'] - title_line3 = '' - title_line4 = '' - fig.text(0.060, 0.939, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) - fig.text(0.55, 0.985, title_line3, ha='left', va='top', fontsize=14, color=KLIPPAIN_COLORS['dark_purple']) - fig.text(0.55, 0.950, title_line4, ha='left', va='top', fontsize=11, color=KLIPPAIN_COLORS['dark_purple']) - - plot_spectrogram(ax1, t, bins, pdata, max_freq) - plot_energy_accumulation(ax3, t, bins, pdata) - - ax_logo = fig.add_axes([0.001, 0.894, 0.105, 0.105], anchor='NW') - ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png'))) - ax_logo.axis('off') - - if st_version != 'unknown': - fig.text(0.995, 0.980, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple']) - - return fig - - -def main(): - usage = '%prog [options] ' - opts = optparse.OptionParser(usage) - opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph') - opts.add_option('-f', '--freq', type='float', default=None, help='frequency maintained during the measurement') - opts.add_option('-d', '--duration', type='float', default=None, help='duration of the measurement') - opts.add_option('--max_freq', type='float', default=500.0, help='maximum frequency to graph') - opts.add_option('--accel_per_hz', type='float', default=None, help='accel_per_hz used during the measurement') - opts.add_option( - '-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory' - ) - options, args = opts.parse_args() - if len(args) < 1: - opts.error('Incorrect number of arguments') - if options.output is None: - opts.error('You must specify an output file.png to use the script (option -o)') - - measurements_manager = MeasurementsManager(10) - if args[0].endswith('.csv'): - measurements_manager.load_from_csvs(args) - elif args[0].endswith('.stdata'): - measurements_manager.load_from_stdata(args[0]) - else: - raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') - - fig = static_frequency_tool( - measurements_manager.get_measurements(), - options.klipperdir, - options.freq, - options.duration, - options.max_freq, - options.accel_per_hz, - 'unknown', - ) - fig.savefig(options.output, dpi=150) - - -if __name__ == '__main__': - main() +class StaticGraphComputation: + def __init__( + self, + measurements: List[Measurement], + freq: float, + duration: float, + max_freq: float, + accel_per_hz: Optional[float], + st_version: str, + ): + self.measurements = measurements + self.freq = freq + self.duration = duration + self.max_freq = max_freq + self.accel_per_hz = accel_per_hz + self.st_version = st_version + + def compute(self): + if len(self.measurements) == 0: + raise ValueError('No valid data found in the provided measurements!') + if len(self.measurements) > 1: + ConsoleOutput.print('Warning: incorrect number of measurements detected. Only the first one will be used!') + + datas = [np.array(m['samples']) for m in self.measurements if m['samples'] is not None] + + pdata, bins, t = compute_spectrogram(datas[0]) + del datas + + computation_result = { + 'freq': self.freq, + 'duration': self.duration, + 'accel_per_hz': self.accel_per_hz, + 'st_version': self.st_version, + 'measurements': self.measurements, + 't': t, + 'bins': bins, + 'pdata': pdata, + 'max_freq': self.max_freq, + } + + return computation_result diff --git a/shaketune/graph_creators/vibrations_graph_creator.py b/shaketune/graph_creators/vibrations_graph_creator.py index 4dcbf9e..e832b89 100644 --- a/shaketune/graph_creators/vibrations_graph_creator.py +++ b/shaketune/graph_creators/vibrations_graph_creator.py @@ -9,31 +9,21 @@ import math -import optparse import os import re -from datetime import datetime from typing import List, Optional, Tuple -import matplotlib -import matplotlib.font_manager -import matplotlib.gridspec -import matplotlib.pyplot as plt -import matplotlib.ticker import numpy as np -matplotlib.use('Agg') - from ..helpers.accelerometer import Measurement, MeasurementsManager from ..helpers.common_func import ( - compute_mechanical_parameters, detect_peaks, identify_low_energy_zones, - setup_klipper_import, ) from ..helpers.console_output import ConsoleOutput from ..helpers.motors_config_parser import Motor, MotorsConfigParser from ..shaketune_config import ShakeTuneConfig +from . import get_shaper_calibrate_module from .graph_creator import GraphCreator PEAKS_DETECTION_THRESHOLD = 0.05 @@ -43,14 +33,6 @@ SPEEDS_AROUND_PEAK_DELETION = 3 # to delete +-3mm/s around a peak ANGLES_VALLEY_DETECTION_THRESHOLD = 1.1 # Lower is more sensitive -KLIPPAIN_COLORS = { - 'purple': '#70088C', - 'orange': '#FF8D32', - 'dark_purple': '#150140', - 'dark_orange': '#F24130', - 'red_pink': '#F2055C', -} - @GraphCreator.register('vibrations profile') class VibrationsGraphCreator(GraphCreator): @@ -60,884 +42,405 @@ def __init__(self, config: ShakeTuneConfig): self._accel: Optional[float] = None self._motors: Optional[List[MotorsConfigParser]] = None - def configure(self, kinematics: str, accel: float, motor_config_parser: MotorsConfigParser) -> None: + def configure(self, kinematics: str, accel: float, motor_config_parser: MotorsConfigParser = None) -> None: self._kinematics = kinematics self._accel = accel - self._motors: List[Motor] = motor_config_parser.get_motors() + if motor_config_parser is not None: + self._motors: List[Motor] = motor_config_parser.get_motors() + else: + self._motors = None def create_graph(self, measurements_manager: MeasurementsManager) -> None: - if not self._accel or not self._kinematics: - raise ValueError('accel and kinematics must be set to create the vibrations profile graph!') - - fig = vibrations_profile( + computer = VibrationGraphComputation( measurements=measurements_manager.get_measurements(), - klipperdir=str(self._config.klipper_folder), kinematics=self._kinematics, accel=self._accel, + max_freq=self._config.max_freq_vibrations, st_version=self._version, motors=self._motors, ) + computation = computer.compute() + fig = self._plotter.plot_vibrations_graph(computation) self._save_figure(fig, measurements_manager) -###################################################################### -# Computation -###################################################################### - - -# Call to the official Klipper input shaper object to do the PSD computation -def calc_freq_response(data) -> Tuple[np.ndarray, np.ndarray]: - helper = shaper_calibrate.ShaperCalibrate(printer=None) - return helper.process_accelerometer_data(data) - - -# Calculate motor frequency profiles based on the measured Power Spectral Density (PSD) measurements for the machine kinematics -# main angles and then create a global motor profile as a weighted average (from their own vibrations) of all calculated profiles -def compute_motor_profiles( - freqs: np.ndarray, - psds: dict, - all_angles_energy: dict, - measured_angles: Optional[List[int]] = None, - energy_amplification_factor: int = 2, -) -> Tuple[dict, np.ndarray]: - if measured_angles is None: - measured_angles = [0, 90] - - motor_profiles = {} - weighted_sum_profiles = np.zeros_like(freqs) - total_weight = 0 - conv_filter = np.ones(20) / 20 - - # Creating the PSD motor profiles for each angles - for angle in measured_angles: - # Calculate the sum of PSDs for the current angle and then convolve - sum_curve = np.sum(np.array([psds[angle][speed] for speed in psds[angle]]), axis=0) - motor_profiles[angle] = np.convolve(sum_curve / len(psds[angle]), conv_filter, mode='same') - - # Calculate weights - angle_energy = ( - all_angles_energy[angle] ** energy_amplification_factor - ) # First weighting factor is based on the total vibrations of the machine at the specified angle - curve_area = ( - np.trapz(motor_profiles[angle], freqs) ** energy_amplification_factor - ) # Additional weighting factor is based on the area under the current motor profile at this specified angle - total_angle_weight = angle_energy * curve_area - - # Update weighted sum profiles to get the global motor profile - weighted_sum_profiles += motor_profiles[angle] * total_angle_weight - total_weight += total_angle_weight - - # Creating a global average motor profile that is the weighted average of all the PSD motor profiles - global_motor_profile = weighted_sum_profiles / total_weight if total_weight != 0 else weighted_sum_profiles - - return motor_profiles, global_motor_profile - - -# Since it was discovered that there is no non-linear mixing in the stepper "steps" vibrations, instead of measuring -# the effects of each speeds at each angles, this function simplify it by using only the main motors axes (X/Y for Cartesian -# printers and A/B for CoreXY) measurements and project each points on the [0,360] degrees range using trigonometry -# to "sum" the vibration impact of each axis at every points of the generated spectrogram. The result is very similar at the end. -def compute_dir_speed_spectrogram( - measured_speeds: List[float], data: dict, kinematics: str = 'cartesian', measured_angles: Optional[List[int]] = None -) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: - if measured_angles is None: - measured_angles = [0, 90] - - # We want to project the motor vibrations measured on their own axes on the [0, 360] range - spectrum_angles = np.linspace(0, 360, 720) # One point every 0.5 degrees - spectrum_speeds = np.linspace(min(measured_speeds), max(measured_speeds), len(measured_speeds) * 6) - spectrum_vibrations = np.zeros((len(spectrum_angles), len(spectrum_speeds))) - - def get_interpolated_vibrations(data: dict, speed: float, speeds: List[float]) -> float: - idx = np.clip(np.searchsorted(speeds, speed, side='left'), 1, len(speeds) - 1) - lower_speed = speeds[idx - 1] - upper_speed = speeds[idx] - lower_vibrations = data.get(lower_speed, 0) - upper_vibrations = data.get(upper_speed, 0) - return lower_vibrations + (speed - lower_speed) * (upper_vibrations - lower_vibrations) / ( - upper_speed - lower_speed - ) - - # Precompute trigonometric values and constant before the loop - angle_radians = np.deg2rad(spectrum_angles) - cos_vals = np.cos(angle_radians) - sin_vals = np.sin(angle_radians) - sqrt_2_inv = 1 / math.sqrt(2) - - # Compute the spectrum vibrations for each angle and speed combination - for target_angle_idx, (cos_val, sin_val) in enumerate(zip(cos_vals, sin_vals)): - for target_speed_idx, target_speed in enumerate(spectrum_speeds): - if kinematics in {'cartesian', 'limited_cartesian', 'corexz', 'limited_corexz'}: - speed_1 = np.abs(target_speed * cos_val) - speed_2 = np.abs(target_speed * sin_val) - elif kinematics in {'corexy', 'limited_corexy'}: - speed_1 = np.abs(target_speed * (cos_val + sin_val) * sqrt_2_inv) - speed_2 = np.abs(target_speed * (cos_val - sin_val) * sqrt_2_inv) - - vibrations_1 = get_interpolated_vibrations(data[measured_angles[0]], speed_1, measured_speeds) - vibrations_2 = get_interpolated_vibrations(data[measured_angles[1]], speed_2, measured_speeds) - spectrum_vibrations[target_angle_idx, target_speed_idx] = vibrations_1 + vibrations_2 - - return spectrum_angles, spectrum_speeds, spectrum_vibrations - - -def compute_angle_powers(spectrogram_data: np.ndarray) -> np.ndarray: - angles_powers = np.trapz(spectrogram_data, axis=1) - - # Since we want to plot it on a continuous polar plot later on, we need to append parts of - # the array to start and end of it to smooth transitions when doing the convolution - # and get the same value at modulo 360. Then we return the array without the extras - extended_angles_powers = np.concatenate([angles_powers[-9:], angles_powers, angles_powers[:9]]) - convolved_extended = np.convolve(extended_angles_powers, np.ones(15) / 15, mode='same') - - return convolved_extended[9:-9] - - -def compute_speed_powers(spectrogram_data: np.ndarray, smoothing_window: int = 15) -> np.ndarray: - min_values = np.amin(spectrogram_data, axis=0) - max_values = np.amax(spectrogram_data, axis=0) - var_values = np.var(spectrogram_data, axis=0) - - # rescale the variance to the same range as max_values to plot it on the same graph - var_values = var_values / var_values.max() * max_values.max() - - # Create a vibration metric that is the product of the max values and the variance to quantify the best - # speeds that have at the same time a low global energy level that is also consistent at every angles - vibration_metric = max_values * var_values - - # utility function to pad and smooth the data avoiding edge effects - conv_filter = np.ones(smoothing_window) / smoothing_window - window = int(smoothing_window / 2) - - def pad_and_smooth(data: np.ndarray) -> np.ndarray: - data_padded = np.pad(data, (window,), mode='edge') - smoothed_data = np.convolve(data_padded, conv_filter, mode='valid') - return smoothed_data - - # Stack the arrays and apply padding and smoothing in batch - data_arrays = np.stack([min_values, max_values, var_values, vibration_metric]) - smoothed_arrays = np.array([pad_and_smooth(data) for data in data_arrays]) - - return smoothed_arrays - - -# Function that filter and split the good_speed ranges. The goal is to remove some zones around -# additional detected small peaks in order to suppress them if there is a peak, even if it's low, -# that's probably due to a crossing in the motor resonance pattern that still need to be removed -def filter_and_split_ranges( - all_speeds: np.ndarray, good_speeds: List[Tuple[int, int, float]], peak_speed_indices: dict, deletion_range: int -) -> List[Tuple[int, int, float]]: - # Process each range to filter out and split based on peak indices - filtered_good_speeds = [] - for start, end, energy in good_speeds: - start_speed, end_speed = all_speeds[start], all_speeds[end] - # Identify peaks that intersect with the current speed range - intersecting_peaks_indices = [ - idx for speed, idx in peak_speed_indices.items() if start_speed <= speed <= end_speed - ] - - if not intersecting_peaks_indices: - filtered_good_speeds.append((start, end, energy)) - else: - intersecting_peaks_indices.sort() - current_start = start - - for peak_index in intersecting_peaks_indices: - before_peak_end = max(current_start, peak_index - deletion_range) - if current_start < before_peak_end: - filtered_good_speeds.append((current_start, before_peak_end, energy)) - current_start = peak_index + deletion_range + 1 - - if current_start < end: - filtered_good_speeds.append((current_start, end, energy)) - - # Sorting by start point once and then merge overlapping ranges - sorted_ranges = sorted(filtered_good_speeds, key=lambda x: x[0]) - merged_ranges = [sorted_ranges[0]] - - for current in sorted_ranges[1:]: - last_merged_start, last_merged_end, last_merged_energy = merged_ranges[-1] - if current[0] <= last_merged_end: - new_end = max(last_merged_end, current[1]) - new_energy = min(last_merged_energy, current[2]) - merged_ranges[-1] = (last_merged_start, new_end, new_energy) +class VibrationGraphComputation: + def __init__( + self, + measurements: List[Measurement], + kinematics: str, + accel: float, + max_freq: float, + st_version: str, + motors: List[str], + ): + self.measurements = measurements + self.kinematics = kinematics + self.accel = accel + self.max_freq = max_freq + self.st_version = st_version + self.motors = motors + + def compute(self): + if self.kinematics in {'cartesian', 'limited_cartesian', 'corexz', 'limited_corexz'}: + main_angles = [0, 90] + elif self.kinematics in {'corexy', 'limited_corexy'}: + main_angles = [45, 135] else: - merged_ranges.append(current) - - return merged_ranges - - -# This function allow the computation of a symmetry score that reflect the spectrogram apparent symmetry between -# measured axes on both the shape of the signal and the energy level consistency across both side of the signal -def compute_symmetry_analysis( - all_angles: np.ndarray, spectrogram_data: np.ndarray, measured_angles: Optional[List[int]] = None -) -> float: - if measured_angles is None: - measured_angles = [0, 90] - - total_spectrogram_angles = len(all_angles) - half_spectrogram_angles = total_spectrogram_angles // 2 - - # Extend the spectrogram by adding half to the beginning (in order to not get an out of bounds error later) - extended_spectrogram = np.concatenate((spectrogram_data[-half_spectrogram_angles:], spectrogram_data), axis=0) - - # Calculate the split index directly within the slicing - midpoint_angle = np.mean(measured_angles) - split_index = int(midpoint_angle * (total_spectrogram_angles / 360) + half_spectrogram_angles) - half_segment_length = half_spectrogram_angles // 2 - - # Slice out the two segments of the spectrogram and flatten them for comparison - segment_1_flattened = extended_spectrogram[split_index - half_segment_length : split_index].flatten() - segment_2_flattened = extended_spectrogram[split_index : split_index + half_segment_length].flatten() - - # Compute the correlation coefficient between the two segments of spectrogram - correlation = np.corrcoef(segment_1_flattened, segment_2_flattened)[0, 1] - percentage_correlation_biased = (100 * np.power(correlation, 0.75)) + 10 - - return np.clip(0, 100, percentage_correlation_biased) - - -###################################################################### -# Graphing -###################################################################### - - -def plot_angle_profile_polar( - ax: plt.Axes, - angles: np.ndarray, - angles_powers: np.ndarray, - low_energy_zones: List[Tuple[int, int, float]], - symmetry_factor: float, -) -> None: - angles_radians = np.deg2rad(angles) - - ax.set_title('Polar angle energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - ax.set_theta_zero_location('E') - ax.set_theta_direction(1) - - ax.plot(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], zorder=5) - ax.fill(angles_radians, angles_powers, color=KLIPPAIN_COLORS['purple'], alpha=0.3) - ax.set_xlim([0, np.deg2rad(360)]) - ymax = angles_powers.max() * 1.05 - ax.set_ylim([0, ymax]) - ax.set_thetagrids([theta * 15 for theta in range(360 // 15)]) - - ax.text( - 0, - 0, - f'Symmetry: {symmetry_factor:.1f}%', - ha='center', - va='center', - color=KLIPPAIN_COLORS['red_pink'], - fontsize=12, - fontweight='bold', - zorder=6, - ) - - for _, (start, end, _) in enumerate(low_energy_zones): - ax.axvline( - angles_radians[start], - angles_powers[start] / ymax, - color=KLIPPAIN_COLORS['red_pink'], - linestyle='dotted', - linewidth=1.5, - ) - ax.axvline( - angles_radians[end], - angles_powers[end] / ymax, - color=KLIPPAIN_COLORS['red_pink'], - linestyle='dotted', - linewidth=1.5, - ) - ax.fill_between( - angles_radians[start:end], angles_powers[start:end], angles_powers.max() * 1.05, color='green', alpha=0.2 - ) + raise ValueError('Only Cartesian, CoreXY and CoreXZ kinematics are supported by this tool at the moment!') - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - - # Polar plot doesn't follow the gridspec margin, so we adjust it manually here - pos = ax.get_position() - new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height] - ax.set_position(new_pos) - - return - - -def plot_global_speed_profile( - ax: plt.Axes, - all_speeds: np.ndarray, - sp_min_energy: np.ndarray, - sp_max_energy: np.ndarray, - sp_variance_energy: np.ndarray, - vibration_metric: np.ndarray, - num_peaks: int, - peaks: np.ndarray, - low_energy_zones: List[Tuple[int, int, float]], -) -> None: - ax.set_title('Global speed energy profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - ax.set_xlabel('Speed (mm/s)') - ax.set_ylabel('Energy') - ax2 = ax.twinx() - ax2.yaxis.set_visible(False) - - ax.plot(all_speeds, sp_min_energy, label='Minimum', color=KLIPPAIN_COLORS['dark_purple'], zorder=5) - ax.plot(all_speeds, sp_max_energy, label='Maximum', color=KLIPPAIN_COLORS['purple'], zorder=5) - ax.plot(all_speeds, sp_variance_energy, label='Variance', color=KLIPPAIN_COLORS['orange'], zorder=5, linestyle='--') - ax2.plot( - all_speeds, - vibration_metric, - label=f'Vibration metric ({num_peaks} bad peaks)', - color=KLIPPAIN_COLORS['red_pink'], - zorder=5, - ) - - ax.set_xlim([all_speeds.min(), all_speeds.max()]) - ax.set_ylim([0, sp_max_energy.max() * 1.15]) - - y2min = -(vibration_metric.max() * 0.025) - y2max = vibration_metric.max() * 1.07 - ax2.set_ylim([y2min, y2max]) - - if peaks is not None and len(peaks) > 0: - ax2.plot(all_speeds[peaks], vibration_metric[peaks], 'x', color='black', markersize=8, zorder=10) - for idx, peak in enumerate(peaks): - ax2.annotate( - f'{idx+1}', - (all_speeds[peak], vibration_metric[peak]), - textcoords='offset points', - xytext=(5, 5), - fontweight='bold', - ha='left', - fontsize=13, - color=KLIPPAIN_COLORS['red_pink'], - zorder=10, - ) + psds = {} + psds_sum = {} + target_freqs_initialized = False + target_freqs = None - for idx, (start, end, _) in enumerate(low_energy_zones): - # ax2.axvline(all_speeds[start], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8) - # ax2.axvline(all_speeds[end], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5, zorder=8) - ax2.fill_between( - all_speeds[start:end], - y2min, - vibration_metric[start:end], - color='green', - alpha=0.2, - label=f'Zone {idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s', - ) + helper = get_shaper_calibrate_module().ShaperCalibrate(printer=None) - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('small') - ax.legend(loc='upper left', prop=fontP) - ax2.legend(loc='upper right', prop=fontP) - - return - - -def plot_angular_speed_profiles( - ax: plt.Axes, speeds: np.ndarray, angles: np.ndarray, spectrogram_data: np.ndarray, kinematics: str = 'cartesian' -) -> None: - ax.set_title('Angular speed energy profiles', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - ax.set_xlabel('Speed (mm/s)') - ax.set_ylabel('Energy') - - # Define mappings for labels and colors to simplify plotting commands - angle_settings = { - 0: ('X (0 deg)', 'purple', 10), - 90: ('Y (90 deg)', 'dark_purple', 5), - 45: ('A (45 deg)' if kinematics in {'corexy', 'limited_corexy'} else '45 deg', 'orange', 10), - 135: ('B (135 deg)' if kinematics in {'corexy', 'limited_corexy'} else '135 deg', 'dark_orange', 5), - } - - # Plot each angle using settings from the dictionary - for angle, (label, color, zorder) in angle_settings.items(): - idx = np.searchsorted(angles, angle, side='left') - ax.plot(speeds, spectrogram_data[idx], label=label, color=KLIPPAIN_COLORS[color], zorder=zorder) - - ax.set_xlim([speeds.min(), speeds.max()]) - max_value = max(spectrogram_data[angle].max() for angle in {0, 45, 90, 135}) - ax.set_ylim([0, max_value * 1.1]) - - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('small') - ax.legend(loc='upper right', prop=fontP) - - return - - -def plot_motor_profiles( - ax: plt.Axes, - freqs: np.ndarray, - main_angles: List[int], - motor_profiles: dict, - global_motor_profile: np.ndarray, - max_freq: float, -) -> None: - ax.set_title('Motor frequency profile', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - ax.set_ylabel('Energy') - ax.set_xlabel('Frequency (Hz)') - - ax2 = ax.twinx() - ax2.yaxis.set_visible(False) - - # Global weighted average motor profile - ax.plot(freqs, global_motor_profile, label='Combined', color=KLIPPAIN_COLORS['purple'], zorder=5) - max_value = global_motor_profile.max() - - # Mapping of angles to axis names - angle_settings = {0: 'X', 90: 'Y', 45: 'A', 135: 'B'} - - # And then plot the motor profiles at each measured angles - for angle in main_angles: - profile_max = motor_profiles[angle].max() - if profile_max > max_value: - max_value = profile_max - label = f'{angle_settings[angle]} ({angle} deg)' if angle in angle_settings else f'{angle} deg' - ax.plot(freqs, motor_profiles[angle], linestyle='--', label=label, zorder=2) - - ax.set_xlim([0, max_freq]) - ax.set_ylim([0, max_value * 1.1]) - ax.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0)) - - # Then add the motor resonance peak to the graph and print some infos about it - motor_fr, motor_zeta, motor_res_idx, lowfreq_max = compute_mechanical_parameters(global_motor_profile, freqs, 30) - if lowfreq_max: - ConsoleOutput.print( - '[WARNING] There are a lot of low frequency vibrations that can alter the readings. This is probably due to the test being performed at too high an acceleration!' - ) - ConsoleOutput.print( - 'Try lowering the ACCEL value and/or increasing the SIZE value before restarting the macro to ensure that only constant speeds are being recorded and that the dynamic behavior of the machine is not affecting the measurements' - ) - if motor_zeta is not None: - ConsoleOutput.print( - f'Motors have a main resonant frequency at {motor_fr:.1f}Hz with an estimated damping ratio of {motor_zeta:.3f}' - ) - else: - ConsoleOutput.print( - f'Motors have a main resonant frequency at {motor_fr:.1f}Hz but it was impossible to estimate a damping ratio.' - ) + for measurement in self.measurements: + data = np.array(measurement['samples']) + if data is None: + continue # Measurement data is not in the expected format or is empty, skip it - ax.plot(freqs[motor_res_idx], global_motor_profile[motor_res_idx], 'x', color='black', markersize=10) - ax.annotate( - 'R', - (freqs[motor_res_idx], global_motor_profile[motor_res_idx]), - textcoords='offset points', - xytext=(15, 5), - ha='right', - fontsize=14, - color=KLIPPAIN_COLORS['red_pink'], - weight='bold', - ) - - ax2.plot([], [], ' ', label=f'Motor resonant frequency (ω0): {motor_fr:.1f}Hz') - if motor_zeta is not None: - ax2.plot([], [], ' ', label=f'Motor damping ratio (ζ): {motor_zeta:.3f}') - else: - ax2.plot([], [], ' ', label='No damping ratio computed') - - ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) - ax.grid(which='major', color='grey') - ax.grid(which='minor', color='lightgrey') - - fontP = matplotlib.font_manager.FontProperties() - fontP.set_size('small') - ax.legend(loc='upper left', prop=fontP) - ax2.legend(loc='upper right', prop=fontP) - - return - - -def plot_vibration_spectrogram_polar( - ax: plt.Axes, angles: np.ndarray, speeds: np.ndarray, spectrogram_data: np.ndarray -) -> None: - angles_radians = np.radians(angles) - - # Assuming speeds defines the radial distance from the center, we need to create a meshgrid - # for both angles and speeds to map the spectrogram data onto a polar plot correctly - radius, theta = np.meshgrid(speeds, angles_radians) - - ax.set_title( - 'Polar vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold', va='bottom' - ) - ax.set_theta_zero_location('E') - ax.set_theta_direction(1) - - ax.pcolormesh(theta, radius, spectrogram_data, norm=matplotlib.colors.LogNorm(), cmap='inferno', shading='auto') - ax.set_thetagrids([theta * 15 for theta in range(360 // 15)]) - ax.tick_params(axis='y', which='both', colors='white', labelsize='medium') - ax.set_ylim([0, max(speeds)]) - - # Polar plot doesn't follow the gridspec margin, so we adjust it manually here - pos = ax.get_position() - new_pos = [pos.x0 - 0.01, pos.y0 - 0.01, pos.width, pos.height] - ax.set_position(new_pos) - - return - - -def plot_vibration_spectrogram( - ax: plt.Axes, angles: np.ndarray, speeds: np.ndarray, spectrogram_data: np.ndarray, peaks: np.ndarray -) -> None: - ax.set_title('Vibrations heatmap', fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold') - ax.set_xlabel('Speed (mm/s)') - ax.set_ylabel('Angle (deg)') - - ax.imshow( - spectrogram_data, - norm=matplotlib.colors.LogNorm(), - cmap='inferno', - aspect='auto', - extent=[speeds[0], speeds[-1], angles[0], angles[-1]], - origin='lower', - interpolation='antialiased', - ) - - # Add peaks lines in the spectrogram to get hint from peaks found in the first graph - if peaks is not None and len(peaks) > 0: - for idx, peak in enumerate(peaks): - ax.axvline(speeds[peak], color='cyan', linewidth=0.75) - ax.annotate( - f'Peak {idx+1}', - (speeds[peak], angles[-1] * 0.9), - textcoords='data', - color='cyan', - rotation=90, - fontsize=10, - verticalalignment='top', - horizontalalignment='right', - ) + angle, speed = self._extract_angle_and_speed(measurement['name']) + freq_response = helper.process_accelerometer_data(data) + first_freqs = freq_response.freq_bins + psd_sum = freq_response.psd_sum + + if not target_freqs_initialized: + target_freqs = first_freqs[first_freqs <= self.max_freq] + target_freqs_initialized = True - return + psd_sum = psd_sum[first_freqs <= self.max_freq] + first_freqs = first_freqs[first_freqs <= self.max_freq] + # Initialize the angle dictionary if it doesn't exist + if angle not in psds: + psds[angle] = {} + psds_sum[angle] = {} -def plot_motor_config_txt(fig: plt.Figure, motors: List[MotorsConfigParser], differences: Optional[str]) -> None: - motor_details = [(motors[0], 'X motor'), (motors[1], 'Y motor')] + # Store the interpolated PSD and integral values + psds[angle][speed] = np.interp(target_freqs, first_freqs, psd_sum) + psds_sum[angle][speed] = np.trapz(psd_sum, first_freqs) - distance = 0.12 - if motors[0].get_config('autotune_enabled'): - distance = 0.27 - config_blocks = [ - f"| {lbl}: {mot.get_config('motor').upper()} on {mot.get_config('tmc').upper()} @ {mot.get_config('voltage'):0.1f}V {mot.get_config('run_current'):0.2f}A - {mot.get_config('microsteps')}usteps" - for mot, lbl in motor_details - ] - config_blocks.append( - f'| TMC Autotune enabled (PWM freq target: X={int(motors[0].get_config("pwm_freq_target")/1000)}kHz / Y={int(motors[1].get_config("pwm_freq_target")/1000)}kHz)' + measured_angles = sorted(psds_sum.keys()) + measured_speeds = sorted({speed for angle_speeds in psds_sum.values() for speed in angle_speeds.keys()}) + + for main_angle in main_angles: + if main_angle not in measured_angles: + raise ValueError('Measurements not taken at the correct angles for the specified kinematics!') + + # Precompute the variables used in plot functions + all_angles, all_speeds, spectrogram_data = self._compute_dir_speed_spectrogram( + measured_speeds, psds_sum, self.kinematics, main_angles ) - else: - config_blocks = [ - f"| {lbl}: {mot.get_config('tmc').upper()} @ {mot.get_config('run_current'):0.2f}A - {mot.get_config('microsteps')}usteps" - for mot, lbl in motor_details - ] - config_blocks.append('| TMC Autotune not detected') - - for idx, block in enumerate(config_blocks): - fig.text( - 0.41, 0.990 - 0.015 * idx, block, ha='left', va='top', fontsize=10, color=KLIPPAIN_COLORS['dark_purple'] + all_angles_energy = self._compute_angle_powers(spectrogram_data) + sp_min_energy, sp_max_energy, sp_variance_energy, vibration_metric = self._compute_speed_powers( + spectrogram_data ) - - tmc_registers = motors[0].get_registers() - idx = -1 - for idx, (register, settings) in enumerate(tmc_registers.items()): - settings_str = ' '.join(f'{k}={v}' for k, v in settings.items()) - tmc_block = f'| {register.upper()}: {settings_str}' - fig.text( - 0.41 + distance, - 0.990 - 0.015 * idx, - tmc_block, - ha='left', - va='top', - fontsize=10, - color=KLIPPAIN_COLORS['dark_purple'], + motor_profiles, global_motor_profile = self._compute_motor_profiles( + target_freqs, psds, all_angles_energy, main_angles ) - if differences is not None: - differences_text = f'| Y motor diff: {differences}' - fig.text( - 0.41 + distance, - 0.990 - 0.015 * (idx + 1), - differences_text, - ha='left', - va='top', - fontsize=10, - color=KLIPPAIN_COLORS['dark_purple'], + # symmetry_factor = compute_symmetry_analysis(all_angles, all_angles_energy) + symmetry_factor = self._compute_symmetry_analysis(all_angles, spectrogram_data, main_angles) + ConsoleOutput.print(f'Machine estimated vibration symmetry: {symmetry_factor:.1f}%') + + # Analyze low variance ranges of vibration energy across all angles for each speed to identify clean speeds + # and highlight them. Also find the peaks to identify speeds to avoid due to high resonances + num_peaks, vibration_peaks, peaks_speeds = detect_peaks( + vibration_metric, + all_speeds, + PEAKS_DETECTION_THRESHOLD * vibration_metric.max(), + PEAKS_RELATIVE_HEIGHT_THRESHOLD, + 10, + 10, + ) + formated_peaks_speeds = ['{:.1f}'.format(pspeed) for pspeed in peaks_speeds] + ConsoleOutput.print( + f"Vibrations peaks detected: {num_peaks} @ {', '.join(map(str, formated_peaks_speeds))} mm/s (avoid setting a speed near these values in your slicer print profile)" ) - -###################################################################### -# Startup and main routines -###################################################################### - - -def extract_angle_and_speed(logname: str) -> Tuple[float, float]: - try: - match = re.search(r'an(\d+)_\d+sp(\d+)_\d+', os.path.basename(logname)) - if match: - angle = match.group(1) - speed = match.group(2) + good_speeds = identify_low_energy_zones(vibration_metric, SPEEDS_VALLEY_DETECTION_THRESHOLD) + if good_speeds is not None: + deletion_range = int(SPEEDS_AROUND_PEAK_DELETION / (all_speeds[1] - all_speeds[0])) + peak_speed_indices = {pspeed: np.where(all_speeds == pspeed)[0][0] for pspeed in set(peaks_speeds)} + + # Filter and split ranges based on peak indices, avoiding overlaps + good_speeds = self._filter_and_split_ranges(all_speeds, good_speeds, peak_speed_indices, deletion_range) + + # Add some logging about the good speeds found + ConsoleOutput.print(f'Lowest vibrations speeds ({len(good_speeds)} ranges sorted from best to worse):') + for idx, (start, end, _) in enumerate(good_speeds): + ConsoleOutput.print(f'{idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s') + + # Angle low energy valleys identification (good angles ranges) and print them to the console + good_angles = identify_low_energy_zones(all_angles_energy, ANGLES_VALLEY_DETECTION_THRESHOLD) + if good_angles is not None: + ConsoleOutput.print(f'Lowest vibrations angles ({len(good_angles)} ranges sorted from best to worse):') + for idx, (start, end, energy) in enumerate(good_angles): + ConsoleOutput.print( + f'{idx+1}: {all_angles[start]:.1f}° to {all_angles[end]:.1f}° (mean vibrations energy: {energy:.2f}% of max)' + ) + + # Motors infos and config differences check + if self.motors is not None and len(self.motors) == 2: + motors_config_differences = self.motors[0].compare_to(self.motors[1]) + if motors_config_differences is not None and self.kinematics in {'corexy', 'limited_corexy'}: + ConsoleOutput.print(f'Warning: motors have different TMC configurations!\n{motors_config_differences}') else: - raise ValueError(f'File {logname} does not match expected format. Clean your /tmp folder and start again!') - except AttributeError as err: - raise ValueError( - f'File {logname} does not match expected format. Clean your /tmp folder and start again!' - ) from err - return float(angle), float(speed) - - -def vibrations_profile( - measurements: List[Measurement], - klipperdir: str = '~/klipper', - kinematics: str = 'cartesian', - accel: Optional[float] = None, - max_freq: float = 1000.0, - st_version: Optional[str] = None, - motors: Optional[List[MotorsConfigParser]] = None, -) -> plt.Figure: - global shaper_calibrate - shaper_calibrate = setup_klipper_import(klipperdir) - - if kinematics in {'cartesian', 'limited_cartesian', 'corexz', 'limited_corexz'}: - main_angles = [0, 90] - elif kinematics in {'corexy', 'limited_corexy'}: - main_angles = [45, 135] - else: - raise ValueError('Only Cartesian, CoreXY and CoreXZ kinematics are supported by this tool at the moment!') - - psds = {} - psds_sum = {} - target_freqs_initialized = False - target_freqs = None - - for measurement in measurements: - data = np.array(measurement['samples']) - if data is None: - continue # Measurement data is not in the expected format or is empty, skip it - - angle, speed = extract_angle_and_speed(measurement['name']) - freq_response = calc_freq_response(data) - first_freqs = freq_response.freq_bins - psd_sum = freq_response.psd_sum - - if not target_freqs_initialized: - target_freqs = first_freqs[first_freqs <= max_freq] - target_freqs_initialized = True - - psd_sum = psd_sum[first_freqs <= max_freq] - first_freqs = first_freqs[first_freqs <= max_freq] - - # Initialize the angle dictionary if it doesn't exist - if angle not in psds: - psds[angle] = {} - psds_sum[angle] = {} - - # Store the interpolated PSD and integral values - psds[angle][speed] = np.interp(target_freqs, first_freqs, psd_sum) - psds_sum[angle][speed] = np.trapz(psd_sum, first_freqs) - - measured_angles = sorted(psds_sum.keys()) - measured_speeds = sorted({speed for angle_speeds in psds_sum.values() for speed in angle_speeds.keys()}) - - for main_angle in main_angles: - if main_angle not in measured_angles: - raise ValueError('Measurements not taken at the correct angles for the specified kinematics!') - - # Precompute the variables used in plot functions - all_angles, all_speeds, spectrogram_data = compute_dir_speed_spectrogram( - measured_speeds, psds_sum, kinematics, main_angles - ) - all_angles_energy = compute_angle_powers(spectrogram_data) - sp_min_energy, sp_max_energy, sp_variance_energy, vibration_metric = compute_speed_powers(spectrogram_data) - motor_profiles, global_motor_profile = compute_motor_profiles(target_freqs, psds, all_angles_energy, main_angles) - - # symmetry_factor = compute_symmetry_analysis(all_angles, all_angles_energy) - symmetry_factor = compute_symmetry_analysis(all_angles, spectrogram_data, main_angles) - ConsoleOutput.print(f'Machine estimated vibration symmetry: {symmetry_factor:.1f}%') - - # Analyze low variance ranges of vibration energy across all angles for each speed to identify clean speeds - # and highlight them. Also find the peaks to identify speeds to avoid due to high resonances - num_peaks, vibration_peaks, peaks_speeds = detect_peaks( - vibration_metric, - all_speeds, - PEAKS_DETECTION_THRESHOLD * vibration_metric.max(), - PEAKS_RELATIVE_HEIGHT_THRESHOLD, - 10, - 10, - ) - formated_peaks_speeds = ['{:.1f}'.format(pspeed) for pspeed in peaks_speeds] - ConsoleOutput.print( - f"Vibrations peaks detected: {num_peaks} @ {', '.join(map(str, formated_peaks_speeds))} mm/s (avoid setting a speed near these values in your slicer print profile)" - ) - - good_speeds = identify_low_energy_zones(vibration_metric, SPEEDS_VALLEY_DETECTION_THRESHOLD) - if good_speeds is not None: - deletion_range = int(SPEEDS_AROUND_PEAK_DELETION / (all_speeds[1] - all_speeds[0])) - peak_speed_indices = {pspeed: np.where(all_speeds == pspeed)[0][0] for pspeed in set(peaks_speeds)} - - # Filter and split ranges based on peak indices, avoiding overlaps - good_speeds = filter_and_split_ranges(all_speeds, good_speeds, peak_speed_indices, deletion_range) - - # Add some logging about the good speeds found - ConsoleOutput.print(f'Lowest vibrations speeds ({len(good_speeds)} ranges sorted from best to worse):') - for idx, (start, end, _) in enumerate(good_speeds): - ConsoleOutput.print(f'{idx+1}: {all_speeds[start]:.1f} to {all_speeds[end]:.1f} mm/s') - - # Angle low energy valleys identification (good angles ranges) and print them to the console - good_angles = identify_low_energy_zones(all_angles_energy, ANGLES_VALLEY_DETECTION_THRESHOLD) - if good_angles is not None: - ConsoleOutput.print(f'Lowest vibrations angles ({len(good_angles)} ranges sorted from best to worse):') - for idx, (start, end, energy) in enumerate(good_angles): - ConsoleOutput.print( - f'{idx+1}: {all_angles[start]:.1f}° to {all_angles[end]:.1f}° (mean vibrations energy: {energy:.2f}% of max)' + motors_config_differences = None + + computation_results = { + 'measurements': self.measurements, + 'all_speeds': all_speeds, + 'all_angles': all_angles, + 'all_angles_energy': all_angles_energy, + 'good_speeds': good_speeds, + 'good_angles': good_angles, + 'kinematics': self.kinematics, + 'accel': self.accel, + 'motors': self.motors, + 'motors_config_differences': motors_config_differences, + 'symmetry_factor': symmetry_factor, + 'spectrogram_data': spectrogram_data, + 'sp_min_energy': sp_min_energy, + 'sp_max_energy': sp_max_energy, + 'sp_variance_energy': sp_variance_energy, + 'vibration_metric': vibration_metric, + 'motor_profiles': motor_profiles, + 'global_motor_profile': global_motor_profile, + 'num_peaks': num_peaks, + 'vibration_peaks': vibration_peaks, + 'target_freqs': target_freqs, + 'main_angles': main_angles, + 'max_freq': self.max_freq, + 'st_version': self.st_version, + } + + return computation_results + + # Calculate motor frequency profiles based on the measured Power Spectral Density (PSD) measurements for the machine kinematics + # main angles and then create a global motor profile as a weighted average (from their own vibrations) of all calculated profiles + def _compute_motor_profiles( + self, + freqs: np.ndarray, + psds: dict, + all_angles_energy: dict, + measured_angles: Optional[List[int]] = None, + energy_amplification_factor: int = 2, + ) -> Tuple[dict, np.ndarray]: + if measured_angles is None: + measured_angles = [0, 90] + + motor_profiles = {} + weighted_sum_profiles = np.zeros_like(freqs) + total_weight = 0 + conv_filter = np.ones(20) / 20 + + # Creating the PSD motor profiles for each angles + for angle in measured_angles: + # Calculate the sum of PSDs for the current angle and then convolve + sum_curve = np.sum(np.array([psds[angle][speed] for speed in psds[angle]]), axis=0) + motor_profiles[angle] = np.convolve(sum_curve / len(psds[angle]), conv_filter, mode='same') + + # Calculate weights + angle_energy = ( + all_angles_energy[angle] ** energy_amplification_factor + ) # First weighting factor is based on the total vibrations of the machine at the specified angle + curve_area = ( + np.trapz(motor_profiles[angle], freqs) ** energy_amplification_factor + ) # Additional weighting factor is based on the area under the current motor profile at this specified angle + total_angle_weight = angle_energy * curve_area + + # Update weighted sum profiles to get the global motor profile + weighted_sum_profiles += motor_profiles[angle] * total_angle_weight + total_weight += total_angle_weight + + # Creating a global average motor profile that is the weighted average of all the PSD motor profiles + global_motor_profile = weighted_sum_profiles / total_weight if total_weight != 0 else weighted_sum_profiles + + return motor_profiles, global_motor_profile + + # Since it was discovered that there is no non-linear mixing in the stepper "steps" vibrations, instead of measuring + # the effects of each speeds at each angles, this function simplify it by using only the main motors axes (X/Y for Cartesian + # printers and A/B for CoreXY) measurements and project each points on the [0,360] degrees range using trigonometry + # to "sum" the vibration impact of each axis at every points of the generated spectrogram. The result is very similar at the end. + def _compute_dir_speed_spectrogram( + self, + measured_speeds: List[float], + data: dict, + kinematics: str = 'cartesian', + measured_angles: Optional[List[int]] = None, + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + if measured_angles is None: + measured_angles = [0, 90] + + # We want to project the motor vibrations measured on their own axes on the [0, 360] range + spectrum_angles = np.linspace(0, 360, 720) # One point every 0.5 degrees + spectrum_speeds = np.linspace(min(measured_speeds), max(measured_speeds), len(measured_speeds) * 6) + spectrum_vibrations = np.zeros((len(spectrum_angles), len(spectrum_speeds))) + + def get_interpolated_vibrations(data: dict, speed: float, speeds: List[float]) -> float: + idx = np.clip(np.searchsorted(speeds, speed, side='left'), 1, len(speeds) - 1) + lower_speed = speeds[idx - 1] + upper_speed = speeds[idx] + lower_vibrations = data.get(lower_speed, 0) + upper_vibrations = data.get(upper_speed, 0) + return lower_vibrations + (speed - lower_speed) * (upper_vibrations - lower_vibrations) / ( + upper_speed - lower_speed ) - # Create graph layout - fig, ((ax1, ax2, ax3), (ax4, ax5, ax6)) = plt.subplots( - 2, - 3, - gridspec_kw={ - 'height_ratios': [1, 1], - 'width_ratios': [4, 8, 6], - 'bottom': 0.050, - 'top': 0.890, - 'left': 0.040, - 'right': 0.985, - 'hspace': 0.166, - 'wspace': 0.138, - }, - ) - - # Transform ax3 and ax4 to polar plots - ax1.remove() - ax1 = fig.add_subplot(2, 3, 1, projection='polar') - ax4.remove() - ax4 = fig.add_subplot(2, 3, 4, projection='polar') - - # Set the global .png figure size - fig.set_size_inches(20, 11.5) - - # Add title - title_line1 = 'MACHINE VIBRATIONS ANALYSIS TOOL' - fig.text( - 0.060, 0.965, title_line1, ha='left', va='bottom', fontsize=20, color=KLIPPAIN_COLORS['purple'], weight='bold' - ) - try: - filename_parts = measurements[0]['name'].split('_') - dt = datetime.strptime(f"{filename_parts[4]} {filename_parts[5].split('-')[0]}", '%Y%m%d %H%M%S') - title_line2 = dt.strftime('%x %X') - if accel is not None: - title_line2 += ' at ' + str(accel) + ' mm/s² -- ' + kinematics.upper() + ' kinematics' - except Exception: - ConsoleOutput.print( - f'Warning: measurement names look to be different than expected ({measurements[0]["name"]})' - ) - title_line2 = measurements[0]['name'] - fig.text(0.060, 0.957, title_line2, ha='left', va='top', fontsize=16, color=KLIPPAIN_COLORS['dark_purple']) - - # Add the motors infos to the top of the graph - if motors is not None and len(motors) == 2: - differences = motors[0].compare_to(motors[1]) - plot_motor_config_txt(fig, motors, differences) - if differences is not None and kinematics in {'corexy', 'limited_corexy'}: - ConsoleOutput.print(f'Warning: motors have different TMC configurations!\n{differences}') - - # Plot the graphs - plot_angle_profile_polar(ax1, all_angles, all_angles_energy, good_angles, symmetry_factor) - plot_vibration_spectrogram_polar(ax4, all_angles, all_speeds, spectrogram_data) - - plot_global_speed_profile( - ax2, - all_speeds, - sp_min_energy, - sp_max_energy, - sp_variance_energy, - vibration_metric, - num_peaks, - vibration_peaks, - good_speeds, - ) - plot_angular_speed_profiles(ax3, all_speeds, all_angles, spectrogram_data, kinematics) - plot_vibration_spectrogram(ax5, all_angles, all_speeds, spectrogram_data, vibration_peaks) - - plot_motor_profiles(ax6, target_freqs, main_angles, motor_profiles, global_motor_profile, max_freq) - - # Adding a small Klippain logo to the top left corner of the figure - ax_logo = fig.add_axes([0.001, 0.924, 0.075, 0.075], anchor='NW') - ax_logo.imshow(plt.imread(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'klippain.png'))) - ax_logo.axis('off') - - # Adding Shake&Tune version in the top right corner - if st_version != 'unknown': - fig.text(0.995, 0.985, st_version, ha='right', va='bottom', fontsize=8, color=KLIPPAIN_COLORS['purple']) - - return fig - - -def main(): - # Parse command-line arguments - usage = '%prog [options] ' - opts = optparse.OptionParser(usage) - opts.add_option('-o', '--output', type='string', dest='output', default=None, help='filename of output graph') - opts.add_option( - '-c', '--accel', type='int', dest='accel', default=None, help='accel value to be printed on the graph' - ) - opts.add_option('-f', '--max_freq', type='float', default=1000.0, help='maximum frequency to graph') - opts.add_option( - '-k', '--klipper_dir', type='string', dest='klipperdir', default='~/klipper', help='main klipper directory' - ) - opts.add_option( - '-m', - '--kinematics', - type='string', - dest='kinematics', - default='cartesian', - help='machine kinematics configuration', - ) - options, args = opts.parse_args() - if len(args) < 1: - opts.error('No measurements to analyse') - if options.output is None: - opts.error('You must specify an output file.png to use the script (option -o)') - if options.kinematics not in { - 'cartesian', - 'limited_cartesian', - 'corexy', - 'limited_corexy', - 'corexz', - 'limited_corexz', - }: - opts.error( - 'Only cartesian, limited_cartesian, corexy, limited_corexy, corexz, limited_corexz kinematics are supported by this tool at the moment!' - ) - - measurements_manager = MeasurementsManager(10) - if args[0].endswith('.csv'): - measurements_manager.load_from_csvs(args) - elif args[0].endswith('.stdata'): - measurements_manager.load_from_stdata(args[0]) - else: - raise ValueError('Only .stdata or legacy Klipper raw accelerometer CSV files are supported!') - - fig = vibrations_profile( - measurements_manager.get_measurements(), - options.klipperdir, - options.kinematics, - options.accel, - options.max_freq, - ) - fig.savefig(options.output, dpi=150) - - -if __name__ == '__main__': - main() + # Precompute trigonometric values and constant before the loop + angle_radians = np.deg2rad(spectrum_angles) + cos_vals = np.cos(angle_radians) + sin_vals = np.sin(angle_radians) + sqrt_2_inv = 1 / math.sqrt(2) + + # Compute the spectrum vibrations for each angle and speed combination + for target_angle_idx, (cos_val, sin_val) in enumerate(zip(cos_vals, sin_vals)): + for target_speed_idx, target_speed in enumerate(spectrum_speeds): + if kinematics in {'cartesian', 'limited_cartesian', 'corexz', 'limited_corexz'}: + speed_1 = np.abs(target_speed * cos_val) + speed_2 = np.abs(target_speed * sin_val) + elif kinematics in {'corexy', 'limited_corexy'}: + speed_1 = np.abs(target_speed * (cos_val + sin_val) * sqrt_2_inv) + speed_2 = np.abs(target_speed * (cos_val - sin_val) * sqrt_2_inv) + + vibrations_1 = get_interpolated_vibrations(data[measured_angles[0]], speed_1, measured_speeds) + vibrations_2 = get_interpolated_vibrations(data[measured_angles[1]], speed_2, measured_speeds) + spectrum_vibrations[target_angle_idx, target_speed_idx] = vibrations_1 + vibrations_2 + + return spectrum_angles, spectrum_speeds, spectrum_vibrations + + def _compute_angle_powers(self, spectrogram_data: np.ndarray) -> np.ndarray: + angles_powers = np.trapz(spectrogram_data, axis=1) + + # Since we want to plot it on a continuous polar plot later on, we need to append parts of + # the array to start and end of it to smooth transitions when doing the convolution + # and get the same value at modulo 360. Then we return the array without the extras + extended_angles_powers = np.concatenate([angles_powers[-9:], angles_powers, angles_powers[:9]]) + convolved_extended = np.convolve(extended_angles_powers, np.ones(15) / 15, mode='same') + + return convolved_extended[9:-9] + + def _compute_speed_powers(self, spectrogram_data: np.ndarray, smoothing_window: int = 15) -> np.ndarray: + min_values = np.amin(spectrogram_data, axis=0) + max_values = np.amax(spectrogram_data, axis=0) + var_values = np.var(spectrogram_data, axis=0) + + # rescale the variance to the same range as max_values to plot it on the same graph + var_values = var_values / var_values.max() * max_values.max() + + # Create a vibration metric that is the product of the max values and the variance to quantify the best + # speeds that have at the same time a low global energy level that is also consistent at every angles + vibration_metric = max_values * var_values + + # utility function to pad and smooth the data avoiding edge effects + conv_filter = np.ones(smoothing_window) / smoothing_window + window = int(smoothing_window / 2) + + def pad_and_smooth(data: np.ndarray) -> np.ndarray: + data_padded = np.pad(data, (window,), mode='edge') + smoothed_data = np.convolve(data_padded, conv_filter, mode='valid') + return smoothed_data + + # Stack the arrays and apply padding and smoothing in batch + data_arrays = np.stack([min_values, max_values, var_values, vibration_metric]) + smoothed_arrays = np.array([pad_and_smooth(data) for data in data_arrays]) + + return smoothed_arrays + + # Function that filter and split the good_speed ranges. The goal is to remove some zones around + # additional detected small peaks in order to suppress them if there is a peak, even if it's low, + # that's probably due to a crossing in the motor resonance pattern that still need to be removed + def _filter_and_split_ranges( + self, + all_speeds: np.ndarray, + good_speeds: List[Tuple[int, int, float]], + peak_speed_indices: dict, + deletion_range: int, + ) -> List[Tuple[int, int, float]]: + # Process each range to filter out and split based on peak indices + filtered_good_speeds = [] + for start, end, energy in good_speeds: + start_speed, end_speed = all_speeds[start], all_speeds[end] + # Identify peaks that intersect with the current speed range + intersecting_peaks_indices = [ + idx for speed, idx in peak_speed_indices.items() if start_speed <= speed <= end_speed + ] + + if not intersecting_peaks_indices: + filtered_good_speeds.append((start, end, energy)) + else: + intersecting_peaks_indices.sort() + current_start = start + + for peak_index in intersecting_peaks_indices: + before_peak_end = max(current_start, peak_index - deletion_range) + if current_start < before_peak_end: + filtered_good_speeds.append((current_start, before_peak_end, energy)) + current_start = peak_index + deletion_range + 1 + + if current_start < end: + filtered_good_speeds.append((current_start, end, energy)) + + # Sorting by start point once and then merge overlapping ranges + sorted_ranges = sorted(filtered_good_speeds, key=lambda x: x[0]) + merged_ranges = [sorted_ranges[0]] + + for current in sorted_ranges[1:]: + last_merged_start, last_merged_end, last_merged_energy = merged_ranges[-1] + if current[0] <= last_merged_end: + new_end = max(last_merged_end, current[1]) + new_energy = min(last_merged_energy, current[2]) + merged_ranges[-1] = (last_merged_start, new_end, new_energy) + else: + merged_ranges.append(current) + + return merged_ranges + + # This function allow the computation of a symmetry score that reflect the spectrogram apparent symmetry between + # measured axes on both the shape of the signal and the energy level consistency across both side of the signal + def _compute_symmetry_analysis( + self, all_angles: np.ndarray, spectrogram_data: np.ndarray, measured_angles: Optional[List[int]] = None + ) -> float: + if measured_angles is None: + measured_angles = [0, 90] + + total_spectrogram_angles = len(all_angles) + half_spectrogram_angles = total_spectrogram_angles // 2 + + # Extend the spectrogram by adding half to the beginning (in order to not get an out of bounds error later) + extended_spectrogram = np.concatenate((spectrogram_data[-half_spectrogram_angles:], spectrogram_data), axis=0) + + # Calculate the split index directly within the slicing + midpoint_angle = np.mean(measured_angles) + split_index = int(midpoint_angle * (total_spectrogram_angles / 360) + half_spectrogram_angles) + half_segment_length = half_spectrogram_angles // 2 + + # Slice out the two segments of the spectrogram and flatten them for comparison + segment_1_flattened = extended_spectrogram[split_index - half_segment_length : split_index].flatten() + segment_2_flattened = extended_spectrogram[split_index : split_index + half_segment_length].flatten() + + # Compute the correlation coefficient between the two segments of spectrogram + correlation = np.corrcoef(segment_1_flattened, segment_2_flattened)[0, 1] + percentage_correlation_biased = (100 * np.power(correlation, 0.75)) + 10 + + return np.clip(0, 100, percentage_correlation_biased) + + # Extract from the measurement name the angle and speed of the tested movement + def _extract_angle_and_speed(self, logname: str) -> Tuple[float, float]: + try: + match = re.search(r'an(\d+)_\d+sp(\d+)_\d+', os.path.basename(logname)) + if match: + angle = match.group(1) + speed = match.group(2) + else: + raise ValueError( + f'File {logname} does not match expected format. Clean your /tmp folder and start again!' + ) + except AttributeError as err: + raise ValueError( + f'File {logname} does not match expected format. Clean your /tmp folder and start again!' + ) from err + return float(angle), float(speed) diff --git a/shaketune/helpers/common_func.py b/shaketune/helpers/common_func.py index 3e69a45..79feadc 100644 --- a/shaketune/helpers/common_func.py +++ b/shaketune/helpers/common_func.py @@ -28,6 +28,7 @@ ] +# TODO: remove this function when the refactoring is finished def setup_klipper_import(kdir): kdir = os.path.expanduser(kdir) sys.path.append(os.path.join(kdir, 'klippy')) diff --git a/shaketune/shaketune.py b/shaketune/shaketune.py index 23eaef5..70bcaad 100644 --- a/shaketune/shaketune.py +++ b/shaketune/shaketune.py @@ -28,6 +28,7 @@ DEFAULT_FOLDER = '~/printer_data/config/ShakeTune_results' DEFAULT_NUMBER_OF_RESULTS = 10 DEFAULT_KEEP_RAW_DATA = False +DEFAULT_MAX_FREQ = 200.0 DEFAULT_DPI = 150 DEFAULT_TIMEOUT = 600 DEFAULT_SHOW_MACROS = True @@ -75,9 +76,12 @@ def _initialize_config(self, config) -> None: result_folder_path = Path(result_folder).expanduser() if result_folder else None keep_n_results = config.getint('number_of_results_to_keep', default=DEFAULT_NUMBER_OF_RESULTS, minval=0) keep_raw_data = config.getboolean('keep_raw_data', default=DEFAULT_KEEP_RAW_DATA) + max_freq = config.getfloat('max_freq', default=DEFAULT_MAX_FREQ, minval=100.0) dpi = config.getint('dpi', default=DEFAULT_DPI, minval=100, maxval=500) m_chunk_size = config.getint('measurements_chunk_size', default=DEFAULT_MEASUREMENTS_CHUNK_SIZE, minval=2) - self._st_config = ShakeTuneConfig(result_folder_path, keep_n_results, keep_raw_data, m_chunk_size, dpi) + self._st_config = ShakeTuneConfig( + result_folder_path, keep_n_results, keep_raw_data, m_chunk_size, max_freq, dpi + ) self.timeout = config.getfloat('timeout', DEFAULT_TIMEOUT, above=0.0) self._show_macros = config.getboolean('show_macros_in_webui', default=DEFAULT_SHOW_MACROS) diff --git a/shaketune/shaketune_config.py b/shaketune/shaketune_config.py index c3d1885..fad5d2d 100644 --- a/shaketune/shaketune_config.py +++ b/shaketune/shaketune_config.py @@ -31,6 +31,7 @@ def __init__( keep_n_results: int = 10, keep_raw_data: bool = False, chunk_size: int = 2, + max_freq: float = 200.0, dpi: int = 150, ) -> None: self._result_folder = result_folder @@ -38,6 +39,8 @@ def __init__( self.keep_n_results = keep_n_results self.keep_raw_data = keep_raw_data self.chunk_size = chunk_size + self.max_freq = max_freq + self.max_freq_vibrations = max_freq * 5 # 1000Hz is the default (5 * 200.0) self.dpi = dpi self.klipper_folder = KLIPPER_FOLDER