diff --git a/demos/fkm_nonlinear/fkm_nonlinear.ipynb b/demos/fkm_nonlinear/fkm_nonlinear.ipynb index 849a582b..a04e0152 100644 --- a/demos/fkm_nonlinear/fkm_nonlinear.ipynb +++ b/demos/fkm_nonlinear/fkm_nonlinear.ipynb @@ -130,7 +130,7 @@ "id": "312c5770", "metadata": {}, "source": [ - "### Run FKM nonlinear algorithm" + "## Run FKM nonlinear algorithm" ] }, { @@ -150,7 +150,7 @@ "id": "941a473e", "metadata": {}, "source": [ - "#### Output resulting lifetimes" + "## Output resulting lifetimes" ] }, { @@ -209,7 +209,7 @@ "id": "165ca7d3", "metadata": {}, "source": [ - "#### Lifetime $N$ for given failure probability $P_A$\n", + "## Lifetime $N$ for given failure probability $P_A$\n", "The dashed lines show the lifetime if the scaling factor $\\gamma_M$ is not clipped at 1.1 (P_RAM), respective 1.2 (P_RAJ)." ] }, @@ -239,7 +239,7 @@ "id": "7c518856", "metadata": {}, "source": [ - "#### Plot failure probability\n", + "## Plot failure probability\n", "The marked points have $P_A$ = 50%." ] }, @@ -274,7 +274,7 @@ "id": "94ac0271", "metadata": {}, "source": [ - "#### Woehler curves" + "## Woehler curves" ] }, { @@ -366,7 +366,7 @@ "id": "2e8a779f", "metadata": {}, "source": [ - "#### Hystereses" + "## Hystereses" ] }, { @@ -398,33 +398,27 @@ "# plot resulting stress-strain curve\n", "sampling_parameter = 50 # choose larger for smoother plot\n", "plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", + " \n", + "primary = plotting_data.loc[~plotting_data.secondary_branch]\n", + "secondary = plotting_data.loc[plotting_data.secondary_branch]\n", "\n", - "strain_values_primary = plotting_data[\"strain_values_primary\"]\n", - "stress_values_primary = plotting_data[\"stress_values_primary\"]\n", - "hysteresis_index_primary = plotting_data[\"hysteresis_index_primary\"]\n", - "strain_values_secondary = plotting_data[\"strain_values_secondary\"]\n", - "stress_values_secondary = plotting_data[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary = plotting_data[\"hysteresis_index_secondary\"]\n", - "\n", - "sampling_parameter = 50 # choose larger for smoother plot\n", - "plotting_data_1st = detector_1st.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", - "\n", - "strain_values_primary_1st = plotting_data_1st[\"strain_values_primary\"]\n", - "stress_values_primary_1st = plotting_data_1st[\"stress_values_primary\"]\n", - "hysteresis_index_primary_1st = plotting_data_1st[\"hysteresis_index_primary\"]\n", - "strain_values_secondary_1st = plotting_data_1st[\"strain_values_secondary\"]\n", - "stress_values_secondary_1st = plotting_data_1st[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary_1st = plotting_data_1st[\"hysteresis_index_secondary\"]\n", - "\n", + "plotting_data_1st = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", + " \n", + "primary_1st = plotting_data_1st.loc[~plotting_data_1st.secondary_branch]\n", + "secondary_1st = plotting_data_1st.loc[plotting_data_1st.secondary_branch]\n", "\n", "# stress-strain diagram\n", - "axes[1,0].plot(strain_values_primary, stress_values_primary, \"y-\", lw=2, label=\"HCM second run\")\n", - "axes[1,0].plot(strain_values_secondary, stress_values_secondary, \"y-.\", lw=2)\n", - "axes[1,0].plot(strain_values_primary_1st, stress_values_primary_1st, \"g-\", lw=2, label=\"HCM first run\")\n", - "axes[1,0].plot(strain_values_secondary_1st, stress_values_secondary_1st, \"g-.\", lw=2)\n", + "for i, prim in primary.groupby(\"load_segment\"):\n", + " axes[1,0].plot(prim.strain, prim.stress, \"y-\", lw=2, label=\"HCM second run\" if i == 0 else None)\n", + "for _, sec in secondary.groupby(\"load_segment\"):\n", + " axes[1,0].plot(sec.strain, sec.stress, \"y-.\", lw=2)\n", + "for i, prim in primary_1st.groupby(\"load_segment\"):\n", + " axes[1,0].plot(prim.strain, prim.stress, \"g-\", lw=2, label=\"HCM first run\" if i == 0 else None)\n", + "for _, sec in secondary_1st.groupby(\"load_segment\"):\n", + " axes[1,0].plot(sec.strain, sec.stress, \"g-.\", lw=2)\n", "axes[1,0].grid()\n", - "axes[1,0].set_xlabel(\"$\\epsilon$\")\n", - "axes[1,0].set_ylabel(\"$\\sigma$ [MPa]\")\n", + "axes[1,0].set_xlabel(r\"$\\epsilon$\")\n", + "axes[1,0].set_ylabel(r\"$\\sigma$ [MPa]\")\n", "axes[1,0].set_title(f\"P_{parameter_name} material response\")\n", "\n", "\n", @@ -442,32 +436,27 @@ "# plot resulting stress-strain curve\n", "sampling_parameter = 50 # choose larger for smoother plot\n", "plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", + " \n", + "primary = plotting_data.loc[~plotting_data.secondary_branch]\n", + "secondary = plotting_data.loc[plotting_data.secondary_branch]\n", "\n", - "strain_values_primary = plotting_data[\"strain_values_primary\"]\n", - "stress_values_primary = plotting_data[\"stress_values_primary\"]\n", - "hysteresis_index_primary = plotting_data[\"hysteresis_index_primary\"]\n", - "strain_values_secondary = plotting_data[\"strain_values_secondary\"]\n", - "stress_values_secondary = plotting_data[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary = plotting_data[\"hysteresis_index_secondary\"]\n", - "\n", - "sampling_parameter = 50 # choose larger for smoother plot\n", - "plotting_data_1st = detector_1st.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", - "\n", - "strain_values_primary_1st = plotting_data_1st[\"strain_values_primary\"]\n", - "stress_values_primary_1st = plotting_data_1st[\"stress_values_primary\"]\n", - "hysteresis_index_primary_1st = plotting_data_1st[\"hysteresis_index_primary\"]\n", - "strain_values_secondary_1st = plotting_data_1st[\"strain_values_secondary\"]\n", - "stress_values_secondary_1st = plotting_data_1st[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary_1st = plotting_data_1st[\"hysteresis_index_secondary\"]\n", + "plotting_data_1st = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", + " \n", + "primary_1st = plotting_data_1st.loc[~plotting_data_1st.secondary_branch]\n", + "secondary_1st = plotting_data_1st.loc[plotting_data_1st.secondary_branch]\n", "\n", "# stress-strain diagram\n", - "axes[1,1].plot(strain_values_primary, stress_values_primary, \"y-\", lw=2, label=\"HCM second run\")\n", - "axes[1,1].plot(strain_values_secondary, stress_values_secondary, \"y-.\", lw=2)\n", - "axes[1,1].plot(strain_values_primary_1st, stress_values_primary_1st, \"g-\", lw=2, label=\"HCM first run\")\n", - "axes[1,1].plot(strain_values_secondary_1st, stress_values_secondary_1st, \"g-.\", lw=2)\n", + "for i, prim in primary.groupby(\"load_segment\"):\n", + " axes[1,1].plot(prim.strain, prim.stress, \"y-\", lw=2, label=\"HCM second run\" if i == 0 else None)\n", + "for _, sec in secondary.groupby(\"load_segment\"):\n", + " axes[1,1].plot(sec.strain, sec.stress, \"y-.\", lw=2)\n", + "for i, prim in primary_1st.groupby(\"load_segment\"):\n", + " axes[1,1].plot(prim.strain, prim.stress, \"g-\", lw=2, label=\"HCM first run\" if i == 0 else None)\n", + "for _, sec in secondary_1st.groupby(\"load_segment\"):\n", + " axes[1,1].plot(sec.strain, sec.stress, \"g-.\", lw=2)\n", "axes[1,1].grid()\n", - "axes[1,1].set_xlabel(\"$\\epsilon$\")\n", - "axes[1,1].set_ylabel(\"$\\sigma$ [MPa]\")\n", + "axes[1,1].set_xlabel(r\"$\\epsilon$\")\n", + "axes[1,1].set_ylabel(r\"$\\sigma$ [MPa]\")\n", "axes[1,1].set_title(f\"P_{parameter_name} material response\")\n", "\n", "plt.tight_layout()" @@ -478,7 +467,7 @@ "id": "31668d98", "metadata": {}, "source": [ - "#### Damaging effects of hystereses" + "## Damaging effects of hystereses" ] }, { @@ -546,7 +535,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/demos/fkm_nonlinear/fkm_nonlinear_full.ipynb b/demos/fkm_nonlinear/fkm_nonlinear_full.ipynb index c7460084..5f0d80c2 100644 --- a/demos/fkm_nonlinear/fkm_nonlinear_full.ipynb +++ b/demos/fkm_nonlinear/fkm_nonlinear_full.ipynb @@ -151,7 +151,7 @@ "id": "84138164", "metadata": {}, "source": [ - "### Estimate tensile strength $R_m$ from nominal value $R_{m,N}$" + "## Estimate tensile strength $R_m$ from nominal value $R_{m,N}$" ] }, { @@ -260,7 +260,7 @@ "id": "0abbb656", "metadata": {}, "source": [ - "#### 2.5.5.1 Determine from experiments\n", + "### 2.5.5.1 Determine from experiments\n", "* Conduct strain-driven experiments with $R_\\varepsilon = -1$. Record values for $\\sigma_a, \\varepsilon_{a,\\text{ges}}$ and $N_\\text{Werkstoff}$.\n", "* Compute $P_{RAM}=\\sqrt{\\sigma_a\\cdot \\varepsilon_{a,\\text{ges}} \\cdot E}$ for every single experiment.\n", "* Use the maximum-likelihood method to infer the parameters $d_1, d_2, P_{RAM,Z,WS}$. For details, refer to the FKM nonlinear document, Sec. 2.5.5.1, p. 40." @@ -271,7 +271,7 @@ "id": "b20fc450", "metadata": {}, "source": [ - "#### 2.5.5.2 Estimate from formulas\n", + "### 2.5.5.2 Estimate from formulas\n", "Alternatively, estimate the material SN-curve from the ultimate tensile strength $R_m$." ] }, @@ -583,21 +583,35 @@ { "cell_type": "code", "execution_count": null, - "id": "cbe73466", + "id": "18ad5152-24e2-4e7e-b5f6-68cf494e6f7f", + "metadata": {}, + "outputs": [], + "source": [ + "detector_1st.history()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "735105f1-0845-41ed-878c-b5408be6877c", "metadata": {}, "outputs": [], "source": [ "# plot resulting stress-strain curve\n", "sampling_parameter = 50 # choose larger for smoother plot\n", "plotting_data = detector_1st.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", - "\n", - "strain_values_primary = plotting_data[\"strain_values_primary\"]\n", - "stress_values_primary = plotting_data[\"stress_values_primary\"]\n", - "hysteresis_index_primary = plotting_data[\"hysteresis_index_primary\"]\n", - "strain_values_secondary = plotting_data[\"strain_values_secondary\"]\n", - "stress_values_secondary = plotting_data[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary = plotting_data[\"hysteresis_index_secondary\"]\n", - "\n", + "plotting_data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "041d01b6-e5cf-4a28-9dc5-d39560deb89a", + "metadata": {}, + "outputs": [], + "source": [ + "primary = plotting_data.loc[~plotting_data.secondary_branch]\n", + "secondary = plotting_data.loc[plotting_data.secondary_branch]\n", "\n", "fig, axes = plt.subplots(1, 2, figsize=(12,6))\n", "plt.subplots_adjust(wspace=0.4, \n", @@ -614,23 +628,24 @@ "\n", "# stress-strain diagram\n", "axes[1].plot(0,0,\"ok\")\n", - "axes[1].plot(strain_values_primary, stress_values_primary, \"k-\", lw=2)\n", - "axes[1].plot(strain_values_secondary, stress_values_secondary, \"k--\", lw=1)\n", + "for _, prim in primary.groupby(\"load_segment\"):\n", + " axes[1].plot(prim.strain, prim.stress, \"k-\", lw=2)\n", + "axes[1].plot(secondary.strain, secondary.stress, \"k--\", lw=1)\n", "axes[1].grid()\n", - "axes[1].set_xlabel(\"$\\epsilon$\")\n", - "axes[1].set_ylabel(\"$\\sigma$ [MPa]\")\n", + "axes[1].set_xlabel(r\"$\\epsilon$\")\n", + "axes[1].set_ylabel(r\"$\\sigma$ [MPa]\")\n", "axes[1].set_title(\"Material response\")\n", "\n", - "\n", "# stress-strain diagram\n", "plt.rcParams.update({'font.size': 22})\n", "fig, ax = plt.subplots(1, 1, figsize=(4,4))\n", "ax.plot(0,0,\"ok\")\n", - "ax.plot([e*1e2 for e in strain_values_primary], stress_values_primary, \"k-\", lw=3)\n", - "ax.plot([e*1e2 for e in strain_values_secondary], stress_values_secondary, \"k--\", lw=1)\n", + "for _, prim in primary.groupby(\"load_segment\"):\n", + " ax.plot([e*1e2 for e in prim.strain], prim.stress, \"k-\", lw=3)\n", + "ax.plot([e*1e2 for e in secondary.strain], secondary.stress, \"k--\", lw=1)\n", "ax.grid()\n", - "ax.set_xlabel(\"$\\epsilon$ [%]\")\n", - "ax.set_ylabel(\"$\\sigma$ [MPa]\")" + "ax.set_xlabel(r\"$\\epsilon$ [%]\")\n", + "ax.set_ylabel(r\"$\\sigma$ [MPa]\")" ] }, { @@ -761,7 +776,9 @@ "detector.process(load_sequence_list, flush=False)\n", "\n", "# perform HCM algorithm, second run\n", - "detector.process(load_sequence_list, flush=True)\n" + "detector.process(load_sequence_list, flush=True)\n", + "\n", + "detector.recorder.collective" ] }, { @@ -790,7 +807,7 @@ "# define damage parameter\n", "damage_parameter = pylife.strength.damage_parameter.P_RAJ(recorder.collective, assessment_parameters,\\\n", " component_woehler_curve_P_RAJ)\n", - "#display(damage_parameter.collective)\n", + "display(damage_parameter.collective)\n", "\n", "# compute the effect of the damage parameter with the woehler curve\n", "damage_calculator = pylife.strength.fkm_nonlinear.damage_calculator\\\n", @@ -836,23 +853,32 @@ { "cell_type": "code", "execution_count": null, - "id": "9ec6da78", + "id": "0784f613-6a76-474a-a5c3-5c5878538809", + "metadata": {}, + "outputs": [], + "source": [ + "history = detector.history()\n", + "history" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ecb2bbe-c1b4-4a69-af6e-b7d88537fdc9", "metadata": {}, "outputs": [], "source": [ "# plot resulting stress-strain curve\n", "sampling_parameter = 50 # choose larger for smoother plot\n", "plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", + " \n", + "primary = plotting_data.loc[~plotting_data.secondary_branch]\n", + "secondary = plotting_data.loc[plotting_data.secondary_branch]\n", "\n", - "strain_values_primary = plotting_data[\"strain_values_primary\"]\n", - "stress_values_primary = plotting_data[\"stress_values_primary\"]\n", - "hysteresis_index_primary = plotting_data[\"hysteresis_index_primary\"]\n", - "strain_values_secondary = plotting_data[\"strain_values_secondary\"]\n", - "stress_values_secondary = plotting_data[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary = plotting_data[\"hysteresis_index_secondary\"]\n", + "fig, axes = plt.subplots(1, 2, figsize=(12,6))\n", + "plt.subplots_adjust(wspace=0.4, \n", + " hspace=0.4)\n", "\n", - "# all hystereses in stress-strain diagram\n", - "fig, axes = plt.subplots(1, 2, figsize=(18,6))\n", "# load-time diagram\n", "import matplotlib\n", "matplotlib.rcParams.update({'font.size': 14})\n", @@ -863,70 +889,69 @@ "axes[0].set_title(\"Scaled load sequence\")\n", "\n", "# stress-strain diagram\n", - "axes[1].plot(strain_values_primary, stress_values_primary, \"k-\", lw=2)\n", - "axes[1].plot(strain_values_secondary, stress_values_secondary, \"k--\", lw=1)\n", + "axes[1].plot(0,0,\"ok\")\n", + "#for _, run in primary.groupby(\"run_index\"):\n", + "for _, prim in primary.groupby(\"load_segment\"):\n", + " axes[1].plot(prim.strain, prim.stress, \"k-\", lw=2)\n", + "for _, run in secondary.groupby(\"load_segment\"): \n", + " axes[1].plot(run.strain, run.stress, \"k--\", lw=1)\n", "axes[1].grid()\n", - "axes[1].set_xlabel(\"$\\epsilon$\")\n", - "axes[1].set_ylabel(\"$\\sigma$ [MPa]\")\n", + "axes[1].set_xlabel(r\"$\\epsilon$\")\n", + "axes[1].set_ylabel(r\"$\\sigma$ [MPa]\")\n", "axes[1].set_title(\"Material response\")\n", "\n", - "# all hystereses in stress-strain diagram\n", - "strain_previous = []\n", - "stress_previous = []\n", - "\n", - "n_hystereses = max(max(hysteresis_index_primary), max(hysteresis_index_secondary))\n", - "for hysteresis_index in range(n_hystereses):\n", - "\n", + "for hysteresis_index, hysteresis in plotting_data.groupby(\"hyst_index\"):\n", + " if hysteresis_index < 0:\n", + " continue\n", " fig, ax = plt.subplots(figsize=(12,6))\n", - " strain_primary_subset = np.where(hysteresis_index_primary == hysteresis_index, strain_values_primary, np.nan)\n", - " stress_primary_subset = np.where(hysteresis_index_primary == hysteresis_index, stress_values_primary, np.nan)\n", "\n", - " strain_secondary_subset = np.where(hysteresis_index_secondary == hysteresis_index, strain_values_secondary, np.nan)\n", - " stress_secondary_subset = np.where(hysteresis_index_secondary == hysteresis_index, stress_values_secondary, np.nan)\n", - "\n", - " run_index = damage_parameter.collective.iloc[hysteresis_index, damage_parameter.collective.columns.get_loc(\"run_index\")]\n", - " debug_output = damage_parameter.collective.iloc[hysteresis_index, damage_parameter.collective.columns.get_loc(\"debug_output\")]\n", - " ax.set_title(f\"Hysteresis {hysteresis_index}, Run {run_index}:\\n{debug_output}\")\n", - " ax.plot(strain_previous, stress_previous, \"gray\", lw=1)\n", - " ax.plot(strain_primary_subset, stress_primary_subset, \"b-\", lw=2)\n", - " ax.plot(strain_secondary_subset, stress_secondary_subset, \"b--\", lw=1)\n", + " primary = hysteresis.loc[~hysteresis.secondary_branch]\n", + " secondary = hysteresis.loc[hysteresis.secondary_branch]\n", + " run_index = hysteresis.run_index.iloc[0]\n", + "\n", + " hyst_start_segment = history.query(f\"hyst_to == {hysteresis_index}\").index.get_level_values(\"load_segment\")[0]\n", + " history_plot = plotting_data[plotting_data.load_segment < hyst_start_segment]\n", + " \n", + " ax.set_title(f\"Hysteresis {hysteresis_index}, Run {run_index}\")\n", + " for _, segment in history_plot.groupby(\"load_segment\"):\n", + " ax.plot(segment.strain, segment.stress, \"gray\", lw=1)\n", + " for _, prim in primary.groupby(\"load_segment\"):\n", + " ax.plot(prim.strain, prim.stress, \"b-\", lw=2)\n", + " ax.plot(secondary.strain, secondary.stress, \"b--\", lw=1)\n", " ax.grid()\n", - " ax.set_xlabel(\"$\\epsilon$\")\n", - " ax.set_ylabel(\"$\\sigma$ [MPa]\")\n", + " ax.set_xlabel(r\"$\\epsilon$\")\n", + " ax.set_ylabel(r\"$\\sigma$ [MPa]\")\n", " \n", - " # add the new hysteresis to every next plot\n", - " strain_previous += list(strain_primary_subset)\n", - " stress_previous += list(stress_primary_subset)\n", - " strain_previous += list(strain_secondary_subset)\n", - " stress_previous += list(stress_secondary_subset)\n", " " ] }, { "cell_type": "code", "execution_count": null, - "id": "a79d3212", + "id": "8c6d359f-5397-4c50-9a3f-dcd0760f6fd0", "metadata": {}, "outputs": [], "source": [ "# plot all hystereses\n", "# get all graph data\n", "sampling_parameter = 50 # choose larger for smoother plot\n", - "plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter, only_hystereses=False)\n", + "plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter)\n", "\n", - "strain_values_primary = plotting_data[\"strain_values_primary\"]\n", - "stress_values_primary = plotting_data[\"stress_values_primary\"]\n", - "hysteresis_index_primary = plotting_data[\"hysteresis_index_primary\"]\n", - "strain_values_secondary = plotting_data[\"strain_values_secondary\"]\n", - "stress_values_secondary = plotting_data[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary = plotting_data[\"hysteresis_index_secondary\"]\n", + "primary = plotting_data.loc[~plotting_data.secondary_branch]\n", + "secondary = plotting_data.loc[plotting_data.secondary_branch]\n", "\n", "fig, ax = plt.subplots(figsize=(12,6))\n", - "ax.plot(strain_values_primary, stress_values_primary, \"b-\", lw=2)\n", - "ax.plot(strain_values_secondary, stress_values_secondary, \"b--\", lw=1)\n", + "\n", + "for _, prim in primary.groupby(\"load_segment\"):\n", + " ax.plot(prim.strain, prim.stress, \"b-\", lw=2)\n", + "for _, sec in secondary.groupby(\"load_segment\"):\n", + " ax.plot(sec.strain, sec.stress, \"b--\", lw=1)\n", + "\n", + "#ax.plot(primary.strain, primary.stress, \"b-\", lw=2)\n", + "#ax.plot(secondary.strain, secondary.stress, \"b--\", lw=1)\n", "ax.grid()\n", - "ax.set_xlabel(\"$\\epsilon$\")\n", - "ax.set_ylabel(\"$\\sigma$ [MPa]\")\n", + "ax.set_xlabel(r\"$\\epsilon$\")\n", + "ax.set_ylabel(r\"$\\sigma$ [MPa]\")\n", "ax.set_title(\"All hystereses\")\n", "\n", "\n", @@ -934,44 +959,24 @@ "\n", "# get graph data of only hystereses\n", "sampling_parameter = 50 # choose larger for smoother plot\n", - "plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter, only_hystereses=True)\n", - "\n", - "strain_values_primary = plotting_data[\"strain_values_primary\"]\n", - "stress_values_primary = plotting_data[\"stress_values_primary\"]\n", - "hysteresis_index_primary = plotting_data[\"hysteresis_index_primary\"]\n", - "strain_values_secondary = plotting_data[\"strain_values_secondary\"]\n", - "stress_values_secondary = plotting_data[\"stress_values_secondary\"]\n", - "hysteresis_index_secondary = plotting_data[\"hysteresis_index_secondary\"]\n", - "\n", - "# plot only hystereses\n", - "strain_previous = []\n", - "stress_previous = []\n", - "n_hystereses = max(max(hysteresis_index_primary), max(hysteresis_index_secondary))\n", - "for hysteresis_index in range(n_hystereses):\n", "\n", + "for hysteresis_index, hysteresis in plotting_data.query(\"hyst_index >= 0\").groupby(\"hyst_index\"):\n", " fig, ax = plt.subplots(figsize=(12,6))\n", " \n", - " # plot hysteresis\n", - " strain_primary_subset = np.where(hysteresis_index_primary == hysteresis_index, strain_values_primary, np.nan)\n", - " stress_primary_subset = np.where(hysteresis_index_primary == hysteresis_index, stress_values_primary, np.nan)\n", - "\n", - " strain_secondary_subset = np.where(hysteresis_index_secondary == hysteresis_index, strain_values_secondary, np.nan)\n", - " stress_secondary_subset = np.where(hysteresis_index_secondary == hysteresis_index, stress_values_secondary, np.nan)\n", + " primary = hysteresis.loc[~hysteresis.secondary_branch]\n", + " secondary = hysteresis.loc[hysteresis.secondary_branch]\n", + " run_index = hysteresis.run_index.iloc[0] \n", "\n", - " if len([_ for _ in strain_primary_subset if not np.isnan(_)]) == 0 and \\\n", - " len([_ for _ in strain_secondary_subset if not np.isnan(_)]) == 0:\n", - " strain_primary_subset = np.array(strain_values_primary_all)\n", - " stress_primary_subset = np.array(stress_values_primary_all)\n", - " strain_secondary_subset = np.array(strain_values_secondary_all)\n", - " stress_secondary_subset = np.array(stress_values_secondary_all)\n", + " hyst_start_segment = history.query(f\"hyst_to == {hysteresis_index}\").index.get_level_values(\"load_segment\")[0]\n", + " history_plot = plotting_data[plotting_data.load_segment < hyst_start_segment]\n", + " \n", " \n", - " run_index = collective.iloc[hysteresis_index, collective.columns.get_loc(\"run_index\")]\n", - " #case_debugging = collective.iloc[hysteresis_index, collective.columns.get_loc(\"case_debugging\")]\n", - " case_debugging = \"\"\n", - " ax.set_title(f\"Hysteresis {hysteresis_index}, Run {run_index}:\\n{case_debugging}\")\n", - " ax.plot(strain_previous, stress_previous, \"gray\", lw=1)\n", - " ax.plot(strain_primary_subset, stress_primary_subset, \"b-\", lw=4)\n", - " ax.plot(strain_secondary_subset, stress_secondary_subset, \"b--\", lw=2)\n", + " ax.set_title(f\"Hysteresis {hysteresis_index}, Run {run_index}\")\n", + " for _, segment in history_plot.groupby(\"load_segment\"):\n", + " ax.plot(segment.strain, segment.stress, \"gray\", lw=1) \n", + " for _, prim in primary.groupby(\"load_segment\"):\n", + " ax.plot(prim.strain, prim.stress, \"b-\", lw=2) \n", + " ax.plot(secondary.strain, secondary.stress, \"b--\", lw=1)\n", " \n", " # plot crack opening points\n", " S_open = collective.iloc[hysteresis_index, collective.columns.get_loc(\"S_open\")]\n", @@ -996,13 +1001,6 @@ " epsilon_min_LF = collective.iloc[hysteresis_index, collective.columns.get_loc(\"epsilon_min_LF\")]\n", " epsilon_max_LF = collective.iloc[hysteresis_index, collective.columns.get_loc(\"epsilon_max_LF\")]\n", " \n", - " \n", - " # add the new hysteresis to every next plot\n", - " strain_previous += list(strain_primary_subset)\n", - " stress_previous += list(stress_primary_subset)\n", - " strain_previous += list(strain_secondary_subset)\n", - " stress_previous += list(stress_secondary_subset)\n", - " \n", " ax.plot([epsilon_min_alt_SP,epsilon_min_alt_SP], [S_min*0.9,S_max*1.1], \":\", label=\"ε_min,alt,SP\")\n", " ax.plot([epsilon_max_alt_SP,epsilon_max_alt_SP], [S_min*0.9,S_max*1.1], \"-\", label=\"ε_max,alt,SP\")\n", " ax.plot([epsilon_min_LF,epsilon_min_LF], [S_min,S_max], \":\", label=\"ε_min,LF\")\n", @@ -1010,19 +1008,9 @@ " \n", " ax.grid()\n", " ax.legend(bbox_to_anchor=(1.1,1), loc=\"upper left\")\n", - " ax.set_xlabel(\"$\\epsilon$\")\n", - " ax.set_ylabel(\"$\\sigma$ [MPa]\")\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0ca38e1e", - "metadata": {}, - "outputs": [], - "source": [ - "hysteresis_index_primary" + " ax.set_xlabel(r\"$\\epsilon$\")\n", + " ax.set_ylabel(r\"$\\sigma$ [MPa]\")\n", + " \n" ] }, { @@ -1085,7 +1073,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/docs/stress/rainflow.rst b/docs/stress/rainflow.rst index 9bc02d49..2b55f69b 100644 --- a/docs/stress/rainflow.rst +++ b/docs/stress/rainflow.rst @@ -38,8 +38,6 @@ Utility functions .. toctree:: :maxdepth: 1 - rainflow/fkm_nonlinear_hysteresis_plotter - .. autofunction:: pylife.stress.rainflow.find_turns diff --git a/docs/stress/rainflow/fkm_nonlinear_hysteresis_plotter.rst b/docs/stress/rainflow/fkm_nonlinear_hysteresis_plotter.rst deleted file mode 100644 index 10ed93a3..00000000 --- a/docs/stress/rainflow/fkm_nonlinear_hysteresis_plotter.rst +++ /dev/null @@ -1,7 +0,0 @@ -The ``FKMNonlinearHysteresisPlotter`` class -########################################### - -.. autoclass:: pylife.stress.rainflow.fkm_nonlinear.FKMNonlinearHysteresisPlotter - :members: - :undoc-members: - :special-members: __init__ diff --git a/docs/tutorials.rst b/docs/tutorials.rst index fda56de8..b3a61fd4 100644 --- a/docs/tutorials.rst +++ b/docs/tutorials.rst @@ -17,5 +17,5 @@ computer, you can use `MyBinder tutorials/woehler_curve.nblink tutorials/load_collective.nblink tutorials/stress-strength.rst - tutorials/fkm_nonlinear.nblink - tutorials/fkm_nonlinear_full.nblink + demos/fkm_nonlinear.nblink + demos/fkm_nonlinear_full.nblink diff --git a/docs/user_guide.rst b/docs/user_guide.rst index acef9481..eef31767 100644 --- a/docs/user_guide.rst +++ b/docs/user_guide.rst @@ -59,7 +59,7 @@ components based on load sequences and material data. * :mod:`pylife.strength.fkm_nonlinear.assessment_nonlinear_standard` – Local strain concept / FKM guideline nonlinear -See also the tutorial about :doc:`FKM nonlinear `. +See also the tutorial about :doc:`FKM nonlinear `. Mesh operations --------------- diff --git a/src/pylife/materiallaws/notch_approximation_law.py b/src/pylife/materiallaws/notch_approximation_law.py index 517d205c..2978e293 100644 --- a/src/pylife/materiallaws/notch_approximation_law.py +++ b/src/pylife/materiallaws/notch_approximation_law.py @@ -519,18 +519,21 @@ def stress(self, load, *, rtol=1e-5, tol=1e-6): """ + # FIXME consolidate theese methods (duplicated code) + + load = np.asarray(load) sign = np.sign(load) # if the assessment is performed for multiple points at once, i.e. load is a DataFrame with values for every node - if isinstance(load, pd.Series) and isinstance(self._lut_primary_branch.index, pd.MultiIndex): + if isinstance(self._lut_primary_branch.index, pd.MultiIndex): # the lut is a DataFrame with MultiIndex with levels class_index and node_id # find the corresponding class only for the first node, use the result for all nodes first_node_id = self._lut_primary_branch.index.get_level_values("node_id")[0] lut_for_first_node = self._lut_primary_branch.load[self._lut_primary_branch.index.get_level_values("node_id")==first_node_id] - first_abs_load = abs(load.iloc[0]) + first_abs_load = abs(load[0]) # get the class index of the corresponding bin/class class_index = lut_for_first_node.searchsorted(first_abs_load) @@ -592,17 +595,18 @@ def strain(self, stress, load): strain : array-like float The resulting strain """ + load = np.asarray(load) sign = np.sign(load) # if the assessment is performed for multiple points at once, i.e. load is a DataFrame with values for every node - if isinstance(load, pd.Series) and isinstance(self._lut_primary_branch.index, pd.MultiIndex): + if isinstance(self._lut_primary_branch.index, pd.MultiIndex): # the lut is a DataFrame with MultiIndex with levels class_index and node_id # find the corresponding class only for the first node, use the result for all nodes first_node_id = self._lut_primary_branch.index.get_level_values("node_id")[0] lut_for_first_node = self._lut_primary_branch.load[self._lut_primary_branch.index.get_level_values("node_id")==first_node_id] - first_abs_load = abs(load.iloc[0]) + first_abs_load = abs(load[0]) # get the class index of the corresponding bin/class class_index = lut_for_first_node.searchsorted(first_abs_load) @@ -671,17 +675,18 @@ def stress_secondary_branch(self, delta_load, *, rtol=1e-5, tol=1e-6): The resulting stress increment within the hysteresis """ + delta_load = np.asarray(delta_load) sign = np.sign(delta_load) # if the assessment is performed for multiple points at once, i.e. load is a DataFrame with values for every node - if isinstance(delta_load, pd.Series) and isinstance(self._lut_primary_branch.index, pd.MultiIndex): + if isinstance(self._lut_primary_branch.index, pd.MultiIndex): # the lut is a DataFrame with MultiIndex with levels class_index and node_id # find the corresponding class only for the first node, use the result for all nodes first_node_id = self._lut_primary_branch.index.get_level_values("node_id")[0] lut_for_first_node = self._lut_secondary_branch.delta_load[self._lut_secondary_branch.index.get_level_values("node_id")==first_node_id] - first_abs_load = abs(delta_load.iloc[0]) + first_abs_load = abs(delta_load[0]) # get the class index of the corresponding bin/class class_index = lut_for_first_node.searchsorted(first_abs_load) @@ -724,7 +729,7 @@ def stress_secondary_branch(self, delta_load, *, rtol=1e-5, tol=1e-6): raise ValueError(f"Binned class is initialized for a maximum absolute delta_load of {2*self._maximum_absolute_load}, "\ f" but a higher absolute delta_load value of |{delta_load}| is requested (in stress_secondary_branch()).") - return sign * self._lut_secondary_branch.iloc[index+1].delta_stress # "+1", because the next higher class is used + return sign * np.asarray(self._lut_secondary_branch.iloc[index+1].delta_stress) def strain_secondary_branch(self, delta_stress, delta_load): """Get the strain on secondary branches in the stress-strain diagram at a given stress and load @@ -746,17 +751,18 @@ def strain_secondary_branch(self, delta_stress, delta_load): """ #return self._notch_approximation_law.strain_secondary_branch(delta_stress, delta_load) + delta_load = np.asarray(delta_load) sign = np.sign(delta_load) # if the assessment is performed for multiple points at once, i.e. load is a DataFrame with values for every node - if isinstance(delta_load, pd.Series) and isinstance(self._lut_primary_branch.index, pd.MultiIndex): + if isinstance(self._lut_primary_branch.index, pd.MultiIndex): # the lut is a DataFrame with MultiIndex with levels class_index and node_id # find the corresponding class only for the first node, use the result for all nodes first_node_id = self._lut_secondary_branch.index.get_level_values("node_id")[0] lut_for_first_node = self._lut_secondary_branch.delta_load[self._lut_secondary_branch.index.get_level_values("node_id")==first_node_id] - first_abs_load = abs(delta_load.iloc[0]) + first_abs_load = abs(delta_load[0]) # get the class index of the corresponding bin/class class_index = lut_for_first_node.searchsorted(first_abs_load) @@ -799,7 +805,7 @@ def strain_secondary_branch(self, delta_stress, delta_load): raise ValueError(f"Binned class is initialized for a maximum absolute delta_load of {2*self._maximum_absolute_load}, "\ f" but a higher absolute delta_load value of |{delta_load}| is requested (in strain_secondary_branch()).") - return sign * self._lut_secondary_branch.iloc[index+1].delta_strain # "-1", transform to zero-based indices + return sign * np.asarray(self._lut_secondary_branch.iloc[index+1].delta_strain) # "-1", transform to zero-based indices def _create_bins(self): """Initialize the lookup tables by precomputing the notch approximation law values. diff --git a/src/pylife/stress/rainflow/fkm_nonlinear.py b/src/pylife/stress/rainflow/fkm_nonlinear.py index 4bee84d6..7c45fde6 100644 --- a/src/pylife/stress/rainflow/fkm_nonlinear.py +++ b/src/pylife/stress/rainflow/fkm_nonlinear.py @@ -19,83 +19,72 @@ import numpy as np import pandas as pd -import itertools -import pylife.stress.rainflow.general +import pylife.stress.rainflow.general as RFG +INDEX = 0 +LOAD_TYPE = 1 -class FKMNonlinearDetector(pylife.stress.rainflow.general.AbstractDetector): - """HCM-Algorithm detector as described in FKM nonlinear. +IS_CLOSED = 0 +FROM = 1 +TO = 2 +CLOSE = 3 - """ +LOAD = 0 +STRESS = 1 +STRAIN = 2 +EPS_MIN_LF = 3 +EPS_MAX_LF = 4 - class _HCM_Point: - """A point in the stress-strain diagram on which the HCM algorithm operates on. - - .. note:: - For an assessment for multiple points (FEM mesh nodes) at once, - we assume that the load time series for the different points are - multiples of each other. In consequence, the hysteresis graph in - the stress-strain diagram follows the same sequence of primary and - secondary paths for every assessment point. - It suffices to consider a single point to find out when a hysteresis - gets closed and when to reach the primary path etc. However, the actual - stress/strain values will be computed individually for every point. - """ +PRIMARY = 0 +SECONDARY = 1 - def __init__(self, load=None, strain=None, stress=None): - self._load = load - self._stress = stress - self._strain = strain +MEMORY_1_2 = 1 +MEMORY_3 = 0 - @property - def load(self): - return self._load +HYSTORY_COLUMNS = ["load", "stress", "strain", "secondary_branch"] +HISTORY_INDEX_LEVELS = [ + "load_segment", "load_step", "run_index", "turning_point", "hyst_from", "hyst_to", "hyst_close" +] - @property - def load_representative(self): - if isinstance(self._load, pd.Series): - return self._load.iloc[0] - else: - return self._load - @property - def strain_representative(self): - if isinstance(self._strain, pd.Series): - return self._strain.iloc[0] - else: - return self._strain - - @property - def stress(self): - return self._stress - - @property - def strain(self): - return self._strain - - def __str__(self): - assert not isinstance(self._load, pd.DataFrame) - if isinstance(self._load, pd.Series): - if self._stress is None: - if self._load is None: - return "()" - return f"(load:{self._load.values[0]:.1f})" - - if self._load is None: - return f"(sigma:{self._stress.values[0]:.1f}, eps:{self._strain.values[0]:.1e})" - return f"(load:{self._load.values[0]:.1f}, sigma:{self._stress.values[0]:.1f}, eps:{self._strain.values[0]:.1e})" - - if self._stress is None: - if self._load is None: - return "()" - return f"(load:{self._load:.1f})" - if self._load is None: - return f"sigma:{self._stress:.1f}, eps:{self._strain:.1e})" - return f"(load:{self._load:.1f}, sigma:{self._stress:.1f}, eps:{self._strain:.1e})" - - def __repr__(self): - return self.__str__() +class _ResidualsRecord: + + def __init__(self): + self._index = [] + self._values = [] + + def append(self, idx, val): + self._index.append(idx) + self._values.append(val) + + def pop(self): + return self._index.pop(), self._values.pop() + + @property + def index(self): + return np.array(self._index, dtype=np.int64) + + @property + def current_index(self): + return self._index[-1] + + def reindex(self): + self._index = list(range(-len(self._values), 0)) + + def will_remain_open_by(self, load): + current_load_extent = np.abs(load - self._values[-1]) + previous_load_extent = np.abs(self._values[-1] - self._values[-2]) + return current_load_extent < previous_load_extent + + def __len__(self): + return len(self._index) + + +class FKMNonlinearDetector(RFG.AbstractDetector): + """HCM-Algorithm detector as described in FKM nonlinear. + + """ def __init__(self, recorder, notch_approximation_law): super().__init__(recorder) @@ -110,36 +99,14 @@ def __init__(self, recorder, notch_approximation_law): self._load_max_seen = 0.0 # maximum seen load value self._run_index = 0 # which run through the load sequence is currently performed - self._epsilon_min_LF = np.inf # the current value for _epsilon_min_LF, initialization see FKM nonlinear p.122 - self._epsilon_max_LF = -np.inf # the current value for _epsilon_max_LF, initialization see FKM nonlinear p.122 - - # deviation from FKM nonlinear algorithm to match given example in FKM nonlinear - self._epsilon_min_LF = 0 # the current value for _epsilon_min_LF, initialization see FKM nonlinear p.122 - self._epsilon_max_LF = 0 # the current value for _epsilon_max_LF, initialization see FKM nonlinear p.122 + self._last_record = None + self._residuals_record = _ResidualsRecord() + self._residuals = np.array([]) + self._record_vals_residuals = pd.DataFrame() - self._epsilon_min_LF = None - self._epsilon_max_LF = None + self._history_record = [] - # whether the load sequence starts and the first value should be considered (normally, only "turns" in the sequence get extracted, which would omit the first value) - self._is_load_sequence_start = True - - self._residuals = [] # unclosed hysteresis points - - self._hcm_point_history = [] # all traversed points, for plotting and debugging - # list of tuples (type, hcm_point, index), e.g., [ ("primary", hcm_point, 0), ("secondary", hcm_point, 1), ...] - # where the type is one of {"primary", "secondary"} and indicates the hysteresis branch up to the current point - # and the index is the hysteresis number to which the points belong - self._hcm_message = "" - - # the current index of the row in the recorded `collective` DataFrame, used only for debugging, - # i.e., the `interpolated_stress_strain_data` method - self._hysteresis_index = 0 - - self._current_debug_output = "" - self._strain_values = [] - self._n_strain_values_first_run = 0 - - self._last_sample = pd.DataFrame() + self._num_turning_points = 0 def process_hcm_first(self, samples): """Perform the HCM algorithm for the first time. @@ -153,10 +120,8 @@ def process_hcm_first(self, samples): The samples to be processed by the HCM algorithm. """ assert len(samples) >= 2 - samples, flush = self._adjust_samples_and_flush_for_hcm_first_run(samples) - self._hcm_message += f"HCM first run starts\n" return self.process(samples, flush=flush) def process_hcm_second(self, samples): @@ -174,8 +139,6 @@ def process_hcm_second(self, samples): The samples to be processed by the HCM algorithm. """ assert len(samples) >= 2 - - self._hcm_message += f"\nHCM second run starts\n" return self.process(samples, flush=True) def process(self, samples, flush=False): @@ -229,257 +192,281 @@ def process(self, samples, flush=False): # collected values, which will be passed to the recorder at the end of `process()` assert not isinstance(samples, pd.DataFrame) - multi_index = isinstance(samples, pd.Series) and len(samples.index.names) > 1 - - empty_index = [] if not isinstance(samples, pd.Series) else pd.DataFrame(columns=samples.index.names, dtype=np.int64).set_index(samples.index.names, drop=True).index - _loads_min = pd.Series(index=empty_index, dtype=np.float64) - _loads_max = pd.Series(index=empty_index, dtype=np.float64) - _S_min = pd.Series(index=empty_index, dtype=np.float64) - _S_max = pd.Series(index=empty_index, dtype=np.float64) - _epsilon_min = pd.Series(index=empty_index, dtype=np.float64) - _epsilon_max = pd.Series(index=empty_index, dtype=np.float64) - _epsilon_min_LF = pd.Series(index=empty_index, dtype=np.float64) # minimum strain of the load history up to (including) the current hysteresis (LF=Lastfolge), mentioned on p.127 of FKM nonlinear - _epsilon_max_LF = pd.Series(index=empty_index, dtype=np.float64) # maximum strain of the load history up to (including) the current hysteresis (LF=Lastfolge), mentioned on p.127 of FKM nonlinear - _is_closed_hysteresis = [] # whether the hysteresis is fully closed and counts as a normal damage hysteresis - _is_zero_mean_stress_and_strain = [] # whether the mean stress and strain are forced to be zero (occurs in eq. 2.9-52) - _debug_output = [] - - # initialization of _epsilon_min_LF see FKM nonlinear p.122 - if self._epsilon_min_LF is None: - self._epsilon_min_LF = pd.Series(0.0) - - if self._epsilon_max_LF is None: - self._epsilon_max_LF = pd.Series(0.0) - - # store all lists together - recording_lists = [_loads_min, _loads_max, _S_min, _S_max, _epsilon_min, - _epsilon_max, _epsilon_min_LF, _epsilon_max_LF, _is_closed_hysteresis, - _is_zero_mean_stress_and_strain, _debug_output] - - largest_point = self._HCM_Point(load=0) - previous_load = 0 - self._run_index += 1 - # convert from Series to np.array + load_turning_points = self._determine_load_turning_points(samples, flush) + + self._current_load_index = load_turning_points.index + + li = load_turning_points.index.to_frame()['load_step'] + turning_point_idx = pd.Index((li != li.shift()).cumsum() - 1, name="turning_point") + + load_turning_points_rep = np.asarray( + load_turning_points.groupby(turning_point_idx, sort=False).first() + ) + + record, hysts = self._perform_hcm_algorithm(load_turning_points_rep) + + if self._last_record is None: + self._last_record = np.zeros((5, self._group_size)) + + num_turning_points = len(load_turning_points_rep) + record_vals = self._collect_record(load_turning_points, num_turning_points, record) + + self._store_recordings_for_history(record, record_vals, turning_point_idx, hysts) + + results = self._process_recording(load_turning_points_rep, record_vals, hysts) + results_min, results_max, epsilon_min_LF, epsilon_max_LF = results + + self._update_residuals(record_vals, turning_point_idx, load_turning_points_rep) + + self._num_turning_points += num_turning_points + + # TODO: check if these are really that redundant + is_closed_hysteresis = (hysts[:, 0] != MEMORY_3).tolist() + is_zero_mean_stress_and_strain = (hysts[:, 0] == MEMORY_3).tolist() + self._recorder.record_values_fkm_nonlinear( + loads_min=results_min["loads_min"], + loads_max=results_max["loads_max"], + S_min=results_min["S_min"], + S_max=results_max["S_max"], + epsilon_min=results_min["epsilon_min"], + epsilon_max=results_max["epsilon_max"], + epsilon_min_LF=epsilon_min_LF, + epsilon_max_LF=epsilon_max_LF, + is_closed_hysteresis=is_closed_hysteresis, + is_zero_mean_stress_and_strain=is_zero_mean_stress_and_strain, + run_index=self._run_index + ) + + return self + + def _determine_load_turning_points(self, samples, flush): old_head_index = self._head_index + have_multi_index = isinstance(samples, pd.Series) and len(samples.index.names) > 1 - if multi_index: - _samples = samples.groupby('load_step', sort=False).first().to_numpy().flatten() + if have_multi_index: + rep_samples = samples.groupby('load_step', sort=False).first().to_numpy() else: - _samples = np.asarray(samples) - - # get the turning points - loads_indices, load_turning_points = self._new_turns(_samples, flush) + rep_samples = np.asarray(samples) - if multi_index: - load_steps = samples.index.get_level_values('load_step').unique().to_series() - if len(loads_indices) > 0: - tindex = loads_indices - old_head_index - idx = load_steps.iloc[tindex] - vals = samples.loc[idx].reset_index() - vals = vals.set_index(['load_step', 'node_id']) - if tindex[0] < 0: - vals.iloc[:len(self._last_sample), 0] = self._last_sample + loads_indices, load_turning_points = self._new_turns(rep_samples, flush) - load_turning_points = vals.iloc[:, 0] + self._group_size = len(samples) // len(rep_samples) + if have_multi_index: + load_steps = samples.index.get_level_values('load_step').unique() + if len(loads_indices) > 0: + turns_idx = loads_indices - old_head_index + idx = load_steps[turns_idx] + load_turning_points = samples.loc[idx] + if turns_idx[0] < 0: + load_turning_points.iloc[:self._group_size] = self._last_sample else: - load_turning_points = [] - idx = load_steps.iloc[-1] + load_turning_points = pd.Series( + [], index=pd.MultiIndex.from_tuples([], names=samples.index.names) + ) + idx = load_steps[-1] self._last_sample = samples.loc[idx] - self._initialize_epsilon_min_for_hcm_run(samples, load_turning_points) + if isinstance(load_turning_points, pd.Series): + return load_turning_points - if not isinstance(load_turning_points, pd.Series): - load_turning_points = pd.Series(load_turning_points) - load_turning_points.index.name = 'load_step' - - self._load_max_seen, self._iz, self._ir, recording_lists = self._perform_hcm_algorithm( - samples=samples, recording_lists=recording_lists, largest_point=largest_point, - previous_load=previous_load, iz=self._iz, ir=self._ir, - load_max_seen=self._load_max_seen, load_turning_points=load_turning_points) + return pd.Series( + load_turning_points, index=pd.Index(loads_indices, name="load_step") + ) - # transfer the detected hystereses to the recorder - [_loads_min, _loads_max, _S_min, _S_max, _epsilon_min, _epsilon_max, _epsilon_min_LF, - _epsilon_max_LF, _is_closed_hysteresis, _is_zero_mean_stress_and_strain, _debug_output] = recording_lists + def _store_recordings_for_history(self, record, record_vals, turning_point, hysts): + record_repr = ( + record_vals.reset_index(["load_step", "turning_point"]) + .groupby(turning_point) + .first() + .drop(["epsilon_min_LF", "epsilon_max_LF"], axis=1) + ) - self._recorder.record_values_fkm_nonlinear( - loads_min=_loads_min, loads_max=_loads_max, - S_min=_S_min, S_max=_S_max, - epsilon_min=_epsilon_min, epsilon_max=_epsilon_max, - epsilon_min_LF=_epsilon_min_LF, epsilon_max_LF=_epsilon_max_LF, - is_closed_hysteresis=_is_closed_hysteresis, is_zero_mean_stress_and_strain=_is_zero_mean_stress_and_strain, - run_index=self._run_index, debug_output=_debug_output) + record_repr["run_index"] = self._run_index + record_repr["secondary_branch"] = record[:, SECONDARY] != 0 + rec_hysts = hysts.copy() + rec_hysts[:, 1:] += self._num_turning_points - return self + self._history_record.append((record_repr, rec_hysts)) - @property - def strain_values(self): - """ - Get the strain values of the turning points in the stress-strain diagram. - They are needed in the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm. + def _update_residuals(self, record_vals, turning_point, load_turning_points_rep): + residuals_index = self._residuals_record.index + old_residuals_index = residuals_index[residuals_index < 0] + new_residuals_index = residuals_index[residuals_index >= 0] + self._num_turning_points - Returns - ------- - list of float - The strain values of the turning points that are visited during the HCM algorithm. - """ - return np.array(self._strain_values) + remaining_vals_residuals = self._record_vals_residuals.loc[ + self._record_vals_residuals.index[old_residuals_index] + ] - @property - def strain_values_first_run(self): - """ - Get the strain values of the turning points in the stress-strain diagram, for the first run of the HCM algorithm. - They are needed in the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm. + new_vals_residuals = record_vals.loc[ + record_vals.index.isin(new_residuals_index, level="turning_point") + ] - Returns - ------- - list of float - The strain values of the turning points that are visited during the first run of the HCM algorithm. - """ + self._record_vals_residuals = pd.concat([remaining_vals_residuals, new_vals_residuals]) + self._record_vals_residuals.index.names = record_vals.index.names - return np.array(self._strain_values[:self._n_strain_values_first_run]) + self._residuals = ( + load_turning_points_rep[residuals_index] if len(residuals_index) else np.array([]) + ) - @property - def strain_values_second_run(self): - """ - Get the strain values of the turning points in the stress-strain diagram, for the second and any further run of the HCM algorithm. - They are needed in the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm. + self._residuals_record.reindex() - Returns - ------- - list of float - The strain values of the turning points that are visited during the second run of the HCM algorithm. - """ + def _collect_record(self, load_turning_points, num_turning_points, record): + def primary(_prev, load): + sigma = self._notch_approximation_law.stress(load) + epsilon = self._notch_approximation_law.strain(sigma, load) + return np.array([load, sigma, epsilon]) - return np.array(self._strain_values[self._n_strain_values_first_run:]) + def secondary(prev, load): + prev_load = prev[LOAD] - def interpolated_stress_strain_data(self, *, n_points_per_branch=100, only_hystereses=False): - """Return points on the traversed hysteresis curve, mainly intended for plotting. - The curve including all hystereses, primary and secondary branches is sampled - at a fixed number of points within each hysteresis branch. - These points can be used for plotting. + delta_L = load - prev_load + delta_sigma = self._notch_approximation_law.stress_secondary_branch(delta_L) + delta_epsilon = self._notch_approximation_law.strain_secondary_branch(delta_sigma, delta_L) - The intended use is to generate plots as follows: + sigma = prev[STRESS] + delta_sigma + epsilon = prev[STRAIN] + delta_epsilon - .. code:: python + return np.array([load, sigma, epsilon]) - fkm_nonlinear_detector.process_hcm_first(...) - sampling_parameter = 100 # choose larger for smoother plot or smaller for lower runtime - plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter) + def determine_prev_record(prev_idx): + if prev_idx < 0: + idx = len(self._record_vals_residuals) + prev_idx*self._group_size + return self._record_vals_residuals.iloc[idx:idx+self._group_size].to_numpy().T + if prev_idx < i: + idx = prev_idx * self._group_size + return record_vals[:, idx:idx+self._group_size] + return self._last_record - strain_values_primary = plotting_data["strain_values_primary"] - stress_values_primary = plotting_data["stress_values_primary"] - hysteresis_index_primary = plotting_data["hysteresis_index_primary"] - strain_values_secondary = plotting_data["strain_values_secondary"] - stress_values_secondary = plotting_data["stress_values_secondary"] - hysteresis_index_secondary = plotting_data["hysteresis_index_secondary"] + record_vals = np.empty((5, num_turning_points*self._group_size)) - plt.plot(strain_values_primary, stress_values_primary, "g-", lw=3) - plt.plot(strain_values_secondary, stress_values_secondary, "b-.", lw=1) + turning_points = load_turning_points.to_numpy() - Parameters - ---------- - n_points_per_branch : int, optional - How many sampling points to use per hysteresis branch, default 100. - A larger value values means smoother curves but longer runtime. + for i in range(num_turning_points): + prev_record = determine_prev_record(record[i, INDEX]) - only_hystereses : bool, optional - Default ``False``. If only graphs of the closed hystereses should be output. - Note that this does not work for hysteresis that have multiple smaller hysterseses included. + idx = i * self._group_size + load_turning_point = turning_points[idx:idx+self._group_size] - Returns - ------- - plotting_data : dict - A dict with the following keys: - - * "strain_values_primary" - * "stress_values_primary" - * "hysteresis_index_primary" - * "strain_values_secondary" - * "stress_values_secondary" - * "hysteresis_index_secondary" - - The values are lists of strains and stresses of the points on the stress-strain curve, - separately for primary and secondary branches. The lists contain nan values whenever the - curve of the *same* branch is discontinuous. This allows to plot the entire curve - with different colors for primary and secondary branches. - - The entries for hysteresis_index_primary and hysteresis_index_secondary are the row - indices into the collective DataFrame returned by the recorder. This allows, e.g., - to separate the output of multiple runs of the HCM algorithm or to plot the traversed - paths on the stress-strain diagram for individual steps of the algorithm. + deformation_function = secondary if record[i, SECONDARY] else primary + result_buf = record_vals[:, idx:idx+self._group_size] + self._process_deformation( + deformation_function, result_buf, load_turning_point, prev_record + ) - """ + record_vals = pd.DataFrame( + record_vals.T, + columns=["load", "stress", "strain", "epsilon_min_LF", "epsilon_max_LF"], + index=load_turning_points.index, + ) - assert n_points_per_branch >= 2 + new_sum_tp = self._num_turning_points + num_turning_points + tp_index = [np.arange(self._num_turning_points, new_sum_tp)] * self._group_size - plotter = FKMNonlinearHysteresisPlotter(self._hcm_point_history, self._ramberg_osgood_relation) - return plotter.interpolated_stress_strain_data(n_points_per_branch=n_points_per_branch, only_hystereses=only_hystereses) + record_vals["turning_point"] = np.stack(tp_index).T.flatten() + return record_vals.set_index("turning_point", drop=True, append=True) - def _proceed_on_primary_branch(self, current_point): - """Follow the primary branch (de: Erstbelastungskurve) of a notch approximation material curve. + def _process_deformation(self, deformation_func, result_buf, load, prev_record): + result_buf[:3] = deformation_func(prev_record, load) - Parameters - ---------- - previous_point : _HCM_Point - The starting point in the stress-strain diagram where to begin to follow the primary branch. - current_point : _HCM_Point - The end point until where to follow the primary branch. This variable only needs to have the load value. + old_load = self._last_record[LOAD, 0] - Returns - ------- - current_point : _HCM_Point - The initially given current point, but with updated values of stress and strain. + if old_load < load[0]: + result_buf[EPS_MAX_LF] = ( + self._last_record[EPS_MAX_LF] + if self._last_record[EPS_MAX_LF, 0] > result_buf[STRAIN, 0] + else result_buf[STRAIN, :] + ) + result_buf[EPS_MIN_LF] = self._last_record[EPS_MIN_LF] + else: + result_buf[EPS_MIN_LF] = ( + self._last_record[EPS_MIN_LF] + if self._last_record[EPS_MIN_LF, 0] < result_buf[STRAIN, 0] + else result_buf[STRAIN, :] + ) + result_buf[EPS_MAX_LF] = self._last_record[EPS_MAX_LF] - """ - sigma = self._notch_approximation_law.stress(current_point.load) - epsilon = self._notch_approximation_law.strain(sigma, current_point.load) + self._last_record = result_buf - current_point._stress = pd.Series(sigma.values, index=current_point.load.index) - current_point._strain = pd.Series(epsilon.values, index=current_point.load.index) + def _process_recording(self, turning_points, record_vals, hysts): + def turn_memory_1_2(values, index): + if values[0][0, 0] < values[1][0, 0]: + return (values[0], values[1], index[0], index[1]) + return (values[1], values[0], index[1], index[0]) - # log point for later plotting - self._hcm_point_history.append(("primary", current_point, self._hysteresis_index)) + def turn_memory_3(values, index): + abs_point = np.abs(values[0]) + return (-abs_point, abs_point, index[0], index[0]) - return current_point + memory_functions = [turn_memory_3, turn_memory_1_2] - def _proceed_on_secondary_branch(self, previous_point, current_point): - """Follow the secondary branch of a notch approximation material curve. + start = len(self._residuals) + if start: + turning_points = np.concatenate((self._residuals, turning_points)) - Parameters - ---------- - previous_point : _HCM_Point - The starting point in the stress-strain diagram where to begin to follow the primary branch. - current_point : _HCM_Point - The end point until where to follow the primary branch. This variable only needs to have the load value. + record_vals_with_residuals = pd.concat([self._record_vals_residuals, record_vals]) - Returns - ------- - current_point : _HCM_Point - The initially given current point, but with updated values of stress and strain. + value_array = record_vals_with_residuals.to_numpy() - """ - delta_L = current_point.load.values - previous_point.load.values # as described in FKM nonlinear - index = current_point.load.index - obsolete_index_levels = [n for n in index.names if n != 'load_step'] - delta_L = pd.Series( - delta_L, index=current_point.load.index.droplevel(obsolete_index_levels) + index_array = np.asarray( + record_vals_with_residuals.index.droplevel("turning_point").to_frame() ) - delta_sigma = self._notch_approximation_law.stress_secondary_branch(delta_L) - delta_epsilon = self._notch_approximation_law.strain_secondary_branch(delta_sigma, delta_L) + signal_index_names = self._current_load_index.names + signal_index_num = len(signal_index_names) + + result_len = len(hysts) * self._group_size + + results_min = np.zeros((result_len, 3)) + results_min_idx = np.zeros((result_len, signal_index_num), dtype=np.int64) + + results_max = np.zeros((result_len, 3)) + results_max_idx = np.zeros((result_len, signal_index_num), dtype=np.int64) + + epsilon_min_LF = np.zeros(result_len) + epsilon_max_LF = np.zeros(result_len) - current_point._stress = pd.Series(previous_point._stress.values + delta_sigma.values, index=current_point.load.index) - current_point._strain = pd.Series(previous_point._strain.values + delta_epsilon.values, index=current_point.load.index) + for i, hyst in enumerate(hysts): + idx = (hyst[FROM:CLOSE] + start) * self._group_size - # log point for later plotting - self._hcm_point_history.append(("secondary", current_point, self._hysteresis_index)) - return current_point + beg0, beg1 = idx[0], idx[1] + end0, end1 = beg0 + self._group_size, beg1 + self._group_size + + values = value_array[beg0:end0], value_array[beg1:end1] + index = index_array[beg0:end0], index_array[beg1:end1] + + hyst_type = hyst[IS_CLOSED] + min_val, max_val, min_idx, max_idx = memory_functions[hyst_type](values, index) + + beg = i * self._group_size + end = beg + self._group_size + + results_min[beg:end] = min_val[:, :3] + results_max[beg:end] = max_val[:, :3] + + results_min_idx[beg:end] = min_idx + results_max_idx[beg:end] = max_idx + + epsilon_min_LF[beg:end] = min_val[:, EPS_MIN_LF] + epsilon_max_LF[beg:end] = max_val[:, EPS_MAX_LF] + + results_min = pd.DataFrame( + results_min, + columns=["loads_min", "S_min", "epsilon_min"], + index=pd.MultiIndex.from_arrays(results_min_idx.T, names=signal_index_names) + ) + results_max = pd.DataFrame( + results_max, + columns=["loads_max", "S_max", "epsilon_max"], + index=pd.MultiIndex.from_arrays(results_max_idx.T, names=signal_index_names) + ) + + return results_min, results_max, pd.Series(epsilon_min_LF), pd.Series(epsilon_max_LF) def _adjust_samples_and_flush_for_hcm_first_run(self, samples): @@ -511,7 +498,7 @@ def _adjust_samples_and_flush_for_hcm_first_run(self, samples): scalar_samples = samples.groupby("load_step", sort=False).first() scalar_samples_twice = np.concatenate([scalar_samples, scalar_samples]) - turn_indices, _ = pylife.stress.rainflow.general.find_turns(scalar_samples_twice) + turn_indices, _ = RFG.find_turns(scalar_samples_twice) flush = True if len(scalar_samples)-1 not in turn_indices: @@ -519,141 +506,88 @@ def _adjust_samples_and_flush_for_hcm_first_run(self, samples): return samples, flush - def _perform_hcm_algorithm(self, *, samples, recording_lists, largest_point, previous_load, iz, ir, load_max_seen, load_turning_points): - """Perform the entire HCM algorithm for all load samples, - record the found hysteresis parameters in the recording_lists.""" + def _perform_hcm_algorithm(self, load_turning_points): + """Perform the entire HCM algorithm for all load samples""" # iz: number of not yet closed branches # ir: number of residual loads corresponding to hystereses that cannot be closed, # because they contain parts of the primary branch - self._hcm_message += f"turning points: {samples}\n" - # iterate over loads from the given list of samples - li = load_turning_points.index.to_frame()['load_step'] - load_step = (li != li.shift()).cumsum() - - for _, current_load in load_turning_points.groupby(load_step, sort=False): - current_load_representative = self._get_scalar_current_load(current_load) - - self._hcm_message += f"* load {current_load}:" - # initialize the point in the stress-strain diagram corresponding to the current load. - # The stress and strain values will be computed during the current iteration of the present loop. - current_point = self._HCM_Point(load=current_load) + hysts = np.zeros((len(load_turning_points), 4), dtype=np.int64) + hyst_index = 0 - current_point, iz, ir, recording_lists = self._hcm_process_sample( - current_point=current_point, - recording_lists=recording_lists, - largest_point=largest_point, iz=iz, ir=ir, - load_max_seen=load_max_seen, current_load_representative=current_load_representative - ) - - # update the maximum seen absolute load - if np.abs(current_load_representative) > load_max_seen+1e-12: - load_max_seen = np.abs(current_load_representative) - largest_point = current_point + record = -np.ones((len(load_turning_points), 2), dtype=np.int64) + rec_index = 0 - # increment the indicator how many open hystereses there are - iz += 1 + for index, current_load in enumerate(load_turning_points): + hyst_index = self._hcm_process_sample(current_load, index, hysts, hyst_index, record, rec_index) - # store the previously processed point to the list of residuals to be processed in the next iterations - self._residuals.append(current_point) + if np.abs(current_load) > self._load_max_seen: + self._load_max_seen = np.abs(current_load) - self._hcm_update_min_max_strain_values( - previous_load=previous_load, - current_load_representative=current_load_representative, - current_point=current_point - ) - self._hcm_message += f"\n" + self._iz += 1 - previous_load = current_load_representative + self._residuals_record.append(rec_index, current_load) - return load_max_seen, iz, ir, recording_lists + rec_index += 1 - def _hcm_update_min_max_strain_values(self, *, previous_load, current_load_representative, current_point): - """Update the minimum and maximum yet seen strain values - This corresponds to the "steigend=1 or 2" assignment at chapter 2.9.7 point 5 and - the rules under point 7. + hysts = hysts[:hyst_index, :] + return record, hysts - 5->6, VZ = 5-6=-1 < 0, steigend = 1 - 7->4, VZ = 7-4=3 >= 0, steigend = 2, L(0)=0 - """ + def _hcm_process_sample(self, current_load, current_index, hysts, hyst_index, record, rec_index): + """ Process one sample in the HCM algorithm, i.e., one load value """ - if previous_load < current_load_representative-1e-12: - # case "steigend=1", i.e., load increases - new_val = self._epsilon_max_LF if self._epsilon_max_LF.values[0] > current_point.strain.values[0] else current_point.strain - self._epsilon_max_LF = new_val #pd.Series(new_val.values, index=current_point.strain.index) + record_index = current_index + while True: + if self._iz == self._ir: - else: - # case "steigend=2", i.e., load decreases - new_val = self._epsilon_min_LF if self._epsilon_min_LF.values[0] < current_point.strain.values[0] else current_point.strain - self._epsilon_min_LF = pd.Series(new_val.values, index=current_point.strain.index) + if np.abs(current_load) > self._load_max_seen: # case a) i, "Memory 3" + record[rec_index, :] = [record_index, PRIMARY] - def _hcm_process_sample(self, *, current_point, recording_lists, largest_point, iz, ir, load_max_seen, current_load_representative): - """ Process one sample in the HCM algorithm, i.e., one load value """ + residuals_idx = self._residuals_record.current_index + hysts[hyst_index, :] = [MEMORY_3, residuals_idx, current_index, -1] + hyst_index += 1 - while True: - # iz = len(self._residuals) - if iz == ir: - previous_point = self._residuals[-1] - - # if the current load is a new maximum - if np.abs(current_load_representative) > load_max_seen+1e-12: - # case a) i., "Memory 3" - current_point, recording_lists = self._handle_case_a_i( - current_point=current_point, previous_point=previous_point, - recording_lists=recording_lists - ) - ir += 1 + self._ir += 1 else: - current_point = self._handle_case_a_ii(current_point=current_point, previous_point=previous_point) - - - # end the inner loop and fetch the next load from the load sequence + record[rec_index, :] = [record_index, SECONDARY] break - if iz < ir: - # branch is fully part of the initial curve, case "Memory 1" - current_point = self._handle_case_b(current_point) - # do not further process this load + if self._iz < self._ir: + record[rec_index, :] = [record_index, PRIMARY] break # here we have iz > ir: - previous_point_0 = self._residuals[-2] - previous_point_1 = self._residuals[-1] - - # is the current load extent smaller than the last one? - current_load_extent = np.abs(current_load_representative-previous_point_1.load_representative) - previous_load_extent = np.abs(previous_point_1.load_representative-previous_point_0.load_representative) - # yes - if current_load_extent < previous_load_extent-1e-12: - current_point = self._handle_case_c_i(current_point=current_point, previous_point_1=previous_point_1) - # continue with the next load value + if self._residuals_record.will_remain_open_by(current_load): + record[rec_index, :] = [record_index, SECONDARY] break # no -> we have a new hysteresis - recording_lists = self._handle_case_c_ii( - recording_lists=recording_lists, previous_point_0=previous_point_0, previous_point_1=previous_point_1 - ) - iz -= 2 + prev_idx_1, prev_load_1 = self._residuals_record.pop() + prev_idx_0, prev_load_0 = self._residuals_record.pop() + + if len(self._residuals_record): + record_index = self._residuals_record.current_index + + self._iz -= 2 # if the points of the hysteresis lie fully inside the seen range of loads, i.e., # the turn points are smaller than the maximum turn point so far # (this happens usually in the second run of the HCM algorithm) - if np.abs(previous_point_0.load_representative) < load_max_seen-1e-12 and np.abs(previous_point_1.load_representative) < load_max_seen-1e-12: + if np.abs(prev_load_0) < self._load_max_seen and np.abs(prev_load_1) < self._load_max_seen: # case "Memory 2", "c) ii B" # the primary branch is not yet reached, continue processing residual loads, potentially # closing even more hysteresis - self._hcm_message += "," + hysts[hyst_index, :] = [MEMORY_1_2, prev_idx_0, prev_idx_1, current_index] + hyst_index += 1 - # add a discontinuity marker - self._hcm_point_history.append(("discontinuity", None, self._hysteresis_index)) continue # case "Memory 1", "c) ii A" @@ -665,478 +599,273 @@ def _hcm_process_sample(self, *, current_point, recording_lists, largest_point, # effectively `iz = iz - 1` as described on p.70 # Proceed on primary path for the rest, which was not part of the closed hysteresis - current_point = self._proceed_on_primary_branch(current_point) - - # store strain values, this is for the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm - self._strain_values.append(current_point.strain.values[0]) - - # count number of strain values in the first run of the HCM algorithm - if self._run_index == 1: - self._n_strain_values_first_run += 1 - break - - return current_point, iz, ir, recording_lists - - def _handle_case_c_ii(self, *, recording_lists, previous_point_0, previous_point_1): - """ Handle case c) ii. in the HCM algorithm, which detects a new hysteresis.""" - - self._hcm_message += f" case c) ii., detected full hysteresis" - - epsilon_min = previous_point_0.strain if previous_point_0.strain.values[0] < previous_point_1.strain.values[0] else previous_point_1.strain - epsilon_max = previous_point_0.strain if previous_point_0.strain.values[0] > previous_point_1.strain.values[0] else previous_point_1.strain - - [_loads_min, _loads_max, _S_min, _S_max, _epsilon_min, _epsilon_max, _epsilon_min_LF, - _epsilon_max_LF, _is_closed_hysteresis, _is_zero_mean_stress_and_strain, _debug_output] = recording_lists - - # consume the last two loads, process this hysteresis - current_load_min = previous_point_0.load if previous_point_0.load.values[0] < previous_point_1.load.values[0] else previous_point_1.load - _loads_min = pd.concat([_loads_min, current_load_min]) - current_load_max = previous_point_0.load if previous_point_0.load.values[0] > previous_point_1.load.values[0] else previous_point_1.load - _loads_max = pd.concat([_loads_max, current_load_max]) - - current_S_min = previous_point_0.stress if previous_point_0.stress.values[0] < previous_point_1.stress.values[0] else previous_point_1.stress - _S_min = pd.concat([_S_min, current_S_min]) - current_S_max = previous_point_0.stress if previous_point_0.stress.values[0] > previous_point_1.stress.values[0] else previous_point_1.stress - _S_max = pd.concat([_S_max, current_S_max]) - - _epsilon_min = pd.concat([_epsilon_min, epsilon_min]) - _epsilon_max = pd.concat([_epsilon_max, epsilon_max]) - _epsilon_min_LF = pd.concat([_epsilon_min_LF, self._epsilon_min_LF]) - _epsilon_max_LF = pd.concat([_epsilon_max_LF, self._epsilon_max_LF]) - _is_closed_hysteresis.append(True) - _is_zero_mean_stress_and_strain.append(False) # do not force the mean stress and strain to be zero - # save point for the plotting utility / `interpolated_stress_strain_data` method - # The hysteresis goes: previous_point_0 -> previous_point_1 -> previous_point_0. - # previous_point_0,previous_point_1 are already logged, now store only previous_point_0 again to visualize the closed hysteresis - self._hcm_point_history.append(("secondary", previous_point_0, self._hysteresis_index)) - - self._hysteresis_index += 1 # increment the hysteresis counter, only needed for the `interpolated_stress_strain` method which helps in plotting the hystereses - - # remove the last two loads from the list of residual loads - self._residuals.pop() - self._residuals.pop() - return [_loads_min, _loads_max, _S_min, _S_max, _epsilon_min, _epsilon_max, _epsilon_min_LF, - _epsilon_max_LF, _is_closed_hysteresis, _is_zero_mean_stress_and_strain, _debug_output] + record[rec_index, :] = [record_index, PRIMARY] + hysts[hyst_index, :] = [MEMORY_1_2, prev_idx_0, prev_idx_1, current_index] + hyst_index += 1 - def _handle_case_c_i(self, *, current_point, previous_point_1): - """Handle case c) i. of the HCM algorithm.""" + return hyst_index - self._hcm_message += f" case c) i." - - # yes -> we are on a new secondary branch, there is no new hysteresis to be closed with this - current_point = self._proceed_on_secondary_branch(previous_point_1, current_point) - - # store strain values, this is for the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm - self._strain_values.append(current_point.strain.values[0]) - - # count number of strain values in the first run of the HCM algorithm - if self._run_index == 1: - self._n_strain_values_first_run += 1 - return current_point - - def _handle_case_b(self, current_point): - """ Handle case b) of the HCM algorithm. - The branch is fully part of the initial curve, case "Memory 1" + @property + def strain_values(self): """ + Get the strain values of the turning points in the stress-strain diagram. + They are needed in the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm. - self._hcm_message += f" case b)" - - # compute stress and strain of the current point - current_point = self._proceed_on_primary_branch(current_point) - - # store strain values, this is for the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm - self._strain_values.append(current_point.strain.values[0]) - - # count number of strain values in the first run of the HCM algorithm - if self._run_index == 1: - self._n_strain_values_first_run += 1 - - return current_point - - def _handle_case_a_ii(self, *, current_point, previous_point): - """Handle the case a) ii. in the HCM algorithm.""" - - self._hcm_message += f" case a) ii." - - # secondary branch - current_point = self._proceed_on_secondary_branch(previous_point, current_point) + Returns + ------- + list of float + The strain values of the turning points that are visited during the HCM algorithm. + """ + return self.history().query("load_step >= 0").strain.to_numpy() - # store strain values, this is for the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm - self._strain_values.append(current_point.strain.values[0]) + @property + def strain_values_first_run(self): + """ + Get the strain values of the turning points in the stress-strain diagram, for the first run of the HCM algorithm. + They are needed in the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm. - # count number of strain values in the first run of the HCM algorithm - if self._run_index == 1: - self._n_strain_values_first_run += 1 + Returns + ------- + list of float + The strain values of the turning points that are visited during the first run of the HCM algorithm. + """ - return current_point + return self.history().query("load_step >= 0 and run_index == 1").strain.to_numpy() - def _handle_case_a_i(self, *, current_point, previous_point, recording_lists): - """Handle the case a) i. in the HCM algorithm where - the memory 3 effect is considered.""" + @property + def strain_values_second_run(self): + """ + Get the strain values of the turning points in the stress-strain diagram, for the second and any further run of the HCM algorithm. + They are needed in the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm. - self._hcm_message += f" case a) i., detected half counted hysteresis" + Returns + ------- + list of float + The strain values of the turning points that are visited during the second run of the HCM algorithm. + """ - # case "Memory 3" - # the first part is still on the secondary branch, the second part is on the primary branch - # split these two parts + return self.history().query("load_step >= 0 and run_index == 2").strain.to_numpy() - # the secondary branch corresponds to the load range [L, -L], where L is the previous load, - # which is named L_{q-1} in the FKM document - flipped_previous_point = self._HCM_Point(load=-previous_point.load) - flipped_previous_point = self._proceed_on_secondary_branch(previous_point, flipped_previous_point) + def history(self): + """Compile the history of noteworthy points. - # the primary branch is the rest - current_point = self._proceed_on_primary_branch(current_point) + Returns + ------- - [_loads_min, _loads_max, _S_min, _S_max, _epsilon_min, _epsilon_max, _epsilon_min_LF, - _epsilon_max_LF, _is_closed_hysteresis, _is_zero_mean_stress_and_strain, _debug_output] = recording_lists - _loads_min = pd.concat([_loads_min, -abs(previous_point.load)]) - _loads_max = pd.concat([_loads_max, abs(previous_point.load)]) - _S_min = pd.concat([_S_min, -abs(previous_point.stress)]) - _S_max = pd.concat([_S_max, abs(previous_point.stress)]) - _epsilon_min = pd.concat([_epsilon_min, -abs(previous_point.strain)]) - _epsilon_max = pd.concat([_epsilon_max, abs(previous_point.strain)]) - _epsilon_min_LF = pd.concat([_epsilon_min_LF, self._epsilon_min_LF]) - _epsilon_max_LF = pd.concat([_epsilon_max_LF, self._epsilon_max_LF]) - _is_closed_hysteresis.append(False) # the hysteresis is not fully closed and will be considered half damage - _is_zero_mean_stress_and_strain.append(True) # force the mean stress and strain to be zero + history : pd.DataFrame + The history containing of + ``load``, ``stress``, ``strain`` and ``secondary_branch``. + The ``secondary_branch`` column is ``bool`` and indicates if the point + is on secondary load branch. - # store strain values, this is for the FKM nonlinear roughness & surface layer algorithm, which adds residual stresses in another pass of the HCM algorithm - self._strain_values.append(current_point.strain.values[0]) + The index consists of the following levels: + * ``load_segment``: the number of the point + * ``load_step``: the index of the point in the actual samples + * ``run_index``: the index of the run (usually 1 or 2) + * ``turning_point``: the number of the turning point (-1 if it is not a turning point) + * ``hyst_from``: the number of the hysteresis starting at the point (-1 if there isn't one) + * ``hyst_to``: the number of the hysteresis opened at the point (-1 if there isn't one) + * ``hyst_close``: the number hof the hysteresis closed at the point (-1 if there isn't one) - # count number of strain values in the first run of the HCM algorithm - if self._run_index == 1: - self._n_strain_values_first_run += 1 + Notes + ----- - # A note on _is_zero_mean_stress_and_strain: the FKM document specifies zero mean stress and strain in the current case, - # sigma_m=0, and epsilon_m=0 (eq. (2.9-52, 2.9-53)). - # Due to rounding errors as a result of the binning (de: Klassierung), the sigma_m and epsilon_m values are - # normally not zero. The FKM + The history contains all the turning points with two other kinds of points injected: + * The primary hysteresis opening (Memory 3 of the guidline) + * The closing points of a hysteresis - self._hysteresis_index += 1 # increment the hysteresis counter, only needed for the `interpolated_stress_strain` method which helps in plotting the hystereses + Note that the ``load_step`` index of the injected points is always `-1`, so you + can't use it to determine the index of a hysteresis closing in the original + signal. - return ( - current_point, - [ - _loads_min, _loads_max, _S_min, _S_max, _epsilon_min, _epsilon_max, _epsilon_min_LF, - _epsilon_max_LF, _is_closed_hysteresis, _is_zero_mean_stress_and_strain, _debug_output - ] + """ + history = pd.concat([rr for rr, _ in self._history_record]).reset_index( + drop=True ) + history["load_segment"] = np.arange(1, len(history) + 1) + + hysts = np.concatenate([hs for _, hs in self._history_record]) + hyst_index = np.concatenate( + [[np.arange(len(hysts))], hysts[:, FROM:CLOSE].T, [hysts[:, IS_CLOSED].T]] + ).T + + hyst_from_marker = pd.Series(-1, index=history.index) + hyst_to_marker = pd.Series(-1, index=history.index) + + if len(hysts): + hyst_from_marker.iloc[hyst_index[:, FROM]] = hyst_index[:, IS_CLOSED] + hyst_to_marker.iloc[hyst_index[:, TO]] = hyst_index[:, IS_CLOSED] + + history["hyst_from"] = hyst_from_marker + history["hyst_to"] = hyst_to_marker + history["hyst_close"] = pd.Series(-1, index=history.index) + + to_insert = [] + negate = [] + turning_point_drop_idx = [] + hyst_close_index = [] + + for hyst_index, hyst in enumerate(hysts): + if hyst[IS_CLOSED] == MEMORY_1_2: + hyst_close = int(hyst[CLOSE]) + len(to_insert) + hyst_from = int(hyst[FROM]) + turning_point_drop_idx.append(hyst_close) + hyst_close_index.append([hyst_close, hyst_index]) + to_insert.append((hyst_close, hyst_from)) + else: + hyst_from = int(hyst[FROM]) + hyst_to = int(hyst[TO]) + len(to_insert) + negate.append(hyst_to) + turning_point_drop_idx.append(hyst_to) + to_insert.append((hyst_to, hyst_from)) - def _get_scalar_current_load(self, current_load): - """Get a scalar value that represents the current load. - This is either the load itself if it is already scaler, - or the node from the first assessment point if multiple points are - considered at once.""" - - if isinstance(current_load, pd.Series): - current_load_representative = current_load.iloc[0] - else: - current_load_representative = current_load - return current_load_representative - - def _initialize_epsilon_min_for_hcm_run(self, samples, load_turning_points): - """initializes the values of epsilon_min_LF and epsilon_max_LF to - have the proper dimensions.""" - - if self._is_load_sequence_start: - self._is_load_sequence_start = False - - if not isinstance(load_turning_points, np.ndarray): - # properly initialize self._epsilon_min_LF and self._epsilon_max_LF - first_sample = samples[samples.index.get_level_values("load_step") == 0].reset_index(drop=True) + hyst_close_index = np.array(hyst_close_index, dtype=np.int64) - n_nodes = len(first_sample) - self._epsilon_min_LF = pd.Series([0.0]*n_nodes, index=pd.Index(np.arange(n_nodes), name='node_id')) - self._epsilon_max_LF = pd.Series([0.0]*n_nodes, index=pd.Index(np.arange(n_nodes), name='node_id')) + negate = np.array(negate, dtype=np.int64) + index = list(np.arange(len(history))) -class FKMNonlinearHysteresisPlotter: + for target, idx in to_insert: + index.insert(target, int(idx)) - def __init__(self, hcm_point_history, ramberg_osgood_relation): - self._hcm_point_history = hcm_point_history - self._ramberg_osgood_relation = ramberg_osgood_relation + history = history.iloc[index].reset_index(drop=True) - def interpolated_stress_strain_data(self, *, n_points_per_branch=100, only_hystereses=False): - """Return points on the traversed hysteresis curve, mainly intended for plotting. - The curve including all hystereses, primary and secondary branches is sampled - at a fixed number of points within each hysteresis branch. - These points can be used for plotting. + history.loc[ + turning_point_drop_idx, + ["turning_point", "load_step", "hyst_from", "hyst_to"], + ] = -1 + history.loc[turning_point_drop_idx, "secondary_branch"] = True + history.loc[negate, HYSTORY_COLUMNS] = -history.loc[ + negate, HYSTORY_COLUMNS + ] + history.loc[negate, "hyst_to"] = history.loc[negate + 1, "hyst_to"].to_numpy() + history.loc[negate + 1, "hyst_to"] = -1 + history.loc[negate, "secondary_branch"] = True - The intended use is to generate plots as follows: + if len(hyst_close_index): + history.loc[hyst_close_index[:, 0], "hyst_close"] = hyst_close_index[:, 1] - .. code:: python + history["load_segment"] = np.arange(len(history), dtype=np.int64) - fkm_nonlinear_detector.process_hcm_first(...) - sampling_parameter = 100 # choose larger for smoother plot or smaller for lower runtime - plotting_data = detector.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter) + history.set_index(HISTORY_INDEX_LEVELS, inplace=True) - strain_values_primary = plotting_data["strain_values_primary"] - stress_values_primary = plotting_data["stress_values_primary"] - hysteresis_index_primary = plotting_data["hysteresis_index_primary"] - strain_values_secondary = plotting_data["strain_values_secondary"] - stress_values_secondary = plotting_data["stress_values_secondary"] - hysteresis_index_secondary = plotting_data["hysteresis_index_secondary"] + return history - plt.plot(strain_values_primary, stress_values_primary, "g-", lw=3) - plt.plot(strain_values_secondary, stress_values_secondary, "b-.", lw=1) + def interpolated_stress_strain_data( + self, + *, + load_segment=None, + hysteresis_index=None, + n_points_per_branch=100 + ): + """Caclulate interpolated stress and strain data. Parameters ---------- - n_points_per_branch : int, optional - How many sampling points to use per hysteresis branch, default 100. - A larger value values means smoother curves but longer runtime. - - only_hystereses : bool, optional - Default ``False``. If only graphs of the closed hystereses should be output. - Note that this does not work for hysteresis that have multiple smaller hysterseses included. + load_segment : int, Optional + The number of the load segment for which the stress strain data is to be + interpolated. + hysteresis_index : int, Optional + The number of the hysteresis for which the stress strain data is to be + interpolated. + n_points_per_branch : int, Optional + The number of points to be interpolated to of each load segment Returns ------- - plotting_data : dict - A dict with the following keys: - - * "strain_values_primary" - * "stress_values_primary" - * "hysteresis_index_primary" - * "strain_values_secondary" - * "stress_values_secondary" - * "hysteresis_index_secondary" - - The values are lists of strains and stresses of the points on the stress-strain curve, - separately for primary and secondary branches. The lists contain nan values whenever the - curve of the *same* branch is discontinuous. This allows to plot the entire curve - with different colors for primary and secondary branches. - - The entries for hysteresis_index_primary and hysteresis_index_secondary are the row - indices into the collective DataFrame returned by the recorder. This allows, e.g., - to separate the output of multiple runs of the HCM algorithm or to plot the traversed - paths on the stress-strain diagram for individual steps of the algorithm. + stress_strain_data : pd.DataFrame + The resulting ``DataFrame`` will contain the following columns: + + * ``stress``, ``strain`` – the stress strain data + * ``secondary_branch``– a ``bool`` column indicating if the point is + on a secondary load branch + * ``hyst_index`` – the number of the hysteresis the load segment is part of (-1 if there isn't one) + * ``load_segment`` the number of the load segment + * ``run_index`` the number of the run """ + history = self.history() - """self._hcm_point_history contains all traversed points: - It is a list of tuples (type, hcm_point, hysteresis_index), e.g., [ ("primary", hcm_point, 0), ("secondary", hcm_point, 1), ...] - where the type is one of {"primary", "secondary"} and indicates the hysteresis branch up to the current point - and the index is the hysteresis number to which the points belong. """ - - strain_values_primary = [] - stress_values_primary = [] - hysteresis_index_primary = [] - strain_values_secondary = [] - stress_values_secondary = [] - hysteresis_index_secondary = [] - - previous_point = FKMNonlinearDetector._HCM_Point(stress=0, strain=0, load=0) - previous_point._stress = pd.Series(0) - previous_point._strain = pd.Series(0) - previous_type = "primary" - previous_is_direction_up = None - last_secondary_start_point = None - is_direction_up = None - - # split primary parts if necessary - self._split_primary_parts(previous_point) - - # determine which points are part of closed hysteresis and which are only part - # of other parts in the stress-strain diagram - point_is_part_of_closed_hysteresis = self._determine_point_is_part_of_closed_hysteresis() - - previous_point = FKMNonlinearDetector._HCM_Point(strain=0) - previous_point._stress = pd.Series(0) - previous_point._strain = pd.Series(0) - - # iterate over all previously stored points of the curve - for (type, hcm_point, hysteresis_index), is_part_of_closed_hysteresis in zip(self._hcm_point_history, point_is_part_of_closed_hysteresis): - - # determine current direction ("upwards"/"downwards" in stress direction) of the hysteresis branch to be plotted - if hcm_point is not None and previous_point is not None: - is_direction_up = hcm_point.stress.iloc[0] - previous_point.stress.iloc[0] > 0 - - # depending on branch type, compute interpolated points on the branch - if type == "primary": - self._handle_primary_branch(n_points_per_branch=n_points_per_branch, only_hystereses=only_hystereses, - strain_values_primary=strain_values_primary, stress_values_primary=stress_values_primary, - hysteresis_index_primary=hysteresis_index_primary, strain_values_secondary=strain_values_secondary, - stress_values_secondary=stress_values_secondary, previous_point=previous_point, previous_type=previous_type, - type=type, hcm_point=hcm_point, hysteresis_index=hysteresis_index, - is_part_of_closed_hysteresis=is_part_of_closed_hysteresis) - - last_secondary_start_point = None - - elif type == "secondary": - hcm_point, secondary_start_point = self._handle_secondary_branch( - n_points_per_branch=n_points_per_branch, only_hystereses=only_hystereses, - strain_values_primary=strain_values_primary, stress_values_primary=stress_values_primary, - strain_values_secondary=strain_values_secondary, stress_values_secondary=stress_values_secondary, - hysteresis_index_secondary=hysteresis_index_secondary, previous_point=previous_point, - previous_type=previous_type, previous_is_direction_up=previous_is_direction_up, - last_secondary_start_point=last_secondary_start_point, hcm_point=hcm_point, hysteresis_index=hysteresis_index, - is_part_of_closed_hysteresis=is_part_of_closed_hysteresis, is_direction_up=is_direction_up) - - if previous_type == 'primary': - last_secondary_start_point = secondary_start_point - elif previous_type == 'discontinuity': - last_secondary_start_point = hcm_point - - elif type == "discontinuity": - - # if the option "only_hystereses" is set, only output point if it is part of a closed hysteresis - if is_part_of_closed_hysteresis or not only_hystereses: - - stress_values_secondary.append(np.nan) - strain_values_secondary.append(np.nan) - hysteresis_index_secondary.append(hysteresis_index) - - previous_type = type - continue + if hysteresis_index is not None: - previous_point = hcm_point - previous_type = type - previous_is_direction_up = is_direction_up - - result = { - "strain_values_primary": np.array(strain_values_primary), - "stress_values_primary": np.array(stress_values_primary), - "hysteresis_index_primary": np.array(hysteresis_index_primary), - "strain_values_secondary": np.array(strain_values_secondary), - "stress_values_secondary": np.array(stress_values_secondary), - "hysteresis_index_secondary": np.array(hysteresis_index_secondary) - } - return result - - def _handle_secondary_branch(self, *, n_points_per_branch, only_hystereses, strain_values_primary, stress_values_primary, - strain_values_secondary, stress_values_secondary, hysteresis_index_secondary, previous_point, - previous_type, previous_is_direction_up, last_secondary_start_point, hcm_point, hysteresis_index, - is_part_of_closed_hysteresis, is_direction_up): - - secondary_start_point = None - - # if the option "only_hystereses" is set, only output point if it is part of a closed hysteresis - if is_part_of_closed_hysteresis or not only_hystereses: - # whenever a new segment of the secondary branch starts, - # add the previous point as starting point - if previous_type == "primary": - stress_values_secondary.append(np.nan) - strain_values_secondary.append(np.nan) - hysteresis_index_secondary.append(hysteresis_index) - - if stress_values_primary: - stress_values_secondary.append(stress_values_primary[-1]) - strain_values_secondary.append(strain_values_primary[-1]) - hysteresis_index_secondary.append(hysteresis_index) - - # determine starting point of the current secondary branch - # After hanging hystereses that consist entirely of secondary branches, the line continues on a previous secondary branch - # Such case is detected if the previous direction up or downwards (from the hanging hystereses) is the same as the current direction (continuing after hanging hysteresis) - if previous_is_direction_up == is_direction_up and last_secondary_start_point is not None: - secondary_start_point = last_secondary_start_point + hyst_to = history.query(f"hyst_to == {hysteresis_index}") + if hysteresis_index in history.index.get_level_values("hyst_close"): + hyst_close = history.query(f"hyst_close == {hysteresis_index}") + load_segment_close = hyst_close.index.get_level_values("load_segment")[0] else: - secondary_start_point = previous_point + load_segment_close = None + + load_segment_to = hyst_to.index.get_level_values("load_segment")[0] - new_points_stress = [] - new_points_strain = [] + segments = [ + self._interpolate_deformation(load_segment_to, n_points_per_branch) + ] + if load_segment_close is not None: + segments.append( + self._interpolate_deformation( + load_segment_close, n_points_per_branch + ) + ) - # iterate over sampling point within current curve segment - for stress in np.linspace(previous_point.stress, hcm_point.stress, n_points_per_branch): - # compute point on secondary branch - delta_stress = stress - secondary_start_point.stress - delta_strain = self._ramberg_osgood_relation.delta_strain(delta_stress) + result = pd.concat(segments).reset_index(drop=True) + result["hyst_index"] = hysteresis_index - stress = secondary_start_point._stress + delta_stress - strain = secondary_start_point._strain + delta_strain + return result - new_points_stress.append(stress.iloc[0]) - new_points_strain.append(strain.iloc[0]) + if load_segment is not None: + return self._interpolate_deformation(load_segment, n_points_per_branch) - # if the hysteresis ends on the primary path, then the current assumption that we only need to plot the secondary branch is incorrect. - # In that case, the end points are not equal, do not output any curve then. + return ( + pd.concat( + [ + self._interpolate_deformation( + row.load_segment, n_points_per_branch + ) + for _, row in history.reset_index().iterrows() + ] + ) + .reset_index(drop=True) + ) - # If the end points are equal - if np.isclose(stress, hcm_point.stress): - stress_values_secondary += new_points_stress - strain_values_secondary += new_points_strain - hysteresis_index_secondary += [hysteresis_index] * len(new_points_strain) - # if the end points are not equal (see explanation above) - else: - # reuse the previous point for the next part of the graph - hcm_point = previous_point - - return hcm_point, secondary_start_point - - def _handle_primary_branch(self, *, n_points_per_branch, only_hystereses, strain_values_primary, stress_values_primary, - hysteresis_index_primary, strain_values_secondary, stress_values_secondary, previous_point, previous_type, type, - hcm_point, hysteresis_index, is_part_of_closed_hysteresis): - - # if the option "only_hystereses" is set, only output point if it is part of a closed hysteresis - if is_part_of_closed_hysteresis or not only_hystereses: - # whenever a new segment of the primary branch starts, - # add the previous point as starting point - if previous_type != type: - stress_values_primary.append(np.nan) - strain_values_primary.append(np.nan) - hysteresis_index_primary.append(hysteresis_index) - stress_values_primary.append(stress_values_secondary[-1]) - strain_values_primary.append(strain_values_secondary[-1]) - hysteresis_index_primary.append(hysteresis_index) - - # iterate over sampling point within current curve segment - for stress in np.linspace(previous_point.stress, hcm_point.stress, n_points_per_branch): - # compute point on primary branch - strain = self._ramberg_osgood_relation.strain(stress) - stress_values_primary.append(stress[0]) - strain_values_primary.append(strain[0]) - hysteresis_index_primary.append(hysteresis_index) - - def _determine_point_is_part_of_closed_hysteresis(self): - """Determine which points are part of a closed hysteresis""" - point_is_part_of_closed_hysteresis = [] - previous_index = -1 - first_point_set = False - - for (_, _, index) in reversed(self._hcm_point_history): - if index != previous_index: - point_is_part_of_closed_hysteresis.insert(0, True) - first_point_set = True - - elif first_point_set: - first_point_set = False - point_is_part_of_closed_hysteresis.insert(0, True) - else: - point_is_part_of_closed_hysteresis.insert(0, False) + def _interpolate_deformation(self, load_segment, n_points): + history = self.history() + idx = history.index.get_level_values("load_segment").get_loc(load_segment) - previous_index = index + to_value = history.iloc[idx] - return point_is_part_of_closed_hysteresis + run_index = history.index.get_level_values("run_index")[idx] - def _split_primary_parts(self, previous_point): - """Adjust the _hcm_point_history, split parts on the primary branch - that appear for positive residual stresses.""" - old_hcm_point_history = self._hcm_point_history.copy() - largest_abs_stress_seen = pd.Series(0) - largest_abs_strain = 0 - self._hcm_point_history = [] + hyst_open_idx = history.index.get_level_values("hyst_to")[idx] + hyst_close_idx = history.index.get_level_values("hyst_close")[idx] - for (type, hcm_point, hysteresis_index) in old_hcm_point_history: - if type != "primary": - self._hcm_point_history.append((type, hcm_point, hysteresis_index)) - previous_point = hcm_point - continue + hyst_index = hyst_open_idx if hyst_open_idx >= 0 else hyst_close_idx - if all(previous_point.stress * hcm_point.stress < 0): - sign = np.sign(hcm_point.stress) - intermediate_point = FKMNonlinearDetector._HCM_Point( - strain=sign*largest_abs_strain, stress=sign*largest_abs_stress_seen - ) - self._hcm_point_history.append(("secondary", intermediate_point, hysteresis_index)) - self._hcm_point_history.append(("primary", hcm_point, hysteresis_index)) - else: - self._hcm_point_history.append((type, hcm_point, hysteresis_index)) + if idx == 0: + from_value = pd.Series({"stress": 0.0}) + elif hyst_close_idx >= 0: + from_value = history.query(f"hyst_to == {hyst_close_idx}").iloc[0] + elif hyst_open_idx >= 0: + from_value = history.query(f"hyst_from == {hyst_open_idx}").iloc[0] + else: + from_value = history.iloc[idx-1] - if all(abs(hcm_point.stress.values) > largest_abs_stress_seen.values): - largest_abs_stress_seen = abs(hcm_point.stress) - largest_abs_strain = abs(hcm_point.strain) + stress = np.linspace(from_value.stress, to_value.stress, n_points) - previous_point = hcm_point + if to_value.secondary_branch: + delta_stress = from_value.stress - stress + strain = from_value.strain - self._ramberg_osgood_relation.delta_strain(delta_stress) + else: + strain = self._ramberg_osgood_relation.strain(stress) + + return pd.DataFrame( + { + "stress": stress, + "strain": strain, + "secondary_branch": to_value.secondary_branch, + "hyst_index": hyst_index, + "load_segment": load_segment, + "run_index": run_index, + } + ) diff --git a/src/pylife/stress/rainflow/recorders.py b/src/pylife/stress/rainflow/recorders.py index b67a9f1d..dac66416 100644 --- a/src/pylife/stress/rainflow/recorders.py +++ b/src/pylife/stress/rainflow/recorders.py @@ -162,7 +162,6 @@ def __init__(self): self._is_closed_hysteresis = [] self._is_zero_mean_stress_and_strain = [] self._run_index = [] - self._debug_output = [] @property def loads_min(self): @@ -351,14 +350,14 @@ def collective(self): "epsilon_min_LF": self._epsilon_min_LF.values, "epsilon_max_LF": self._epsilon_max_LF.values, "is_closed_hysteresis": self._is_closed_hysteresis, # FIXME .values - "is_zero_mean_stress_and_strain": self._is_zero_mean_stress_and_strain, # FIXME .values, + "is_zero_mean_stress_and_strain": self._is_zero_mean_stress_and_strain, # FIXME .values, "run_index": np.array(self._run_index, dtype=np.int64), - "debug_output": self._debug_output, # FIXME .values, - }) + }, + ) def record_values_fkm_nonlinear(self, loads_min, loads_max, S_min, S_max, epsilon_min, epsilon_max, epsilon_min_LF, epsilon_max_LF, - is_closed_hysteresis, is_zero_mean_stress_and_strain, run_index, debug_output): + is_closed_hysteresis, is_zero_mean_stress_and_strain, run_index): """Record the loop values.""" if len(loads_min) > 0: @@ -378,10 +377,6 @@ def record_values_fkm_nonlinear(self, loads_min, loads_max, S_min, S_max, epsilo self._is_closed_hysteresis += is_closed_hysteresis self._is_zero_mean_stress_and_strain += is_zero_mean_stress_and_strain - if len(debug_output) == 0: - debug_output = [""] * len(S_min) - self._debug_output += debug_output - self._run_index += [run_index] * len(S_min) def _get_for_every_node(self, boolean_array): diff --git a/tests/stress/rainflow/test_fkm_nonlinear.py b/tests/stress/rainflow/test_fkm_nonlinear.py index 5300befe..325f023a 100644 --- a/tests/stress/rainflow/test_fkm_nonlinear.py +++ b/tests/stress/rainflow/test_fkm_nonlinear.py @@ -23,10 +23,83 @@ import pandas as pd import copy -import pylife.stress.rainflow as RF from pylife.stress.rainflow.fkm_nonlinear import FKMNonlinearDetector import pylife.stress.rainflow.recorders as RFR import pylife.materiallaws.notch_approximation_law +import pylife.materiallaws.notch_approximation_law_seegerbeste + + +@pytest.fixture(autouse=True) +def np_precision_2_print(): + old_prec = pd.get_option("display.precision") + old_expand = pd.get_option("expand_frame_repr") + with np.printoptions(precision=2): + pd.set_option("display.precision", 2) + pd.set_option("expand_frame_repr", False) + yield + pd.set_option("display.precision", old_prec) + pd.set_option("expand_frame_repr", old_expand) + + +class TestIncomplete(unittest.TestCase): + + def setUp(self): + + signal = np.array([0, 500.]) + + self._recorder = RFR.FKMNonlinearRecorder() + E = 206e3 # [MPa] Young's modulus + K = 2650 # 1184 [MPa] + n = 0.187 # [-] + K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) + + # initialize notch approximation law + extended_neuber = pylife.materiallaws.notch_approximation_law.ExtendedNeuber(E, K, n, K_p) + + # wrap the notch approximation law by a binning class, which precomputes the values + maximum_absolute_load = max(abs(signal)) + extended_neuber_binned = pylife.materiallaws.notch_approximation_law.Binned( + extended_neuber, maximum_absolute_load, 100) + + # first run + detector = FKMNonlinearDetector(recorder=self._recorder, notch_approximation_law=extended_neuber_binned) + detector.process(signal) + + # second run + self._detector = detector.process(signal) + + def test_values(self): + np.testing.assert_array_equal(self._recorder.loads_min, np.array([])) + np.testing.assert_array_equal(self._recorder.loads_max, np.array([])) + np.testing.assert_array_equal(self._recorder.S_min, np.array([])) + np.testing.assert_array_equal(self._recorder.S_max, np.array([])) + np.testing.assert_array_equal(self._recorder.epsilon_min, np.array([])) + np.testing.assert_array_equal(self._recorder.epsilon_max, np.array([])) + + def test_strain_values(self): + np.testing.assert_array_equal(self._detector.strain_values_first_run, np.array([])) + np.testing.assert_allclose(self._detector.strain_values_second_run, np.array([2.48e-3, 5.65e-5]), rtol=1e-1) + np.testing.assert_allclose(self._detector.strain_values, np.array([2.48e-3, 5.65e-5]), rtol=1e-1) + + def test_epsilon_LF(self): + collective = self._recorder.collective + np.testing.assert_allclose(collective.epsilon_min_LF.to_numpy(), np.array([])) + np.testing.assert_allclose(collective.epsilon_max_LF.to_numpy(), np.array([])) + + def test_interpolation(self): + df = self._detector.interpolated_stress_strain_data(load_segment=0, n_points_per_branch=5) + expected = pd.DataFrame( + { + "stress": [0.0, 122.02, 244.04, 366.06, 488.08], + "strain": [0.0, 5.92e-4, 1.19e-3, 1.80e-3, 2.48e-3], + "secondary_branch": [False, False, False, False, False], + "hyst_index": -1, + "load_segment": 0, + "run_index": 2 + } + ) + + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) class TestFKMMemory1Inner(unittest.TestCase): @@ -35,7 +108,7 @@ class TestFKMMemory1Inner(unittest.TestCase): def setUp(self): - signal = np.array([100,0,80,20,60,40]) + signal = np.array([100, 0, 80, 20, 60, 40]) self._recorder = RFR.FKMNonlinearRecorder() E = 206e3 # [MPa] Young's modulus @@ -69,9 +142,43 @@ def test_values(self): def test_strain_values(self): # regression test - np.testing.assert_allclose(self._detector.strain_values, np.array([4.854492e-04, 1.169416e-08, 3.883614e-04, 9.709922e-05, 2.912740e-04, 1.941866e-04, 4.854492e-04, 1.169416e-08, 3.883614e-04, 9.709922e-05, 2.912740e-04, 1.941866e-04]), rtol=1e-3, atol=1e-5) - np.testing.assert_allclose(self._detector.strain_values_first_run, np.array([4.854492e-04, 1.169416e-08, 3.883614e-04, 9.709922e-05, 2.912740e-04]), rtol=1e-3, atol=1e-5) - np.testing.assert_allclose(self._detector.strain_values_second_run, np.array([1.941866e-04, 4.854492e-04, 1.169416e-08, 3.883614e-04, 9.709922e-05, 2.912740e-04, 1.941866e-04]), rtol=1e-3, atol=1e-5) + + expected_first = np.array([4.85e-04, 1.17e-08, 3.88e-04, 9.70e-05, 2.91e-04]) + expected_second = np.array( + [1.94e-04, 4.85e-04, 1.17e-08, 3.88e-04, 9.70e-05, 2.91e-04, 1.94e-04] + ) + expected_total = np.concatenate((expected_first, expected_second)) + + np.testing.assert_allclose(self._detector.strain_values, expected_total, rtol=1e-3, atol=1e-5) + np.testing.assert_allclose(self._detector.strain_values_first_run, expected_first, rtol=1e-3, atol=1e-5) + np.testing.assert_allclose(self._detector.strain_values_second_run, expected_second, rtol=1e-3, atol=1e-5) + + def test_epsilon_LF(self): + collective = self._recorder.collective + + np.testing.assert_allclose( + collective.epsilon_min_LF.to_numpy(), np.array([0.0, 0.0, 0.0]), rtol=1e-2 + ) + + np.testing.assert_allclose( + collective.epsilon_max_LF.to_numpy(), + np.array([0.485, 0.485, 0.485]) * 1e-3, + rtol=1e-2, + ) + + def test_interpolation(self): + df = self._detector.interpolated_stress_strain_data(load_segment=5, n_points_per_branch=5) + expected = pd.DataFrame( + { + "stress": [6.0e01, 5.5e01, 5.0e01, 4.5e01, 4.0e01], + "strain": [2.9e-04, 2.7e-04, 2.4e-04, 2.2e-04, 1.9e-04], + "secondary_branch": True, + "hyst_index": 0, + "load_segment": 5, + "run_index": 2, + } + ) + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) class TestFKMMemory1_2_3(unittest.TestCase): @@ -130,6 +237,22 @@ def test_strain_values(self): 9.71373378e-04, 4.74995178e-07, 9.71373378e-04, -9.713734e-04]), rtol=1e-3, atol=1e-5) + def test_epsilon_LF(self): + collective = self._recorder.collective + + np.testing.assert_allclose( + collective.epsilon_min_LF.to_numpy(), + np.array([-0.485, -0.485, -0.971, -0.971, -0.971, -0.971, -0.971, -0.971, -0.971, -0.971]) * 1e-3, + rtol=1e-2 + ) + + np.testing.assert_allclose( + collective.epsilon_max_LF.to_numpy(), + np.array([0.485, 0.485, 0.485, 0.971, 0.971, 0.971, 0.971, 0.971, 0.971, 0.971]) * 1e-3, + rtol=1e-2 + ) + + class TestHCMExample1(unittest.TestCase): """Example 2.7.1 "Akademisches Beispiel", p.74 """ @@ -183,106 +306,19 @@ def test_strain_values(self): np.testing.assert_allclose(self._detector.strain_values_first_run, np.array([0.000704, -0.001551, 0.000632, -0.002099, 0.001529, 0.000121, 0.001529, -0.001574]), rtol=1e-3, atol=1e-5) np.testing.assert_allclose(self._detector.strain_values_second_run, np.array([0.00061, -0.001574, 0.00061, -0.002099, 0.001529, 0.000121, 0.001529, -0.001574]), rtol=1e-3, atol=1e-5) - def test_plotting(self): - - plotting_data = self._detector.interpolated_stress_strain_data(n_points_per_branch=3, only_hystereses=False) - - strain_values_primary = plotting_data["strain_values_primary"] - stress_values_primary = plotting_data["stress_values_primary"] - hysteresis_index_primary = plotting_data["hysteresis_index_primary"] - strain_values_secondary = plotting_data["strain_values_secondary"] - stress_values_secondary = plotting_data["stress_values_secondary"] - hysteresis_index_secondary = plotting_data["hysteresis_index_secondary"] - - # plot resulting stress-strain curve - sampling_parameter = 50 # choose larger for smoother plot - plotting_data_fine = self._detector_1st.interpolated_stress_strain_data(n_points_per_branch=sampling_parameter) - - strain_values_primary_fine = plotting_data_fine["strain_values_primary"] - stress_values_primary_fine = plotting_data_fine["stress_values_primary"] - hysteresis_index_primary_fine = plotting_data_fine["hysteresis_index_primary"] - strain_values_secondary_fine = plotting_data_fine["strain_values_secondary"] - stress_values_secondary_fine = plotting_data_fine["stress_values_secondary"] - hysteresis_index_secondary_fine = plotting_data_fine["hysteresis_index_secondary"] - - # the following plots the test case for visual debugging - if False: - import matplotlib.pyplot as plt - fig, axes = plt.subplots(1, 2, figsize=(12,6)) - # load-time diagram - import matplotlib - matplotlib.rcParams.update({'font.size': 14}) - axes[0].plot(self.signal, "o-", lw=2) - axes[0].grid() - axes[0].set_xlabel("t [s]") - axes[0].set_ylabel("L [N]") - axes[0].set_title("Scaled load sequence") - - # stress-strain diagram - axes[1].plot(strain_values_primary_fine, stress_values_primary_fine, "y-", lw=1) - axes[1].plot(strain_values_secondary_fine, stress_values_secondary_fine, "y-.", lw=1) - axes[1].grid() - axes[1].set_xlabel(r"$\epsilon$") - axes[1].set_ylabel(r"$\sigma$ [MPa]") - axes[1].set_title("Material response") - - plt.savefig("test_fkm_nonlinear.png") - - - strain_values_primary_reference = np.array([ - 0., 0.00034608, 0.00070365, np.nan, -0.00070365, -0.00070365, - -0.00104958, -0.00155125, np.nan, -0.00155125, -0.00155125, -0.00179771, - -0.00209921, np.nan, -0.00178372, -0.00209921, -0.00209921, -0.00209921,]) - stress_values_primary_reference = np.array([ - 0., 71.23135917, 142.46271834, np.nan, -142.46271834, - -142.46271834, -200.66910165, -258.87548495, np.nan, -258.87548495, - -258.87548495, -279.28078419, -299.68608343, np.nan, -299.68608343, - -299.68608343, -299.68608343, -299.68608343, - ]) - hysteresis_index_primary_reference = np.array([0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 6, 6, 6, 6, 6]) - strain_values_secondary_reference = np.array([ - np.nan, 7.03647041e-04, 7.03647041e-04, 1.14870755e-05, - -7.03647041e-04, np.nan, -1.55125063e-03, -1.55125063e-03, - -5.43592015e-04, 6.32115565e-04, 6.32115565e-04, -3.75543053e-04, - -1.55125063e-03, np.nan, -2.09920894e-03, -2.09920894e-03, - -7.15382913e-04, 1.52864389e-03, 1.52864389e-03, 8.36483922e-04, - 1.21349805e-04, 1.21349805e-04, 8.13509770e-04, 1.52864389e-03, - np.nan, 1.52864389e-03, 1.52864389e-03, 1.52864389e-03, - 1.52864389e-03, 2.57497805e-04, -1.57385738e-03, -1.57385738e-03, - -5.66198760e-04, 6.09508820e-04, 6.09508820e-04, -3.98149798e-04, - -1.57385738e-03, np.nan, -1.57385738e-03, -1.57385738e-03, - -1.57385738e-03, -1.57385738e-03, -5.66198760e-04, 6.09508820e-04, - 6.09508820e-04, -3.98149798e-04, -1.57385738e-03, np.nan, - -1.57385738e-03, -1.67878923e-03, -1.78372203e-03, np.nan, - -2.09920894e-03, -2.09920894e-03, -7.15382913e-04, 1.52864389e-03, - 1.52864389e-03, 8.36483922e-04, 1.21349805e-04, 1.21349805e-04, - 8.13509770e-04, 1.52864389e-03, np.nan, 1.52864389e-03, - 1.52864389e-03, 1.52864389e-03, 1.52864389e-03, 2.57497805e-04, - -1.57385738e-03]) - stress_values_secondary_reference = np.array([ - np.nan, 142.46271834, 142.46271834, 0., -142.46271834, - np.nan, -258.87548495, -258.87548495, -52.19191877, 154.4916474, - 154.4916474, -52.19191877, -258.87548495, np.nan, -299.68608343, - -299.68608343, -19.19464281, 261.29679782, 261.29679782, 118.83407947, - -23.62863887, -23.62863887, 118.83407947, 261.29679782, np.nan, - 261.29679782, 261.29679782, 261.29679782, 261.29679782, 2.42131287, - -256.45417208, -256.45417208, -49.77060591, 156.91296027, 156.91296027, - -49.77060591, -256.45417208, np.nan, -256.45417208, -256.45417208, - -256.45417208, -256.45417208, -49.77060591, 156.91296027, 156.91296027, - -49.77060591, -256.45417208, np.nan, -256.45417208, -278.07012776, - -299.68608343, np.nan, -299.68608343, -299.68608343, -19.19464281, - 261.29679782, 261.29679782, 118.83407947, -23.62863887, -23.62863887, - 118.83407947, 261.29679782, np.nan, 261.29679782, 261.29679782, - 261.29679782, 261.29679782, 2.42131287, -256.45417208]) - hysteresis_index_secondary_reference = np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7]) - - np.testing.assert_allclose(strain_values_primary_reference, strain_values_primary, rtol=1e-3) - np.testing.assert_allclose(stress_values_primary_reference, stress_values_primary, rtol=1e-3) - np.testing.assert_allclose(hysteresis_index_primary_reference, hysteresis_index_primary) - - np.testing.assert_allclose(strain_values_secondary_reference, strain_values_secondary, rtol=1e-3) - np.testing.assert_allclose(stress_values_secondary_reference, stress_values_secondary, rtol=1e-3) - np.testing.assert_allclose(hysteresis_index_secondary_reference, hysteresis_index_secondary) + def test_epsilon_LF(self): + collective = self._recorder.collective + np.testing.assert_allclose( + collective.epsilon_min_LF.to_numpy(), + np.array([0.0, -1.55, -2.1, -2.1, -2.1, -2.1, -2.1]) * 1e-3, + rtol=1e-2 + ) + + np.testing.assert_allclose( + collective.epsilon_max_LF.to_numpy(), + np.array([0.70, 0.70, 1.53, 1.53, 1.53, 1.53, 1.53]) * 1e-3, + rtol=1e-2 + ) class TestHCMExample2(unittest.TestCase): @@ -356,6 +392,53 @@ def test_strain_values(self): 0.00410439, -0.00405193, 0.00603472, -0.00603472, 0.00603472, -0.00603472]), rtol=1e-3, atol=1e-5) +@pytest.mark.parametrize('vals, expected_loads_min, expected_loads_max', [ + ( + [200, 600, 1000, 60, 1500, 200, 80, 400, 1500, 700, 200], + [60, 80, 200, 60, 80], + [1000, 1500, 1000, 1500, 1500] + ), + ( + [0, 500], [], [] + ), + ( + [100, -200, 100, -250, 200, 0, 200, -200], + [-200, 0, -200, -200, -250, 0], + [100, 200, 100, 100, 200, 200] + ) +]) +def test_edge_case_value_in_sample_tail_simple_signal(vals, expected_loads_min, expected_loads_max): + signal = np.array(vals) + + E = 206e3 # [MPa] Young's modulus + K = 3.1148*(1251)**0.897 / (( np.min([0.338, 1033.*1251.**(-1.235)]) )**0.187) + #K = 2650.5 # [MPa] + n = 0.187 # [-] + K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) + + extended_neuber = pylife.materiallaws.notch_approximation_law.ExtendedNeuber(E, K, n, K_p) + + maximum_absolute_load = max(abs(signal)) + + extended_neuber_binned = pylife.materiallaws.notch_approximation_law.Binned( + extended_neuber, maximum_absolute_load, 100 + ) + + detector = FKMNonlinearDetector( + recorder=RFR.FKMNonlinearRecorder(), + notch_approximation_law=extended_neuber_binned + ) + detector.process(signal).process(signal) + + loads_min = detector.recorder.loads_min + loads_max = detector.recorder.loads_max + + np.testing.assert_allclose(loads_min, np.array(expected_loads_min)) + np.testing.assert_allclose(loads_max, np.array(expected_loads_max)) + + detector.recorder.collective + + @pytest.mark.parametrize('vals, expected_loads_min, expected_loads_max', [ ( [200, 600, 1000, 60, 1500, 200, 80, 400, 1500, 700, 200], @@ -405,6 +488,7 @@ def test_edge_case_value_in_sample_tail(vals, expected_loads_min, expected_loads detector.recorder.collective + def test_flush_edge_case_load(): mi_1 = pd.MultiIndex.from_product([range(9), range(3)], names=["load_step", "node_id"]) @@ -423,8 +507,8 @@ def test_flush_edge_case_load(): ], index=mi_2) E = 206e3 # [MPa] Young's modulus - K = 3.048*(1251)**0.07 / (( np.min([0.08, 1033.*1251.**(-1.05)]) )**0.07) - #K = 2650.5 # [MPa] + #K = 3.048*(1251)**0.07 / ((np.min([0.08, 1033.*1251.**(-1.05)]) )**0.07) + K = 2650.5 # [MPa] n = 0.07 # [-] K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) @@ -450,7 +534,8 @@ def test_flush_edge_case_load(): ], index=pd.MultiIndex.from_product( [[1, 2, 6, 8, 10, 4, 14], range(3)], names=["load_step", "node_id"] - ) + ), + name="loads_min" ) expected_load_max = pd.Series( [ @@ -459,7 +544,8 @@ def test_flush_edge_case_load(): ], index=pd.MultiIndex.from_product( [[1, 3, 5, 9, 11, 7, 13], range(3)], names=["load_step", "node_id"] - ) + ), + name="loads_max" ) loads_min = detector.recorder.loads_min @@ -468,6 +554,100 @@ def test_flush_edge_case_load(): pd.testing.assert_series_equal(loads_min, expected_load_min) pd.testing.assert_series_equal(loads_max, expected_load_max) + collective = detector.recorder.collective + + np.testing.assert_allclose( + collective.epsilon_min_LF.to_numpy(), + np.array([0., 0., 0., -1.4, -1.67, -0.31, -1.75, -2.08, -0.4, -1.75, -2.08, -0.4, -1.75, -2.08, -0.4, -1.75, -2.08, -0.4, -1.75, -2.08, -0.4 ]) * 1e-3, + rtol=1e-1 + ) + + np.testing.assert_allclose( + collective.epsilon_max_LF.to_numpy(), + np.array([0.71, 0.83, 0.17, 0.71, 0.83, 0.17, 1.4, 1.67, 0.31, 1.4, 1.67, 0.31, 1.4, 1.67, 0.31, 1.4, 1.67, 0.31, 1.4, 1.67, 0.31]) * 1e-3, + rtol=1e-1 + ) + + +def test_flush_edge_case_load_simple_signal(): + + signal_1 = np.array([0.0, 143.0, -287.0, 143.0, -359.0, 287.0, 0.0, 287.0, -287.0]) + + mi_2 = pd.MultiIndex.from_product([range(9, 17), range(3)], names=["load_step", "node_id"]) + + signal_2 = np.array([143.0, -287.0, 143.0, -359.0, 287.0, 0.0, 287.0, -287.0]) + + E = 206e3 # [MPa] Young's modulus + K = 3.048*(1251)**0.07 / (( np.min([0.08, 1033.*1251.**(-1.05)]) )**0.07) + #K = 2650.5 # [MPa] + n = 0.07 # [-] + K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) + + extended_neuber = pylife.materiallaws.notch_approximation_law.ExtendedNeuber(E, K, n, K_p) + + maximum_absolute_load = max(abs(np.concatenate([signal_1, signal_2]))) + + extended_neuber_binned = pylife.materiallaws.notch_approximation_law.Binned( + extended_neuber, maximum_absolute_load, 100 + ) + + detector = FKMNonlinearDetector( + recorder=RFR.FKMNonlinearRecorder(), + notch_approximation_law=extended_neuber_binned + ) + + detector.process(signal_1, flush=True).process(signal_2, flush=True) + + expected_load_min = np.array( + [-143.0, -287.0, 0.0, -287.0, -287.0, -359.0, 0.0], + ) + expected_load_max = np.array( + [143.0, 143.0, 287.0, 143.0, 143.0, 287.0, 287.0], + ) + + loads_min = detector.recorder.loads_min + loads_max = detector.recorder.loads_max + + np.testing.assert_allclose(loads_min, expected_load_min) + np.testing.assert_allclose(loads_max, expected_load_max) + + +def test_flush_edge_case_S_simple_signal(): + + signal_1 = np.array([0.0, 143.0, -287.0, 143.0, -359.0, 287.0, 0.0, 287.0, -287.0]) + + signal_2 = np.array([143.0, -287.0, 143.0, -359.0, 287.0, 0.0, 287.0, -287.0]) + + E = 206e3 # [MPa] Young's modulus + K = 3.048*(1251)**0.07 / (( np.min([0.08, 1033.*1251.**(-1.05)]) )**0.07) + #K = 2650.5 # [MPa] + n = 0.07 # [-] + K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) + + extended_neuber = pylife.materiallaws.notch_approximation_law.ExtendedNeuber(E, K, n, K_p) + + maximum_absolute_load = max(abs(np.concatenate([signal_1, signal_2]))) + + extended_neuber_binned = pylife.materiallaws.notch_approximation_law.Binned( + extended_neuber, maximum_absolute_load, 100 + ) + + detector = FKMNonlinearDetector( + recorder=RFR.FKMNonlinearRecorder(), + notch_approximation_law=extended_neuber_binned + ) + + detector.process(signal_1, flush=True).process(signal_2, flush=True) + + expected_S_min = np.array([-48.0, -96.7, 1.42e-14, -96.7, -96.7, -121.0, 1.42e-14]) + expected_S_max = pd.Series([49.1, 49.1, 96.74, 49.1, 49.1, 96.75, 96.75]) + + S_min = detector.recorder.S_min + S_max = detector.recorder.S_max + + np.testing.assert_allclose(S_min, expected_S_min, rtol=1e-1, atol=0.0) + np.testing.assert_allclose(S_max, expected_S_max, rtol=1e-1, atol=0.0) + def test_flush_edge_case_S(): mi_1 = pd.MultiIndex.from_product([range(9), range(3)], names=["load_step", "node_id"]) @@ -516,7 +696,8 @@ def test_flush_edge_case_S(): ], index=pd.MultiIndex.from_product( [[1, 2, 6, 8, 10, 4, 14], range(3)], names=["load_step", "node_id"] - ) + ), + name="S_min" ) expected_S_max = pd.Series( [ @@ -526,7 +707,8 @@ def test_flush_edge_case_S(): ], index=pd.MultiIndex.from_product( [[1, 3, 5, 9, 11, 7, 13], range(3)], names=["load_step", "node_id"] - ) + ), + name="S_max" ) S_min = detector.recorder.S_min @@ -614,11 +796,8 @@ def test_edge_case_value_in_sample_tail_compare_simple(vals, num): simple_collective = detector_simple.recorder.collective simple_collective.index = simple_collective.index.droplevel('assessment_point_index') - simple_collective.pop('debug_output') multi_collective = detector_multiindex.recorder.collective - if 'debug_output' in multi_collective: - multi_collective.pop('debug_output') pd.testing.assert_frame_equal( simple_collective, multi_collective.groupby('hysteresis_index').first(), @@ -698,12 +877,8 @@ def test_hcm_first_second(vals, num): simple_collective = detector_simple.recorder.collective simple_collective.index = simple_collective.index.droplevel('assessment_point_index') - simple_collective.pop('debug_output') multi_collective = detector_multiindex.recorder.collective - if 'debug_output' in multi_collective: - multi_collective.pop('debug_output') - pd.testing.assert_frame_equal( simple_collective, multi_collective.groupby('hysteresis_index').first(), @@ -713,3 +888,290 @@ def test_hcm_first_second(vals, num): reference = f.read() assert multi_collective.to_json(indent=4) == reference + + +@pytest.fixture +def detector_seeger_beste(): + E = 206e3 # [MPa] Young's modulus + K = 1184.0 # [MPa] + n = 0.187 # [-] + K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) + + seeger_beste = pylife.materiallaws.notch_approximation_law_seegerbeste.SeegerBeste(E, K, n, K_p) + seeger_beste_binned = pylife.materiallaws.notch_approximation_law.Binned( + seeger_beste, 800, 100 + ) + + return FKMNonlinearDetector( + recorder=RFR.FKMNonlinearRecorder(), notch_approximation_law=seeger_beste_binned + ) + + +@pytest.fixture +def detector_interpolate(detector_seeger_beste): + vals = pd.Series([160, -200, 250, -250, 230, 0, 260]) * 800.0/260.0 + return detector_seeger_beste.process(vals, flush=False).process(vals, flush=True) + + +@pytest.mark.parametrize("load_segment, n_points_per_branch, expected", [ + ( + 0, 10, + { + "stress": [0.0, -4.2e+1, -8.4e+1, -1.3e+2, -1.7e+2, -2.1e+2, -2.5e+2, -2.9e+2, -3.4e+2, -3.8e+2], + "strain": [0.0, -2.0e-4, -4.1e-4, -6.2e-4, -8.4e-4, -1.1e-3, -1.5e-3, -2.0e-3, -2.8e-3, -4.0e-3], + "secondary_branch": False, + "hyst_index": -1, + "load_segment": 0, + "run_index": 1, + } + ), + ( + 0, 5, + { + "stress": [0.0e+00, -9.4e+01, -1.9e+02, -2.8e+02, -3.8e+02], + "strain": [0.0e+00, -4.6e-04, -9.7e-04, -1.8e-03, -4.0e-03], + "secondary_branch": False, + "hyst_index": -1, + "load_segment": 0, + "run_index": 1 + } + ), + ( + 1, 5, + { + "stress": [-3.77e+02, -1.89e+02, 0.00e+00, 1.89e+02, 3.77e+02], + "strain": [-4.03e-03, -3.11e-03, -2.09e-03, -3.40e-04, 4.03e-03], + "secondary_branch": True, + "hyst_index": 0, + "load_segment": 1, + "run_index": 1, + } + ), + ( + 2, 5, + { + "stress": [3.77e+02, 3.89e+02, 4.01e+02, 4.12e+02, 4.24e+02], + "strain": [4.03e-03, 4.48e-03, 4.98e-03, 5.55e-03, 6.18e-03], + "secondary_branch": False, + "hyst_index": -1, + "load_segment": 2, + "run_index": 1, + } + ), +]) +def test_interpolation_like_in_demo_load_segment(detector_interpolate, load_segment, n_points_per_branch, expected): + df = detector_interpolate.interpolated_stress_strain_data(load_segment=load_segment, n_points_per_branch=n_points_per_branch) + expected = pd.DataFrame(expected) + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) + + +@pytest.mark.parametrize("hyst_index, expected", [ + ( + 0, + { + "stress": [-3.77e+02, -1.89e+02, 0.00e+00, 1.89e+02, 3.77e+02], + "strain": [-4.03e-03, -3.11e-03, -2.09e-03, -3.40e-04, 4.03e-03], + "secondary_branch": [True, True, True, True, True], + "hyst_index": 0, + "load_segment": [1, 1, 1, 1, 1], + "run_index": 1, + } + ), + ( + 1, + { + "stress": [4.06e+02, 2.64e+02, 1.23e+02, -1.78e+01, -1.59e+02, -1.59e+02, -1.78e+01, 1.23e+02, 2.64e+02, 4.06e+02], + "strain": [5.19e-03, 4.50e-03, 3.80e-03, 2.93e-03, 1.51e-03, 1.51e-03, 2.20e-03, 2.91e-03, 3.77e-03, 5.19e-03], + "secondary_branch": [True, True, True, True, True, True, True, True, True, True], + "hyst_index": 1, + "load_segment": [5, 5, 5, 5, 5, 6, 6, 6, 6, 6], + "run_index": 1, + } + ), + ( + 5, + { + "stress": [4.31e+02, 2.17e+02, 3.44e+00, -2.10e+02, -4.24e+02, -4.24e+02, -2.10e+02, 3.44e+00, 2.17e+02, 4.31e+02], + "strain": [6.59e-03, 5.54e-03, 4.30e-03, 1.62e-03, -6.18e-03, -6.18e-03, -5.13e-03, -3.89e-03, -1.21e-03, 6.59e-03], + "secondary_branch": [True, True, True, True, True, True, True, True, True, True], + "hyst_index": 5, + "load_segment": [12, 12, 12, 12, 12, 16, 16, 16, 16, 16], + "run_index": 2, + } + ) +]) +def test_interpolation_like_in_demo_hyst_index(detector_interpolate, hyst_index, expected): + df = detector_interpolate.interpolated_stress_strain_data(hysteresis_index=hyst_index, n_points_per_branch=5) + expected = pd.DataFrame(expected) + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) + + +def test_interpolation_everything(detector_interpolate): + df = detector_interpolate.interpolated_stress_strain_data(n_points_per_branch=3) + expected = pd.DataFrame( + { + "stress": [ + 0.0e+00, -1.9e+02, -3.8e+02, -3.8e+02, 0.0e+00, 3.8e+02, 3.8e+02, 4.0e+02, 4.2e+02, + 4.2e+02, 1.1e+00, -4.2e+02, -4.2e+02, -8.1e+00, 4.1e+02, 4.1e+02, 1.2e+02, -1.6e+02, + -1.6e+02, 1.2e+02, 4.1e+02, -4.2e+02, 1.1e+00, 4.2e+02, 4.2e+02, 4.3e+02, 4.3e+02, + 4.3e+02, 2.6e+01, -3.8e+02, -3.8e+02, 2.3e+01, 4.2e+02, 4.2e+02, 2.3e+01, -3.8e+02, + 4.3e+02, 3.4e+00, -4.2e+02, -4.2e+02, -1.0e+01, 4.0e+02, 4.0e+02, 1.2e+02, -1.6e+02, + -1.6e+02, 1.2e+02, 4.0e+02, -4.2e+02, 3.4e+00, 4.3e+02, 4.3e+02, 4.3e+02, 4.3e+02 + ], + "strain": [ + 0.0e+00, -9.7e-04, -4.0e-03, -4.0e-03, -2.1e-03, 4.0e-03, 4.0e-03, 5.0e-03, 6.2e-03, + 6.2e-03, 3.9e-03, -6.0e-03, -6.0e-03, -3.9e-03, 5.2e-03, 5.2e-03, 3.8e-03, 1.5e-03, + 1.5e-03, 2.9e-03, 5.2e-03, -6.0e-03, -3.8e-03, 6.2e-03, 6.2e-03, 6.4e-03, 6.6e-03, + 6.6e-03, 4.5e-03, -3.7e-03, -3.7e-03, -1.6e-03, 6.3e-03, 6.3e-03, 4.2e-03, -3.7e-03, + 6.6e-03, 4.3e-03, -6.2e-03, -6.2e-03, -4.0e-03, 5.1e-03, 5.1e-03, 3.7e-03, 1.4e-03, + 1.4e-03, 2.8e-03, 5.1e-03, -6.2e-03, -3.9e-03, 6.6e-03, 6.6e-03, 6.6e-03, 6.6e-03 + ], + "secondary_branch": [ + False, False, False, True, True, True, False, False, False, True, True, True, + True, True, True, True, True, True, True, True, True, True, True, True, + False, False, False, True, True, True, True, True, True, True, True, True, + True, True, True, True, True, True, True, True, True, True, True, True, + True, True, True, False, False, False + ], + "hyst_index": [ + -1, -1, -1, 0, 0, 0, -1, -1, -1, 2, 2, 2, -1, -1, -1, 1, 1, 1, 1, 1, 1, 2, 2, 2, + -1, -1, -1, -1, -1, -1, 3, 3, 3, 3, 3, 3, 5, 5, 5, -1, -1, -1, 4, 4, 4, 4, 4, 4, + 5, 5, 5, -1, -1, -1 + ], + "load_segment": [ + 0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6, 7, 7, 7, + 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11, 11, 12, 12, 12, 13, 13, 13, 14, 14, 14, 15, 15, 15, + 16, 16, 16, 17, 17, 17], + "run_index": [ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + ], + } + ) + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) + + +def test_interpolation_everything_first_run(detector_seeger_beste): + vals = pd.Series([160, -200, 250, -250, 230, 0, 260]) * 800.0/260.0 + detector_seeger_beste.process(vals, flush=False) + df = detector_seeger_beste.interpolated_stress_strain_data(n_points_per_branch=3) + + expected = pd.DataFrame( + { + "stress": [ + 0.00e+00, -1.89e+02, -3.77e+02, -3.77e+02, 0.00e+00, 3.77e+02, 3.77e+02, 4.01e+02, 4.24e+02, + 4.24e+02, 1.15e+00, -4.22e+02, -4.22e+02, -8.06e+00, 4.06e+02, 4.06e+02, 1.23e+02, -1.59e+02 + ], + "strain": [ + 0.00e+00, -9.69e-04, -4.03e-03, -4.03e-03, -2.09e-03, 4.03e-03, 4.03e-03, 4.98e-03, 6.18e-03, + 6.18e-03, 3.92e-03, -6.05e-03, -6.05e-03, -3.86e-03, 5.19e-03, 5.19e-03, 3.79e-03, 1.51e-03 + ], + "secondary_branch": [ + False, False, False, True, True, True, False, False, False, + True, True, True, True, True, True, True, True, True + ], + "hyst_index": [ + -1, -1, -1, 0, 0, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 + ], + "load_segment": [ + 0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5 + ], + "run_index": [ + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 + ], + } + ) + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) + + +def test_history_guideline_at_once(): + # Fig 2.3 FKM NL Guideline + + signal = pd.Series([0., 200., -50., 250., -300., 150., -120., 350., 349.]) + signal.index.name = "load_step" + + recorder = RFR.FKMNonlinearRecorder() + E = 206e3 # [MPa] Young's modulus + K = 2650 # 1184 [MPa] + n = 0.187 # [-] + K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) + + # initialize notch approximation law + extended_neuber = pylife.materiallaws.notch_approximation_law.ExtendedNeuber(E, K, n, K_p) + + # wrap the notch approximation law by a binning class, which precomputes the values + maximum_absolute_load = max(abs(signal)) + extended_neuber_binned = pylife.materiallaws.notch_approximation_law.Binned( + extended_neuber, maximum_absolute_load, 100) + + # first run + detector = FKMNonlinearDetector(recorder=recorder, notch_approximation_law=extended_neuber_binned) + + detector.process(signal) + + df = detector.history() + + expected = pd.DataFrame( + { + "load": [200., -50., 200., 250., -250., -300., 150., -120., 150., 300., 350.], + "stress": [2.0e+02, -4.9e+01, 2.0e+02, 2.5e+02, -2.5e+02, -3.0e+02, 1.5e+02, -1.2e+02, 1.5e+02, 3.0e+02, 3.5e+02], + "strain": [9.9e-04, -2.4e-04, 9.9e-04, 1.2e-03, -1.2e-03, -1.5e-03, 7.3e-04, -6.0e-04, 7.3e-04, 1.5e-03, 1.7e-03], + "secondary_branch": [False, True, True, False, True, False, True, True, True, True, False], + "load_step": [1, 2, -1, 3, -1, 4, 5, 6, -1, -1, 7], + "turning_point": [0, 1, -1, 2, -1, 3, 4, 5, -1, -1, 6], + "load_segment": np.arange(11), + "run_index": 1, + "hyst_from": [0, -1, -1, 1, -1, 3, 2, -1, -1, -1, -1], + "hyst_to": [-1, 0, -1, -1, 1, -1, -1, 2, -1, 3, -1], + "hyst_close": [-1, -1, 0, -1, -1, -1, -1, -1, 2, -1, -1], + } + ).set_index(["load_segment", "load_step", "run_index", "turning_point", "hyst_from", "hyst_to", "hyst_close"]) + + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) + + +@pytest.mark.parametrize("split_point", [5]) +def test_history_guideline_at_split(split_point): + # Fig 2.3 FKM NL Guideline + + signal = pd.Series([0., 200., -50., 250., -300., 150., -120., 350., 349.]) + signal.index.name = "load_step" + + recorder = RFR.FKMNonlinearRecorder() + E = 206e3 # [MPa] Young's modulus + K = 2650 # 1184 [MPa] + n = 0.187 # [-] + K_p = 3.5 # [-] (de: Traglastformzahl) K_p = F_plastic / F_yield (3.1.1) + + # initialize notch approximation law + extended_neuber = pylife.materiallaws.notch_approximation_law.ExtendedNeuber(E, K, n, K_p) + + # wrap the notch approximation law by a binning class, which precomputes the values + maximum_absolute_load = max(abs(signal)) + extended_neuber_binned = pylife.materiallaws.notch_approximation_law.Binned( + extended_neuber, maximum_absolute_load, 100) + + # first run + detector = FKMNonlinearDetector(recorder=recorder, notch_approximation_law=extended_neuber_binned) + + detector.process(signal[:split_point]).process(signal[split_point:]) + + df = detector.history() + + expected = pd.DataFrame( + { + "load": [200., -50., 200., 250., -250., -300., 150., -120., 150., 300., 350.], + "stress": [2.0e+02, -4.9e+01, 2.0e+02, 2.5e+02, -2.5e+02, -3.0e+02, 1.5e+02, -1.2e+02, 1.5e+02, 3.0e+02, 3.5e+02], + "strain": [9.9e-04, -2.4e-04, 9.9e-04, 1.2e-03, -1.2e-03, -1.5e-03, 7.3e-04, -6.0e-04, 7.3e-04, 1.5e-03, 1.7e-03], + "secondary_branch": [False, True, True, False, True, False, True, True, True, True, False], + "load_step": [1, 2, -1, 3, -1, 4, 5, 6, -1, -1, 7], + "turning_point": [0, 1, -1, 2, -1, 3, 4, 5, -1, -1, 6], + "load_segment": np.arange(11), + "run_index": [1] * split_point + [2] * (11-split_point), + "hyst_from": [0, -1, -1, 1, -1, 3, 2, -1, -1, -1, -1], + "hyst_to": [-1, 0, -1, -1, 1, -1, -1, 2, -1, 3, -1], + "hyst_close": [-1, -1, 0, -1, -1, -1, -1, -1, 2, -1, -1], + } + ).set_index(["load_segment", "load_step", "run_index", "turning_point", "hyst_from", "hyst_to", "hyst_close"]) + + pd.testing.assert_frame_equal(df, expected, rtol=1e-1) diff --git a/tests/stress/rainflow/test_recorders.py b/tests/stress/rainflow/test_recorders.py index c78d75ad..1a07044d 100644 --- a/tests/stress/rainflow/test_recorders.py +++ b/tests/stress/rainflow/test_recorders.py @@ -144,8 +144,6 @@ def test_full_rainflow_recorder_two_non_zero_collective(): 'index_to': pd.Series([it1, it2], dtype=np.uintp) }) - print(expected) - print(expected.dtypes) pd.testing.assert_frame_equal(fr.collective, expected) @@ -260,16 +258,16 @@ def test_loopvalue_rainflow_recorder_histogram_one_non_zero(value_from, value_to # fkm nonlinear recorder def test_fkm_nonlinear_recorder_record_two_values(): a1, a2, b1, b2, c1, c2, d1, d2, e1, e2, f1, f2 = 23., 42., 46., 84., 2.5, -2.2, 4.8, 2.3, 4.5, -0.2, 1.8, 0.3 - g1, g2 = 1, 2 - h1, h2 = 4, 5 + g1, g2 = 1., 2. + h1, h2 = 4., 5. fr = RFR.FKMNonlinearRecorder() # arguments: loads_min, loads_max, S_min, S_max, epsilon_min, epsilon_max, epsilon_min_LF, epsilon_max_LF, - # is_closed_hysteresis, is_zero_mean_stress_and_strain, run_index, debug_output + # is_closed_hysteresis, is_zero_mean_stress_and_strain, run_index - args_1 = [pd.Series([v]) for v in [a1, b1, c1, d1, e1, f1, g1, h1]] + [[False], [False], 1, [""]] - args_2 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [False], 2, [""]] - args_3 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [True], 2, [""]] + args_1 = [pd.Series([v]) for v in [a1, b1, c1, d1, e1, f1, g1, h1]] + [[False], [False], 1] + args_2 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [False], 2] + args_3 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [True], 2] fr.record_values_fkm_nonlinear(*args_1) fr.record_values_fkm_nonlinear(*args_2) @@ -319,7 +317,6 @@ def test_fkm_nonlinear_recorder_empty_collective_default(): "is_closed_hysteresis": [], "is_zero_mean_stress_and_strain": [], "run_index": np.array([], dtype=np.int64), - "debug_output": [] } ) @@ -328,13 +325,13 @@ def test_fkm_nonlinear_recorder_empty_collective_default(): def test_fkm_nonlinear_recorder_two_non_zero_collective(): a1, a2, b1, b2, c1, c2, d1, d2, e1, e2, f1, f2 = 23., 42., 46., 84., 2.5, -2.2, 4.8, 2.3, 0.5, -0.2, 1.8, 0.3 - g1, g2 = 1, 2 - h1, h2 = 4, 5 + g1, g2 = 1., 2. + h1, h2 = 4., 5. fr = RFR.FKMNonlinearRecorder() - args_1 = [pd.Series([v]) for v in [a1, b1, c1, d1, e1, f1, g1, h1]] + [[False], [False], 1, [""]] - args_2 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [False], 2, [""]] - args_3 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [True], 2, [""]] + args_1 = [pd.Series([v]) for v in [a1, b1, c1, d1, e1, f1, g1, h1]] + [[False], [False], 1] + args_2 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [False], 2] + args_3 = [pd.Series([v]) for v in [a2, b2, c2, d2, e2, f2, g2, h2]] + [[True], [True], 2] fr.record_values_fkm_nonlinear(*args_1) fr.record_values_fkm_nonlinear(*args_2) @@ -359,7 +356,6 @@ def test_fkm_nonlinear_recorder_two_non_zero_collective(): "is_closed_hysteresis": [False, True, True], "is_zero_mean_stress_and_strain": [False, False, True], "run_index": [1, 2, 2], - "debug_output": ["", "", ""], } )