diff --git a/experiment_log_3.txt b/experiment_log_3.txt new file mode 100644 index 00000000..0eebadbe --- /dev/null +++ b/experiment_log_3.txt @@ -0,0 +1,92 @@ + +======================================== +Experiment conducted on: 2024-12-15 14:35:04.892244 + +Hyperparameters: +name: GlobalLocalKernel +trainable: True +verbose: False +is_fitted: False +ignored_attrs: ['train_set', 'val_set', 'test_set'] +num_users: None +num_items: 1656 +uid_map: None +iid_map: None +max_rating: 4.0 +min_rating: 1.0 +global_mean: None +_Recommender__user_ids: None +_Recommender__item_ids: None +n_hid: 10 +n_dim: 2 +n_layers: 2 +lambda_s: 0.006 +lambda_2: 0.001 +gk_size: 3 +dot_scale: 1 +max_epoch_p: 500 +max_epoch_f: 1000 +tol_p: 0.0001 +tol_f: 1e-05 +patience_p: 10 +patience_f: 10 +lr_p: 0.1 +lr_f: 0.01 +device: cuda +model: CompleteNet( + (local_kernel_net): KernelNet( + (layers): ModuleList( + (0-1): 2 x KernelLayer( + (activation): Sigmoid() + ) + (2): KernelLayer( + (activation): Identity() + ) + ) + (dropout): Dropout(p=0.33, inplace=False) + ) +) +train_r_local: [[4.13143 3.8251498 4.2022805 ... 3.46096 4.9777036 3.8819532] + [4.1326137 3.825405 4.202767 ... 3.4611 4.9777055 3.8823788] + [3.8330228 3.6725302 4.1033254 ... 3.3236573 4.9774 3.8047073] + ... + [3.1168575 3.5055826 3.8102179 ... 3.2242472 4.9762387 3.5493011] + [3.1168575 3.5055826 3.8102179 ... 3.2242472 4.9762387 3.5493011] + [3.1168575 3.5055826 3.8102179 ... 3.2242472 4.9762387 3.5493011]] +_train_r: [[4. 0. 0. ... 0. 0. 0.] + [0. 5. 0. ... 0. 0. 0.] + [0. 0. 5. ... 0. 0. 0.] + ... + [0. 0. 0. ... 0. 0. 0.] + [0. 0. 0. ... 0. 0. 0.] + [0. 0. 0. ... 0. 0. 0.]] +_train_mask: [[1. 0. 0. ... 0. 0. 0.] + [0. 1. 0. ... 0. 0. 0.] + [0. 0. 1. ... 0. 0. 0.] + ... + [0. 0. 0. ... 0. 0. 0.] + [0. 0. 0. ... 0. 0. 0.] + [0. 0. 0. ... 0. 0. 0.]] + +Test Results: +Early stopping fine-tuning at epoch: 237 +Early stopping fine-tuning at epoch: 241 +Early stopping fine-tuning at epoch: 251 +Early stopping fine-tuning at epoch: 261 +Early stopping fine-tuning at epoch: 271 +Early stopping fine-tuning at epoch: 281 +Early stopping fine-tuning at epoch: 420 +Early stopping fine-tuning at epoch: 421 +Early stopping fine-tuning at epoch: 900 + +TEST: +... + | MAE | RMSE | AUC | MAP | NDCG@10 | Precision@10 | Recall@10 | Train (s) | Test (s) +----------------- + ------ + ------ + ------ + ------ + ------- + ------------ + --------- + --------- + -------- +GlobalLocalKernel | 0.8029 | 0.9731 | 0.4371 | 0.0158 | 0.0168 | 0.0137 | 0.0110 | 55.5029 | 232.3715 +MF | 0.7430 | 0.8998 | 0.7445 | 0.0548 | 0.0761 | 0.0675 | 0.0463 | 0.0302 | 0.8123 +PMF | 0.7534 | 0.9138 | 0.7744 | 0.0671 | 0.0969 | 0.0813 | 0.0639 | 1.4669 | 1.2117 +BPR | 2.0143 | 2.2267 | 0.8695 | 0.1042 | 0.1500 | 0.1110 | 0.1195 | 1.3102 | 0.7666 + + +======================================== diff --git a/project_notebook.ipynb b/project_notebook.ipynb new file mode 100644 index 00000000..d8ec5555 --- /dev/null +++ b/project_notebook.ipynb @@ -0,0 +1,421 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Recommendation Systems Project\n", + "\n", + "By : ADJAL Mehdi Zakaria - BENBETKA Rachid - YAMANI Mohammed Kamel - Rami Boukaroura\n", + "\n", + "You will find the report in the following link : " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## How to run this notebook :" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/mehdiz/miniconda3/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "import os\n", + "from datetime import datetime\n", + "import numpy as np\n", + "import cornac\n", + "from cornac.models import GlobalLocalKernel\n", + "from cornac.eval_methods import RatioSplit\n", + "from cornac.metrics import MAE, RMSE, Precision, Recall, NDCG, AUC, MAP\n", + "from cornac.models import MF, PMF, BPR" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# Function to find the next available log file name\n", + "def get_next_log_file(base_name=\"experiment_log\", ext=\".txt\"):\n", + " counter = 1\n", + " while os.path.exists(f\"{base_name}_{counter}{ext}\"):\n", + " counter += 1\n", + " return f\"{base_name}_{counter}{ext}\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# Function to log results\n", + "def log_results(log_file, test_results, model_instance):\n", + " with open(log_file, \"a\") as f:\n", + " f.write(\"\\n\" + \"=\" * 40 + \"\\n\")\n", + " f.write(f\"Experiment conducted on: {datetime.now()}\\n\")\n", + " f.write(\"\\nHyperparameters:\\n\")\n", + " for attr, value in vars(model_instance).items():\n", + " f.write(f\"{attr}: {value}\\n\")\n", + " f.write(\"\\nTest Results:\\n\")\n", + " f.write(test_results)\n", + " f.write(\"\\n\" + \"=\" * 40 + \"\\n\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# Load the MovieLens 100K dataset\n", + "ml_100k = cornac.datasets.movielens.load_feedback()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "# Split the data\n", + "rs = RatioSplit(data=ml_100k, test_size=0.2, rating_threshold=4.0, seed=123)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Data matrix loaded\n", + "Number of users: 943\n", + "Number of movies: 1656\n", + "Number of training ratings: 80000\n", + "Number of test ratings: 19971\n" + ] + } + ], + "source": [ + "# Get the total number of users and items in the subset\n", + "n_u = rs.total_users\n", + "n_m = rs.total_items\n", + "\n", + "print('Data matrix loaded')\n", + "print('Number of users: {}'.format(n_u))\n", + "print('Number of movies: {}'.format(n_m))\n", + "print('Number of training ratings: {}'.format(len(rs.train_set.uir_tuple[2])))\n", + "print('Number of test ratings: {}'.format(len(rs.test_set.uir_tuple[2])))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize your model\n", + "my_model = GlobalLocalKernel(\n", + " # Example hyperparameters\n", + " n_hid=10, \n", + " n_dim=2, \n", + " max_epoch_p=500, \n", + " max_epoch_f=1000,\n", + " lr_p=0.1,\n", + " lr_f=0.01, \n", + " verbose=False\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "# Models to compare with\n", + "mf = MF(k=10, max_iter=25, learning_rate=0.01, lambda_reg=0.02, use_bias=True, seed=123)\n", + "pmf = PMF(k=10, max_iter=100, learning_rate=0.001, lambda_reg=0.001, seed=123)\n", + "bpr = BPR(k=10, max_iter=200, learning_rate=0.001, lambda_reg=0.01, seed=123)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "# Define some basic metrics\n", + "metrics = [MAE(), RMSE(), Precision(k=10), Recall(k=10), NDCG(k=10), AUC(), MAP()]" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Early stopping fine-tuning at epoch: 237\n", + "Early stopping fine-tuning at epoch: 241\n", + "Early stopping fine-tuning at epoch: 251\n", + "Early stopping fine-tuning at epoch: 261\n", + "Early stopping fine-tuning at epoch: 271\n", + "Early stopping fine-tuning at epoch: 281\n", + "Early stopping fine-tuning at epoch: 420\n", + "Early stopping fine-tuning at epoch: 421\n", + "Early stopping fine-tuning at epoch: 900\n", + "\n", + "TEST:\n", + "...\n", + " | MAE | RMSE | AUC | MAP | NDCG@10 | Precision@10 | Recall@10 | Train (s) | Test (s)\n", + "----------------- + ------ + ------ + ------ + ------ + ------- + ------------ + --------- + --------- + --------\n", + "GlobalLocalKernel | 0.8029 | 0.9731 | 0.4371 | 0.0158 | 0.0168 | 0.0137 | 0.0110 | 55.5029 | 232.3715\n", + "MF | 0.7430 | 0.8998 | 0.7445 | 0.0548 | 0.0761 | 0.0675 | 0.0463 | 0.0302 | 0.8123\n", + "PMF | 0.7534 | 0.9138 | 0.7744 | 0.0671 | 0.0969 | 0.0813 | 0.0639 | 1.4669 | 1.2117\n", + "BPR | 2.0143 | 2.2267 | 0.8695 | 0.1042 | 0.1500 | 0.1110 | 0.1195 | 1.3102 | 0.7666\n", + "\n", + "\n", + "Experiment results and hyperparameters saved to experiment_log_3.txt\n" + ] + } + ], + "source": [ + "# Redirect Cornac output to capture experiment results\n", + "from io import StringIO\n", + "import sys\n", + "\n", + "# Get the next available log file name\n", + "log_file = get_next_log_file()\n", + "temp = sys.stdout # Store original stdout object for later\n", + "sys.stdout = StringIO() # Redirect stdout to capture results" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Epochs 1-10 (Pre-Training): 100%|██████████| 10/10 [00:01<00:00, 7.78it/s, Train RMSE=1.0664]\n", + "Epochs 11-20 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 32.31it/s, Train RMSE=1.1418]\n", + "Epochs 21-30 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 31.31it/s, Train RMSE=1.0929]\n", + "Epochs 31-40 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.31it/s, Train RMSE=1.0234]\n", + "Epochs 41-50 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.04it/s, Train RMSE=1.0055]\n", + "Epochs 51-60 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.87it/s, Train RMSE=0.9866]\n", + "Epochs 61-70 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 30.84it/s, Train RMSE=0.9950]\n", + "Epochs 71-80 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 27.34it/s, Train RMSE=0.9840]\n", + "Epochs 81-90 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 30.68it/s, Train RMSE=0.9846]\n", + "Epochs 91-100 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 32.19it/s, Train RMSE=0.9751]\n", + "Epochs 101-110 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.04it/s, Train RMSE=0.9751]\n", + "Epochs 111-120 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 30.87it/s, Train RMSE=0.9689]\n", + "Epochs 121-130 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.90it/s, Train RMSE=0.9731]\n", + "Epochs 131-140 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 35.47it/s, Train RMSE=0.9749]\n", + "Epochs 141-150 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.76it/s, Train RMSE=0.9686]\n", + "Epochs 151-160 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 27.36it/s, Train RMSE=0.9663]\n", + "Epochs 161-170 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 27.76it/s, Train RMSE=0.9661]\n", + "Epochs 171-180 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 30.29it/s, Train RMSE=0.9676]\n", + "Epochs 181-190 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 27.21it/s, Train RMSE=0.9617]\n", + "Epochs 191-200 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 27.10it/s, Train RMSE=0.9677]\n", + "Epochs 201-210 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 27.40it/s, Train RMSE=0.9643]\n", + "Epochs 211-220 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 28.24it/s, Train RMSE=0.9621]\n", + "Epochs 221-230 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 30.32it/s, Train RMSE=0.9631]\n", + "Epochs 231-240 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 31.95it/s, Train RMSE=0.9622]\n", + "Epochs 241-250 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.13it/s, Train RMSE=0.9638]\n", + "Epochs 251-260 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 30.76it/s, Train RMSE=0.9614]\n", + "Epochs 261-270 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.07it/s, Train RMSE=0.9603]\n", + "Epochs 271-280 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 32.32it/s, Train RMSE=0.9632]\n", + "Epochs 281-290 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 32.24it/s, Train RMSE=0.9629]\n", + "Epochs 291-300 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.41it/s, Train RMSE=0.9623]\n", + "Epochs 301-310 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.49it/s, Train RMSE=0.9590]\n", + "Epochs 311-320 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 35.98it/s, Train RMSE=0.9647]\n", + "Epochs 321-330 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.35it/s, Train RMSE=0.9590]\n", + "Epochs 331-340 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.30it/s, Train RMSE=0.9618]\n", + "Epochs 341-350 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.56it/s, Train RMSE=0.9651]\n", + "Epochs 351-360 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 35.91it/s, Train RMSE=0.9625]\n", + "Epochs 361-370 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 37.15it/s, Train RMSE=0.9614]\n", + "Epochs 371-380 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.79it/s, Train RMSE=0.9625]\n", + "Epochs 381-390 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.42it/s, Train RMSE=0.9602]\n", + "Epochs 391-400 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.12it/s, Train RMSE=0.9674]\n", + "Epochs 401-410 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.22it/s, Train RMSE=0.9601]\n", + "Epochs 411-420 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.75it/s, Train RMSE=0.9617]\n", + "Epochs 421-430 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 33.84it/s, Train RMSE=0.9644]\n", + "Epochs 431-440 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.14it/s, Train RMSE=0.9650]\n", + "Epochs 441-450 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 35.13it/s, Train RMSE=0.9620]\n", + "Epochs 451-460 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.43it/s, Train RMSE=0.9619]\n", + "Epochs 461-470 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.61it/s, Train RMSE=0.9631]\n", + "Epochs 471-480 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.99it/s, Train RMSE=0.9624]\n", + "Epochs 481-490 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 34.93it/s, Train RMSE=0.9621]\n", + "Epochs 491-500 (Pre-Training): 100%|██████████| 10/10 [00:00<00:00, 36.42it/s, Train RMSE=0.9585]\n", + "Epochs 1-10 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 10.38it/s, Train RMSE=1.0211]\n", + "Epochs 11-20 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.77it/s, Train RMSE=1.0215]\n", + "Epochs 21-30 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.28it/s, Train RMSE=1.0187]\n", + "Epochs 31-40 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.56it/s, Train RMSE=1.0114]\n", + "Epochs 41-50 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 23.77it/s, Train RMSE=1.0070]\n", + "Epochs 51-60 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.97it/s, Train RMSE=1.0043]\n", + "Epochs 61-70 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.94it/s, Train RMSE=1.0020]\n", + "Epochs 71-80 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.79it/s, Train RMSE=0.9985]\n", + "Epochs 81-90 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.09it/s, Train RMSE=0.9941]\n", + "Epochs 91-100 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.00it/s, Train RMSE=0.9916]\n", + "Epochs 101-110 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.97it/s, Train RMSE=0.9878]\n", + "Epochs 111-120 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.22it/s, Train RMSE=0.9862]\n", + "Epochs 121-130 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.73it/s, Train RMSE=0.9843]\n", + "Epochs 131-140 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.51it/s, Train RMSE=0.9835]\n", + "Epochs 141-150 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.59it/s, Train RMSE=0.9822]\n", + "Epochs 151-160 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.25it/s, Train RMSE=0.9800]\n", + "Epochs 161-170 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.35it/s, Train RMSE=0.9798]\n", + "Epochs 171-180 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.83it/s, Train RMSE=0.9802]\n", + "Epochs 181-190 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.53it/s, Train RMSE=0.9809]\n", + "Epochs 191-200 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.09it/s, Train RMSE=0.9786]\n", + "Epochs 201-210 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.20it/s, Train RMSE=0.9794]\n", + "Epochs 211-220 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.44it/s, Train RMSE=0.9795]\n", + "Epochs 221-230 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.84it/s, Train RMSE=0.9793]\n", + "Epochs 231-240 (Fine-Tuning): 70%|███████ | 7/10 [00:00<00:00, 25.55it/s, Train RMSE=0.9889]\n", + "Epochs 241-250 (Fine-Tuning): 10%|█ | 1/10 [00:00<00:00, 24.93it/s, Train RMSE=0.9904]\n", + "Epochs 251-260 (Fine-Tuning): 10%|█ | 1/10 [00:00<00:00, 24.58it/s, Train RMSE=0.9919]\n", + "Epochs 261-270 (Fine-Tuning): 10%|█ | 1/10 [00:00<00:00, 25.29it/s, Train RMSE=0.9932]\n", + "Epochs 271-280 (Fine-Tuning): 10%|█ | 1/10 [00:00<00:00, 25.56it/s, Train RMSE=0.9939]\n", + "Epochs 281-290 (Fine-Tuning): 10%|█ | 1/10 [00:00<00:00, 24.34it/s, Train RMSE=0.9945]\n", + "Epochs 291-300 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.30it/s, Train RMSE=0.9963]\n", + "Epochs 301-310 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.97it/s, Train RMSE=0.9944]\n", + "Epochs 311-320 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.09it/s, Train RMSE=0.9927]\n", + "Epochs 321-330 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.74it/s, Train RMSE=0.9924]\n", + "Epochs 331-340 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.59it/s, Train RMSE=0.9921]\n", + "Epochs 341-350 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.34it/s, Train RMSE=0.9898]\n", + "Epochs 351-360 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.34it/s, Train RMSE=0.9880]\n", + "Epochs 361-370 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.82it/s, Train RMSE=0.9884]\n", + "Epochs 371-380 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.31it/s, Train RMSE=0.9881]\n", + "Epochs 381-390 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.86it/s, Train RMSE=0.9871]\n", + "Epochs 391-400 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.93it/s, Train RMSE=0.9845]\n", + "Epochs 401-410 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.54it/s, Train RMSE=0.9825]\n", + "Epochs 411-420 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.00it/s, Train RMSE=0.9951]\n", + "Epochs 421-430 (Fine-Tuning): 10%|█ | 1/10 [00:00<00:00, 26.12it/s, Train RMSE=0.9951]\n", + "Epochs 431-440 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.69it/s, Train RMSE=0.9976]\n", + "Epochs 441-450 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.06it/s, Train RMSE=0.9955]\n", + "Epochs 451-460 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.23it/s, Train RMSE=0.9955]\n", + "Epochs 461-470 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.02it/s, Train RMSE=0.9952]\n", + "Epochs 471-480 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.03it/s, Train RMSE=0.9927]\n", + "Epochs 481-490 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.06it/s, Train RMSE=0.9907]\n", + "Epochs 491-500 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.02it/s, Train RMSE=0.9886]\n", + "Epochs 501-510 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.28it/s, Train RMSE=0.9852]\n", + "Epochs 511-520 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.31it/s, Train RMSE=0.9826]\n", + "Epochs 521-530 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.07it/s, Train RMSE=0.9818]\n", + "Epochs 531-540 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.90it/s, Train RMSE=0.9806]\n", + "Epochs 541-550 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.27it/s, Train RMSE=0.9787]\n", + "Epochs 551-560 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.95it/s, Train RMSE=0.9783]\n", + "Epochs 561-570 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.24it/s, Train RMSE=0.9788]\n", + "Epochs 571-580 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.35it/s, Train RMSE=0.9777]\n", + "Epochs 581-590 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.06it/s, Train RMSE=0.9759]\n", + "Epochs 591-600 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.36it/s, Train RMSE=0.9762]\n", + "Epochs 601-610 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.81it/s, Train RMSE=0.9772]\n", + "Epochs 611-620 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.54it/s, Train RMSE=0.9769]\n", + "Epochs 621-630 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.09it/s, Train RMSE=0.9775]\n", + "Epochs 631-640 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.63it/s, Train RMSE=0.9789]\n", + "Epochs 641-650 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.54it/s, Train RMSE=0.9786]\n", + "Epochs 651-660 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.02it/s, Train RMSE=0.9775]\n", + "Epochs 661-670 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.20it/s, Train RMSE=0.9778]\n", + "Epochs 671-680 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.15it/s, Train RMSE=0.9762]\n", + "Epochs 681-690 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.07it/s, Train RMSE=0.9761]\n", + "Epochs 691-700 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.43it/s, Train RMSE=0.9769]\n", + "Epochs 701-710 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.48it/s, Train RMSE=0.9753]\n", + "Epochs 711-720 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.60it/s, Train RMSE=0.9756]\n", + "Epochs 721-730 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 23.90it/s, Train RMSE=0.9748]\n", + "Epochs 731-740 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.89it/s, Train RMSE=0.9752]\n", + "Epochs 741-750 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.83it/s, Train RMSE=0.9761]\n", + "Epochs 751-760 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.44it/s, Train RMSE=0.9745]\n", + "Epochs 761-770 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.75it/s, Train RMSE=0.9765]\n", + "Epochs 771-780 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.28it/s, Train RMSE=0.9768]\n", + "Epochs 781-790 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.24it/s, Train RMSE=0.9758]\n", + "Epochs 791-800 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.44it/s, Train RMSE=0.9742]\n", + "Epochs 801-810 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.71it/s, Train RMSE=0.9744]\n", + "Epochs 811-820 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.86it/s, Train RMSE=0.9743]\n", + "Epochs 821-830 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.00it/s, Train RMSE=0.9756]\n", + "Epochs 831-840 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.98it/s, Train RMSE=0.9761]\n", + "Epochs 841-850 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.81it/s, Train RMSE=0.9756]\n", + "Epochs 851-860 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.08it/s, Train RMSE=0.9757]\n", + "Epochs 861-870 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.10it/s, Train RMSE=0.9746]\n", + "Epochs 871-880 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.47it/s, Train RMSE=0.9723]\n", + "Epochs 881-890 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.26it/s, Train RMSE=0.9714]\n", + "Epochs 891-900 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.00it/s, Train RMSE=0.9745]\n", + "Epochs 901-910 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.13it/s, Train RMSE=0.9725]\n", + "Epochs 911-920 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.75it/s, Train RMSE=0.9722]\n", + "Epochs 921-930 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.83it/s, Train RMSE=0.9725]\n", + "Epochs 931-940 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.21it/s, Train RMSE=0.9710]\n", + "Epochs 941-950 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.10it/s, Train RMSE=0.9738]\n", + "Epochs 951-960 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.54it/s, Train RMSE=0.9738]\n", + "Epochs 961-970 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 23.58it/s, Train RMSE=0.9715]\n", + "Epochs 971-980 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 24.55it/s, Train RMSE=0.9731]\n", + "Epochs 981-990 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 25.77it/s, Train RMSE=0.9722]\n", + "Epochs 991-1000 (Fine-Tuning): 100%|██████████| 10/10 [00:00<00:00, 26.22it/s, Train RMSE=0.9720]\n" + ] + } + ], + "source": [ + "# Run the experiment on the smaller subset\n", + "cornac.Experiment(eval_method=rs, models=[my_model, mf, pmf, bpr], metrics=metrics, user_based=True).run()\n", + "\n", + "# Retrieve experiment results\n", + "experiment_results = sys.stdout.getvalue()\n", + "sys.stdout = temp # Restore stdout to original state" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "# Print the results to the console\n", + "print(experiment_results)\n", + "\n", + "# Log results to file\n", + "log_results(log_file, experiment_results, my_model)\n", + "\n", + "print(f\"Experiment results and hyperparameters saved to {log_file}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}