From 1d98bc84a25912fb459f4370b989ce752415b270 Mon Sep 17 00:00:00 2001 From: Jordon Brooks Date: Sun, 13 Aug 2023 02:07:13 +0100 Subject: [PATCH] updated --- train_model.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/train_model.py b/train_model.py index 2b202f0..6f927f8 100644 --- a/train_model.py +++ b/train_model.py @@ -1,3 +1,4 @@ +import argparse import json import os import cv2 @@ -132,7 +133,6 @@ def main(): parser = argparse.ArgumentParser(description="Train the video compression model.") parser.add_argument('-b', '--batch_size', type=int, default=BATCH_SIZE, help='Batch size for training.') parser.add_argument('-e', '--epochs', type=int, default=EPOCHS, help='Number of epochs for training.') - parser.add_argument('-s', '--training_samples', type=int, default=TRAIN_SAMPLES, help='Number of training samples.') parser.add_argument('-l', '--learning_rate', type=float, default=LEARNING_RATE, help='Learning rate for training.') parser.add_argument('-c', '--continue_training', type=str, nargs='?', const=MODEL_SAVE_FILE, default=None, help='Path to the saved model to continue training. If used without a value, defaults to the MODEL_SAVE_FILE.') @@ -140,7 +140,6 @@ def main(): BATCH_SIZE = args.batch_size EPOCHS = args.epochs - TRAIN_SAMPLES = args.training_samples LEARNING_RATE = args.learning_rate # Display training configuration @@ -148,7 +147,6 @@ def main(): LOGGER.info("Training configuration:") LOGGER.info(f"Batch size: {BATCH_SIZE}") LOGGER.info(f"Epochs: {EPOCHS}") - LOGGER.info(f"Training samples: {TRAIN_SAMPLES}") LOGGER.info(f"Learning rate: {LEARNING_RATE}") LOGGER.info(f"Continue training from: {MODEL_SAVE_FILE}")