-
Notifications
You must be signed in to change notification settings - Fork 0
/
train.py
52 lines (36 loc) · 1.61 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import os
from argparse import ArgumentParser
import pytorch_lightning as pl
import torch
from pytorch_lightning.callbacks import EarlyStopping, LearningRateMonitor
from pytorch_lightning.loggers import NeptuneLogger
from pytorch_lightning.tuner.tuning import Tuner
from data import LitHapticDataset
from models import (LitMLSTMfcnClassifier, LitTCNClassifier,
LitTSTransformerClassifier)
def objective(args):
logger = NeptuneLogger(
project="PPI/terrain-classification",
api_token=os.getenv('NEPTUNE_API_TOKEN'),
tags=["classification", "playground"],
log_model_checkpoints=False)
early_stop_callback = EarlyStopping(monitor="val/loss", min_delta=0.00, patience=10, verbose=True, mode="min")
lr_monitor = LearningRateMonitor(logging_interval='epoch')
trainer = pl.Trainer(
max_epochs=1, callbacks=[early_stop_callback, lr_monitor], logger=logger, log_every_n_steps=1,
accelerator='gpu' if torch.cuda.is_available() else 'cpu', devices=1)
model = LitTCNClassifier(config=LitTCNClassifier.get_default_config())
data = LitHapticDataset(args.dataset_path, 128)
tuner = Tuner(trainer)
tuner.scale_batch_size(model, datamodule=data)
print(data.batch_size)
logger.experiment['model'] = model.model_name
logger.experiment['hyperparams'] = model.config
trainer.fit(model, data)
trainer.test(datamodule=data)
logger.experiment.stop()
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--dataset-path', type=str, required=True)
args, _ = parser.parse_known_args()
objective(args)