-
Notifications
You must be signed in to change notification settings - Fork 0
/
bert_classes.py
67 lines (56 loc) · 1.5 KB
/
bert_classes.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# DL
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader
from transformers import *
# utilities
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from pathlib import Path
from tqdm import tqdm_notebook
import seaborn as sns
# helper functions
import utils
class IMDBDataset(Dataset):
"""
"""
def __init__(self, reviews, sentiments, tokenizer, max_len):
"""
:param reviews:
:param sentiments:
:param tokenizer:
:param max_len:
"""
self.reviews = reviews
self.sentiments = sentiments
self.tokenizer = tokenizer
self.max_len = max_len
def __len__(self):
"""
:return:
"""
return len(self.reviews)
def __getitem__(self, item):
"""
:param item:
:return:
"""
review = str(self.reviews[item])
sentiment = self.sentiments[item]
encoding = self.tokenizer.encode_plus(
review,
max_length=200,
add_special_tokens=True,
return_token_type_ids=False,
return_attention_mask=True,
pad_to_max_length=True,
return_tensors='pt'
)
return {
'review': review,
'input_ids': encoding['input_ids'].flatten(),
'attention_mask': encoding['attention_mask'].flatten(),
'sentiments': torch.tensor(sentiment, dtype=torch.long)
}