-
Notifications
You must be signed in to change notification settings - Fork 0
/
SpyFishAotearoaDataset.py
56 lines (42 loc) · 1.7 KB
/
SpyFishAotearoaDataset.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import os
import torch
import ast
import pandas as pd
from PIL import Image
class SpyFishAotearoaDataset(torch.utils.data.Dataset):
def __init__(self, root_dir, file_name, transforms):
self.root_dir = root_dir
self.transforms = transforms
file_path = os.path.join(root_dir, "output", file_name)
self.table = pd.read_csv(file_path, converters={
"x1": ast.literal_eval,
"y1": ast.literal_eval,
"x2": ast.literal_eval,
"y2": ast.literal_eval,
"label": ast.literal_eval
})
self.imgs_path = os.path.join(root_dir, "images")
def get_image_name(self, idx):
return self.table.loc[idx].image_name
def __getitem__(self, idx):
img_name = self.table.loc[idx].image_name
img_path = os.path.join(self.imgs_path, img_name)
img = Image.open(img_path)
num_objs = len(self.table.loc[idx].x1)
boxes = []
for i in range(num_objs):
x1 = self.table.loc[idx].x1[i]
y1 = self.table.loc[idx].y1[i]
x2 = self.table.loc[idx].x2[i]
y2 = self.table.loc[idx].y2[i]
boxes.append([x1, y1, x2, y2])
boxes = torch.as_tensor(boxes, dtype=torch.float32)
image_id = torch.tensor([idx])
labels = torch.as_tensor(self.table.loc[idx].label, dtype=torch.int64)
area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0])
target = {"boxes": boxes, "labels": labels, "image_id": image_id, "area": area}
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target, idx
def __len__(self):
return len(self.table)