Meehai commited on
Commit
7368892
·
1 Parent(s): 3a808fd

fixes in scripts

Browse files
README.md CHANGED
@@ -290,7 +290,7 @@ outputs one csv file with predictions for each npz file, the scenes are used for
290
 
291
  ```
292
  python scripts/evaluate_semantic_segmentation.py \
293
- data/test_set_annotated_only/semantic_mask2former_swin_mapillary_converted/ \ # change this with your predictions dir
294
  data/test_set_annotated_only/semantic_segprop8/ \
295
  -o results.csv \
296
  --classes land forest residential road little-objects water sky hill \
 
290
 
291
  ```
292
  python scripts/evaluate_semantic_segmentation.py \
293
+ data/test_set_annotated_only/semantic_mask2former_swin_mapillary_converted/ \
294
  data/test_set_annotated_only/semantic_segprop8/ \
295
  -o results.csv \
296
  --classes land forest residential road little-objects water sky hill \
dronescapes_reader/dronescapes_representations.py CHANGED
@@ -182,10 +182,9 @@ mapillary_color_map = [[165, 42, 42], [0, 192, 0], [196, 196, 196], [190, 153, 1
182
  [0, 60, 100], [0, 0, 142], [0, 0, 90], [0, 0, 230], [0, 80, 100], [128, 64, 64], [0, 0, 110],
183
  [0, 0, 70], [0, 0, 192], [32, 32, 32], [120, 10, 10]]
184
 
185
- class SemanticMask2FormerMapillaryConvertedPaper(TaskMapper):
186
- def __init__(self, dep: NpzRepresentation):
187
- super().__init__("semantic_mask2former_swin_mapillary_converted",
188
- dependencies=[dep], merge_fn=self._merge_fn, n_channels=8)
189
  self.mapping = {
190
  "land": ["Terrain", "Sand", "Snow"],
191
  "forest": ["Vegetation"],
@@ -236,7 +235,8 @@ _tasks: list[NpzRepresentation] = [ # some pre-baked representations
236
  SemanticRepresentation("semantic_mask2former_coco_47429163_0", classes=coco_classes, color_map=coco_color_map),
237
  m2f_mapillary := SemanticRepresentation("semantic_mask2former_mapillary_49189528_0", classes=mapillary_classes,
238
  color_map=mapillary_color_map),
239
- SemanticMask2FormerMapillaryConvertedPaper(m2f_mapillary),
 
240
  NpzRepresentation("softseg_gb", 3),
241
  ]
242
  dronescapes_task_types: dict[str, NpzRepresentation] = {task.name: task for task in _tasks}
 
182
  [0, 60, 100], [0, 0, 142], [0, 0, 90], [0, 0, 230], [0, 80, 100], [128, 64, 64], [0, 0, 110],
183
  [0, 0, 70], [0, 0, 192], [32, 32, 32], [120, 10, 10]]
184
 
185
+ class SemanticMask2FormerMapillaryMapped(TaskMapper):
186
+ def __init__(self, name: str, dep: NpzRepresentation):
187
+ super().__init__(name, dependencies=[dep], merge_fn=self._merge_fn, n_channels=8)
 
188
  self.mapping = {
189
  "land": ["Terrain", "Sand", "Snow"],
190
  "forest": ["Vegetation"],
 
235
  SemanticRepresentation("semantic_mask2former_coco_47429163_0", classes=coco_classes, color_map=coco_color_map),
236
  m2f_mapillary := SemanticRepresentation("semantic_mask2former_mapillary_49189528_0", classes=mapillary_classes,
237
  color_map=mapillary_color_map),
238
+ SemanticRepresentation("semantic_mask2former_swin_mapillary_converted", classes=8, color_map=color_map_8classes),
239
+ SemanticMask2FormerMapillaryMapped("semantic_mask2former_swin_mapillary_mapped", m2f_mapillary),
240
  NpzRepresentation("softseg_gb", 3),
241
  ]
242
  dronescapes_task_types: dict[str, NpzRepresentation] = {task.name: task for task in _tasks}
dronescapes_reader/multitask_dataset.py CHANGED
@@ -240,7 +240,7 @@ class MultiTaskDataset(Dataset):
240
 
241
  if self.handle_missing_data == "drop":
242
  b4 = len(names_to_tasks)
243
- names_to_tasks = {k: v for k, v in names_to_tasks if len(v) == len(relevant_tasks_for_files)}
244
  logger.debug(f"Dropped {b4 - len(names_to_tasks)} files not in all tasks")
245
  all_names: list[str] = natsorted(names_to_tasks.keys())
246
  logger.info(f"Total files: {len(names_to_tasks)} per task across {len(task_names)} tasks")
 
240
 
241
  if self.handle_missing_data == "drop":
242
  b4 = len(names_to_tasks)
243
+ names_to_tasks = {k: v for k, v in names_to_tasks.items() if len(v) == len(relevant_tasks_for_files)}
244
  logger.debug(f"Dropped {b4 - len(names_to_tasks)} files not in all tasks")
245
  all_names: list[str] = natsorted(names_to_tasks.keys())
246
  logger.info(f"Total files: {len(names_to_tasks)} per task across {len(task_names)} tasks")
scripts/evaluate_semantic_segmentation.py CHANGED
@@ -18,7 +18,8 @@ import numpy as np
18
  import pandas as pd
19
 
20
  sys.path.append(Path(__file__).parents[1].__str__())
21
- from dronescapes_reader import MultiTaskDataset, SemanticRepresentation
 
22
 
23
  def compute_metrics(tp: np.ndarray, fp: np.ndarray, tn: np.ndarray, fn: np.ndarray) -> pd.DataFrame:
24
  precision = tp / (tp + fp)
@@ -66,7 +67,7 @@ def compute_final_per_scene(res: pd.DataFrame, scene: str, classes: list[str],
66
 
67
  def _check_and_symlink_dirs(y_dir: Path, gt_dir: Path) -> Path:
68
  """checks whether the two provided paths are actual full of npz directories and links them together in a tmp dir"""
69
- assert (l := [x.name for x in y_dir.iterdir()]) == (r := [x.name for x in gt_dir.iterdir()]), f"{l} \n vs \n {r}"
70
  assert all(x.endswith(".npz") for x in [*l, *r]), f"Not dirs of only .npz files: {l} \n {r}"
71
  (temp_dir := Path(TemporaryDirectory().name)).mkdir(exist_ok=False)
72
  os.symlink(y_dir, temp_dir / "pred")
@@ -100,9 +101,11 @@ def get_args() -> Namespace:
100
  def main(args: Namespace):
101
  # setup to put both directories in the same parent directory for the reader to work.
102
  temp_dir = _check_and_symlink_dirs(args.y_dir, args.gt_dir)
103
- sema_repr = partial(SemanticRepresentation, classes=args.classes, color_map=[[0, 0, 0]] * len(args.classes))
104
- reader = MultiTaskDataset(temp_dir, handle_missing_data="drop", task_types={"pred": sema_repr, "gt": sema_repr})
105
- assert (a := len(reader.all_files_per_repr["gt"])) == (b := len(reader.all_files_per_repr["pred"])), f"{a} vs {b}"
 
 
106
 
107
  # Compute TP, FP, TN, FN for each frame
108
  raw_stats = compute_raw_stats_per_frame(reader, args.classes, args.n_workers)
 
18
  import pandas as pd
19
 
20
  sys.path.append(Path(__file__).parents[1].__str__())
21
+ from dronescapes_reader import MultiTaskDataset
22
+ from dronescapes_reader.dronescapes_representations import SemanticRepresentation
23
 
24
  def compute_metrics(tp: np.ndarray, fp: np.ndarray, tn: np.ndarray, fn: np.ndarray) -> pd.DataFrame:
25
  precision = tp / (tp + fp)
 
67
 
68
  def _check_and_symlink_dirs(y_dir: Path, gt_dir: Path) -> Path:
69
  """checks whether the two provided paths are actual full of npz directories and links them together in a tmp dir"""
70
+ assert (l := {x.name for x in y_dir.iterdir()}) == (r := {x.name for x in gt_dir.iterdir()}), f"{l} \n vs \n {r}"
71
  assert all(x.endswith(".npz") for x in [*l, *r]), f"Not dirs of only .npz files: {l} \n {r}"
72
  (temp_dir := Path(TemporaryDirectory().name)).mkdir(exist_ok=False)
73
  os.symlink(y_dir, temp_dir / "pred")
 
101
  def main(args: Namespace):
102
  # setup to put both directories in the same parent directory for the reader to work.
103
  temp_dir = _check_and_symlink_dirs(args.y_dir, args.gt_dir)
104
+ pred_repr = SemanticRepresentation("pred", classes=args.classes, color_map=[[0, 0, 0]] * len(args.classes))
105
+ gt_repr = SemanticRepresentation("gt", classes=args.classes, color_map=[[0, 0, 0]] * len(args.classes))
106
+ reader = MultiTaskDataset(temp_dir, task_names=["pred", "gt"], task_types={"pred": pred_repr, "gt": gt_repr},
107
+ handle_missing_data="drop", normalization=None)
108
+ assert (a := len(reader.files_per_repr["gt"])) == (b := len(reader.files_per_repr["pred"])), f"{a} vs {b}"
109
 
110
  # Compute TP, FP, TN, FN for each frame
111
  raw_stats = compute_raw_stats_per_frame(reader, args.classes, args.n_workers)