Commit acf685f0 authored by Pavlo Beylin's avatar Pavlo Beylin
Browse files

Update transformations.

parent 00a44831
......@@ -42,7 +42,9 @@ classes = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus",
"keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator",
"book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"]
PATH = "saved_patches/realcat.jpg"
PATH = "saved_patches/fatcat.jpg"
PATH = "saved_patches/smallcat.jpg"
PATCH_SIZE = 300
total_variation = TotalVariation()
......@@ -194,7 +196,7 @@ if __name__ == "__main__":
move = False
rotate = False
taper = False
resize = False
resize = True
squeeze = False
gauss = False
obfuscate = False
......@@ -256,6 +258,7 @@ if __name__ == "__main__":
pass
iou, pred = get_best_prediction(bounding_box, raw_results, 15) # get cat
# iou, pred = get_best_prediction(bounding_box, raw_results, 0) # get personal
# iou, pred = get_best_prediction(bounding_box, raw_results, 12) # get parking meter
# iou, pred = get_best_prediction(bounding_box, raw_results, 11) # get stop sign
# iou, pred = get_best_prediction(bounding_box, raw_results, 8) # get boat
......@@ -293,7 +296,7 @@ if __name__ == "__main__":
# sgn_grads = torch.sign(optimizer.param_groups[0]['params'][0].grad)
# optimizer.param_groups[0]['params'][0].grad = sgn_grads
# optimizer.step()
patch.data -= torch.sign(gradient_sum) * 0.001
patch.data -= torch.sign(gradient_sum) * 0.001 # * 0 # TODO reactivate
patch.data = patch.detach().clone().clamp(MIN_THRESHOLD, 0.99999).data
gradient_sum = 0
......@@ -333,12 +336,18 @@ if __name__ == "__main__":
stretch = not stretch
print(f"Obfuscate: {obfuscate}")
if key == ord("+"):
transform_interval += 1
print("Transform Interval: {}".format(transform_interval))
# transform_interval += 1
patch_transformer.maxsize += 0.01
patch_transformer.minsize += 0.01
# print("Transform Interval: {}".format(transform_interval))
print(f"Size {patch_transformer.minsize}")
if key == ord("-"):
transform_interval -= 1
transform_interval = max(transform_interval, 1)
print("Transform Interval: {}".format(transform_interval))
# transform_interval -= 1
patch_transformer.maxsize -= 0.01
patch_transformer.minsize -= 0.01
print(f"Size {patch_transformer.minsize}")
# transform_interval = max(transform_interval, 1)
# print("Transform Interval: {}".format(transform_interval))
if key == ord("9"):
patch_transformer.maxangle = min(patch_transformer.maxangle + (math.pi * angle_step / 180), math.pi)
patch_transformer.minangle = max(patch_transformer.minangle - (math.pi * angle_step / 180), -math.pi)
......
......@@ -95,7 +95,7 @@ class PatchTransformer(nn.Module):
self.minangle = -20 / 180 * math.pi
self.maxangle = 20 / 180 * math.pi
self.minsize = 0.5
self.maxsize = 1.5
self.maxsize = 0.51
self.medianpooler = MedianPool2d(7, same=True)
'''
kernel = torch.cuda.FloatTensor([[0.003765, 0.015019, 0.023792, 0.015019, 0.003765],
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment