Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

test.py 4.5 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
  1. from options.options import Options
  2. import os
  3. import torch
  4. import numpy as np
  5. import sys
  6. # print options to help debugging
  7. if __name__ == '__main__':
  8. args = Options().parse()
  9. if (args.test_dir is not None) and (not os.path.isdir(args.test_dir)):
  10. os.mkdir(args.test_dir)
  11. if args.batch_gen:
  12. from testing.test_VAE import get_layouts_from_network
  13. get_layouts_from_network(args)
  14. exit()
  15. if args.measure_acc_l1_std:
  16. from testing.test_acc_mean_std import get_std, get_acc_l1
  17. get_acc_l1(args)
  18. get_std(args)
  19. exit()
  20. if args.heat_map:
  21. from testing.test_heatmap import produce_heatmap, plot_heatmap
  22. print("Calling network to produce object positions...")
  23. produce_heatmap(args)
  24. print("Rendering images...")
  25. test_data_dir = os.path.join(args.test_dir, "data")
  26. heat_dir = os.path.join(test_data_dir, "heat")
  27. room_idx=0
  28. heat_pkl_path=os.path.join(heat_dir, str(room_idx).zfill(4) + "_heat.pkl")
  29. save_path = heat_dir
  30. plot_heatmap(heat_pkl_path, save_path)
  31. exit()
  32. if args.draw_2d:
  33. from testing.test_plot2d import plot2d
  34. # Please follow this data format when calling the plot2d function
  35. # For rotation, you can use argmax or weighted average of network rotation prediction
  36. test_data_dir = os.path.join(args.test_dir, "data")
  37. save_2d = os.path.join(test_data_dir, "2D_rendered")
  38. exp_boxes = [[0.31150928139686584,0.3127100169658661,0.003096628002822399,0.7295752763748169,0.8262581825256348,0.054250866174697876],[-0.06599953025579453,0.017223943024873734,0.2885378897190094,0.2573782205581665,0.7553179860115051,0.42857787013053894],[0.5567594766616821,0.017786923795938492,0.142490953207016,0.9046159982681274,0.31667089462280273,0.6691973209381104],[0.6205720901489258,0.018211644142866135,0.8416993021965027,0.8348240852355957,0.3893248736858368,0.963701605796814],[0.171146959066391,0.017671708017587662,0.8085968494415283,0.4601595997810364,0.5026606321334839,0.9657217264175415],[0.0,0.0,0.0,1.0,0.7327236533164978,0.9278678297996521]]
  39. exp_boxes = [torch.from_numpy(np.array(x)).float() for x in exp_boxes]
  40. exp_rots = [0.0008550407364964485, 18.074506759643555, 6.062503337860107, 12.16077995300293, 12.012971878051758, 0.0]
  41. exp_rots = [torch.from_numpy(np.array(x)).float() for x in exp_rots]
  42. obj_types = [20, 18, 30, 3, 11, 0]
  43. # Last obj is the "room" bounding box
  44. # Last rotation doesn't matter (isn't used)
  45. plot2d(exp_boxes, exp_rots, obj_types, save_2d)
  46. exit()
  47. if args.draw_3d:
  48. from testing.test_plot3d import run_blender, run_blender_mask_depth
  49. # Note this requires running batch_gen first
  50. # Run the following to select which GPU Blender will use
  51. # And the path to the blender 2.79 binary
  52. # export CUDA_VISIBLE_DEVICES=1
  53. # export PATH="/data/vision/billf/mooncam/code/yonglong/blender:$PATH"
  54. blender_path = args.blender_path
  55. os.environ["CUDA_VISIBLE_DEVICES"] = "0"
  56. os.environ["PATH"] += os.pathsep + blender_path
  57. run_blender(args)
  58. exit()
  59. if args.fine_tune:
  60. from testing.test_render_refine import finetune_VAE
  61. # Replace with list of room IDs
  62. room_to_finetune = ["7096"]
  63. base_save_dir = os.path.join(args.test_dir, "data", "finetune")
  64. if not os.path.isdir(base_save_dir):
  65. os.mkdir(base_save_dir)
  66. save_directories = [os.path.join(base_save_dir, x) for x in room_to_finetune]
  67. finetune_VAE(args, room_to_finetune, save_directories)
  68. exit()
  69. if args.gan_shade:
  70. # This loads weights, which is slow so we put it behind the flag
  71. from testing.test_SPADE_shade import colorize_with_spade
  72. # Modify render_semantic_depth to choose the room id
  73. # Quite slow, run on select rooms...
  74. blender_path = args.blender_path
  75. os.environ["CUDA_VISIBLE_DEVICES"] = "0"
  76. os.environ["PATH"] += os.pathsep + blender_path
  77. run_blender_mask_depth(args)
  78. # Disable if the masks & depth already exist
  79. input_dir = os.path.join(args.test_dir, "data", "semantic_masks")
  80. output_dir = os.path.join(args.test_dir, "data", "SPADE_out")
  81. if not os.path.isdir(output_dir):
  82. os.mkdir(output_dir)
  83. # Rooms can either be "all" or a list of strings
  84. colorize_with_spade(num_z=50, semantic_dir=input_dir, save_dir=output_dir, rooms="all")
  85. exit()
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...