Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

average_checkpoints.py 3.5 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
  1. #!/usr/bin/env python3
  2. import argparse
  3. import collections
  4. import torch
  5. import os
  6. import re
  7. def average_checkpoints(inputs):
  8. """Loads checkpoints from inputs and returns a model with averaged weights.
  9. Args:
  10. inputs: An iterable of string paths of checkpoints to load from.
  11. Returns:
  12. A dict of string keys mapping to various values. The 'model' key
  13. from the returned dict should correspond to an OrderedDict mapping
  14. string parameter names to torch Tensors.
  15. """
  16. params_dict = collections.OrderedDict()
  17. params_keys = None
  18. new_state = None
  19. for f in inputs:
  20. state = torch.load(
  21. f,
  22. map_location=(
  23. lambda s, _: torch.serialization.default_restore_location(s, 'cpu')
  24. ),
  25. )
  26. # Copies over the settings from the first checkpoint
  27. if new_state is None:
  28. new_state = state
  29. model_params = state['model']
  30. model_params_keys = list(model_params.keys())
  31. if params_keys is None:
  32. params_keys = model_params_keys
  33. elif params_keys != model_params_keys:
  34. raise KeyError(
  35. 'For checkpoint {}, expected list of params: {}, '
  36. 'but found: {}'.format(f, params_keys, model_params_keys)
  37. )
  38. for k in params_keys:
  39. if k not in params_dict:
  40. params_dict[k] = []
  41. p = model_params[k]
  42. if isinstance(p, torch.HalfTensor):
  43. p = p.float()
  44. params_dict[k].append(p)
  45. averaged_params = collections.OrderedDict()
  46. # v should be a list of torch Tensor.
  47. for k, v in params_dict.items():
  48. summed_v = None
  49. for x in v:
  50. summed_v = summed_v + x if summed_v is not None else x
  51. averaged_params[k] = summed_v / len(v)
  52. new_state['model'] = averaged_params
  53. return new_state
  54. def last_n_checkpoints(paths, n):
  55. assert len(paths) == 1
  56. path = paths[0]
  57. pt_regexp = re.compile(r'checkpoint(\d+)\.pt')
  58. files = os.listdir(path)
  59. entries = []
  60. for f in files:
  61. m = pt_regexp.fullmatch(f)
  62. if m is not None:
  63. entries.append((int(m.group(1)), m.group(0)))
  64. if len(entries) < n:
  65. raise Exception('Found {} checkpoint files but need at least {}', len(entries), n)
  66. return [os.path.join(path, x[1]) for x in sorted(entries, reverse=True)[:n]]
  67. def main():
  68. parser = argparse.ArgumentParser(
  69. description='Tool to average the params of input checkpoints to '
  70. 'produce a new checkpoint',
  71. )
  72. parser.add_argument(
  73. '--inputs',
  74. required=True,
  75. nargs='+',
  76. help='Input checkpoint file paths.',
  77. )
  78. parser.add_argument(
  79. '--output',
  80. required=True,
  81. metavar='FILE',
  82. help='Write the new checkpoint containing the averaged weights to this '
  83. 'path.',
  84. )
  85. parser.add_argument(
  86. '--num',
  87. type=int,
  88. help='if set, will try to find checkpoints with names checkpoint_xx.pt in the path specified by input, '
  89. 'and average last num of those',
  90. )
  91. args = parser.parse_args()
  92. print(args)
  93. if args.num is not None:
  94. args.inputs = last_n_checkpoints(args.inputs, args.num)
  95. print('averaging checkpoints: ', args.inputs)
  96. new_state = average_checkpoints(args.inputs)
  97. torch.save(new_state, args.output)
  98. print('Finished writing averaged checkpoint to {}.'.format(args.output))
  99. if __name__ == '__main__':
  100. main()
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...