symcad.core.ML.NeuralNetTrainer
1#!/usr/bin/env python3 2# Copyright (C) 2022, Will Hedgecock 3# 4# This program is free software: you can redistribute it and/or modify 5# it under the terms of the GNU General Public License as published by 6# the Free Software Foundation, either version 3 of the License, or 7# (at your option) any later version. 8# 9# This program is distributed in the hope that it will be useful, 10# but WITHOUT ANY WARRANTY; without even the implied warranty of 11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12# GNU General Public License for more details. 13# 14# You should have received a copy of the GNU General Public License 15# along with this program. If not, see <http://www.gnu.org/licenses/>. 16 17from pathlib import Path 18from typing import Dict, List, Tuple, TypeVar 19import copy, io, tarfile, torch 20 21SymPart = TypeVar('SymPart') 22 23MIN_INPUT_VALUE = -1.0 24MAX_INPUT_VALUE = 1.0 25NUM_BATCHES_PER_EPOCH = 25 26NUM_NON_BEST_EPOCHS_TO_TERMINATE = 10 27 28class NeuralNetTrainer(object): 29 """Private helper class to train a set of neural networks to learn the geometric properties 30 of a given `SymPart`.""" 31 32 # Public attributes ---------------------------------------------------------------------------- 33 34 sympart: SymPart 35 """`SymPart` whose geometric properties are being learned.""" 36 37 geometry: Dict[str, float] 38 """Dictionary of geometric properties to learn.""" 39 40 geometry_stats: Dict[str, Tuple[float, float, float, float]] 41 """Dictionary of bounds, scalers, and biases on the geometric properties to learn.""" 42 43 geometry_generators: Dict[str, torch.distributions.distribution.Distribution] 44 """Dictionary of uniform data generators for the geometric properties to learn.""" 45 46 networks: Dict[str, torch.nn.Module] 47 """Dictionary of neural networks corresponding 1-to-1 to each geometric property to learn.""" 48 49 best_networks: Dict[str, torch.nn.Module] 50 """Dictionary of the best neural networks with the lowest losses learned so far.""" 51 52 criteria: Dict[str, torch.nn.Module] 53 """Dictionary of loss criteria corresponding to each neural network being trained.""" 54 55 optimizers: Dict[str, torch.optim.Optimizer] 56 """Dictionary of training optimizers corresponding to each neural network being trained.""" 57 58 59 # Constructor ---------------------------------------------------------------------------------- 60 61 def __init__(self, part: SymPart, cad_params_to_learn: List[str]) -> None: 62 """Initializes a neural network trainer for the specified `part` and corresponding 63 `cad_params_to_learn`. 64 65 The available options for `cad_params_to_learn` are: 66 67 - Lengths: `xlen`, `ylen`, `zlen` 68 - Centers of Gravity: `cg_x`, `cg_y`, `cg_z` 69 - Centers of Buoyancy: `cb_x`, `cb_y`, `cb_z` 70 - Volume and Area: `material_volume`, `displaced_volume`, `surface_area` 71 """ 72 super().__init__() 73 self.sympart = part 74 self.networks = {} 75 self.criteria = {} 76 self.optimizers = {} 77 self.best_networks = {} 78 self.geometry = part.geometry.as_dict() 79 self.geometry_stats = {} 80 self.geometry_generators = {} 81 for param in self.geometry.keys(): 82 bounds = part.get_geometric_parameter_bounds(param) 83 desired_range = MAX_INPUT_VALUE - MIN_INPUT_VALUE 84 actual_range = bounds[1] - bounds[0] 85 scaler = desired_range / actual_range 86 bias = MIN_INPUT_VALUE - bounds[0] 87 self.geometry_stats[param] = bounds[0], bounds[1], scaler, bias 88 self.geometry_generators[param] = torch.distributions.uniform.Uniform(bounds[0], 89 bounds[1]) 90 for cad_param in cad_params_to_learn: 91 network = torch.nn.Sequential( 92 torch.nn.Linear(len(self.geometry), 10), 93 torch.nn.Tanh(), 94 torch.nn.Linear(10, 1)) 95 self.networks[cad_param] = network 96 self.criteria[cad_param] = torch.nn.MSELoss() 97 self.optimizers[cad_param] = torch.optim.SGD(network.parameters(), lr=0.1) 98 self.best_networks[cad_param] = copy.deepcopy(self.networks[cad_param]) 99 100 101 # Private helper methods ----------------------------------------------------------------------- 102 103 def _generate_data(self, num_points: int) -> Tuple[List[float], Dict[str, List[float]]]: 104 105 # Generate all necessary PyTorch data structures 106 inputs = torch.empty(num_points, len(self.geometry)) 107 outputs = { cad_param: torch.empty(num_points, 1) for cad_param in self.networks.keys() } 108 109 # Determine the expected geometric outputs given randomized input parameters 110 datum = 0 111 while datum < num_points: 112 try: 113 for idx, param_and_stat in enumerate(self.geometry_stats.items()): 114 param, stat = param_and_stat 115 self.geometry[param] = self.geometry_generators[param].sample().item() 116 inputs[datum, idx] = (self.geometry[param] * stat[2]) + stat[3] 117 self.sympart.geometry.set(**self.geometry) 118 props = self.sympart.get_cad_physical_properties(True) 119 except Exception: 120 continue 121 for cad_param in self.networks.keys(): 122 outputs[cad_param][datum] = props[cad_param] 123 datum += 1 124 125 # Return all randomized inputs and their corresponding expected outputs 126 return inputs, outputs 127 128 129 # Public methods ------------------------------------------------------------------------------- 130 131 def learn_parameters(self, num_data_points_per_batch: int) -> None: 132 """Trains the underlying neural network to learn all geometric properties as specified when 133 the `NeuralNetTrainer` was created. 134 135 Parameters 136 ---------- 137 num_data_points_per_batch : `int` 138 Number of geometric data points to include per training iteration. 139 """ 140 141 # Initialize loss structures and ensure that all networks are in training mode 142 print('Training neural nets for the "{}" part'.format(self.sympart.name)) 143 print('Input geometric parameters: {}'.format(list(self.geometry.keys()))) 144 print('Geometric parameter bounds: {}'.format( 145 {key: (stats[0], stats[1]) for key, stats in self.geometry_stats.items()})) 146 print('Properties being trained: {}'.format(list(self.networks.keys()))) 147 best_losses = {} 148 running_losses = {} 149 epochs_since_best_loss = {} 150 remaining_networks = self.networks 151 for network_name, network in remaining_networks.items(): 152 epochs_since_best_loss[network_name] = 0 153 best_losses[network_name] = 1000000.0 154 running_losses[network_name] = 0.0 155 network.train() 156 157 # Train the neural networks until their loss stops decreasing 158 while len(remaining_networks): 159 160 # Train each neural network for the specified number of batches 161 for _ in range(NUM_BATCHES_PER_EPOCH): 162 inputs, outputs = self._generate_data(num_data_points_per_batch) 163 for network_name, network in remaining_networks.items(): 164 optimizer = self.optimizers[network_name] 165 criterion = self.criteria[network_name] 166 predicted_outputs = network(inputs) 167 loss = criterion(predicted_outputs, outputs[network_name]) 168 optimizer.zero_grad() 169 loss.backward() 170 optimizer.step() 171 current_loss = loss.item() 172 running_losses[network_name] += current_loss 173 print(' Network: {}, Sub-Epoch Loss: {}'.format(network_name, current_loss)) 174 175 # Determine whether the current network has improved the overall training loss 176 networks_complete =[] 177 for network_name, network in remaining_networks.items(): 178 print('Network: {}, Loss: {}' 179 .format(network_name, running_losses[network_name] / NUM_BATCHES_PER_EPOCH)) 180 if running_losses[network_name] < best_losses[network_name]: 181 epochs_since_best_loss[network_name] = 0 182 best_losses[network_name] = running_losses[network_name] 183 self.best_networks[network_name] = copy.deepcopy(network) 184 else: 185 epochs_since_best_loss[network_name] += 1 186 running_losses[network_name] = 0.0 187 if epochs_since_best_loss[network_name] >= NUM_NON_BEST_EPOCHS_TO_TERMINATE: 188 networks_complete.append(network_name) 189 for completed_network in networks_complete: 190 del remaining_networks[completed_network] 191 print('Training complete!') 192 193 194 def save(self, full_storage_path: str) -> None: 195 """Stores the underlying trained neural networks as an XZ-compressed tarball at the 196 location specified in `full_storage_path`.""" 197 198 # Create any necessary path directories 199 file_path = Path(full_storage_path).absolute().resolve() 200 if not file_path.parent.exists(): 201 file_path.parent.mkdir() 202 203 # Convert all networks to TorchScript, save them, and zip them into a XZ tarball 204 with tarfile.open(file_path, 'w:xz') as zip_file: 205 param_order = ';'.join(self.geometry.keys()).encode('utf-8') 206 file_info = tarfile.TarInfo('param_order.txt') 207 file_info.size = len(param_order) 208 zip_file.addfile(file_info, io.BytesIO(param_order)) 209 param_stats = ';'.join([key+':'+str(val) for key, val 210 in self.geometry_stats.items()]).encode('utf-8') 211 file_info = tarfile.TarInfo('param_stats.txt') 212 file_info.size = len(param_stats) 213 zip_file.addfile(file_info, io.BytesIO(param_stats)) 214 for network_name, network in self.best_networks.items(): 215 scripted_model = torch.jit.script(network.eval()) 216 model_bytes = torch.jit.freeze(scripted_model).save_to_buffer() 217 file_info = tarfile.TarInfo(network_name + '.pt') 218 file_info.size = len(model_bytes) 219 zip_file.addfile(file_info, io.BytesIO(model_bytes))
29class NeuralNetTrainer(object): 30 """Private helper class to train a set of neural networks to learn the geometric properties 31 of a given `SymPart`.""" 32 33 # Public attributes ---------------------------------------------------------------------------- 34 35 sympart: SymPart 36 """`SymPart` whose geometric properties are being learned.""" 37 38 geometry: Dict[str, float] 39 """Dictionary of geometric properties to learn.""" 40 41 geometry_stats: Dict[str, Tuple[float, float, float, float]] 42 """Dictionary of bounds, scalers, and biases on the geometric properties to learn.""" 43 44 geometry_generators: Dict[str, torch.distributions.distribution.Distribution] 45 """Dictionary of uniform data generators for the geometric properties to learn.""" 46 47 networks: Dict[str, torch.nn.Module] 48 """Dictionary of neural networks corresponding 1-to-1 to each geometric property to learn.""" 49 50 best_networks: Dict[str, torch.nn.Module] 51 """Dictionary of the best neural networks with the lowest losses learned so far.""" 52 53 criteria: Dict[str, torch.nn.Module] 54 """Dictionary of loss criteria corresponding to each neural network being trained.""" 55 56 optimizers: Dict[str, torch.optim.Optimizer] 57 """Dictionary of training optimizers corresponding to each neural network being trained.""" 58 59 60 # Constructor ---------------------------------------------------------------------------------- 61 62 def __init__(self, part: SymPart, cad_params_to_learn: List[str]) -> None: 63 """Initializes a neural network trainer for the specified `part` and corresponding 64 `cad_params_to_learn`. 65 66 The available options for `cad_params_to_learn` are: 67 68 - Lengths: `xlen`, `ylen`, `zlen` 69 - Centers of Gravity: `cg_x`, `cg_y`, `cg_z` 70 - Centers of Buoyancy: `cb_x`, `cb_y`, `cb_z` 71 - Volume and Area: `material_volume`, `displaced_volume`, `surface_area` 72 """ 73 super().__init__() 74 self.sympart = part 75 self.networks = {} 76 self.criteria = {} 77 self.optimizers = {} 78 self.best_networks = {} 79 self.geometry = part.geometry.as_dict() 80 self.geometry_stats = {} 81 self.geometry_generators = {} 82 for param in self.geometry.keys(): 83 bounds = part.get_geometric_parameter_bounds(param) 84 desired_range = MAX_INPUT_VALUE - MIN_INPUT_VALUE 85 actual_range = bounds[1] - bounds[0] 86 scaler = desired_range / actual_range 87 bias = MIN_INPUT_VALUE - bounds[0] 88 self.geometry_stats[param] = bounds[0], bounds[1], scaler, bias 89 self.geometry_generators[param] = torch.distributions.uniform.Uniform(bounds[0], 90 bounds[1]) 91 for cad_param in cad_params_to_learn: 92 network = torch.nn.Sequential( 93 torch.nn.Linear(len(self.geometry), 10), 94 torch.nn.Tanh(), 95 torch.nn.Linear(10, 1)) 96 self.networks[cad_param] = network 97 self.criteria[cad_param] = torch.nn.MSELoss() 98 self.optimizers[cad_param] = torch.optim.SGD(network.parameters(), lr=0.1) 99 self.best_networks[cad_param] = copy.deepcopy(self.networks[cad_param]) 100 101 102 # Private helper methods ----------------------------------------------------------------------- 103 104 def _generate_data(self, num_points: int) -> Tuple[List[float], Dict[str, List[float]]]: 105 106 # Generate all necessary PyTorch data structures 107 inputs = torch.empty(num_points, len(self.geometry)) 108 outputs = { cad_param: torch.empty(num_points, 1) for cad_param in self.networks.keys() } 109 110 # Determine the expected geometric outputs given randomized input parameters 111 datum = 0 112 while datum < num_points: 113 try: 114 for idx, param_and_stat in enumerate(self.geometry_stats.items()): 115 param, stat = param_and_stat 116 self.geometry[param] = self.geometry_generators[param].sample().item() 117 inputs[datum, idx] = (self.geometry[param] * stat[2]) + stat[3] 118 self.sympart.geometry.set(**self.geometry) 119 props = self.sympart.get_cad_physical_properties(True) 120 except Exception: 121 continue 122 for cad_param in self.networks.keys(): 123 outputs[cad_param][datum] = props[cad_param] 124 datum += 1 125 126 # Return all randomized inputs and their corresponding expected outputs 127 return inputs, outputs 128 129 130 # Public methods ------------------------------------------------------------------------------- 131 132 def learn_parameters(self, num_data_points_per_batch: int) -> None: 133 """Trains the underlying neural network to learn all geometric properties as specified when 134 the `NeuralNetTrainer` was created. 135 136 Parameters 137 ---------- 138 num_data_points_per_batch : `int` 139 Number of geometric data points to include per training iteration. 140 """ 141 142 # Initialize loss structures and ensure that all networks are in training mode 143 print('Training neural nets for the "{}" part'.format(self.sympart.name)) 144 print('Input geometric parameters: {}'.format(list(self.geometry.keys()))) 145 print('Geometric parameter bounds: {}'.format( 146 {key: (stats[0], stats[1]) for key, stats in self.geometry_stats.items()})) 147 print('Properties being trained: {}'.format(list(self.networks.keys()))) 148 best_losses = {} 149 running_losses = {} 150 epochs_since_best_loss = {} 151 remaining_networks = self.networks 152 for network_name, network in remaining_networks.items(): 153 epochs_since_best_loss[network_name] = 0 154 best_losses[network_name] = 1000000.0 155 running_losses[network_name] = 0.0 156 network.train() 157 158 # Train the neural networks until their loss stops decreasing 159 while len(remaining_networks): 160 161 # Train each neural network for the specified number of batches 162 for _ in range(NUM_BATCHES_PER_EPOCH): 163 inputs, outputs = self._generate_data(num_data_points_per_batch) 164 for network_name, network in remaining_networks.items(): 165 optimizer = self.optimizers[network_name] 166 criterion = self.criteria[network_name] 167 predicted_outputs = network(inputs) 168 loss = criterion(predicted_outputs, outputs[network_name]) 169 optimizer.zero_grad() 170 loss.backward() 171 optimizer.step() 172 current_loss = loss.item() 173 running_losses[network_name] += current_loss 174 print(' Network: {}, Sub-Epoch Loss: {}'.format(network_name, current_loss)) 175 176 # Determine whether the current network has improved the overall training loss 177 networks_complete =[] 178 for network_name, network in remaining_networks.items(): 179 print('Network: {}, Loss: {}' 180 .format(network_name, running_losses[network_name] / NUM_BATCHES_PER_EPOCH)) 181 if running_losses[network_name] < best_losses[network_name]: 182 epochs_since_best_loss[network_name] = 0 183 best_losses[network_name] = running_losses[network_name] 184 self.best_networks[network_name] = copy.deepcopy(network) 185 else: 186 epochs_since_best_loss[network_name] += 1 187 running_losses[network_name] = 0.0 188 if epochs_since_best_loss[network_name] >= NUM_NON_BEST_EPOCHS_TO_TERMINATE: 189 networks_complete.append(network_name) 190 for completed_network in networks_complete: 191 del remaining_networks[completed_network] 192 print('Training complete!') 193 194 195 def save(self, full_storage_path: str) -> None: 196 """Stores the underlying trained neural networks as an XZ-compressed tarball at the 197 location specified in `full_storage_path`.""" 198 199 # Create any necessary path directories 200 file_path = Path(full_storage_path).absolute().resolve() 201 if not file_path.parent.exists(): 202 file_path.parent.mkdir() 203 204 # Convert all networks to TorchScript, save them, and zip them into a XZ tarball 205 with tarfile.open(file_path, 'w:xz') as zip_file: 206 param_order = ';'.join(self.geometry.keys()).encode('utf-8') 207 file_info = tarfile.TarInfo('param_order.txt') 208 file_info.size = len(param_order) 209 zip_file.addfile(file_info, io.BytesIO(param_order)) 210 param_stats = ';'.join([key+':'+str(val) for key, val 211 in self.geometry_stats.items()]).encode('utf-8') 212 file_info = tarfile.TarInfo('param_stats.txt') 213 file_info.size = len(param_stats) 214 zip_file.addfile(file_info, io.BytesIO(param_stats)) 215 for network_name, network in self.best_networks.items(): 216 scripted_model = torch.jit.script(network.eval()) 217 model_bytes = torch.jit.freeze(scripted_model).save_to_buffer() 218 file_info = tarfile.TarInfo(network_name + '.pt') 219 file_info.size = len(model_bytes) 220 zip_file.addfile(file_info, io.BytesIO(model_bytes))
Private helper class to train a set of neural networks to learn the geometric properties
of a given SymPart.
62 def __init__(self, part: SymPart, cad_params_to_learn: List[str]) -> None: 63 """Initializes a neural network trainer for the specified `part` and corresponding 64 `cad_params_to_learn`. 65 66 The available options for `cad_params_to_learn` are: 67 68 - Lengths: `xlen`, `ylen`, `zlen` 69 - Centers of Gravity: `cg_x`, `cg_y`, `cg_z` 70 - Centers of Buoyancy: `cb_x`, `cb_y`, `cb_z` 71 - Volume and Area: `material_volume`, `displaced_volume`, `surface_area` 72 """ 73 super().__init__() 74 self.sympart = part 75 self.networks = {} 76 self.criteria = {} 77 self.optimizers = {} 78 self.best_networks = {} 79 self.geometry = part.geometry.as_dict() 80 self.geometry_stats = {} 81 self.geometry_generators = {} 82 for param in self.geometry.keys(): 83 bounds = part.get_geometric_parameter_bounds(param) 84 desired_range = MAX_INPUT_VALUE - MIN_INPUT_VALUE 85 actual_range = bounds[1] - bounds[0] 86 scaler = desired_range / actual_range 87 bias = MIN_INPUT_VALUE - bounds[0] 88 self.geometry_stats[param] = bounds[0], bounds[1], scaler, bias 89 self.geometry_generators[param] = torch.distributions.uniform.Uniform(bounds[0], 90 bounds[1]) 91 for cad_param in cad_params_to_learn: 92 network = torch.nn.Sequential( 93 torch.nn.Linear(len(self.geometry), 10), 94 torch.nn.Tanh(), 95 torch.nn.Linear(10, 1)) 96 self.networks[cad_param] = network 97 self.criteria[cad_param] = torch.nn.MSELoss() 98 self.optimizers[cad_param] = torch.optim.SGD(network.parameters(), lr=0.1) 99 self.best_networks[cad_param] = copy.deepcopy(self.networks[cad_param])
Initializes a neural network trainer for the specified part and corresponding
cad_params_to_learn.
The available options for cad_params_to_learn are:
- Lengths:
xlen,ylen,zlen - Centers of Gravity:
cg_x,cg_y,cg_z - Centers of Buoyancy:
cb_x,cb_y,cb_z - Volume and Area:
material_volume,displaced_volume,surface_area
Dictionary of bounds, scalers, and biases on the geometric properties to learn.
Dictionary of uniform data generators for the geometric properties to learn.
Dictionary of neural networks corresponding 1-to-1 to each geometric property to learn.
Dictionary of the best neural networks with the lowest losses learned so far.
Dictionary of loss criteria corresponding to each neural network being trained.
Dictionary of training optimizers corresponding to each neural network being trained.
132 def learn_parameters(self, num_data_points_per_batch: int) -> None: 133 """Trains the underlying neural network to learn all geometric properties as specified when 134 the `NeuralNetTrainer` was created. 135 136 Parameters 137 ---------- 138 num_data_points_per_batch : `int` 139 Number of geometric data points to include per training iteration. 140 """ 141 142 # Initialize loss structures and ensure that all networks are in training mode 143 print('Training neural nets for the "{}" part'.format(self.sympart.name)) 144 print('Input geometric parameters: {}'.format(list(self.geometry.keys()))) 145 print('Geometric parameter bounds: {}'.format( 146 {key: (stats[0], stats[1]) for key, stats in self.geometry_stats.items()})) 147 print('Properties being trained: {}'.format(list(self.networks.keys()))) 148 best_losses = {} 149 running_losses = {} 150 epochs_since_best_loss = {} 151 remaining_networks = self.networks 152 for network_name, network in remaining_networks.items(): 153 epochs_since_best_loss[network_name] = 0 154 best_losses[network_name] = 1000000.0 155 running_losses[network_name] = 0.0 156 network.train() 157 158 # Train the neural networks until their loss stops decreasing 159 while len(remaining_networks): 160 161 # Train each neural network for the specified number of batches 162 for _ in range(NUM_BATCHES_PER_EPOCH): 163 inputs, outputs = self._generate_data(num_data_points_per_batch) 164 for network_name, network in remaining_networks.items(): 165 optimizer = self.optimizers[network_name] 166 criterion = self.criteria[network_name] 167 predicted_outputs = network(inputs) 168 loss = criterion(predicted_outputs, outputs[network_name]) 169 optimizer.zero_grad() 170 loss.backward() 171 optimizer.step() 172 current_loss = loss.item() 173 running_losses[network_name] += current_loss 174 print(' Network: {}, Sub-Epoch Loss: {}'.format(network_name, current_loss)) 175 176 # Determine whether the current network has improved the overall training loss 177 networks_complete =[] 178 for network_name, network in remaining_networks.items(): 179 print('Network: {}, Loss: {}' 180 .format(network_name, running_losses[network_name] / NUM_BATCHES_PER_EPOCH)) 181 if running_losses[network_name] < best_losses[network_name]: 182 epochs_since_best_loss[network_name] = 0 183 best_losses[network_name] = running_losses[network_name] 184 self.best_networks[network_name] = copy.deepcopy(network) 185 else: 186 epochs_since_best_loss[network_name] += 1 187 running_losses[network_name] = 0.0 188 if epochs_since_best_loss[network_name] >= NUM_NON_BEST_EPOCHS_TO_TERMINATE: 189 networks_complete.append(network_name) 190 for completed_network in networks_complete: 191 del remaining_networks[completed_network] 192 print('Training complete!')
Trains the underlying neural network to learn all geometric properties as specified when
the NeuralNetTrainer was created.
Parameters
- num_data_points_per_batch (
int): Number of geometric data points to include per training iteration.
195 def save(self, full_storage_path: str) -> None: 196 """Stores the underlying trained neural networks as an XZ-compressed tarball at the 197 location specified in `full_storage_path`.""" 198 199 # Create any necessary path directories 200 file_path = Path(full_storage_path).absolute().resolve() 201 if not file_path.parent.exists(): 202 file_path.parent.mkdir() 203 204 # Convert all networks to TorchScript, save them, and zip them into a XZ tarball 205 with tarfile.open(file_path, 'w:xz') as zip_file: 206 param_order = ';'.join(self.geometry.keys()).encode('utf-8') 207 file_info = tarfile.TarInfo('param_order.txt') 208 file_info.size = len(param_order) 209 zip_file.addfile(file_info, io.BytesIO(param_order)) 210 param_stats = ';'.join([key+':'+str(val) for key, val 211 in self.geometry_stats.items()]).encode('utf-8') 212 file_info = tarfile.TarInfo('param_stats.txt') 213 file_info.size = len(param_stats) 214 zip_file.addfile(file_info, io.BytesIO(param_stats)) 215 for network_name, network in self.best_networks.items(): 216 scripted_model = torch.jit.script(network.eval()) 217 model_bytes = torch.jit.freeze(scripted_model).save_to_buffer() 218 file_info = tarfile.TarInfo(network_name + '.pt') 219 file_info.size = len(model_bytes) 220 zip_file.addfile(file_info, io.BytesIO(model_bytes))
Stores the underlying trained neural networks as an XZ-compressed tarball at the
location specified in full_storage_path.