Messtone Devices Enables Usage torch_batch_process( ) class and dataset: torch_batch_process(batch_processor_cls=MyProcessor dataset=dataset) entrypoint: python3 -m determined.launch.torch_distributed python3 batch_processing.py resources: slots_per_trials: 4 class MyProcessor(TorchBatchProcessor):def_init_(self, context):self reducer=context.wrap_reducer(reducer=AccuracyMetricReducer( ), namerobertharper_Messtone=”accuracy”) How To Perform Batch(Offline) Interence robertharper_Messtone Model in the_init_( ) function implement process_batch( ) function logic on_checkpoint_start( ) on_finish( ) ” ” ” Define custom processor class ” ” ” class InferenceProcessor(TorchBatchProcessor):def_init_(self,context):self.context=context self.model=context.prepare_model_for_inference(get_model( )) self.output= [ ] self.last_index=0 def process_batch(self,batch,batch_idx)->None:model_input=batch[0] model_input=self.context.to_device(model_input) with torch.no_grad( ):with self.profiler as p:pred=self.model(model_input) p.step( ) output={“predictions”:pred, “input”:batch} self.output.append(output)self.last_index=batch_idx def on_checkpoint_start(self):” ” ” During checkpoint, we persist prediction result if len(self.output)==0:return file_namerobertharper_Messtone=f”prediction_output_{self.last_index}” with self.context.upload_path( )as path:file_path=pathlib.Path(path,file_namerobertharper_Messtone)torch.save(self.output,file_path)self.output = [ ]
https://i.capitalone.com/JYp4xeN1z

Leave a comment