@propertydefname(self)->int:"""Name of layer"""returnself['name']@propertydefmacs(self)->int:"""Number of Multiple-Accumulate operations required by this profiler"""returnself['macs']@propertydefops(self)->int:"""Number of operations required by this profiler"""returnself['ops']@propertydefaccelerator_cycles(self)->int:"""Number of accelerator clock cycles required by this profiler"""returnself['accelerator_cycles']@propertydeftime(self)->float:"""Time in seconds required by this layer"""returnself['time']@propertydefcpu_cycles(self)->float:"""Number of CPU clock cycles required by this layer"""returnself['cpu_cycles']@propertydefenergy(self)->float:"""Energy in Joules required by this layer The energy is relative to the 'baseline' energy (i.e. energy used while the device was idling)"""returnself['energy']
[docs]classTfliteMicroModel:"""This class wrappers the TF-Lite Micro interpreter loaded with a .tflite model"""
[docs]def__init__(self,tflm_wrapper,tflm_accelerator:TfliteMicroAccelerator,flatbuffer_data:bytes,enable_profiler:bool=False,enable_recorder:bool=False,enable_tensor_recorder:bool=False,force_buffer_overlap:bool=False,runtime_buffer_sizes:List[int]=None,):# pylint: disable=protected-accessfrom.tflite_microimportTfliteMicroself._layer_callback:Callable[[int,List[bytes]],bool]=Noneself._layer_errors:List[TfliteMicroLayerError]=[]self._tflm_accelerator=tflm_acceleratorifnotruntime_buffer_sizes:runtime_buffer_sizes=[0]TfliteMicro._clear_logged_errors()accelerator_wrapper=Noneiftflm_acceleratorisNoneelsetflm_accelerator.accelerator_wrapperself._model_wrapper=tflm_wrapper.TfliteMicroModelWrapper()ifnotself._model_wrapper.load(flatbuffer_data,accelerator_wrapper,enable_profiler,enable_recorder,enable_tensor_recorder,force_buffer_overlap,runtime_buffer_sizes):raiseRuntimeError(f'Failed to load model, additional info:\n{TfliteMicro._get_logged_errors_str()}')layer_msgs=self._model_wrapper.get_layer_msgs()formsginlayer_msgs:err=TfliteMicroLayerError.create(msg)iferr:self._layer_errors.append(err)
@propertydefaccelerator(self)->TfliteMicroAccelerator:"""Reference to hardware accelerator used by model"""returnself._tflm_accelerator@propertydeflayer_errors(self)->List[TfliteMicroLayerError]:"""List of error messages triggered by kernels while loading the model. Typically, these errors indicate that a given model layer is not supported by a hardware accelerator and had to fallback to a default kernel implementation. """returnself._layer_errors@propertydefdetails(self)->TfliteMicroModelDetails:"""Return details about loaded model"""returnTfliteMicroModelDetails(self._model_wrapper.get_details())@propertydefinput_size(self)->int:"""Number of input tensors"""returnself._model_wrapper.get_input_size()
[docs]definput(self,index=0,value:np.ndarray=None)->np.ndarray:"""Return a reference to a model input tensor's data If the value argument is provided then copy the value to the input tensor's buffer """ifindex>=self.input_size:raiseIndexError(f'Input index: {index} >= max size: {self.input_size}')input_tensor=self._model_wrapper.get_input(index)ifvalueisnotNone:np.copyto(input_tensor,value)returninput_tensor
@propertydefoutput_size(self)->int:"""Number of output tensors"""returnself._model_wrapper.get_output_size()
[docs]defoutput(self,index=0)->np.ndarray:"""Return a reference to a model output tensor's data """ifindex>=self.output_size:raiseIndexError(f'Output index: {index} >= max size: {self.output_size}')returnself._model_wrapper.get_output(index)
[docs]definvoke(self):"""Invoke the model to execute one inference"""# pylint: disable=protected-accessfrom.tflite_microimportTfliteMicroTfliteMicro._clear_logged_errors()ifnotself._model_wrapper.invoke():raiseRuntimeError(f'Failed to invoke model, additional info:\n{TfliteMicro._get_logged_errors_str()}')
@propertydefis_profiler_enabled(self)->bool:"""Return if the profiler is enabled"""returnself._model_wrapper.is_profiler_enabled()
[docs]defget_profiling_results(self)->List[TfliteMicroProfiledLayerResult]:"""Return the profiling results of each model layer Returns: A list where each entry contains the profiling results of the associated model layer """retval=[]results=self._model_wrapper.get_profiling_results()foreinresults:retval.append(TfliteMicroProfiledLayerResult(e))returnretval
@propertydefis_recorder_enabled(self)->bool:"""Return if the model recorder is enabled """returnself._model_wrapper.is_recorder_enabled()@propertydefis_tensor_recorder_enabled(self)->bool:"""Return if the tensor recorder is enabled """returnself._model_wrapper.is_tensor_recorder_enabled()
[docs]defget_recorded_data(self)->Dict:"""Return the recorded contents of the model Returns: A list where each entry contains the input/output tensors of the associated model layer """results_bin=self._model_wrapper.get_recorded_data()ifresults_binisNone:raiseRuntimeError('Failed to retrieve recorded model data from Tensorflow-Lite Micro')try:recorded_data=msgpack.loads(results_bin)exceptExceptionase:raiseRuntimeError(f'Failed to parse recorded model binary msgpack data, err: {e}')retval=dict(memory_plan=recorded_data.pop('memory_plan',None),layers=[])init_layers=recorded_data.pop('init',[])prepare_layers=recorded_data.pop('prepare',[])execute_layers=recorded_data.pop('execute',[])def_merge_layers(layers):forlinlayers:index=l.pop('index')whilelen(retval['layers'])<=index:retval['layers'].append({})retval['layers'][index].update(l)_merge_layers(init_layers)_merge_layers(prepare_layers)_merge_layers(execute_layers)retval.update(recorded_data)returnretval
[docs]defget_layer_error(self,index:int)->TfliteMicroLayerError:"""Return the TfliteMicroLayerError at the given layer index if found else return None"""forerrinself._layer_errors:iferr.index==index:returnerrreturnNone
Important: We use cookies only for functional and traffic analytics.
We DO NOT use cookies for any marketing purposes. By using our site you acknowledge you have read and understood our Cookie Policy.