trt
1596 - jetsonorinnano trt

1596 - jetsonorinnano trt

Summary: Pipeline succeeded and valid report was generated.

Model Details

Logs Details

user.log
Run benchmark with command : trtexec  --profilingVerbosity=layer_names_only --iterations=1 --dumpProfile --avgRuns=20 --exportTimes=time.json --exportOutput=tensor.json --exportProfile=profile.json --exportLayerInfo=layerInfo.json --onnx=1741590822675_mnist7.onnx
[01/07/1970-00:31:31] [W] [TRT] onnx2trt_utils.cpp:375: Your ONNX model has been generated with INT64 weights, while TensorRT does not natively support INT64. Attempting to cast down to INT32. 
[01/07/1970-00:31:50] [W] * GPU compute time is unstable, with coefficient of variance = 2.16747%. 
[01/07/1970-00:31:50] [W]   If not already in use, locking GPU clock frequency or adding --useSpinWait may improve the stability. 
End of benchmarking.
error.log

Report Details

report.json
{
  "GFLOPs": null,
  "accuracy": null,
  "ambiant_temperature": 25,
  "benchmark_type": "TYPE1",
  "date": "2025-03-10T07:15:14.933918+00:00",
  "energy_efficiency": null,
  "flash_size": 61000499200,
  "flash_usage": 86,
  "inference_engine": "trt",
  "inference_latency": {
    "latency_per_layers": [
      {
        "layer_name": "Convolution28 + Parameter6 + (Unnamed Layer* 4) [Shuffle] + Plus30 + ReLU32",
        "max": null,
        "mean": 24.223399999999998,
        "min": null,
        "std": null
      },
      {
        "layer_name": "Pooling66",
        "max": null,
        "mean": 10.0878,
        "min": null,
        "std": null
      },
      {
        "layer_name": "Convolution110 + Parameter88 + (Unnamed Layer* 10) [Shuffle] + Plus112 + ReLU114",
        "max": null,
        "mean": 16.622899999999998,
        "min": null,
        "std": null
      },
      {
        "layer_name": "Pooling160",
        "max": null,
        "mean": 10.188600000000001,
        "min": null,
        "std": null
      },
      {
        "layer_name": "Times212_reshape0 + reshape_before_Times212",
        "max": null,
        "mean": 0,
        "min": null,
        "std": null
      },
      {
        "layer_name": "Times212",
        "max": null,
        "mean": 10.6875,
        "min": null,
        "std": null
      },
      {
        "layer_name": "reshape_after_Times212",
        "max": null,
        "mean": 0,
        "min": null,
        "std": null
      }
    ],
    "max": null,
    "mean": 71.8102,
    "min": null,
    "std": null,
    "troughput": null
  },
  "load_accelerator": 39.187096774193556,
  "load_cpu": 33.86559139784946,
  "model_file_name": "mnist7.onnx",
  "model_size": 26454,
  "nb_inference": 23568,
  "nb_parameters_model": null,
  "postprocess_time": {
    "max": null,
    "mean": null,
    "min": null,
    "std": null
  },
  "power_consumption": null,
  "preprocess_time": {
    "max": null,
    "mean": null,
    "min": null,
    "std": null
  },
  "ram_peak_usage": 0.2921632411264047,
  "ram_size": 7813058560,
  "target": "jetsonorinnano",
  "target_id": null,
  "temperature": 46.937,
  "version": 0,
  "version_tag": "v1.0.0"
}