Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions .github/workflows/pull.yml
Original file line number Diff line number Diff line change
Expand Up @@ -933,15 +933,17 @@ jobs:
# The generic Linux job chooses to use base env, not the one setup by the image
CONDA_ENV=$(conda env list --json | jq -r ".envs | .[-1]")
conda activate "${CONDA_ENV}"

# Install test requirements
pip install -r backends/nxp/requirements-eiq.txt

# Build and install Executorch
PYTHON_EXECUTABLE=python \
CMAKE_ARGS="-DEXECUTORCH_BUILD_NXP_NEUTRON=ON" \
CMAKE_ARGS="-DEXECUTORCH_BUILD_NXP_NEUTRON=ON -DEXECUTORCH_BUILD_NXP_NEUTRON_RUNNER=ON " \
.ci/scripts/setup-linux.sh --build-tool "cmake"

# Install test requirements
pip install -r backends/nxp/requirements-tests-pypi.txt
pip install -r backends/nxp/requirements-tests-eiq.txt
PYTHON_EXECUTABLE=python bash examples/nxp/setup.sh

# Run pytest
Expand All @@ -950,6 +952,13 @@ jobs:
# Run aot examples:
PYTHON_EXECUTABLE=python bash examples/nxp/run_aot_example.sh cifar10
PYTHON_EXECUTABLE=python bash examples/nxp/run_aot_example.sh mobilenetv2

# Run e2e example with Simulator:
PYTHON_EXECUTABLE=python bash examples/nxp/run.sh cifar10

# Run lightweight model tests:
PYTHON_EXECUTABLE=python pytest -c /dev/null backends/nxp/tests_models/ \
--nxp_runner_path "./examples/nxp/executor_runner/build/nxp_executor_runner"

test-samsung-quantmodels-linux:
name: test-samsung-quantmodels-linux
Expand Down
4 changes: 4 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -628,6 +628,10 @@ if(EXECUTORCH_BUILD_NXP_NEUTRON)
list(APPEND _executorch_backends executorch_delegate_neutron)
endif()

if(EXECUTORCH_BUILD_NXP_NEUTRON_RUNNER)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/examples/nxp/executor_runner)
endif()

if(EXECUTORCH_BUILD_COREML)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/backends/apple/coreml)
list(APPEND _executorch_backends coremldelegate)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
--index-url https://eiq.nxp.com/repository
neutron_converter_SDK_25_12
eiq_neutron_sdk==2.2.2
eiq_nsys
4 changes: 4 additions & 0 deletions backends/nxp/tests_models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Copyright 2026 NXP
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
37 changes: 37 additions & 0 deletions backends/nxp/tests_models/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Copyright 2026 NXP
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import os
import pathlib
import shutil

import eiq_neutron_sdk

# The PROJECT_DIR env variable is set by the conftest.py in backends.nxp.tests_models.conftest.
# It is supposed to point at ExecuTorch Project directory (not install folder) to derive path to artefacts (config files,
# dataset, model weight) located in the project directory structure, but not installed.
# TODO(Robert Kalmar) In accordance with the "TODO(dbort): Prune /test[s]/ dirs, /third-party/ dirs" in pyproject.toml,
# once the test folders are not installed we can derive the path from current file location: `pathlib.Path(__file__)`
PROJECT_DIR = os.environ.get("PROJECT_DIR")
assert PROJECT_DIR and os.path.exists(PROJECT_DIR)

OUTPUTS_DIR = pathlib.Path(os.getcwd()) / ".outputs"

NSYS_PATH = pathlib.Path(shutil.which("nsys"))
NSYS_CONFIG_PATH = os.path.join(
PROJECT_DIR, "backends", "nxp", "tests_models", "neutron-imxrt700.ini"
)
NSYS_FIRMWARE_PATH = os.path.join(
os.path.dirname(eiq_neutron_sdk.__file__),
"target",
"imxrt700",
"cmodel",
"NeutronFirmware.elf",
)

# The NXP_RUNNER_PATH env variable is either defined by pytest when using the CLI argument --nxp_executor_path or
# a standard environment variable.
NEUTRON_TEST_PATH = os.environ.get("NXP_RUNNER_PATH")
assert NEUTRON_TEST_PATH and os.path.exists(NEUTRON_TEST_PATH)
17 changes: 17 additions & 0 deletions backends/nxp/tests_models/config_importer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Copyright 2026 NXP
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import logging

logger = logging.getLogger(__name__)

try:
import test.python.config as test_config # noqa F401

logger.debug("Importing from executorch-integration")
except ImportError:
import executorch.backends.nxp.tests_models.config as test_config # noqa F401

logger.debug("Importing from executorch")
38 changes: 38 additions & 0 deletions backends/nxp/tests_models/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Copyright 2026 NXP
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.


import os
import pathlib
import shutil

from executorch.backends.nxp.tests_models.outputs_dir_importer import outputs_dir


def pytest_addoption(parser):
parser.addoption(
"--nxp_runner_path",
action="store",
default=None,
help="Path to the nxp_executor_runner executable",
)


def pytest_configure(config):
nxp_runner_path = config.getoption("--nxp_runner_path")
if nxp_runner_path:
os.environ["NXP_RUNNER_PATH"] = nxp_runner_path

os.environ["PROJECT_DIR"] = str(pathlib.Path(__file__).parent.parent.parent.parent)


# noinspection SpellCheckingInspection
def pytest_sessionstart(session):
import executorch.extension.pybindings.portable_lib
import executorch.kernels.quantized # noqa F401

# Remove all cached test files
shutil.rmtree(outputs_dir.OUTPUTS_DIR, ignore_errors=True)
os.mkdir(outputs_dir.OUTPUTS_DIR)
136 changes: 136 additions & 0 deletions backends/nxp/tests_models/dataset_creator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
# Copyright 2025-2026 NXP
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import abc
import os.path
import shutil
from collections import OrderedDict
from os import mkdir
from random import sample, seed

import numpy as np
import torch
from executorch.backends.nxp.backend.ir.converter.conversion import translator

from executorch.backends.nxp.tests_models.model_input_spec import ModelInputSpec
from executorch.examples.nxp.models.calibration_dataset import CalibrationDataset
from torch import Tensor


class DatasetCreator(abc.ABC):

@abc.abstractmethod
def generate_samples(self, dataset_dir, input_spec: list[ModelInputSpec]):
pass


class RandomDatasetCreator(DatasetCreator):
"""Dataset creator that generates random input samples."""

def __init__(self, num_samples=2):
self._num_samples = num_samples

def generate_samples(self, dataset_dir, input_spec):
assert isinstance(input_spec, list) and all(
isinstance(spec, ModelInputSpec) for spec in input_spec
), "Input_spec must be a list of ModelInputSpec."
rng = np.random.default_rng(42)

for idx in range(self._num_samples):
sample_dir = dataset_dir

# Multi-input, use a subdirectory containing the inputs for each sample
if len(input_spec) > 1:
sample_dir = os.path.join(dataset_dir, f"{str(idx).zfill(4)}")
mkdir(sample_dir)

for spec_idx, spec in enumerate(input_spec):
match spec.dim_order:
case torch.contiguous_format:
shape = spec.shape
case torch.channels_last:
shape = tuple(
translator.dims_to_channels_last(list(spec.shape))
)
case _:
raise ValueError(f"Unsupported dim_order: {spec.dim_order}")

sample_vector = rng.random(np.prod(shape), spec.type).reshape(shape)
sample_vector.tofile(
os.path.join(sample_dir, f"{str(spec_idx).zfill(2)}.bin")
)


class CopyDatasetCreator(DatasetCreator):
"""Creator that just copies data from other directory."""

def __init__(self, source_dir: str):
self._source_dir = source_dir

def generate_samples(self, dataset_dir, input_spec):
for sample_name in os.listdir(self._source_dir):
sample_path = os.path.join(self._source_dir, sample_name)
shutil.copy(sample_path, dataset_dir)


class FromCalibrationDataDatasetCreator(DatasetCreator):
"""Creator that uses CalibrationDataset archive file."""

def __init__(
self,
dataset: CalibrationDataset,
num_examples: int,
idx_to_label: dict[int, str],
):
self._dataset = dataset
self._num_examples = num_examples
self._idx_to_label = idx_to_label
seed(42)

@staticmethod
def _get_example_np_data(example):
if isinstance(example, tuple):
if len(example) == 2:
data, _ = example
elif len(example) == 1:
data = example[0]
else:
raise ValueError(f"Unexpected number of elements in {example}.")
else:
raise NotImplementedError("Examples other than tuple are not supported.")

if isinstance(data, Tensor):
return [data.unsqueeze(0).numpy()]
elif isinstance(data, list) and all(isinstance(dt, Tensor) for dt in data):
return [dt.unsqueeze(0).numpy() for dt in data]
else:
raise TypeError("Data must be a single Tensor or a list of Tensors.")

def generate_samples(self, dataset_dir, input_spec):
os.makedirs(dataset_dir, exist_ok=True)
assert (
type(self._dataset[0]) is tuple and len(self._dataset[0]) == 2
), "Provide calibration data with examples and labels"

# We need to use ordered collection for deterministic selection of samples
classes = OrderedDict([(cl, None) for _, cl in self._dataset])
examples_per_class = self._num_examples // len(classes)
idx_list = []
for cl in classes.keys():
cl_idx_list = [
idx for idx in range(len(self._dataset)) if self._dataset[idx][1] == cl
]
class_indices = list(
zip(sample(cl_idx_list, examples_per_class), [cl] * examples_per_class)
)
idx_list.extend(class_indices)

for i, (idx, cl) in enumerate(idx_list):
label = self._idx_to_label[cl]
example = self._dataset[idx]
data = self._get_example_np_data(example)
for inp_idx, dt in enumerate(data):
bin_file_name = f"{dataset_dir}/example_{label}_{cl}_{i}_i{str(inp_idx).zfill(2)}.bin"
dt.tofile(bin_file_name)
Loading
Loading