xref: /aosp_15_r20/external/executorch/.ci/scripts/test_phi_3_mini.sh (revision 523fa7a60841cd1ecfb9cc4201f1ca8b03ed023a)
1#!/bin/bash
2# Copyright (c) Meta Platforms, Inc. and affiliates.
3# All rights reserved.
4#
5# This source code is licensed under the BSD-style license found in the
6# LICENSE file in the root directory of this source tree.
7
8set -exu
9
10BUILD_TYPE=${1:-Debug}
11BUILD_DIR=${3:-cmake-out}
12MODEL_DIR=examples/models/phi-3-mini
13
14echo "Building with BUILD_TYPE: $BUILD_TYPE, BUILD_DIR: $BUILD_DIR"
15
16if [[ -z "${PYTHON_EXECUTABLE:-}" ]]; then
17    PYTHON_EXECUTABLE=python3
18fi
19
20# Number of processes for a parallel build
21NPROC=8
22if hash nproc &> /dev/null; then NPROC=$(nproc); fi
23
24cmake_install_executorch_libraries() {
25  cmake -DPYTHON_EXECUTABLE=python \
26      -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
27      -DEXECUTORCH_ENABLE_LOGGING=1 \
28      -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
29      -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
30      -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
31      -DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
32      -DEXECUTORCH_BUILD_XNNPACK=ON \
33      -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
34      -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
35      -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
36      -B${BUILD_DIR} .
37
38  cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
39}
40
41cmake_build_phi_3_mini() {
42  cmake -DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \
43      -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
44      -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
45      -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
46      -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
47      -DEXECUTORCH_BUILD_XNNPACK=ON \
48      -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
49      -B${BUILD_DIR}/${MODEL_DIR} \
50      ${MODEL_DIR}
51
52  cmake --build ${BUILD_DIR}/${MODEL_DIR} -j${NPROC} --config ${BUILD_TYPE}
53}
54
55# Download and convert tokenizer.model
56prepare_tokenizer() {
57  echo "Downloading and converting tokenizer.model"
58  wget -O tokenizer.model "https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/tokenizer.model?download=true"
59  $PYTHON_EXECUTABLE -m executorch.extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
60}
61
62# Export phi-3-mini model to pte
63export_phi_3_mini () {
64  echo "Exporting phi-3-mini. This will take a few minutes"
65  $PYTHON_EXECUTABLE -m executorch.examples.models.phi-3-mini.export_phi-3-mini -c "4k" -s 128 -o phi-3-mini.pte
66}
67
68run_and_verify() {
69    NOW=$(date +"%H:%M:%S")
70    echo "Starting to run phi-3-mini runner at ${NOW}"
71    if [[ ! -f "phi-3-mini.pte" ]]; then
72        echo "Export failed. Abort"
73        exit 1
74    fi
75    if [[ ! -f "tokenizer.bin" ]]; then
76        echo "tokenizer.bin is missing."
77        exit 1
78    fi
79
80    ${BUILD_DIR}/${MODEL_DIR}/phi_3_mini_runner \
81    --model_path=phi-3-mini.pte \
82    --tokenizer_path=tokenizer.bin \
83    --seq_len=128 \
84    --temperature=0 \
85    --prompt="<|system|>
86You are a helpful assistant.<|end|>
87<|user|>
88What is the capital of France?<|end|>
89<|assistant|>" > result.txt
90
91    # verify result.txt
92    RESULT=$(cat result.txt)
93    EXPECTED_RESULT="The capital of France is Paris."
94    if [[ "${RESULT}" == *"${EXPECTED_RESULT}"* ]]; then
95        echo "Expected result prefix: ${EXPECTED_RESULT}"
96        echo "Actual result: ${RESULT}"
97        echo "Success"
98        exit 0
99    else
100        echo "Expected result prefix: ${EXPECTED_RESULT}"
101        echo "Actual result: ${RESULT}"
102        echo "Failure; results not the same"
103        exit 1
104    fi
105}
106
107# Step 1. Build ExecuTorch and phi-3-mini runner
108cmake_install_executorch_libraries
109cmake_build_phi_3_mini
110
111# Step 2. Export the tokenizer and model
112prepare_tokenizer
113export_phi_3_mini
114
115# Step 3. Run and verify result
116run_and_verify
117