xref: /aosp_15_r20/external/pytorch/binaries/optimize_for_mobile.cc (revision da0073e96a02ea20f0ac840b70461e3646d07c45)
1 /**
2  * Copyright (c) 2016-present, Facebook, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <string>
18 #include <sstream>
19 #include <torch/script.h>
20 #include <torch/csrc/jit/api/module.h>
21 #include <torch/csrc/jit/passes/metal_rewrite.h>
22 #include <torch/csrc/jit/passes/vulkan_rewrite.h>
23 #include <torch/csrc/jit/passes/xnnpack_rewrite.h>
24 #include <torch/csrc/jit/serialization/import.h>
25 #include <torch/csrc/jit/serialization/export.h>
26 
27 C10_DEFINE_string(model, "", "The torch script model to optimize.");
28 C10_DEFINE_string(
29     output,
30     "",
31     "Name of the output model to be saved.");
32 C10_DEFINE_string(backend, "", "The backend to be optimized");
33 C10_DEFINE_string(preserved_methods, "", "Methods to be preserved")
34 
main(int argc,char ** argv)35 int main(int argc, char** argv) {
36   c10::SetUsageMessage(
37     "\nRun optimization pass for pytorch model. Example usage:\n"
38     "./optimize_for_mobile"
39     " --model=<model_file>"
40     " [--output=<output_file_name>]"
41     " [--backend=<cpu|vulkan|metal>]"
42     " [--preserved_methods=<method_names>]"
43   );
44 
45   if (!c10::ParseCommandLineFlags(&argc, &argv)) {
46     std::cerr << "Failed to parse command line flags!" << std::endl;
47     std::cout << c10::UsageMessage() << std::endl;
48     return 1;
49   }
50 
51   CAFFE_ENFORCE(FLAGS_model != "", c10::UsageMessage());
52 
53   std::string output_model_name =
54     FLAGS_model.substr(0, FLAGS_model.find(".")) + "_optimized.ptl";
55 
56   if (FLAGS_output != "") {
57     output_model_name = FLAGS_output;
58   }
59 
60   std::vector<std::string> preserved_methods;
61   if(FLAGS_preserved_methods != ""){
62     std::stringstream ss(FLAGS_preserved_methods);
63     std::string m;
64     while(std::getline(ss, m, ';')){
65       if(m != ""){
66         preserved_methods.emplace_back(std::move(m));
67       }
68     }
69     std::cout<<"The following methods will be preserved:"<<std::endl;
70     for(auto& str : preserved_methods){
71       std::cout<<str<<std::endl;
72     }
73   }
74 
75   auto module = torch::jit::load(FLAGS_model);
76   auto ops = torch::jit::export_opnames(module);
77   std::cout << "\npt_operator_library(" << std::endl;
78   std::cout << "\tname = \"old_op_library\"," << std::endl;
79   std::cout << "\tops = [" << std::endl;
80   for (auto const& op: ops) {
81     std::cout << "\t\t\"" << op << "\"," << std::endl;
82   }
83   std::cout << "\t],\n)\n" << std::endl;
84 
85   torch::jit::Module optimized_module;
86   if (FLAGS_backend == "" || FLAGS_backend == "cpu") {
87     optimized_module = torch::jit::optimizeForMobile(module);
88   } else if (FLAGS_backend == "vulkan") {
89     optimized_module = torch::jit::vulkanOptimizeForMobile(
90         module, std::set<MobileOptimizerType>(), preserved_methods);
91   } else if (FLAGS_backend == "metal"){
92     optimized_module = torch::jit::metalOptimizeForMobile(module, preserved_methods);
93   }else{
94     CAFFE_ENFORCE(false, "Unknown backend: " + FLAGS_backend);
95   }
96   auto new_ops = torch::jit::export_opnames(optimized_module);
97   std::cout << "\npt_operator_library(" << std::endl;
98   std::cout << "\tname = \"new_op_library\"," << std::endl;
99   std::cout << "\tops = [" << std::endl;
100   for (auto const& op: new_ops) {
101     std::cout << "\t\t\"" << op << "\"," << std::endl;
102   }
103   std::cout << "\t],\n)\n" << std::endl;
104   optimized_module._save_for_mobile(output_model_name);
105   std::cout << "The optimized model for lite interpreter was saved to " << output_model_name << std::endl;
106   return 0;
107 }
108