1// RUN: json_to_flatbuffer %p/test_schema.fbs %s | flatbuffer_translate --tflite-flatbuffer-to-mlir -o - | FileCheck %s 2// RUN: json_to_flatbuffer %p/test_schema.fbs %s | flatbuffer_translate --tflite-flatbuffer-to-mlir -o - | flatbuffer_translate -mlir-to-tflite-flatbuffer - -o - | flatbuffer_to_string - | FileCheck --check-prefix=RoundTrip %s 3 4// CHECK-DAG: %[[input_18:.*]] = "quantfork.stats"({{.*}}) {layerStats = dense<[-8.000000e-01, 1.600000e+00]> : tensor<2xf32>} : (tensor<1x4xf32>) -> tensor<1x4xf32> 5// CHECK-DAG: %[[input_19:.*]] = "quantfork.stats"({{.*}}) {layerStats = dense<[-2.000000e+00, 4.000000e+00]> : tensor<2xf32>} : (tensor<1x2xf32>) -> tensor<1x2xf32> 6 7// CHECK: "tfl.unidirectional_sequence_lstm"({{.*}}, %[[input_18]], %[[input_19]], %{{[0-9]+}}, %{{[0-9]+}}, %{{[0-9]+}}, %{{[0-9]+}}) 8// CHECK-SAME: effective_hidden_scale_intermediate = tensor<*x!quant.calibrated<f32<-5.000000e-01:5.000000e-01>>> 9// CHECK-SAME: input_to_cell_intermediate = tensor<*x!quant.calibrated<f32<-4.000000e+00:4.000000e+00>>> 10// CHECK-SAME: input_to_forget_intermediate = tensor<*x!quant.calibrated<f32<-1.600000e+01:1.600000e+01>>> 11// CHECK-SAME: input_to_input_intermediate = tensor<*x!quant.calibrated<f32<-3.200000e+01:3.200000e+01>>> 12// CHECK-SAME: input_to_output_intermediate = tensor<*x!quant.calibrated<f32<-1.000000e+00:1.000000e+00>>> 13 14// Checks if calibrated type is exported back to quantized type. 15// RoundTrip: name: "effective_hidden_scale_intermediate", 16// RoundTrip-NEXT: quantization: { 17// RoundTrip-NEXT: min: [ -0.5 ], 18// RoundTrip-NEXT: max: [ 0.5 ] 19 20{ 21 "version": 3, 22 "operator_codes": [ 23 { 24 "builtin_code": "UNIDIRECTIONAL_SEQUENCE_LSTM" 25 } 26 ], 27 "subgraphs": [ 28 { 29 "tensors": [ 30 { 31 "shape": [1, 5, 2], 32 "name": "input0" 33 }, 34 { 35 "shape": [2, 5], 36 "buffer": 1, 37 "name": "input2input_weights1" 38 }, 39 { 40 "shape": [2, 5], 41 "buffer": 2, 42 "name": "input2forget_weights2" 43 }, 44 { 45 "shape": [2, 5], 46 "buffer": 3, 47 "name": "input2cell_weights3" 48 }, 49 { 50 "shape": [2, 5], 51 "buffer": 4, 52 "name": "input2output_weights4" 53 }, 54 { 55 "shape": [2, 4], 56 "buffer": 5, 57 "name": "rec2input_weights5" 58 }, 59 { 60 "shape": [2, 4], 61 "buffer": 6, 62 "name": "rec2forget_weights6" 63 }, 64 { 65 "shape": [2, 4], 66 "buffer": 7, 67 "name": "rec2cell_weights7" 68 }, 69 { 70 "shape": [2, 4], 71 "buffer": 8, 72 "name": "rec2output_weights8" 73 }, 74 { 75 "shape": [2], 76 "buffer": 9, 77 "name": "cell2input_weights9" 78 }, 79 { 80 "shape": [2], 81 "buffer": 10, 82 "name": "cell2forget_weights10" 83 }, 84 { 85 "shape": [2], 86 "buffer": 11, 87 "name": "cell2output_weights11" 88 }, 89 { 90 "shape": [2], 91 "buffer": 12, 92 "name": "input_gate_bias12" 93 }, 94 { 95 "shape": [2], 96 "buffer": 13, 97 "name": "forget_gate_bias13" 98 }, 99 { 100 "shape": [2], 101 "buffer": 14, 102 "name": "cell_gate_bias14" 103 }, 104 { 105 "shape": [2], 106 "buffer": 15, 107 "name": "output_gate_bias15" 108 }, 109 { 110 "shape": [4, 2], 111 "buffer": 16, 112 "name": "proj_weights16" 113 }, 114 { 115 "shape": [4], 116 "buffer": 17, 117 "name": "proj_bias17" 118 }, 119 { 120 "shape": [1, 4], 121 "name": "input_activation_state18", 122 "is_variable": true, 123 "quantization": { 124 "min": [-0.8], 125 "max": [1.6] 126 } 127 }, 128 { 129 "shape": [1, 2], 130 "name": "input_cell_state19", 131 "is_variable": true, 132 "quantization": { 133 "min": [-2.0], 134 "max": [4.0] 135 } 136 }, 137 { 138 "shape": [2], 139 "buffer": 18, 140 "name": "input_norm20" 141 }, 142 { 143 "shape": [2], 144 "buffer": 19, 145 "name": "forget_norm21" 146 }, 147 { 148 "shape": [2], 149 "buffer": 20, 150 "name": "cell_norm22" 151 }, 152 { 153 "shape": [2], 154 "buffer": 21, 155 "name": "output_norm23" 156 }, 157 { 158 "shape": [], 159 "name": "output24" 160 }, 161 { 162 "shape": [], 163 "name": "intermediate_0", 164 "is_variable": true, 165 "quantization": { 166 "min": [-32], 167 "max": [32] 168 } 169 }, 170 { 171 "shape": [], 172 "name": "intermediate_1", 173 "is_variable": true, 174 "quantization": { 175 "min": [-16], 176 "max": [16] 177 } 178 }, 179 { 180 "shape": [], 181 "name": "intermediate_2", 182 "is_variable": true, 183 "quantization": { 184 "min": [-4], 185 "max": [4] 186 } 187 }, 188 { 189 "shape": [], 190 "name": "intermediate_3", 191 "is_variable": true, 192 "quantization": { 193 "min": [-1.0], 194 "max": [1.0] 195 } 196 }, 197 { 198 "shape": [], 199 "name": "intermediate_4", 200 "is_variable": true, 201 "quantization": { 202 "min": [-0.5], 203 "max": [0.5] 204 } 205 } 206 ], 207 "inputs": [0], 208 "outputs": [24], 209 "operators": [ 210 { 211 "inputs": [ 212 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23 213 ], 214 "outputs": [24], 215 "intermediates": [ 216 25, 26, 27, 28, 29 217 ], 218 "builtin_options_type": "UnidirectionalSequenceLSTMOptions", 219 "builtin_options": { 220 "fused_activation_function": "TANH", 221 "cell_clip": 50.0 222 }, 223 "mutating_variable_inputs": [ 224 false, 225 false, false, false, false, 226 false, false, false, false, 227 false, false, false, 228 false, false, false, false, 229 true, true, 230 false, false, false, false 231 ] 232 } 233 ] 234 } 235 ], 236 "buffers": [ 237 { 238 "data": [] 239 }, 240 { 241 "data": [ 242 36, 167, 168, 63, 0, 140, 72, 191, 120, 20, 147, 62, 20, 152, 196, 190, 121, 98, 82, 187, 95, 128, 213, 61, 189, 3, 138, 63, 54, 103, 13, 62, 46, 224, 66, 63, 157, 204, 180, 191 243 ] 244 }, 245 { 246 "data": [ 247 223, 20, 21, 64, 246, 166, 31, 191, 6, 51, 157, 188, 114, 90, 167, 62, 118, 240, 59, 63, 49, 162, 255, 62, 17, 91, 160, 63, 32, 47, 26, 63, 40, 136, 178, 191, 243, 154, 236, 61 248 ] 249 }, 250 { 251 "data": [ 252 137, 231, 86, 63, 41, 154, 16, 63, 239, 37, 77, 191, 55, 189, 24, 189, 86, 63, 18, 63, 42, 55, 13, 191, 110, 139, 138, 191, 219, 148, 181, 63, 71, 232, 108, 191, 66, 226, 145, 191 253 ] 254 }, 255 { 256 "data": [ 257 245, 179, 225, 190, 51, 202, 176, 189, 132, 47, 53, 191, 155, 25, 50, 191, 197, 130, 240, 191, 98, 125, 45, 62, 243, 70, 83, 62, 85, 155, 139, 63, 113, 239, 11, 192, 35, 251, 139, 62 258 ] 259 }, 260 { 261 "data": [ 262 248, 188, 211, 191, 142, 11, 73, 62, 36, 8, 84, 63, 186, 208, 11, 191, 76, 208, 190, 191, 223, 200, 210, 63, 183, 170, 103, 63, 116, 129, 145, 63 263 ] 264 }, 265 { 266 "data": [ 267 235, 202, 222, 190, 159, 201, 112, 191, 217, 248, 166, 63, 165, 199, 131, 191, 130, 59, 47, 63, 179, 11, 186, 62, 55, 168, 18, 192, 152, 213, 26, 64 268 ] 269 }, 270 { 271 "data": [ 272 245, 123, 138, 62, 213, 106, 231, 59, 211, 218, 250, 62, 25, 157, 134, 63, 147, 22, 164, 63, 25, 221, 139, 62, 1, 230, 247, 62, 210, 185, 142, 63 273 ] 274 }, 275 { 276 "data": [ 277 197, 123, 23, 192, 45, 96, 178, 190, 174, 87, 165, 62, 213, 225, 200, 191, 119, 248, 15, 191, 128, 125, 171, 189, 90, 125, 222, 63, 4, 76, 95, 62 278 ] 279 }, 280 { 281 "data": [ 282 210, 73, 183, 63, 248, 177, 13, 191 283 ] 284 }, 285 { 286 "data": [ 287 78, 251, 212, 191, 169, 29, 147, 63 288 ] 289 }, 290 { 291 "data": [ 292 178, 227, 203, 191, 247, 155, 103, 63 293 ] 294 }, 295 { 296 "data": [ 297 206, 111, 165, 190, 153, 77, 227, 63 298 ] 299 }, 300 { 301 "data": [ 302 255, 114, 132, 191, 253, 202, 140, 191 303 ] 304 }, 305 { 306 "data": [ 307 90, 247, 1, 192, 125, 120, 209, 191 308 ] 309 }, 310 { 311 "data": [ 312 65, 75, 243, 191, 58, 122, 146, 190 313 ] 314 }, 315 { 316 "data": [ 317 40, 135, 20, 63, 109, 50, 220, 191, 56, 241, 189, 63, 65, 12, 92, 63, 61, 14, 162, 62, 157, 138, 81, 63, 125, 61, 191, 61, 102, 231, 20, 63 318 ] 319 }, 320 { 321 "data": [ 322 145, 79, 49, 189, 175, 235, 220, 190, 182, 111, 157, 190, 144, 236, 97, 191 323 ] 324 }, 325 { 326 "data": [ 327 76, 188, 109, 63, 228, 150, 201, 190 328 ] 329 }, 330 { 331 "data": [ 332 6, 146, 66, 191, 122, 127, 100, 191 333 ] 334 }, 335 { 336 "data": [ 337 216, 59, 169, 190, 161, 178, 215, 191 338 ] 339 }, 340 { 341 "data": [ 342 208, 144, 101, 191, 127, 233, 195, 190 343 ] 344 } 345 ] 346} 347