/aosp_15_r20/external/tensorflow/tensorflow/python/distribute/ |
H A D | parameter_server_strategy.py | 362 num_input_pipelines = multi_worker_util.worker_count( 366 num_input_pipelines = 1 368 num_input_pipelines=num_input_pipelines, 383 num_input_pipelines = multi_worker_util.worker_count( 387 num_input_pipelines = 1 390 num_input_pipelines=num_input_pipelines,
|
H A D | input_lib_type_spec_test.py | 286 dataset = dataset.shard(input_context.num_input_pipelines, 426 dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id) 537 dataset = dataset.shard(input_context.num_input_pipelines, 600 dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id) 661 dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id) 722 dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id)
|
H A D | distribute_lib_test.py | 580 num_input_pipelines=2, input_pipeline_id=1, num_replicas_in_sync=6) 583 self.assertEqual(2, input_context.num_input_pipelines) 587 num_input_pipelines=2, input_pipeline_id=1, num_replicas_in_sync=6) 594 num_input_pipelines=1, input_pipeline_id=0, num_replicas_in_sync=42) 599 num_input_pipelines=3, input_pipeline_id=1, num_replicas_in_sync=42)
|
H A D | input_lib_test.py | 87 num_input_pipelines=input_workers.num_workers, 1121 return dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id) 1239 dataset = dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id) 1284 return dataset.shard(ctx.num_input_pipelines, ctx.input_pipeline_id) 1840 num_input_pipelines=num_workers, 1901 num_input_pipelines=num_workers,
|
H A D | distribute_lib.py | 494 num_input_pipelines=1, argument 505 self._num_input_pipelines = num_input_pipelines 520 def num_input_pipelines(self): member in InputContext 546 self.input_pipeline_id, self.num_input_pipelines)
|
H A D | mirrored_strategy.py | 557 num_input_pipelines=num_workers, 589 num_input_pipelines=num_workers,
|
H A D | strategy_common_test.py | 629 return d.shard(input_context.num_input_pipelines, 674 return dataset.shard(input_context.num_input_pipelines,
|
H A D | tpu_strategy.py | 955 num_input_pipelines=num_workers, 1021 num_input_pipelines=num_workers,
|
H A D | parameter_server_strategy_v2.py | 891 num_input_pipelines=num_input_pipelines_in_sync,
|
H A D | distributed_table_test.py | 100 dataset = dataset.shard(input_context.num_input_pipelines,
|
H A D | input_lib.py | 1132 num_workers = input_context.num_input_pipelines if input_context else len( 1145 input_context.num_input_pipelines,
|
H A D | collective_all_reduce_strategy.py | 644 num_input_pipelines=self._num_workers,
|
H A D | strategy_test_lib.py | 345 input_context.num_input_pipelines)
|
H A D | tpu_strategy_test.py | 292 dataset = dataset.shard(input_context.num_input_pipelines,
|
/aosp_15_r20/external/tensorflow/tensorflow/tools/api/golden/v2/ |
H A D | tensorflow.distribute.-input-context.pbtxt | 10 name: "num_input_pipelines" 19 …argspec: "args=[\'self\', \'num_input_pipelines\', \'input_pipeline_id\', \'num_replicas_in_sync\'…
|
/aosp_15_r20/external/tensorflow/tensorflow/tools/api/golden/v1/ |
H A D | tensorflow.distribute.-input-context.pbtxt | 10 name: "num_input_pipelines" 19 …argspec: "args=[\'self\', \'num_input_pipelines\', \'input_pipeline_id\', \'num_replicas_in_sync\'…
|