Nekshay commited on
Commit
8df9a5d
·
1 Parent(s): 62538ea

Upload 4 files

Browse files
Custom_Object_Detection.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
export_tflite_graph_tf2.py ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Lint as: python2, python3
2
+ # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ # ==============================================================================
16
+ r"""Exports TF2 detection SavedModel for conversion to TensorFlow Lite.
17
+ Link to the TF2 Detection Zoo:
18
+ https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf2_detection_zoo.md
19
+ The output folder will contain an intermediate SavedModel that can be used with
20
+ the TfLite converter.
21
+ NOTE: This only supports SSD meta-architectures for now.
22
+ One input:
23
+ image: a float32 tensor of shape[1, height, width, 3] containing the
24
+ *normalized* input image.
25
+ NOTE: See the `preprocess` function defined in the feature extractor class
26
+ in the object_detection/models directory.
27
+ Four Outputs:
28
+ detection_boxes: a float32 tensor of shape [1, num_boxes, 4] with box
29
+ locations
30
+ detection_classes: a float32 tensor of shape [1, num_boxes]
31
+ with class indices
32
+ detection_scores: a float32 tensor of shape [1, num_boxes]
33
+ with class scores
34
+ num_boxes: a float32 tensor of size 1 containing the number of detected boxes
35
+ Example Usage:
36
+ --------------
37
+ python object_detection/export_tflite_graph_tf2.py \
38
+ --pipeline_config_path path/to/ssd_model/pipeline.config \
39
+ --trained_checkpoint_dir path/to/ssd_model/checkpoint \
40
+ --output_directory path/to/exported_model_directory
41
+ The expected output SavedModel would be in the directory
42
+ path/to/exported_model_directory (which is created if it does not exist).
43
+ Config overrides (see the `config_override` flag) are text protobufs
44
+ (also of type pipeline_pb2.TrainEvalPipelineConfig) which are used to override
45
+ certain fields in the provided pipeline_config_path. These are useful for
46
+ making small changes to the inference graph that differ from the training or
47
+ eval config.
48
+ Example Usage 1 (in which we change the NMS iou_threshold to be 0.5 and
49
+ NMS score_threshold to be 0.0):
50
+ python object_detection/export_tflite_model_tf2.py \
51
+ --pipeline_config_path path/to/ssd_model/pipeline.config \
52
+ --trained_checkpoint_dir path/to/ssd_model/checkpoint \
53
+ --output_directory path/to/exported_model_directory
54
+ --config_override " \
55
+ model{ \
56
+ ssd{ \
57
+ post_processing { \
58
+ batch_non_max_suppression { \
59
+ score_threshold: 0.0 \
60
+ iou_threshold: 0.5 \
61
+ } \
62
+ } \
63
+ } \
64
+ } \
65
+ "
66
+ Example Usage 2 (export CenterNet model for keypoint estimation task with fixed
67
+ shape resizer and customized input resolution):
68
+ python object_detection/export_tflite_model_tf2.py \
69
+ --pipeline_config_path path/to/ssd_model/pipeline.config \
70
+ --trained_checkpoint_dir path/to/ssd_model/checkpoint \
71
+ --output_directory path/to/exported_model_directory \
72
+ --keypoint_label_map_path path/to/label_map.txt \
73
+ --max_detections 10 \
74
+ --centernet_include_keypoints true \
75
+ --config_override " \
76
+ model{ \
77
+ center_net { \
78
+ image_resizer { \
79
+ fixed_shape_resizer { \
80
+ height: 320 \
81
+ width: 320 \
82
+ } \
83
+ } \
84
+ } \
85
+ }" \
86
+ """
87
+ from absl import app
88
+ from absl import flags
89
+
90
+ import tensorflow.compat.v2 as tf
91
+ from google.protobuf import text_format
92
+ from object_detection import export_tflite_graph_lib_tf2
93
+ from object_detection.protos import pipeline_pb2
94
+
95
+ tf.enable_v2_behavior()
96
+
97
+ FLAGS = flags.FLAGS
98
+
99
+ flags.DEFINE_string(
100
+ 'pipeline_config_path', None,
101
+ 'Path to a pipeline_pb2.TrainEvalPipelineConfig config '
102
+ 'file.')
103
+ flags.DEFINE_string('trained_checkpoint_dir', None,
104
+ 'Path to trained checkpoint directory')
105
+ flags.DEFINE_string('output_directory', None, 'Path to write outputs.')
106
+ flags.DEFINE_string(
107
+ 'config_override', '', 'pipeline_pb2.TrainEvalPipelineConfig '
108
+ 'text proto to override pipeline_config_path.')
109
+ flags.DEFINE_integer('max_detections', 10,
110
+ 'Maximum number of detections (boxes) to return.')
111
+ # SSD-specific flags
112
+ flags.DEFINE_bool(
113
+ 'ssd_use_regular_nms', False,
114
+ 'Flag to set postprocessing op to use Regular NMS instead of Fast NMS '
115
+ '(Default false).')
116
+ # CenterNet-specific flags
117
+ flags.DEFINE_bool(
118
+ 'centernet_include_keypoints', False,
119
+ 'Whether to export the predicted keypoint tensors. Only CenterNet model'
120
+ ' supports this flag.'
121
+ )
122
+ flags.DEFINE_string(
123
+ 'keypoint_label_map_path', None,
124
+ 'Path of the label map used by CenterNet keypoint estimation task. If'
125
+ ' provided, the label map path in the pipeline config will be replaced by'
126
+ ' this one. Note that it is only used when exporting CenterNet model for'
127
+ ' keypoint estimation task.'
128
+ )
129
+
130
+
131
+ def main(argv):
132
+ del argv # Unused.
133
+ flags.mark_flag_as_required('pipeline_config_path')
134
+ flags.mark_flag_as_required('trained_checkpoint_dir')
135
+ flags.mark_flag_as_required('output_directory')
136
+
137
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
138
+
139
+ with tf.io.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
140
+ text_format.Parse(f.read(), pipeline_config)
141
+ override_config = pipeline_pb2.TrainEvalPipelineConfig()
142
+ text_format.Parse(FLAGS.config_override, override_config)
143
+ pipeline_config.MergeFrom(override_config)
144
+
145
+ export_tflite_graph_lib_tf2.export_tflite_model(
146
+ pipeline_config, FLAGS.trained_checkpoint_dir, FLAGS.output_directory,
147
+ FLAGS.max_detections, FLAGS.ssd_use_regular_nms,
148
+ FLAGS.centernet_include_keypoints, FLAGS.keypoint_label_map_path)
149
+
150
+
151
+ if __name__ == '__main__':
152
+ app.run(main)
exporter_main_v2.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Lint as: python2, python3
2
+ # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ # ==============================================================================
16
+
17
+ r"""Tool to export an object detection model for inference.
18
+
19
+ Prepares an object detection tensorflow graph for inference using model
20
+ configuration and a trained checkpoint. Outputs associated checkpoint files,
21
+ a SavedModel, and a copy of the model config.
22
+
23
+ The inference graph contains one of three input nodes depending on the user
24
+ specified option.
25
+ * `image_tensor`: Accepts a uint8 4-D tensor of shape [1, None, None, 3]
26
+ * `float_image_tensor`: Accepts a float32 4-D tensor of shape
27
+ [1, None, None, 3]
28
+ * `encoded_image_string_tensor`: Accepts a 1-D string tensor of shape [None]
29
+ containing encoded PNG or JPEG images. Image resolutions are expected to be
30
+ the same if more than 1 image is provided.
31
+ * `tf_example`: Accepts a 1-D string tensor of shape [None] containing
32
+ serialized TFExample protos. Image resolutions are expected to be the same
33
+ if more than 1 image is provided.
34
+
35
+ and the following output nodes returned by the model.postprocess(..):
36
+ * `num_detections`: Outputs float32 tensors of the form [batch]
37
+ that specifies the number of valid boxes per image in the batch.
38
+ * `detection_boxes`: Outputs float32 tensors of the form
39
+ [batch, num_boxes, 4] containing detected boxes.
40
+ * `detection_scores`: Outputs float32 tensors of the form
41
+ [batch, num_boxes] containing class scores for the detections.
42
+ * `detection_classes`: Outputs float32 tensors of the form
43
+ [batch, num_boxes] containing classes for the detections.
44
+
45
+
46
+ Example Usage:
47
+ --------------
48
+ python exporter_main_v2.py \
49
+ --input_type image_tensor \
50
+ --pipeline_config_path path/to/ssd_inception_v2.config \
51
+ --trained_checkpoint_dir path/to/checkpoint \
52
+ --output_directory path/to/exported_model_directory
53
+ --use_side_inputs True/False \
54
+ --side_input_shapes dim_0,dim_1,...dim_a/.../dim_0,dim_1,...,dim_z \
55
+ --side_input_names name_a,name_b,...,name_c \
56
+ --side_input_types type_1,type_2
57
+
58
+ The expected output would be in the directory
59
+ path/to/exported_model_directory (which is created if it does not exist)
60
+ holding two subdirectories (corresponding to checkpoint and SavedModel,
61
+ respectively) and a copy of the pipeline config.
62
+
63
+ Config overrides (see the `config_override` flag) are text protobufs
64
+ (also of type pipeline_pb2.TrainEvalPipelineConfig) which are used to override
65
+ certain fields in the provided pipeline_config_path. These are useful for
66
+ making small changes to the inference graph that differ from the training or
67
+ eval config.
68
+
69
+ Example Usage (in which we change the second stage post-processing score
70
+ threshold to be 0.5):
71
+
72
+ python exporter_main_v2.py \
73
+ --input_type image_tensor \
74
+ --pipeline_config_path path/to/ssd_inception_v2.config \
75
+ --trained_checkpoint_dir path/to/checkpoint \
76
+ --output_directory path/to/exported_model_directory \
77
+ --config_override " \
78
+ model{ \
79
+ faster_rcnn { \
80
+ second_stage_post_processing { \
81
+ batch_non_max_suppression { \
82
+ score_threshold: 0.5 \
83
+ } \
84
+ } \
85
+ } \
86
+ }"
87
+
88
+ If side inputs are desired, the following arguments could be appended
89
+ (the example below is for Context R-CNN).
90
+ --use_side_inputs True \
91
+ --side_input_shapes 1,2000,2057/1 \
92
+ --side_input_names context_features,valid_context_size \
93
+ --side_input_types tf.float32,tf.int32
94
+ """
95
+ from absl import app
96
+ from absl import flags
97
+
98
+ import tensorflow.compat.v2 as tf
99
+ from google.protobuf import text_format
100
+ from object_detection import exporter_lib_v2
101
+ from object_detection.protos import pipeline_pb2
102
+
103
+ tf.enable_v2_behavior()
104
+
105
+
106
+ FLAGS = flags.FLAGS
107
+
108
+ flags.DEFINE_string('input_type', 'image_tensor', 'Type of input node. Can be '
109
+ 'one of [`image_tensor`, `encoded_image_string_tensor`, '
110
+ '`tf_example`, `float_image_tensor`]')
111
+ flags.DEFINE_string('pipeline_config_path', None,
112
+ 'Path to a pipeline_pb2.TrainEvalPipelineConfig config '
113
+ 'file.')
114
+ flags.DEFINE_string('trained_checkpoint_dir', None,
115
+ 'Path to trained checkpoint directory')
116
+ flags.DEFINE_string('output_directory', None, 'Path to write outputs.')
117
+ flags.DEFINE_string('config_override', '',
118
+ 'pipeline_pb2.TrainEvalPipelineConfig '
119
+ 'text proto to override pipeline_config_path.')
120
+ flags.DEFINE_boolean('use_side_inputs', False,
121
+ 'If True, uses side inputs as well as image inputs.')
122
+ flags.DEFINE_string('side_input_shapes', '',
123
+ 'If use_side_inputs is True, this explicitly sets '
124
+ 'the shape of the side input tensors to a fixed size. The '
125
+ 'dimensions are to be provided as a comma-separated list '
126
+ 'of integers. A value of -1 can be used for unknown '
127
+ 'dimensions. A `/` denotes a break, starting the shape of '
128
+ 'the next side input tensor. This flag is required if '
129
+ 'using side inputs.')
130
+ flags.DEFINE_string('side_input_types', '',
131
+ 'If use_side_inputs is True, this explicitly sets '
132
+ 'the type of the side input tensors. The '
133
+ 'dimensions are to be provided as a comma-separated list '
134
+ 'of types, each of `string`, `integer`, or `float`. '
135
+ 'This flag is required if using side inputs.')
136
+ flags.DEFINE_string('side_input_names', '',
137
+ 'If use_side_inputs is True, this explicitly sets '
138
+ 'the names of the side input tensors required by the model '
139
+ 'assuming the names will be a comma-separated list of '
140
+ 'strings. This flag is required if using side inputs.')
141
+
142
+ flags.mark_flag_as_required('pipeline_config_path')
143
+ flags.mark_flag_as_required('trained_checkpoint_dir')
144
+ flags.mark_flag_as_required('output_directory')
145
+
146
+
147
+ def main(_):
148
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
149
+ with tf.io.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
150
+ text_format.Merge(f.read(), pipeline_config)
151
+ text_format.Merge(FLAGS.config_override, pipeline_config)
152
+ exporter_lib_v2.export_inference_graph(
153
+ FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_dir,
154
+ FLAGS.output_directory, FLAGS.use_side_inputs, FLAGS.side_input_shapes,
155
+ FLAGS.side_input_types, FLAGS.side_input_names)
156
+
157
+
158
+ if __name__ == '__main__':
159
+ app.run(main)
model_main_tf2.py ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Lint as: python3
2
+ # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ # ==============================================================================
16
+
17
+ r"""Creates and runs TF2 object detection models.
18
+
19
+ For local training/evaluation run:
20
+ PIPELINE_CONFIG_PATH=path/to/pipeline.config
21
+ MODEL_DIR=/tmp/model_outputs
22
+ NUM_TRAIN_STEPS=10000
23
+ SAMPLE_1_OF_N_EVAL_EXAMPLES=1
24
+ python model_main_tf2.py -- \
25
+ --model_dir=$MODEL_DIR --num_train_steps=$NUM_TRAIN_STEPS \
26
+ --sample_1_of_n_eval_examples=$SAMPLE_1_OF_N_EVAL_EXAMPLES \
27
+ --pipeline_config_path=$PIPELINE_CONFIG_PATH \
28
+ --alsologtostderr
29
+ """
30
+ from absl import flags
31
+ import tensorflow.compat.v2 as tf
32
+ from object_detection import model_lib_v2
33
+
34
+ flags.DEFINE_string('pipeline_config_path', None, 'Path to pipeline config '
35
+ 'file.')
36
+ flags.DEFINE_integer('num_train_steps', None, 'Number of train steps.')
37
+ flags.DEFINE_bool('eval_on_train_data', False, 'Enable evaluating on train '
38
+ 'data (only supported in distributed training).')
39
+ flags.DEFINE_integer('sample_1_of_n_eval_examples', None, 'Will sample one of '
40
+ 'every n eval input examples, where n is provided.')
41
+ flags.DEFINE_integer('sample_1_of_n_eval_on_train_examples', 5, 'Will sample '
42
+ 'one of every n train input examples for evaluation, '
43
+ 'where n is provided. This is only used if '
44
+ '`eval_training_data` is True.')
45
+ flags.DEFINE_string(
46
+ 'model_dir', None, 'Path to output model directory '
47
+ 'where event and checkpoint files will be written.')
48
+ flags.DEFINE_string(
49
+ 'checkpoint_dir', None, 'Path to directory holding a checkpoint. If '
50
+ '`checkpoint_dir` is provided, this binary operates in eval-only mode, '
51
+ 'writing resulting metrics to `model_dir`.')
52
+
53
+ flags.DEFINE_integer('eval_timeout', 3600, 'Number of seconds to wait for an'
54
+ 'evaluation checkpoint before exiting.')
55
+
56
+ flags.DEFINE_bool('use_tpu', False, 'Whether the job is executing on a TPU.')
57
+ flags.DEFINE_string(
58
+ 'tpu_name',
59
+ default=None,
60
+ help='Name of the Cloud TPU for Cluster Resolvers.')
61
+ flags.DEFINE_integer(
62
+ 'num_workers', 1, 'When num_workers > 1, training uses '
63
+ 'MultiWorkerMirroredStrategy. When num_workers = 1 it uses '
64
+ 'MirroredStrategy.')
65
+ flags.DEFINE_integer(
66
+ 'checkpoint_every_n', 1000, 'Integer defining how often we checkpoint.')
67
+ flags.DEFINE_boolean('record_summaries', True,
68
+ ('Whether or not to record summaries during'
69
+ ' training.'))
70
+
71
+ FLAGS = flags.FLAGS
72
+
73
+
74
+ def main(unused_argv):
75
+ flags.mark_flag_as_required('model_dir')
76
+ flags.mark_flag_as_required('pipeline_config_path')
77
+ tf.config.set_soft_device_placement(True)
78
+
79
+ if FLAGS.checkpoint_dir:
80
+ model_lib_v2.eval_continuously(
81
+ pipeline_config_path=FLAGS.pipeline_config_path,
82
+ model_dir=FLAGS.model_dir,
83
+ train_steps=FLAGS.num_train_steps,
84
+ sample_1_of_n_eval_examples=FLAGS.sample_1_of_n_eval_examples,
85
+ sample_1_of_n_eval_on_train_examples=(
86
+ FLAGS.sample_1_of_n_eval_on_train_examples),
87
+ checkpoint_dir=FLAGS.checkpoint_dir,
88
+ wait_interval=300, timeout=FLAGS.eval_timeout)
89
+ else:
90
+ if FLAGS.use_tpu:
91
+ # TPU is automatically inferred if tpu_name is None and
92
+ # we are running under cloud ai-platform.
93
+ resolver = tf.distribute.cluster_resolver.TPUClusterResolver(
94
+ FLAGS.tpu_name)
95
+ tf.config.experimental_connect_to_cluster(resolver)
96
+ tf.tpu.experimental.initialize_tpu_system(resolver)
97
+ strategy = tf.distribute.experimental.TPUStrategy(resolver)
98
+ elif FLAGS.num_workers > 1:
99
+ strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy()
100
+ else:
101
+ strategy = tf.compat.v2.distribute.MirroredStrategy()
102
+
103
+ with strategy.scope():
104
+ model_lib_v2.train_loop(
105
+ pipeline_config_path=FLAGS.pipeline_config_path,
106
+ model_dir=FLAGS.model_dir,
107
+ train_steps=FLAGS.num_train_steps,
108
+ use_tpu=FLAGS.use_tpu,
109
+ checkpoint_every_n=FLAGS.checkpoint_every_n,
110
+ record_summaries=FLAGS.record_summaries)
111
+
112
+ if __name__ == '__main__':
113
+ tf.compat.v1.app.run()