-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathexport_inference_graph.py
More file actions
77 lines (66 loc) · 2.05 KB
/
export_inference_graph.py
File metadata and controls
77 lines (66 loc) · 2.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
'''
Example Usage:
--------------
python export_inference_graph \
--input_type image_tensor \
--pipeline_config_path path/to/ssd_inception_v2.config \
--trained_checkpoint_prefix path/to/model.ckpt \
--output_directory path/to/exported_model_directory \
--config_override " \
model{ \
faster_rcnn { \
second_stage_post_processing { \
batch_non_max_suppression { \
score_threshold: 0.5 \
} \
} \
} \
}"
'''
import tensorflow as tf
from google.protobuf import text_format
from object_detection import exporter
from object_detection.protos import pipeline_pb2
slim = tf.contrib.slim
flags = tf.app.flags
flags.DEFINE_string('conf',
None,
'Path to a config file.')
flags.DEFINE_string('ckpt',
None,
'Path to checkpoint prefix file')
flags.DEFINE_string('output_dir',
None,
'Path to write outputs.')
flags.DEFINE_string('score_th',
0.25,
'Score threshold to use in the post_processing')
flags.DEFINE_string('iou_th',
0.6,
'IOU to use in the post_processing phase')
FLAGS = flags.FLAGS
def main(_):
pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
config_override = "model{ \
ssd { \
post_processing { \
batch_non_max_suppression { \
score_threshold: %s \
iou_threshold: %s \
max_detections_per_class: 20 \
max_total_detections: 20 \
} \
} \
} \
}" % (FLAGS.score_th,FLAGS.iou_th)
with tf.gfile.GFile(FLAGS.conf, 'r') as f:
text_format.Merge(f.read(), pipeline_config)
text_format.Merge(config_override, pipeline_config)
exporter.export_inference_graph(
'image_tensor',
pipeline_config,
FLAGS.ckpt,
FLAGS.output_dir,
None)
if __name__ == '__main__':
tf.app.run()