File size: 3,262 Bytes
5427eec
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
#!/usr/bin/env python3
# Copyright    2023  Xiaomi Corp.        (authors: Fangjun Kuang)

# Please see ./run.sh for usages
import argparse
import os

import tensorflow as tf


# Code in the following function is modified from
# https://blog.metaflow.fr/tensorflow-how-to-freeze-a-model-and-serve-it-with-a-python-api-d4f3596b3adc
def freeze_graph(model_dir, output_node_names, output_filename):
    """Extract the sub graph defined by the output nodes and convert all its
    variables into constant

    Args:
      model_dir:
        the root folder containing the checkpoint state file
      output_node_names:
        a string, containing all the output node's names, comma separated
      output_filename:
        Filename to save the graph.
    """
    if not tf.compat.v1.gfile.Exists(model_dir):
        raise AssertionError(
            "Export directory doesn't exists. Please specify an export "
            "directory: %s" % model_dir
        )

    if not output_node_names:
        print("You need to supply the name of a node to --output_node_names.")
        return -1

    # We retrieve our checkpoint fullpath
    checkpoint = tf.train.get_checkpoint_state(model_dir)
    input_checkpoint = checkpoint.model_checkpoint_path

    # We precise the file fullname of our freezed graph
    absolute_model_dir = "/".join(input_checkpoint.split("/")[:-1])
    output_graph = output_filename

    # We clear devices to allow TensorFlow to control on which device it will load operations
    clear_devices = True

    # We start a session using a temporary fresh Graph
    with tf.compat.v1.Session(graph=tf.Graph()) as sess:
        # We import the meta graph in the current default Graph
        saver = tf.compat.v1.train.import_meta_graph(
            input_checkpoint + ".meta", clear_devices=clear_devices
        )

        # We restore the weights
        saver.restore(sess, input_checkpoint)

        # We use a built-in TF helper to export variables to constants
        output_graph_def = tf.compat.v1.graph_util.convert_variables_to_constants(
            sess,  # The session is used to retrieve the weights
            tf.compat.v1.get_default_graph().as_graph_def(),  # The graph_def is used to retrieve the nodes
            output_node_names.split(
                ","
            ),  # The output node names are used to select the usefull nodes
        )

        # Finally we serialize and dump the output graph to the filesystem
        with tf.compat.v1.gfile.GFile(output_graph, "wb") as f:
            f.write(output_graph_def.SerializeToString())
        print("%d ops in the final graph." % len(output_graph_def.node))

    return output_graph_def


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--model-dir", type=str, default="", help="Model folder to export"
    )
    parser.add_argument(
        "--output-node-names",
        type=str,
        default="vocals_spectrogram/mul,accompaniment_spectrogram/mul",
        help="The name of the output nodes, comma separated.",
    )

    parser.add_argument(
        "--output-filename",
        type=str,
    )
    args = parser.parse_args()

    freeze_graph(args.model_dir, args.output_node_names, args.output_filename)