agent_zoo/RoboschoolAnt_v1_2017jul.py [32:76]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            x = tf.matmul(x, final_w) + final_b
            pi = x
            self.pi = pi

        if take_weights_here is None:
            take_weights_here = {}
            exec(open(os.path.splitext(__file__)[0] + ".weights").read(), take_weights_here)
        self.assigns = [
            (  dense1_w, take_weights_here["weights_dense1_w"]),
            (  dense1_b, take_weights_here["weights_dense1_b"]),
            (  dense2_w, take_weights_here["weights_dense2_w"]),
            (  dense2_b, take_weights_here["weights_dense2_b"]),
            (   final_w, take_weights_here["weights_final_w"]),
            (   final_b, take_weights_here["weights_final_b"]),
        ]

        self.weight_assignment_placeholders = []
        self.weight_assignment_nodes = []
        for var, w in self.assigns:
            ph = tf.placeholder(tf.float32, w.shape)
            self.weight_assignment_placeholders.append(ph)
            self.weight_assignment_nodes.append( tf.assign(var, ph) )

        self.load_weights()

    def load_weights(self):
        feed_dict = {}
        for (var, w), ph in zip(self.assigns, self.weight_assignment_placeholders):
            feed_dict[ph] = w
        tf.get_default_session().run(self.weight_assignment_nodes, feed_dict=feed_dict)

    def act(self, obs_data, cx):
        obs_data = [np.ones((1,)), obs_data]
        obs_data = [obs_data[0], obs_data[1]]
        # Because we need batch dimension, data[None] changes shape from [A] to [1,A]
        a = tf.get_default_session().run(self.pi, feed_dict=dict( (ph,data[None]) for ph,data in zip(self.obs_tuple, obs_data) ))
        return a[0]  # return first in batch


def demo_run():
    config = tf.ConfigProto(
        inter_op_parallelism_threads=1,
        intra_op_parallelism_threads=1,
        device_count = { "GPU": 0 } )
    sess = tf.InteractiveSession(config=config)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



agent_zoo/RoboschoolHumanoidFlagrun_v1_2017jul.py [32:76]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            x = tf.matmul(x, final_w) + final_b
            pi = x
            self.pi = pi

        if take_weights_here is None:
            take_weights_here = {}
            exec(open(os.path.splitext(__file__)[0] + ".weights").read(), take_weights_here)
        self.assigns = [
            (  dense1_w, take_weights_here["weights_dense1_w"]),
            (  dense1_b, take_weights_here["weights_dense1_b"]),
            (  dense2_w, take_weights_here["weights_dense2_w"]),
            (  dense2_b, take_weights_here["weights_dense2_b"]),
            (   final_w, take_weights_here["weights_final_w"]),
            (   final_b, take_weights_here["weights_final_b"]),
        ]

        self.weight_assignment_placeholders = []
        self.weight_assignment_nodes = []
        for var, w in self.assigns:
            ph = tf.placeholder(tf.float32, w.shape)
            self.weight_assignment_placeholders.append(ph)
            self.weight_assignment_nodes.append( tf.assign(var, ph) )

        self.load_weights()

    def load_weights(self):
        feed_dict = {}
        for (var, w), ph in zip(self.assigns, self.weight_assignment_placeholders):
            feed_dict[ph] = w
        tf.get_default_session().run(self.weight_assignment_nodes, feed_dict=feed_dict)

    def act(self, obs_data, cx):
        obs_data = [np.ones((1,)), obs_data]
        obs_data = [obs_data[0], obs_data[1]]
        # Because we need batch dimension, data[None] changes shape from [A] to [1,A]
        a = tf.get_default_session().run(self.pi, feed_dict=dict( (ph,data[None]) for ph,data in zip(self.obs_tuple, obs_data) ))
        return a[0]  # return first in batch


def demo_run():
    config = tf.ConfigProto(
        inter_op_parallelism_threads=1,
        intra_op_parallelism_threads=1,
        device_count = { "GPU": 0 } )
    sess = tf.InteractiveSession(config=config)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



