References

Distributed TensorFlow: A Gentle Introduction

Distriubted Computing with TensorFlow

How to Set Up

Start multiple processes of tf.train.Servers in the background:

import tensorflow as tf
from multiprocessing import Process

cluster = tf.train.ClusterSpec({'local': ['localhost:2222', 'localhost:2223']})

def worker(i):
    ''' Start Worker i '''
    server = tf.train.Server(cluster, job_name='local', task_index=i)
    server.start()
    server.join()

if __name__ == '__main__':
    n_tasks = cluster.num_tasks('local')
    proc = [None for i in range(n_tasks)]
    for i in range(n_tasks):
        proc = Process(target=worker, args=(i,), daemon=True)
        proc.start()

Then run a separate piece of code to connect to the servers:

import tensorflow as tf

cluster = tf.train.ClusterSpec({'local': ['localhost:2222', 'localhost:2223']})

x = tf.constant(2)

with tf.device('/job:local/task:1'):
    y2 = x - 66

with tf.device('/job:local/task:0'):
    y1 = x + 300
    y = y1 + y2

with tf.Session('grpc://localhost:2222') as sess:
    print(sess.run(y))