# TPU 사용 가능 확인 및 Strategy 설정
try:
# TPU detection. No parameters necessary if TPU_NAME environment variable is set.
# On Kaggle this is always the case.
tpu = (
tf.distribute.cluster_resolver.TPUClusterResolver()
)
print("Running on TPU ", tpu.master())
except ValueError:
tpu = None
if tpu:
tf.config.experimental_connect_to_cluster(tpu)
tf.tpu.experimental.initialize_tpu_system(tpu)
strategy = tf.distribute.experimental.TPUStrategy(tpu)
else:
strategy = (
tf.distribute.get_strategy()
) # default distribution strategy in Tensorflow. Works on CPU and single GPU.
>>> print("REPLICAS: ", strategy.num_replicas_in_sync)
Running on TPU grpc://10.0.0.2:8470
REPLICAS: 8