From a9e11623cdee20b1c68bb13a8f7749ec8d165e82 Mon Sep 17 00:00:00 2001
From: Samuel Liu <liupeng0518@gmail.com>
Date: Thu, 17 Sep 2020 17:45:05 +0800
Subject: [PATCH] fix remove node (#6666)

---
 docs/getting-started.md |  4 ++--
 docs/nodes.md           |  2 +-
 remove-node.yml         | 14 +++++++-------
 3 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/docs/getting-started.md b/docs/getting-started.md
index ab1d6e79f..ec9d707e9 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -69,9 +69,9 @@ ansible-playbook -i inventory/mycluster/hosts.yml remove-node.yml -b -v \
 --extra-vars "node=nodename,nodename2"
 ```
 
-If a node is completely unreachable by ssh, add `--extra-vars reset_nodes=no`
+If a node is completely unreachable by ssh, add `--extra-vars reset_nodes=false`
 to skip the node reset step. If one node is unavailable, but others you wish
-to remove are able to connect via SSH, you could set reset_nodes=no as a host
+to remove are able to connect via SSH, you could set `reset_nodes=false` as a host
 var in inventory.
 
 ## Connecting to Kubernetes
diff --git a/docs/nodes.md b/docs/nodes.md
index 6741bfea2..60844794d 100644
--- a/docs/nodes.md
+++ b/docs/nodes.md
@@ -70,7 +70,7 @@ Before using `--limit` run playbook `facts.yml` without the limit to refresh fac
 
 With the old node still in the inventory, run `remove-node.yml`. You need to pass `-e node=NODE_NAME` to the playbook to limit the execution to the node being removed.
   
-If the node you want to remove is not online, you should add `reset_nodes=false` to your extra-vars: `-e node=NODE_NAME reset_nodes=false`.
+If the node you want to remove is not online, you should add `reset_nodes=false` to your extra-vars: `-e node=NODE_NAME -e reset_nodes=false`.
 Use this flag even when you remove other types of nodes like a master or etcd nodes.
 
 ### 5) Remove the node from the inventory
diff --git a/remove-node.yml b/remove-node.yml
index 341c92615..a5a1d2a60 100644
--- a/remove-node.yml
+++ b/remove-node.yml
@@ -31,7 +31,7 @@
         msg: "Delete nodes confirmation failed"
       when: delete_nodes_confirmation != "yes"
 
-- hosts: kube-master
+- hosts: kube-master[0]
   gather_facts: no
   roles:
     - { role: kubespray-defaults }
@@ -41,15 +41,15 @@
 - hosts: "{{ node | default('kube-node') }}"
   gather_facts: no
   roles:
-    - { role: kubespray-defaults }
-    - { role: bootstrap-os, tags: bootstrap-os }
-    - { role: remove-node/remove-etcd-node}
-    - { role: reset, tags: reset, when: reset_nodes|default(True) }
+    - { role: kubespray-defaults, when: reset_nodes|default(True)|bool }
+    - { role: bootstrap-os, tags: bootstrap-os, when: reset_nodes|default(True)|bool }
+    - { role: remove-node/remove-etcd-node }
+    - { role: reset, tags: reset, when: reset_nodes|default(True)|bool }
 
 # Currently cannot remove first master or etcd
 - hosts: "{{ node | default('kube-master[1:]:etcd[1:]') }}"
   gather_facts: no
   roles:
-    - { role: kubespray-defaults }
-    - { role: bootstrap-os, tags: bootstrap-os}
+    - { role: kubespray-defaults, when: reset_nodes|default(True)|bool }
+    - { role: bootstrap-os, tags: bootstrap-os, when: reset_nodes|default(True)|bool }
     - { role: remove-node/post-remove, tags: post-remove }
-- 
GitLab