You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by va...@apache.org on 2023/12/07 05:28:16 UTC
(couchdb) branch more-flakes-in-cluster-setup created (now 960ab6f92)
This is an automated email from the ASF dual-hosted git repository.
vatamane pushed a change to branch more-flakes-in-cluster-setup
in repository https://gitbox.apache.org/repos/asf/couchdb.git
at 960ab6f92 More flaky fixes for cluster setup
This branch includes the following new commits:
new 960ab6f92 More flaky fixes for cluster setup
The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails. The revisions
listed as "add" were already present in the repository and have only
been added to this reference.
(couchdb) 01/01: More flaky fixes for cluster setup
Posted by va...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
vatamane pushed a commit to branch more-flakes-in-cluster-setup
in repository https://gitbox.apache.org/repos/asf/couchdb.git
commit 960ab6f928e1d8dbad5c647cf77a9069ed7583c3
Author: Nick Vatamaniuc <va...@gmail.com>
AuthorDate: Wed Dec 6 23:56:49 2023 -0500
More flaky fixes for cluster setup
Noticed nouveau elixir tests still throw `connection refused` errors in the
`finish_cluster` setup step. So try to make all the http requests in dev/run
use retries. That also helps us DRY out some repeating bits like header auth
and json application content type.
We also had an odd `import catch error then import again` clause. I don't think
we intend this to work on python 2 any longer so I cleaned that up as well.
---
dev/run | 118 +++++++++++++++++++++++++++-------------------------------------
1 file changed, 50 insertions(+), 68 deletions(-)
diff --git a/dev/run b/dev/run
index dbdfecf03..8d47d76f9 100755
--- a/dev/run
+++ b/dev/run
@@ -40,15 +40,8 @@ from pbkdf2 import pbkdf2_hex
COMMON_SALT = uuid.uuid4().hex
-try:
- from urllib.request import urlopen
-except ImportError:
- from urllib.request import urlopen
-
-try:
- import http.client as httpclient
-except ImportError:
- import http.client as httpclient
+from urllib.request import urlopen
+import http.client as httpclient
def toposixpath(path):
@@ -1018,10 +1011,6 @@ def enable_cluster(node_count, port, user, pswd):
"node_count": node_count,
}
)
- headers = {
- "Authorization": basic_auth_header(user, pswd),
- "Content-Type": "application/json",
- }
(status, response) = try_request(
"127.0.0.1",
port,
@@ -1029,7 +1018,7 @@ def enable_cluster(node_count, port, user, pswd):
"/_cluster_setup",
(201, 400),
body=body,
- headers=headers,
+ headers=setup_headers(user, pswd),
error="Failed to run _cluster_setup",
)
if status == 400:
@@ -1039,65 +1028,62 @@ def enable_cluster(node_count, port, user, pswd):
def add_node(lead_port, node_name, node_port, user, pswd):
- conn = httpclient.HTTPConnection("127.0.0.1", lead_port)
- conn.request(
+ body = json.dumps(
+ {
+ "action": "add_node",
+ "host": "127.0.0.1",
+ "port": node_port,
+ "name": node_name,
+ "username": user,
+ "password": pswd,
+ }
+ )
+ (status, response) = try_request(
+ "127.0.0.1",
+ lead_port,
"POST",
"/_cluster_setup",
- json.dumps(
- {
- "action": "add_node",
- "host": "127.0.0.1",
- "port": node_port,
- "name": node_name,
- "username": user,
- "password": pswd,
- }
- ),
- {
- "Authorization": basic_auth_header(user, pswd),
- "Content-Type": "application/json",
- },
+ (201, 409),
+ body=body,
+ headers=setup_headers(user, pswd),
)
- resp = conn.getresponse()
- assert resp.status in (201, 409), resp.read()
- resp.close()
+ assert status in (201, 409), response
def set_cookie(port, user, pswd):
- conn = httpclient.HTTPConnection("127.0.0.1", port)
- conn.request(
+ (status, response) = try_request(
+ "127.0.0.1",
+ port,
"POST",
"/_cluster_setup",
- json.dumps({"action": "receive_cookie", "cookie": generate_cookie()}),
- {
- "Authorization": basic_auth_header(user, pswd),
- "Content-Type": "application/json",
- },
+ (201,),
+ body=json.dumps({"action": "receive_cookie", "cookie": generate_cookie()}),
+ headers=setup_headers(user, pswd),
)
- resp = conn.getresponse()
- assert resp.status == 201, resp.read()
- resp.close()
+ assert status == 201, response
def finish_cluster(port, user, pswd):
- conn = httpclient.HTTPConnection("127.0.0.1", port)
- conn.request(
+ (status, response) = try_request(
+ "127.0.0.1",
+ port,
"POST",
"/_cluster_setup",
- json.dumps({"action": "finish_cluster"}),
- {
- "Authorization": basic_auth_header(user, pswd),
- "Content-Type": "application/json",
- },
+ (201, 400),
+ body=json.dumps({"action": "finish_cluster"}),
+ headers=setup_headers(user, pswd),
+ error="Failed to run _finish_cluster",
)
- resp = conn.getresponse()
- # 400 for already set up'ed cluster
- assert resp.status in (201, 400), resp.read()
- resp.close()
+ # 400 for already set up cluster
+ assert status in (201, 400), response
-def basic_auth_header(user, pswd):
- return "Basic " + base64.b64encode((user + ":" + pswd).encode()).decode()
+def setup_headers(user, pswd):
+ b64userpass = base64.b64encode((user + ":" + pswd).encode()).decode()
+ return {
+ "Authorization": "Basic " + b64userpass,
+ "Content-Type": "application/json",
+ }
def generate_cookie():
@@ -1135,18 +1121,14 @@ def try_request(
def create_system_databases(host, port):
for dbname in ["_users", "_replicator", "_global_changes"]:
- conn = httpclient.HTTPConnection(host, port)
- conn.request("HEAD", "/" + dbname)
- resp = conn.getresponse()
- if resp.status == 404:
- try_request(
- host,
- port,
- "PUT",
- "/" + dbname,
- (201, 202, 412),
- error="Failed to create '%s' database:\n" % dbname,
- )
+ try_request(
+ host,
+ port,
+ "PUT",
+ "/" + dbname,
+ (201, 202, 412),
+ error="Failed to create '%s' database:\n" % dbname,
+ )
@log(