Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 25856d5

Browse filesBrowse files
author
vshepard
committed
Add s3 tests
1 parent 2b9c9b1 commit 25856d5
Copy full SHA for 25856d5

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.
Dismiss banner
Expand file treeCollapse file tree

59 files changed

+14614
-20420
lines changed

‎s3/__init__.py

Copy file name to clipboardExpand all lines: s3/__init__.py
Whitespace-only changes.

‎s3/test_utils/__init__.py

Copy file name to clipboardExpand all lines: s3/test_utils/__init__.py
Whitespace-only changes.

‎s3/test_utils/config_provider.py

Copy file name to clipboard
+8Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
import configparser
2+
3+
4+
def read_config(s3_config_file):
5+
config = configparser.ConfigParser()
6+
config.read_string('[fake-section]\n' + open(s3_config_file).read())
7+
8+
return config['fake-section']

‎s3/test_utils/s3_backup.py

Copy file name to clipboard
+208Lines changed: 208 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,208 @@
1+
import os
2+
import io
3+
import sys
4+
5+
import minio
6+
from minio import Minio
7+
from minio.deleteobjects import DeleteObject
8+
import urllib3
9+
from pg_probackup2.storage.fs_backup import TestBackupDir
10+
from pg_probackup2.init_helpers import init_params
11+
from s3.test_utils import config_provider
12+
13+
root = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..'))
14+
if root not in sys.path:
15+
sys.path.append(root)
16+
17+
status_forcelist = [413, # RequestBodyTooLarge
18+
429, # TooManyRequests
19+
500, # InternalError
20+
503, # ServerBusy
21+
]
22+
23+
DEFAULT_CONF_FILE = 's3/tests/s3.conf'
24+
25+
26+
class S3TestBackupDir(TestBackupDir):
27+
is_file_based = False
28+
29+
def __init__(self, *, rel_path, backup):
30+
self.access_key = None
31+
self.secret_key = None
32+
self.s3_type = None
33+
self.tmp_path = None
34+
self.host = None
35+
self.port = None
36+
self.bucket_name = None
37+
self.region = None
38+
self.bucket = None
39+
self.path_suffix = None
40+
self.https = None
41+
self.s3_config_file = None
42+
self.ca_certificate = None
43+
44+
self.set_s3_config_file()
45+
self.setup_s3_env()
46+
47+
path = "pg_probackup"
48+
if self.path_suffix:
49+
path += "_" + self.path_suffix
50+
if self.tmp_path == '' or os.path.isabs(self.tmp_path):
51+
self.path = f"{path}{self.tmp_path}/{rel_path}/{backup}"
52+
else:
53+
self.path = f"{path}/{self.tmp_path}/{rel_path}/{backup}"
54+
55+
secure: bool = False
56+
self.versioning: bool = False
57+
if self.https in ['ON', 'HTTPS']:
58+
secure = True
59+
if self.https and self.ca_certificate:
60+
http_client = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
61+
ca_certs=self.ca_certificate,
62+
retries=urllib3.Retry(total=5,
63+
backoff_factor=1,
64+
status_forcelist=status_forcelist))
65+
else:
66+
http_client = urllib3.PoolManager(retries=urllib3.Retry(total=5,
67+
backoff_factor=1,
68+
status_forcelist=status_forcelist))
69+
70+
self.conn = Minio(self.host + ":" + self.port, secure=secure, access_key=self.access_key,
71+
secret_key=self.secret_key, http_client=http_client)
72+
if not self.conn.bucket_exists(self.bucket):
73+
raise Exception(f"Test bucket {self.bucket} does not exist.")
74+
75+
try:
76+
config = self.conn.get_bucket_versioning(self.bucket)
77+
if config.status.lower() == "enabled" or config.status.lower() == "suspended":
78+
self.versioning = True
79+
else:
80+
self.versioning = False
81+
except Exception as e:
82+
if "NotImplemented" in repr(e):
83+
self.versioning = False
84+
else:
85+
raise e
86+
self.pb_args = ('-B', '/' + self.path, f'--s3={init_params.s3_type}')
87+
if self.s3_config_file:
88+
self.pb_args += (f'--s3-config-file={self.s3_config_file}',)
89+
return
90+
91+
def setup_s3_env(self, s3_config=None):
92+
self.tmp_path = os.environ.get('PGPROBACKUP_TMP_DIR', default='')
93+
self.host = os.environ.get('PG_PROBACKUP_S3_HOST', default='')
94+
95+
# If environment variables are not setup, use from config
96+
if self.s3_config_file or s3_config:
97+
minio_config = config_provider.read_config(self.s3_config_file or s3_config)
98+
self.access_key = minio_config['access-key']
99+
self.secret_key = minio_config['secret-key']
100+
self.host = minio_config['s3-host']
101+
self.port = minio_config['s3-port']
102+
self.bucket = minio_config['s3-bucket']
103+
self.region = minio_config['s3-region']
104+
self.https = minio_config['s3-secure']
105+
init_params.s3_type = 'minio'
106+
else:
107+
self.access_key = os.environ.get('PG_PROBACKUP_S3_ACCESS_KEY')
108+
self.secret_key = os.environ.get('PG_PROBACKUP_S3_SECRET_ACCESS_KEY')
109+
self.host = os.environ.get('PG_PROBACKUP_S3_HOST')
110+
self.port = os.environ.get('PG_PROBACKUP_S3_PORT')
111+
self.bucket = os.environ.get('PG_PROBACKUP_S3_BUCKET_NAME')
112+
self.region = os.environ.get('PG_PROBACKUP_S3_REGION')
113+
self.https = os.environ.get('PG_PROBACKUP_S3_HTTPS')
114+
self.ca_certificate = os.environ.get('PG_PROBACKUP_S3_CA_CERTIFICATE')
115+
init_params.s3_type = os.environ.get('PG_PROBACKUP_S3_TEST')
116+
117+
# multi-url case
118+
# remove all urls from string except the first one
119+
if ';' in self.host:
120+
self.host = self.host[:self.host.find(';')]
121+
if ':' in self.host: # also change port if it was overridden in multihost string
122+
self.port = self.host[self.host.find(':') + 1:]
123+
self.host = self.host[:self.host.find(':')]
124+
125+
def set_s3_config_file(self):
126+
s3_config = os.environ.get('PG_PROBACKUP_S3_CONFIG_FILE')
127+
if s3_config is not None and s3_config.strip().lower() == "true":
128+
self.s3_config_file = DEFAULT_CONF_FILE
129+
else:
130+
self.s3_config_file = s3_config
131+
132+
def list_instance_backups(self, instance):
133+
full_path = os.path.join(self.path, 'backups', instance)
134+
candidates = self.conn.list_objects(self.bucket, prefix=full_path, recursive=True)
135+
return [os.path.basename(os.path.dirname(x.object_name))
136+
for x in candidates if x.object_name.endswith('backup.control')]
137+
138+
def list_files(self, sub_dir, recursive=False):
139+
full_path = os.path.join(self.path, sub_dir)
140+
# Need '/' in the end to find inside the folder
141+
full_path_dir = full_path if full_path[-1] == '/' else full_path + '/'
142+
object_list = self.conn.list_objects(self.bucket, prefix=full_path_dir, recursive=recursive)
143+
return [obj.object_name.replace(full_path_dir, '', 1)
144+
for obj in object_list
145+
if not obj.is_dir]
146+
147+
def list_dirs(self, sub_dir):
148+
full_path = os.path.join(self.path, sub_dir)
149+
# Need '/' in the end to find inside the folder
150+
full_path_dir = full_path if full_path[-1] == '/' else full_path + '/'
151+
object_list = self.conn.list_objects(self.bucket, prefix=full_path_dir, recursive=False)
152+
return [obj.object_name.replace(full_path_dir, '', 1).rstrip('\\/')
153+
for obj in object_list
154+
if obj.is_dir]
155+
156+
def read_file(self, sub_path, *, text=True):
157+
full_path = os.path.join(self.path, sub_path)
158+
bytes = self.conn.get_object(self.bucket, full_path).read()
159+
if not text:
160+
return bytes
161+
return bytes.decode('utf-8')
162+
163+
def write_file(self, sub_path, data, *, text=True):
164+
full_path = os.path.join(self.path, sub_path)
165+
if text:
166+
data = data.encode('utf-8')
167+
self.conn.put_object(self.bucket, full_path, io.BytesIO(data), length=len(data))
168+
169+
def cleanup(self, dir=''):
170+
self.remove_dir(dir)
171+
172+
def remove_file(self, sub_path):
173+
full_path = os.path.join(self.path, sub_path)
174+
self.conn.remove_object(self.bucket, full_path)
175+
176+
def remove_dir(self, sub_path):
177+
if sub_path:
178+
full_path = os.path.join(self.path, sub_path)
179+
else:
180+
full_path = self.path
181+
objs = self.conn.list_objects(self.bucket, prefix=full_path, recursive=True,
182+
include_version=self.versioning)
183+
delobjs = (DeleteObject(o.object_name, o.version_id) for o in objs)
184+
errs = list(self.conn.remove_objects(self.bucket, delobjs))
185+
if errs:
186+
strerrs = "; ".join(str(err) for err in errs)
187+
raise Exception("There were errors: {0}".format(strerrs))
188+
189+
def exists(self, sub_path):
190+
full_path = os.path.join(self.path, sub_path)
191+
try:
192+
self.conn.stat_object(self.bucket, full_path)
193+
return True
194+
except minio.error.S3Error as s3err:
195+
if s3err.code == 'NoSuchKey':
196+
return False
197+
raise s3err
198+
except Exception as err:
199+
raise err
200+
201+
def __str__(self):
202+
return '/' + self.path
203+
204+
def __repr__(self):
205+
return "S3TestBackupDir" + str(self.path)
206+
207+
def __fspath__(self):
208+
return self.path

‎s3/tests/__init__.py

Copy file name to clipboard
+17Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import unittest
2+
import os
3+
4+
from . import auth_test, param_test
5+
6+
7+
def load_tests(loader, tests, pattern):
8+
suite = unittest.TestSuite()
9+
10+
if 'PG_PROBACKUP_TEST_BASIC' in os.environ:
11+
if os.environ['PG_PROBACKUP_TEST_BASIC'] == 'ON':
12+
loader.testMethodPrefix = 'test_basic'
13+
14+
suite.addTests(loader.loadTestsFromModule(auth_test))
15+
suite.addTests(loader.loadTestsFromModule(param_test))
16+
17+
return suite

‎s3/tests/auth_test.py

Copy file name to clipboard
+36Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import os
2+
import sys
3+
4+
root = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..'))
5+
if root not in sys.path:
6+
sys.path.append(root)
7+
8+
from tests.helpers.ptrack_helpers import ProbackupTest
9+
10+
11+
class AuthorizationTest(ProbackupTest):
12+
"""
13+
Check connect to S3 via pre_start_checks() function
14+
calling pg_probackup init --s3
15+
16+
test that s3 keys allow to connect to all types of storages
17+
"""
18+
19+
def s3_auth_test(self):
20+
console_output = self.pb.init(options=["--log-level-console=VERBOSE"])
21+
22+
self.assertNotIn(': 403', console_output) # Because we can have just '403' substring in timestamp
23+
self.assertMessage(console_output, contains='S3_pre_start_check successful')
24+
self.assertMessage(console_output, contains='HTTP response: 200')
25+
self.assertIn(
26+
f"INFO: Backup catalog '{self.backup_dir}' successfully initialized",
27+
console_output)
28+
29+
def test_log_level_file_requires_log_directory(self):
30+
console_output = self.pb.init(options=["--log-level-file=VERBOSE"],
31+
skip_log_directory=True,
32+
expect_error=True)
33+
34+
self.assertMessage(console_output,
35+
contains='ERROR: Cannot save S3 logs to a file. You must specify --log-directory option when'
36+
' running backup with --log-level-file option enabled')

‎s3/tests/pytest.ini

Copy file name to clipboard
+6Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[pytest]
2+
log_cli = true
3+
log_cli_level = INFO
4+
log_format = %(asctime)s %(levelname)s %(message)s
5+
log_date_format = %Y-%m-%d %H:%M:%S
6+
testpaths = tests

‎tests/CVE_2018_1058_test.py

Copy file name to clipboardExpand all lines: tests/CVE_2018_1058_test.py
+18-35Lines changed: 18 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,16 @@
11
import os
22
import unittest
3-
from .helpers.ptrack_helpers import ProbackupTest, ProbackupException
3+
from .helpers.ptrack_helpers import ProbackupTest
44

5-
class CVE_2018_1058(ProbackupTest, unittest.TestCase):
5+
class CVE_2018_1058(ProbackupTest):
66

77
# @unittest.skip("skip")
88
def test_basic_default_search_path(self):
99
""""""
10-
backup_dir = os.path.join(self.tmp_path, self.module_name, self.fname, 'backup')
11-
node = self.make_simple_node(
12-
base_dir=os.path.join(self.module_name, self.fname, 'node'),
13-
set_replication=True)
10+
node = self.pg_node.make_simple('node', checksum=False, set_replication=True)
1411

15-
self.init_pb(backup_dir)
16-
self.add_instance(backup_dir, 'node', node)
12+
self.pb.init()
13+
self.pb.add_instance('node', node)
1714
node.slow_start()
1815

1916
node.safe_psql(
@@ -26,19 +23,16 @@ def test_basic_default_search_path(self):
2623
"END "
2724
"$$ LANGUAGE plpgsql")
2825

29-
self.backup_node(backup_dir, 'node', node, backup_type='full', options=['--stream'])
26+
self.pb.backup_node('node', node, backup_type='full', options=['--stream'])
3027

3128
# @unittest.skip("skip")
3229
def test_basic_backup_modified_search_path(self):
3330
""""""
34-
backup_dir = os.path.join(self.tmp_path, self.module_name, self.fname, 'backup')
35-
node = self.make_simple_node(
36-
base_dir=os.path.join(self.module_name, self.fname, 'node'),
37-
set_replication=True)
38-
self.set_auto_conf(node, options={'search_path': 'public,pg_catalog'})
31+
node = self.pg_node.make_simple('node', checksum=False, set_replication=True)
32+
node.set_auto_conf(options={'search_path': 'public,pg_catalog'})
3933

40-
self.init_pb(backup_dir)
41-
self.add_instance(backup_dir, 'node', node)
34+
self.pb.init()
35+
self.pb.add_instance('node', node)
4236
node.slow_start()
4337

4438
node.safe_psql(
@@ -62,7 +56,7 @@ def test_basic_backup_modified_search_path(self):
6256
"$$ LANGUAGE plpgsql; "
6357
"CREATE VIEW public.pg_proc AS SELECT proname FROM public.pg_proc()")
6458

65-
self.backup_node(backup_dir, 'node', node, backup_type='full', options=['--stream'])
59+
self.pb.backup_node('node', node, backup_type='full', options=['--stream'])
6660

6761
log_file = os.path.join(node.logs_dir, 'postgresql.log')
6862
with open(log_file, 'r') as f:
@@ -73,10 +67,8 @@ def test_basic_backup_modified_search_path(self):
7367
# @unittest.skip("skip")
7468
def test_basic_checkdb_modified_search_path(self):
7569
""""""
76-
node = self.make_simple_node(
77-
base_dir=os.path.join(self.module_name, self.fname, 'node'),
78-
initdb_params=['--data-checksums'])
79-
self.set_auto_conf(node, options={'search_path': 'public,pg_catalog'})
70+
node = self.pg_node.make_simple('node')
71+
node.set_auto_conf(options={'search_path': 'public,pg_catalog'})
8072
node.slow_start()
8173

8274
node.safe_psql(
@@ -110,20 +102,11 @@ def test_basic_checkdb_modified_search_path(self):
110102
"CREATE VIEW public.pg_namespace AS SELECT * FROM public.pg_namespace();"
111103
)
112104

113-
try:
114-
self.checkdb_node(
105+
self.pb.checkdb_node(
115106
options=[
116107
'--amcheck',
117108
'--skip-block-validation',
118-
'-d', 'postgres', '-p', str(node.port)])
119-
self.assertEqual(
120-
1, 0,
121-
"Expecting Error because amcheck{,_next} not installed\n"
122-
" Output: {0} \n CMD: {1}".format(
123-
repr(self.output), self.cmd))
124-
except ProbackupException as e:
125-
self.assertIn(
126-
"WARNING: Extension 'amcheck' or 'amcheck_next' are not installed in database postgres",
127-
e.message,
128-
"\n Unexpected Error Message: {0}\n CMD: {1}".format(
129-
repr(e.message), self.cmd))
109+
'-d', 'postgres', '-p', str(node.port)],
110+
expect_error="because amcheck{,_next} not installed")
111+
self.assertMessage(contains=
112+
"WARNING: Extension 'amcheck' or 'amcheck_next' are not installed in database postgres")

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.