Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,7 @@
"couchbaseHost",
"couchbasePort",
"couchbaseBakLoc",
"archiveName",
"couchbaseBakRepo",
"mountPath",
"stgClusterName",
Expand All @@ -298,7 +299,7 @@
"xdcrAdminPassword",
"fts_service",
"eventing_service",
"config_settings_prov"
"config_settings_prov"
],
"properties" : {
"dSourceType": {
Expand Down Expand Up @@ -327,6 +328,13 @@
"description": "Backups taken via cbbackupmgr",
"default": ""
},
"archiveName": {
"type": "string",
"format": "unixpath",
"prettyName": "Backup Archive Name",
"description": "Archive Name for Backups taken",
"default": ""
},
"couchbaseBakRepo": {
"type": "string",
"format": "unixpath",
Expand Down
61 changes: 60 additions & 1 deletion src/controller/couchbase_lib/_cb_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@

##############################################################################
import logging
import os
from datetime import datetime

from controller import helper_lib
from controller.couchbase_lib._mixin_interface import MixinInterface
Expand All @@ -36,6 +38,61 @@ def generate_environment_map(self):
# MixinInterface.read_map(env)
return env

def check_and_update_archive_path(self, check_file=False):
folder_name = self.parameters.archive_name
if self.parameters.archive_name == "":
command_output, std_err, exit_code = self.run_os_command(
os_command="os_ls", dir_path=self.parameters.couchbase_bak_loc
)
logger.debug(f"command_output={command_output}")
datetime_object_list = []
for archive_name in command_output.split("\n"):
# archive name in format 20230810010001
try:
datetime_archive_name = datetime.strptime(
archive_name, "%Y%m%d%H%M%S"
)
datetime_object_list.append(datetime_archive_name)
except ValueError:
logger.debug(
f"Cannot convert {archive_name} into "
f"%Y%m%d%H%M%S format."
)
if not datetime_object_list:
raise UserError(
f"No valid backups found in %Y%m%d%H%M%S "
f"format in directory "
f"{self.parameters.couchbase_bak_loc}."
)
else:
max_date = max(datetime_object_list)
folder_name = max_date.strftime("%Y%m%d%H%M%S")
logger.debug(
f"maximum date = {max_date}, " f"folder_name={folder_name}"
)
file_data = ""
if check_file:
backup_restore_filename = os.path.join(
self.parameters.mount_path,
".delphix/backup_restore.txt",
)
check_file_stdout, _, exit_code = self.run_os_command(
os_command="check_file",
file_path=backup_restore_filename,
)

if exit_code == 0 and "Found" in check_file_stdout:
file_data, _, _ = self.run_os_command(
os_command="cat", path=backup_restore_filename
)
file_data = file_data.strip()

if file_data == folder_name:
raise UserError("No new backups found....exiting snapshot")
else:
self.parameters.archive_name = folder_name
return folder_name

def cb_backup_full(self, csv_bucket):
logger.debug("Starting Restore via Backup file...")
logger.debug("csv_bucket_list: {}".format(csv_bucket))
Expand Down Expand Up @@ -79,7 +136,9 @@ def cb_backup_full(self, csv_bucket):

stdout, stderr, exit_code = self.run_couchbase_command(
couchbase_command="cb_backup_full",
backup_location=self.parameters.couchbase_bak_loc,
backup_location=os.path.join(
self.parameters.couchbase_bak_loc, self.parameters.archive_name
),
csv_bucket_list=csv_bucket,
backup_repo=self.parameters.couchbase_bak_repo,
skip=skip,
Expand Down
9 changes: 9 additions & 0 deletions src/controller/couchbase_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,6 +535,7 @@ def source_bucket_list_offline(self):
os_command="get_backup_bucket_list",
path=os.path.join(
self.parameters.couchbase_bak_loc,
self.parameters.archive_name,
self.parameters.couchbase_bak_repo,
),
)
Expand Down Expand Up @@ -909,6 +910,14 @@ def save_config(self, what, nodeno=1):
),
trgname=chronicle_target_dir,
)
if self.parameters.d_source_type == constants.CBBKPMGR:
self.run_os_command(
os_command="write_file",
filename=os.path.join(
self.parameters.mount_path, ".delphix/backup_restore.txt"
),
data=self.parameters.archive_name,
)

def check_cluster_notconfigured(self):

Expand Down
7 changes: 7 additions & 0 deletions src/db_commands/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,13 @@ def os_cpr(srcname, trgname, sudo=False, uid=None, **kwargs):
srcname=srcname, trgname=trgname
)

@staticmethod
def os_ls(dir_path, sudo=False, uid=None, **kwargs):
if sudo:
return f"sudo -u \#{uid} ls {dir_path}"
else:
return f"ls {dir_path}"

@staticmethod
def get_dlpx_bin(**kwargs):
return "echo $DLPX_BIN_JQ"
Expand Down
35 changes: 28 additions & 7 deletions src/operations/link_cbbkpmgr.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def resync_cbbkpmgr(
.set_source_config(source_config)
.build()
)
resync_process.check_and_update_archive_path()

linking.check_for_concurrent(
resync_process, dsource_type, dsource_name, couchbase_host
Expand Down Expand Up @@ -74,21 +75,41 @@ def pre_snapshot_cbbkpmgr(
.set_source_config(source_config)
.build()
)

old_archive_name = input_parameters.archive_name
new_archive_name = pre_snapshot_process.check_and_update_archive_path(
check_file=True
)
dsource_type = input_parameters.d_source_type
dsource_name = source_config.pretty_name
couchbase_host = input_parameters.couchbase_host
linking.check_for_concurrent(
pre_snapshot_process, dsource_type, dsource_name, couchbase_host
)
if old_archive_name == new_archive_name:
logger.debug("Finding source and staging bucket list")
bucket_details_staged = pre_snapshot_process.bucket_list()
filter_bucket_list = helper_lib.filter_bucket_name_from_output(
bucket_details_staged
)
csv_bucket_list = ",".join(filter_bucket_list)
else:
logger.debug("Running resync process....for ingesting new backup!")
linking.configure_cluster(pre_snapshot_process)

logger.debug("Finding source and staging bucket list")
bucket_details_source = (
pre_snapshot_process.source_bucket_list_offline()
)
bucket_details_staged = helper_lib.filter_bucket_name_from_output(
pre_snapshot_process.bucket_list()
)

logger.debug("Finding source and staging bucket list")
buckets_toprocess = linking.buckets_precreation(
pre_snapshot_process, bucket_details_source, bucket_details_staged
)

csv_bucket_list = ",".join(buckets_toprocess)

bucket_details_staged = pre_snapshot_process.bucket_list()
filter_bucket_list = helper_lib.filter_bucket_name_from_output(
bucket_details_staged
)
csv_bucket_list = ",".join(filter_bucket_list)
pre_snapshot_process.cb_backup_full(csv_bucket_list)
logger.info("Re-ingesting from latest backup complete.")

Expand Down
Empty file removed test/__init__.py
Empty file.
Loading