instruction
stringclasses
14 values
output
stringlengths
105
12.9k
input
stringlengths
0
4.12k
generate comment for following function:
def __init__(self, server, server_info, os_type, version, edition): """ Creats an instance of the NodeInstallInfo class. :param server: server object of type TestInputServer :param server_info: server info with information of the server :param os_type: OS type of the server :param version: version of the couchbase server :param edition: type of Couchbase Server """ self.server = server self.server_info = server_info self.os_type = os_type self.version = version self.edition = edition self.build_url = None self.debug_build_url = None self.non_root_package_mgr = None self.state = "not_started"
def __init__(self, server, server_info, os_type, version, edition): self.server = server self.server_info = server_info self.os_type = os_type self.version = version self.edition = edition self.build_url = None self.debug_build_url = None self.non_root_package_mgr = None self.state = "not_started"
generate python code for the following
def stop_memcached(self): """ Stop memcached process on remote server :return: None """ o, r = self.execute_command("taskkill /F /T /IM memcached*") self.log_command_output(o, r, debug=False)
Stop memcached process on remote server
generate code for the above:
def is_couchbase_installed(self): """ Check if Couchbase is installed on the remote server. This checks if the couchbase is installed in default or non default path. :return: True if Couchbase is installed on the remote server else False """ if self.file_exists(WIN_CB_PATH, VERSION_FILE): self.log.info("{0} - VERSION file {1} {2} exists" .format(self.ip, WIN_CB_PATH, VERSION_FILE)) return True return False
Check if Couchbase is installed on the remote server. This checks if the couchbase is installed in default or non default path.
Code the following:
def get_full_hostname(self): """ Get the full hostname of the remote server Override method for windows :return: full hostname if domain is set, else None """ if not self.info.domain: return None return '%s.%s' % (self.info.hostname[0], self.info.domain)
Get the full hostname of the remote server Override method for windows
def cbbackupmgr_param(self, name, *args): """ Returns the config value from the ini whose key matches 'name' and is stored under the 'cbbackupmgr' section heading. :param name: the key under which an expected value is stored. :param args: expects a single parameter which will be used as the default if the requested key is not found. :return: the value parsed from the ini file/default value if the given key is not found. :raises Exception: if the given key does not exist in the ini and no default value is provided. """ if name in self.cbbackupmgr: return TestInput._parse_param(self.cbbackupmgr[name]) if len(args) == 1: return args[0] if self.cbbackupmgr["name"] != "local_bkrs": raise Exception(f"Parameter '{name}' must be set in the test configuration")
Returns the config value from the ini whose key matches 'name' and is stored under the 'cbbackupmgr' section heading.
give python code to
import install_util.constants from install_util.constants.build import BuildUrl from shell_util.remote_connection import RemoteMachineShellConnection def __construct_build_url(self, is_debuginfo_build=False): """ Constructs the build url for the given node. This url is used to download the installation package. :param is_debuginfo_build: gets debug_info build url if True :return: build url """ file_name = None build_version = self.node_install_info.version.split("-") os_type = self.node_install_info.os_type node_info = RemoteMachineShellConnection.get_info_for_server( self.node_install_info.server) # Decide between release / regular build URL path if len(build_version) == 1: # Release build url url_path = "http://{}/{}/{}" \ .format(BuildUrl.CB_DOWNLOAD_SERVER, BuildUrl.CB_RELEASE_URL_PATH, build_version[0]) else: # Build_number specific url main_version = ".".join(build_version[0].split(".")[:2]) # Reference: builds/latestbuilds/couchbase-server/trinity/1000 url_path = "http://{}/{}/{}/{}" \ .format(BuildUrl.CB_DOWNLOAD_SERVER, BuildUrl.CB_LATESTBUILDS_URL_PATH, BuildUrl.CB_VERSION_NAME[main_version], build_version[1]) build_version = "-".join(build_version) file_prefix = "{}-{}" \ .format(BuildUrl.CB_BUILD_FILE_PREFIX, self.node_install_info.edition) if os_type in install_util.constants.build.X86: # couchbase-server-enterprise-7.1.5-linux.x86_64.rpm # couchbase-server-enterprise-debuginfo-7.1.5-linux.x86_64.rpm if is_debuginfo_build: file_prefix = "{}-{}".format(file_prefix, "debuginfo") os_type = "linux" if float(build_version[:3]) < 7.1: os_type = self.node_install_info.os_type file_name = "{}-{}-{}.{}.{}" \ .format(file_prefix, build_version, os_type, node_info.architecture_type, node_info.deliverable_type) elif os_type in install_util.constants.build.LINUX_AMD64: # TODO: Check install_utils.py L1127 redundant code presence # couchbase-server-enterprise_7.1.5-linux_amd64.deb # couchbase-server-enterprise-dbg_7.1.5-linux_amd64.deb if is_debuginfo_build: file_prefix = "{}-{}".format(file_prefix, "dbg") os_type = "linux" if float(build_version[:3]) < 7.1: os_type = self.node_install_info.os_type file_name = "{}_{}-{}_{}.{}" \ .format(file_prefix, build_version, os_type, "amd64", node_info.deliverable_type) elif os_type in install_util.constants.build.WINDOWS_SERVER: # couchbase-server-enterprise_6.5.0-4557-windows_amd64.msi if "windows" in self.node_install_info.os_type: self.node_install_info.deliverable_type = "msi" file_name = "{}_{}-{}_{}.{}" \ .format(file_prefix, build_version, self.node_install_info.os_type, "amd64", node_info.deliverable_type) elif os_type in install_util.constants.build.MACOS_VERSIONS: # couchbase-server-enterprise_6.5.0-4557-macos_x86_64.dmg file_name = "{}_{}-{}_{}-{}.{}" \ .format(file_prefix, build_version, "macos", node_info.architecture_type, "unnotarized", node_info.deliverable_type) else: self.result = False self.log.critical("Unsupported os_type '{}' for build_url" .format(self.node_install_info.os_type)) return "{}/{}".format(url_path, file_name)
Constructs the build url for the given node. This url is used to download the installation package.
generate comment for above
def pause_memcached(self, timesleep=30, delay=0): """ Pauses the memcached process on remote server :param timesleep: time to wait after pause (in seconds) :param delay: time to delay pause of memcached process (in seconds) :return: None """ log.info("*** pause memcached process ***") if delay: time.sleep(delay) if self.nonroot: o, r = self.execute_command("killall -SIGSTOP memcached.bin") else: o, r = self.execute_command("killall -SIGSTOP memcached") self.log_command_output(o, r) log.info("wait %s seconds to make node down." % timesleep) time.sleep(timesleep)
def pause_memcached(self, timesleep=30, delay=0): log.info("*** pause memcached process ***") if delay: time.sleep(delay) if self.nonroot: o, r = self.execute_command("killall -SIGSTOP memcached.bin") else: o, r = self.execute_command("killall -SIGSTOP memcached") self.log_command_output(o, r) log.info("wait %s seconds to make node down." % timesleep) time.sleep(timesleep)
generate comment for above
def start_indexer(self): """ Start indexer process on remote server :return: None """ o, r = self.execute_command("taskkill /F /T /IM indexer*") self.log_command_output(o, r)
def start_indexer(self): o, r = self.execute_command("taskkill /F /T /IM indexer*") self.log_command_output(o, r)
generate code for the following
def delete_network_rule(self): """ Delete all traffic control rules set for eth0 :return: None """ o, r = self.execute_command("tc qdisc del dev eth0 root") self.log_command_output(o, r)
Delete all traffic control rules set for eth0
give python code to
def set_environment_variable(self, name, value): """Request an interactive shell session, export custom variable and restart Couchbase server. Shell session is necessary because basic SSH client is stateless. :param name: environment variable :param value: environment variable value :return: None """ shell = self._ssh_client.invoke_shell() shell.send('export {0}={1}\n'.format(name, value)) if self.info.distribution_version.lower() in SYSTEMD_SERVER: """from watson, systemd is used in centos 7 """ log.info("this node is centos 7.x") shell.send("systemctl restart couchbase-server.service\n") else: shell.send('/etc/init.d/couchbase-server restart\n') shell.close()
Request an interactive shell session, export custom variable and restart Couchbase server. Shell session is necessary because basic SSH client is stateless.
give a code to
def kill_eventing_process(self, name): """ Kill eventing process on remote server :param name: name of eventing process :return: None """ o, r = self.execute_command(command="killall -9 {0}".format(name)) self.log_command_output(o, r)
Kill eventing process on remote server
generate comment for above
def get_collection_config(collection, config): """ Get collection configuration :param collection: collection name to get configuration for :param config: config :return: dict of collection information """ collection_config = {} for section in config.sections(): if section == collection: options = config.options(section) for option in options: if option == 'bucket': collection_config['bucket'] = config.get(section, option) if option == 'scope': collection_config['scope'] = config.get(section, option) if option.lower() == 'maxttl': collection_config['maxTTL'] = config.get(section, option) return collection_config
def get_collection_config(collection, config): collection_config = {} for section in config.sections(): if section == collection: options = config.options(section) for option in options: if option == 'bucket': collection_config['bucket'] = config.get(section, option) if option == 'scope': collection_config['scope'] = config.get(section, option) if option.lower() == 'maxttl': collection_config['maxTTL'] = config.get(section, option) return collection_config
give python code to
def file_starts_with(self, remotepath, pattern): """ Check if file starting with this pattern is present in remote machine. :param remotepath: path of the file to check :param pattern: pattern to check against :return: True if file starting with this pattern is present in remote machine else False """ sftp = self._ssh_client.open_sftp() files_matched = [] try: file_names = sftp.listdir(remotepath) for name in file_names: if name.startswith(pattern): files_matched.append("{0}/{1}".format(remotepath, name)) except IOError: # ignore this error pass sftp.close() if len(files_matched) > 0: log.info("found these files : {0}".format(files_matched)) return files_matched
Check if file starting with this pattern is present in remote machine.
Code the following:
def enable_disk_readonly(self, disk_location): """ Enables read-only mode for the specified disk location. Override method for Windows :param disk_location: disk location to enable read-only mode. :return: None """ raise NotImplementedError
Enables read-only mode for the specified disk location. Override method for Windows
generate python code for
def pause_beam(self): """ Pauses the beam.smp process on remote server :return: None """ o, r = self.execute_command("killall -SIGSTOP beam.smp") self.log_command_output(o, r)
Pauses the beam.smp process on remote server
generate comment for above
def change_env_variables(self, dict): """ Change environment variables mentioned in dictionary and restart Couchbase server :param dict: key value pair of environment variables and their values to change to :return: None """ prefix = "\\n " shell = self._ssh_client.invoke_shell() init_file = "couchbase-server" file_path = "/opt/couchbase/bin/" environmentVariables = "" backupfile = file_path + init_file + ".bak" sourceFile = file_path + init_file o, r = self.execute_command("cp " + sourceFile + " " + backupfile) self.log_command_output(o, r) command = "sed -i 's/{0}/{0}".format("ulimit -l unlimited") for key in list(dict.keys()): o, r = self.execute_command( "sed -i 's/{1}.*//' {0}".format(sourceFile, key)) self.log_command_output(o, r) o, r = self.execute_command( "sed -i 's/export ERL_FULLSWEEP_AFTER/export " "ERL_FULLSWEEP_AFTER\\n{1}={2}\\nexport {1}/' {0}" .format(sourceFile, key, dict[key])) self.log_command_output(o, r) for key in list(dict.keys()): environmentVariables += prefix \ + 'export {0}={1}'.format(key, dict[key]) command += environmentVariables + "/'" + " " + sourceFile o, r = self.execute_command(command) self.log_command_output(o, r) # Restart Couchbase o, r = self.execute_command("service couchbase-server restart") self.log_command_output(o, r) shell.close()
def change_env_variables(self, dict): prefix = "\\n " shell = self._ssh_client.invoke_shell() init_file = "couchbase-server" file_path = "/opt/couchbase/bin/" environmentVariables = "" backupfile = file_path + init_file + ".bak" sourceFile = file_path + init_file o, r = self.execute_command("cp " + sourceFile + " " + backupfile) self.log_command_output(o, r) command = "sed -i 's/{0}/{0}".format("ulimit -l unlimited") for key in list(dict.keys()): o, r = self.execute_command( "sed -i 's/{1}.*//' {0}".format(sourceFile, key)) self.log_command_output(o, r) o, r = self.execute_command( "sed -i 's/export ERL_FULLSWEEP_AFTER/export " "ERL_FULLSWEEP_AFTER\\n{1}={2}\\nexport {1}/' {0}" .format(sourceFile, key, dict[key])) self.log_command_output(o, r) for key in list(dict.keys()): environmentVariables += prefix \ + 'export {0}={1}'.format(key, dict[key]) command += environmentVariables + "/'" + " " + sourceFile o, r = self.execute_command(command) self.log_command_output(o, r) # Restart Couchbase o, r = self.execute_command("service couchbase-server restart") self.log_command_output(o, r) shell.close()
generate comment.
def copy_file_remote_to_local(self, rem_path, des_path): """ Copy file from remote server to local :param rem_path: remote path of the file to be copied :param des_path: destination path of the file to be copied :return: True if the file was successfully copied else False """ result = True sftp = self._ssh_client.open_sftp() try: sftp.get(rem_path, des_path) except IOError as e: self.log.error('Can not copy file', e) result = False finally: sftp.close() return result
def copy_file_remote_to_local(self, rem_path, des_path): result = True sftp = self._ssh_client.open_sftp() try: sftp.get(rem_path, des_path) except IOError as e: self.log.error('Can not copy file', e) result = False finally: sftp.close() return result
generate comment for following function:
def get_info_for_server(server): """ Get info about given server, if available :param server: server to get the information of :return: information of the server if available else None """ if server.ip in RemoteMachineShellConnection.__info_dict: return RemoteMachineShellConnection.__info_dict[server.ip]
def get_info_for_server(server): if server.ip in RemoteMachineShellConnection.__info_dict: return RemoteMachineShellConnection.__info_dict[server.ip]
give python code to
def cleanup_data_config(self, data_path): """ Cleans up the data config directory and its contents Override method for Windows :param data_path: path to data config directory :return: None """ if "c:/Program Files" in data_path: data_path = data_path.replace("c:/Program Files", "/cygdrive/c/Program\ Files") o, r = self.execute_command("rm -rf ""{0}""/*".format(data_path)) self.log_command_output(o, r) o, r = self.execute_command("rm -rf ""{0}""/*" \ .format( data_path.replace("data", "config"))) self.log_command_output(o, r)
Cleans up the data config directory and its contents Override method for Windows
Code the following:
def create_file(self, remote_path, file_data): """ Create a remote file from input string :param remote_path: remote path of the file to be created :param file_data: file data to be written to the file :return: None """ output, error = self.execute_command("echo '{0}' > {1}".format(file_data, remote_path))
Create a remote file from input string
generate doc string for following function:
def windows_process_utils(self, ps_name_or_id, cmd_file_name, option=""): """ Windows process utility. This adds firewall rules to Windows system. If a previously suspended process is detected, it continues with the process instead. :param ps_name_or_id: process name or process id :param cmd_file_name: file containing firewall rules :param option: arguments to pass to command file :return: True if firewall rules were set else False """ success = False files_path = "cygdrive/c/utils/suspend/" # check to see if suspend files exist in server file_existed = self.file_exists(files_path, cmd_file_name) if file_existed: command = "{0}{1} {2} {3}".format(files_path, cmd_file_name, option, ps_name_or_id) o, r = self.execute_command(command) if not r: success = True self.log_command_output(o, r) self.sleep(30, "Wait for windows to execute completely") else: log.error( "Command didn't run successfully. Error: {0}".format(r)) else: o, r = self.execute_command( "netsh advfirewall firewall add rule name=\"block erl.exe in\" dir=in action=block program=\"%ProgramFiles%\Couchbase\Server\\bin\erl.exe\"") if not r: success = True self.log_command_output(o, r) o, r = self.execute_command( "netsh advfirewall firewall add rule name=\"block erl.exe out\" dir=out action=block program=\"%ProgramFiles%\Couchbase\Server\\bin\erl.exe\"") if not r: success = True self.log_command_output(o, r) return success
def windows_process_utils(self, ps_name_or_id, cmd_file_name, option=""): success = False files_path = "cygdrive/c/utils/suspend/" # check to see if suspend files exist in server file_existed = self.file_exists(files_path, cmd_file_name) if file_existed: command = "{0}{1} {2} {3}".format(files_path, cmd_file_name, option, ps_name_or_id) o, r = self.execute_command(command) if not r: success = True self.log_command_output(o, r) self.sleep(30, "Wait for windows to execute completely") else: log.error( "Command didn't run successfully. Error: {0}".format(r)) else: o, r = self.execute_command( "netsh advfirewall firewall add rule name=\"block erl.exe in\" dir=in action=block program=\"%ProgramFiles%\Couchbase\Server\\bin\erl.exe\"") if not r: success = True self.log_command_output(o, r) o, r = self.execute_command( "netsh advfirewall firewall add rule name=\"block erl.exe out\" dir=out action=block program=\"%ProgramFiles%\Couchbase\Server\\bin\erl.exe\"") if not r: success = True self.log_command_output(o, r) return success
generate python code for the following
def get_ip_address(self): """ Get ip address of a remote server :return: ip address of remote server """ ip_type = "inet \K[\d.]" ipv6_server = False if "ip6" in self.ip or self.ip.startswith("["): ipv6_server = True ip_type = "inet6 \K[0-9a-zA-Z:]" cmd = "ifconfig | grep -Po '{0}+'".format(ip_type) o, r = self.execute_command_raw(cmd) if ipv6_server: for x in range(len(o)): o[x] = "[{0}]".format(o[x]) return o
Get ip address of a remote server
generate comment for above
def __init__(self, test_server, info=None): """ Creates a new shell connection for Unix based platforms :param test_server: test server to create the shell connection for :param info: None """ super(Unix, self).__init__(test_server) self.nonroot = False self.info = info
def __init__(self, test_server, info=None): super(Unix, self).__init__(test_server) self.nonroot = False self.info = info
generate python code for the above
def stop_membase(self): """ Override method """ raise NotImplementedError
Override method
generate comment for following function:
def stop_couchbase(self, num_retries=5, poll_interval=10): """ Stop couchbase service on remote server :param num_retries: Number of times to retry stopping couchbase :param poll_interval: interval between each retry attempt :return: None """ o, r = self.execute_command("net stop couchbaseserver") self.log_command_output(o, r) is_server_stopped = False retries = num_retries while not is_server_stopped and retries > 0: self.sleep(poll_interval, "Wait to stop service completely") is_server_stopped = self.__check_if_cb_service_stopped("couchbaseserver") retries -= 1
def stop_couchbase(self, num_retries=5, poll_interval=10): o, r = self.execute_command("net stop couchbaseserver") self.log_command_output(o, r) is_server_stopped = False retries = num_retries while not is_server_stopped and retries > 0: self.sleep(poll_interval, "Wait to stop service completely") is_server_stopped = self.__check_if_cb_service_stopped("couchbaseserver") retries -= 1
generate doc string for following function:
def get_collection_config(collection, config): """ Get collection configuration :param collection: collection name to get configuration for :param config: config :return: dict of collection information """ collection_config = {} for section in config.sections(): if section == collection: options = config.options(section) for option in options: if option == 'bucket': collection_config['bucket'] = config.get(section, option) if option == 'scope': collection_config['scope'] = config.get(section, option) if option.lower() == 'maxttl': collection_config['maxTTL'] = config.get(section, option) return collection_config
def get_collection_config(collection, config): collection_config = {} for section in config.sections(): if section == collection: options = config.options(section) for option in options: if option == 'bucket': collection_config['bucket'] = config.get(section, option) if option == 'scope': collection_config['scope'] = config.get(section, option) if option.lower() == 'maxttl': collection_config['maxTTL'] = config.get(section, option) return collection_config
def populate_cb_server_versions(self): """ Update the BuildUrl with all versions of Couchbase Server currently available for testing. \n This method gets the current versions of Couchbase Servers available from the CB server manifest and updates the missing versions in BuildUrl constants accordingly. :return: None """ cb_server_manifests_url = "https://github.com/couchbase" \ "/manifest/tree/master/couchbase-server/" raw_content_url = "https://raw.githubusercontent.com/couchbase" \ "/manifest/master/couchbase-server/" version_pattern = r'<annotation name="VERSION" value="([0-9\.]+)"' version_pattern = re.compile(version_pattern) payload_pattern = r'>({"payload".*})<' payload_pattern = re.compile(payload_pattern) data = urlopen(cb_server_manifests_url).read() data = json.loads(re.findall(payload_pattern, data.decode())[0]) for item in data["payload"]["tree"]["items"]: if item["contentType"] == "file" and item["name"].endswith(".xml"): rel_name = item["name"].replace(".xml", "") data = urlopen(raw_content_url + item["name"]).read() rel_ver = re.findall(version_pattern, data.decode())[0][:3] if rel_ver not in BuildUrl.CB_VERSION_NAME: self.log.info("Adding missing version {}={}" .format(rel_ver, rel_name)) BuildUrl.CB_VERSION_NAME[rel_ver] = rel_name
def populate_cb_server_versions(self): cb_server_manifests_url = "https://github.com/couchbase" \ "/manifest/tree/master/couchbase-server/" raw_content_url = "https://raw.githubusercontent.com/couchbase" \ "/manifest/master/couchbase-server/" version_pattern = r'<annotation name="VERSION" value="([0-9\.]+)"' version_pattern = re.compile(version_pattern) payload_pattern = r'>({"payload".*})<' payload_pattern = re.compile(payload_pattern) data = urlopen(cb_server_manifests_url).read() data = json.loads(re.findall(payload_pattern, data.decode())[0]) for item in data["payload"]["tree"]["items"]: if item["contentType"] == "file" and item["name"].endswith(".xml"): rel_name = item["name"].replace(".xml", "") data = urlopen(raw_content_url + item["name"]).read() rel_ver = re.findall(version_pattern, data.decode())[0][:3] if rel_ver not in BuildUrl.CB_VERSION_NAME: self.log.info("Adding missing version {}={}" .format(rel_ver, rel_name)) BuildUrl.CB_VERSION_NAME[rel_ver] = rel_name
generate python code for the following
def create_new_partition(self, location, size=None): """ Create a new partition at the location specified and of the size specified :param location: Location to create the new partition at. :param size: Size of the partition in MB :return: None """ command = "umount -l {0}".format(location) output, error = self.execute_command(command) command = "rm -rf {0}".format(location) output, error = self.execute_command(command) command = "rm -rf /usr/disk-img/disk-quota.ext3" output, error = self.execute_command(command) command = "mkdir -p {0}".format(location) output, error = self.execute_command(command) if size: count = (size * 1024 * 1024) // 512 else: count = (5 * 1024 * 1024 * 1024) // 512 command = "mkdir -p /usr/disk-img" output, error = self.execute_command(command) command = "dd if=/dev/zero of=/usr/disk-img/disk-quota.ext3 count={0}".format(count) output, error = self.execute_command(command) command = "/sbin/mkfs -t ext3 -q /usr/disk-img/disk-quota.ext3 -F" output, error = self.execute_command(command) command = "mount -o loop,rw,usrquota,grpquota /usr/disk-img/disk-quota.ext3 {0}".format(location) output, error = self.execute_command(command) command = "chown 'couchbase' {0}".format(location) output, error = self.execute_command(command) command = "chmod 777 {0}".format(location) output, error = self.execute_command(command)
Create a new partition at the location specified and of the size specified
give a code to
def ram_stress(self, stop_time): """ Applies memory stress for a specified duration with 3 workers each of size 2.5G. :param stop_time: duration to apply the memory stress for. :return: None """ o, r = self.execute_command("stress --vm 3 --vm-bytes 2.5G --timeout {}".format(stop_time)) self.log_command_output(o, r)
Applies memory stress for a specified duration with 3 workers each of size 2.5G.
generate python code for the following
def stop_memcached(self): """ Stop memcached process on remote server :return: None """ o, r = self.execute_command("taskkill /F /T /IM memcached*") self.log_command_output(o, r, debug=False)
Stop memcached process on remote server
generate python code for the following
from shell_util.shell_conn import ShellConnection def get_info_for_server(server): """ Get info about given server, if available :param server: server to get the information of :return: information of the server if available else None """ if server.ip in RemoteMachineShellConnection.__info_dict: return RemoteMachineShellConnection.__info_dict[server.ip]
Get info about given server, if available
generate python code for the above
def start_memcached(self): """ Start memcached process on remote server :return: None """ o, r = self.execute_command("kill -SIGCONT $(pgrep memcached)") self.log_command_output(o, r, debug=False)
Start memcached process on remote server
generate comment for above
def __init__(self, logger): """ Creates an instance of InstallHelper object :param logger: logger object """ self.log = logger
def __init__(self, logger): self.log = logger
generate code for the above:
def cleanup_all_configuration(self, data_path): """ Deletes the contents of the parent folder that holds the data and config directories. Override method for Windows :param data_path: The path key from the /nodes/self end-point which looks something like "/opt/couchbase/var/lib/couchbase/data" on Linux or "c:/Program Files/Couchbase/Server/var/lib/couchbase/data" on Windows. :return: None """ path = data_path.replace("/data", "") if "c:/Program Files" in path: path = path.replace("c:/Program Files", "/cygdrive/c/Program\ Files") o, r = self.execute_command(f"rm -rf {path}/*") self.log_command_output(o, r)
Deletes the contents of the parent folder that holds the data and config directories. Override method for Windows
Code the following:
def unmount_partition(self, location): """ Unmount the partition at the specified location. :param location: Location of the partition which has to be unmounted :return: Output and error message from the umount command """ command = "umount -l {0}; df -Th".format(location) output, error = self.execute_command(command) return output, error
Unmount the partition at the specified location.
generate python code for the following
def check_build_url_status(self): """ Checks the build url status. Checks if the url is reachable and valid. :return: None """ self.check_url_status(self.node_install_info.build_url)
Checks the build url status. Checks if the url is reachable and valid.
give a code to
import install_util.constants from install_util.constants.build import BuildUrl from shell_util.remote_connection import RemoteMachineShellConnection def __construct_build_url(self, is_debuginfo_build=False): """ Constructs the build url for the given node. This url is used to download the installation package. :param is_debuginfo_build: gets debug_info build url if True :return: build url """ file_name = None build_version = self.node_install_info.version.split("-") os_type = self.node_install_info.os_type node_info = RemoteMachineShellConnection.get_info_for_server( self.node_install_info.server) # Decide between release / regular build URL path if len(build_version) == 1: # Release build url url_path = "http://{}/{}/{}" \ .format(BuildUrl.CB_DOWNLOAD_SERVER, BuildUrl.CB_RELEASE_URL_PATH, build_version[0]) else: # Build_number specific url main_version = ".".join(build_version[0].split(".")[:2]) # Reference: builds/latestbuilds/couchbase-server/trinity/1000 url_path = "http://{}/{}/{}/{}" \ .format(BuildUrl.CB_DOWNLOAD_SERVER, BuildUrl.CB_LATESTBUILDS_URL_PATH, BuildUrl.CB_VERSION_NAME[main_version], build_version[1]) build_version = "-".join(build_version) file_prefix = "{}-{}" \ .format(BuildUrl.CB_BUILD_FILE_PREFIX, self.node_install_info.edition) if os_type in install_util.constants.build.X86: # couchbase-server-enterprise-7.1.5-linux.x86_64.rpm # couchbase-server-enterprise-debuginfo-7.1.5-linux.x86_64.rpm if is_debuginfo_build: file_prefix = "{}-{}".format(file_prefix, "debuginfo") os_type = "linux" if float(build_version[:3]) < 7.1: os_type = self.node_install_info.os_type file_name = "{}-{}-{}.{}.{}" \ .format(file_prefix, build_version, os_type, node_info.architecture_type, node_info.deliverable_type) elif os_type in install_util.constants.build.LINUX_AMD64: # TODO: Check install_utils.py L1127 redundant code presence # couchbase-server-enterprise_7.1.5-linux_amd64.deb # couchbase-server-enterprise-dbg_7.1.5-linux_amd64.deb if is_debuginfo_build: file_prefix = "{}-{}".format(file_prefix, "dbg") os_type = "linux" if float(build_version[:3]) < 7.1: os_type = self.node_install_info.os_type file_name = "{}_{}-{}_{}.{}" \ .format(file_prefix, build_version, os_type, "amd64", node_info.deliverable_type) elif os_type in install_util.constants.build.WINDOWS_SERVER: # couchbase-server-enterprise_6.5.0-4557-windows_amd64.msi if "windows" in self.node_install_info.os_type: self.node_install_info.deliverable_type = "msi" file_name = "{}_{}-{}_{}.{}" \ .format(file_prefix, build_version, self.node_install_info.os_type, "amd64", node_info.deliverable_type) elif os_type in install_util.constants.build.MACOS_VERSIONS: # couchbase-server-enterprise_6.5.0-4557-macos_x86_64.dmg file_name = "{}_{}-{}_{}-{}.{}" \ .format(file_prefix, build_version, "macos", node_info.architecture_type, "unnotarized", node_info.deliverable_type) else: self.result = False self.log.critical("Unsupported os_type '{}' for build_url" .format(self.node_install_info.os_type)) return "{}/{}".format(url_path, file_name)
Constructs the build url for the given node. This url is used to download the installation package.
generate code for the above:
from time import sleep def sleep(seconds, msg=""): """ Sleep for specified number of seconds. Optionally log a message given :param seconds: number of seconds to sleep for :param msg: optional message to log :return: None """ if msg: log.info(msg) sleep(seconds)
Sleep for specified number of seconds. Optionally log a message given
generate python code for
def terminate_processes(self, info, p_list): """ Terminate a list of processes on remote server :param info: None :param p_list: List of processes to terminate :return: None """ for process in p_list: # set debug=False if does not want to show log self.execute_command("taskkill /F /T /IM {0}" .format(process), debug=False)
Terminate a list of processes on remote server
generate python code for
def start_couchbase(self): """ Starts couchbase on remote server :return: None """ retry = 0 running = self.is_couchbase_running() while not running and retry < 3: self.log.info("Starting couchbase server") o, r = self.execute_command("open /Applications/Couchbase\ Server.app") self.log_command_output(o, r) running = self.is_couchbase_running() retry = retry + 1 if not running and retry >= 3: self.log.critical("%s - Server not started even after 3 retries" % self.info.ip) return False return True
Starts couchbase on remote server
generate doc string for following function:
def disable_disk_readonly(self, disk_location): """ Disables read-only mode for the specified disk location. Override method for Windows :param disk_location: disk location to disable read-only mode. :return: None """ raise NotImplementedError
def disable_disk_readonly(self, disk_location): raise NotImplementedError
generate comment for following function:
def connect_with_user(self, user="root"): """ Connect to the remote server with given user Override method since this is not required for Unix :param user: user to connect to remote server with :return: None """ return
def connect_with_user(self, user="root"): return
generate python code for the above
def stop_server(self): """ Stops the Couchbase server on the remote server. The method stops the server from non-default location if it's run as nonroot user. Else from default location. :param os: :return: None """ o, r = self.execute_command("net stop couchbaseserver") self.log_command_output(o, r)
Stops the Couchbase server on the remote server. The method stops the server from non-default location if it's run as nonroot user. Else from default location.
generate code for the above:
import re import configparser def parse_from_file(file): """ Parse the test inputs from file :param file: path to file to parse :return: TestInput object """ count = 0 start = 0 end = 0 servers = list() ips = list() input = TestInput() config = configparser.ConfigParser(interpolation=None) config.read(file) sections = config.sections() global_properties = dict() cluster_ips = list() clusters = dict() client_ips = list() input.cbbackupmgr = dict() for section in sections: result = re.search('^cluster', section) if section == 'servers': ips = TestInputParser.get_server_ips(config, section) elif section == 'clients': client_ips = TestInputParser.get_server_ips(config, section) elif section == 'membase': input.membase_settings = TestInputParser.get_membase_settings(config, section) elif section == 'global': #get global stuff and override for those unset for option in config.options(section): global_properties[option] = config.get(section, option) elif section == 'elastic': input.elastic = TestInputParser.get_elastic_config(config, section, global_properties) elif section == 'bkrs_client': input.bkrs_client = TestInputParser.get_bkrs_client_config(config, section, global_properties, input.membase_settings) elif section == 'cbbackupmgr': input.cbbackupmgr = TestInputParser.get_cbbackupmgr_config(config, section) elif result is not None: cluster_list = TestInputParser.get_server_ips(config, section) cluster_ips.extend(cluster_list) clusters[count] = len(cluster_list) count += 1 # Setup 'cluster#' tag as dict # input.clusters -> {0: [ip:10.1.6.210 ssh_username:root, ip:10.1.6.211 ssh_username:root]} for cluster_ip in cluster_ips: servers.append(TestInputParser.get_server(cluster_ip, config)) servers = TestInputParser.get_server_options(servers, input.membase_settings, global_properties) for key, value in list(clusters.items()): end += value input.clusters[key] = servers[start:end] start += value # Setting up 'servers' tag servers = [] for ip in ips: servers.append(TestInputParser.get_server(ip, config)) input.servers = TestInputParser.get_server_options(servers, input.membase_settings, global_properties) if 'cbbackupmgr' not in sections: input.cbbackupmgr["name"] = "local_bkrs" if 'bkrs_client' not in sections: input.bkrs_client = None # Setting up 'clients' tag input.clients = client_ips return input
Parse the test inputs from file
give a code to
def reboot_node(self): """ Reboot the remote server :return: None """ o, r = self.execute_command("shutdown -r -f -t 0") self.log_command_output(o, r)
Reboot the remote server
Code the following:
def reboot_node(self): """ Reboot the remote server :return: None """ o, r = self.execute_command("shutdown -r -f -t 0") self.log_command_output(o, r)
Reboot the remote server
def __init__(self, test_server, info=None): """ Creates a new shell connection for Linux based platforms :param test_server: test server to create the shell connection for :param info: None """ super(Linux, self).__init__(test_server) self.nonroot = False self.use_sudo = False self.info = info
def __init__(self, test_server, info=None): super(Linux, self).__init__(test_server) self.nonroot = False self.use_sudo = False self.info = info
generate doc string for following function:
def stop_indexer(self): """ Stop indexer process on remote server :return: None """ o, r = self.execute_command("kill -SIGSTOP $(pgrep indexer)") self.log_command_output(o, r, debug=False)
def stop_indexer(self): o, r = self.execute_command("kill -SIGSTOP $(pgrep indexer)") self.log_command_output(o, r, debug=False)
generate python code for the above
def reconnect_if_inactive(self): """ If the SSH channel is inactive, retry the connection :return: None """ tp = self._ssh_client.get_transport() if tp and not tp.active: log.warning("SSH connection to {} inactive, reconnecting...".format(self.ip)) self.ssh_connect_with_retries(self.ip, self.username, self.password, self.ssh_key)
If the SSH channel is inactive, retry the connection
def wait_till_file_added(self, remotepath, filename, timeout_in_seconds=180): """ Wait until the remote file in remote path is created :param remotepath: remote path of the file to be created :param filename: name of the file to be created :param timeout_in_seconds: wait time in seconds until the file is created :return: True if the file is created within timeout else False """ end_time = time.time() + float(timeout_in_seconds) added = False log.info("file {0} checked at {1}".format(filename, remotepath)) while time.time() < end_time and not added: # get the process list exists = self.file_exists(remotepath, filename) if not exists: log.error('at {2} file {1} does not exist' \ .format(remotepath, filename, self.ip)) time.sleep(2) else: log.info('at {2} FILE {1} EXISTS!' \ .format(remotepath, filename, self.ip)) added = True return added
Wait until the remote file in remote path is created
generate python code for the following
def terminate_processes(self, info, p_list): """ Terminate a list of processes on remote server :param info: None :param p_list: List of processes to terminate :return: None """ for process in p_list: self.terminate_process(info, process, force=True)
Terminate a list of processes on remote server
give python code to
import os import paramiko import signal from time import sleep def ssh_connect_with_retries(self, ip, ssh_username, ssh_password, ssh_key, exit_on_failure=False, max_attempts_connect=5, backoff_time=10): """ Connect to the remote server with given user and password, with exponential backoff delay :param ip: IP address of the remote server to connect to :param ssh_username: user to connect to remote server with :param ssh_password: password to connect to remote server with :param ssh_key: ssh key to connect to remote server with :param exit_on_failure: exit the function on error if True :param max_attempts_connect: max number of attempts before giving up :param backoff_time: time to wait between attempts :return: None """ attempt = 0 is_ssh_ok = False while not is_ssh_ok and attempt < max_attempts_connect: attempt += 1 log.info("SSH Connecting to {} with username:{}, attempt#{} of {}" .format(ip, ssh_username, attempt, max_attempts_connect)) try: if self.remote and ssh_key == '': self._ssh_client.connect( hostname=ip.replace('[', '').replace(']', ''), username=ssh_username, password=ssh_password, look_for_keys=False) elif self.remote: self._ssh_client.connect( hostname=ip.replace('[', '').replace(']', ''), username=ssh_username, key_filename=ssh_key, look_for_keys=False) is_ssh_ok = True except paramiko.BadHostKeyException as bhke: log.error("Can't establish SSH (Invalid host key) to {}: {}" .format(ip, bhke)) raise Exception(bhke) except Exception as e: log.error("Can't establish SSH (unknown reason) to {}: {}" .format(ip, e, ssh_username, ssh_password)) if attempt < max_attempts_connect: log.info("Retrying with back off delay for {} secs." .format(backoff_time)) self.sleep(backoff_time) backoff_time *= 2 if not is_ssh_ok: error_msg = "-->No SSH connectivity to {} even after {} times!\n".format(self.ip, attempt) log.error(error_msg) if exit_on_failure: log.error("Exit on failure: killing process") os.kill(os.getpid(), signal.SIGKILL) else: log.error("No exit on failure, raise exception") raise Exception(error_msg) else: log.info("SSH Connected to {} as {}".format(ip, ssh_username))
Connect to the remote server with given user and password, with exponential backoff delay
generate code for the following
def get_disk_info(self, win_info=None, mac=False): """ Get disk info of a remote server :param win_info: windows info :param mac: get disk info from macOS if True :return: disk info of remote server """ if win_info: if 'Total Physical Memory' not in win_info: win_info = self.create_windows_info() o = "Total Physical Memory =" + win_info['Total Physical Memory'] + '\n' o += "Available Physical Memory =" + win_info['Available Physical Memory'] elif mac: o, r = self.execute_command_raw('df -hl', debug=False) else: o, r = self.execute_command_raw('df -Thl', debug=False) if o: return o
Get disk info of a remote server
give a code to
def __init__(self): """ Creates an instance of the TestInputBuild class """ self.version = '' self.url = ''
Creates an instance of the TestInputBuild class
generate code for the above:
def get_download_dir(node_installer): """ Gets the download directory for the given node. Returns non-root download directory in case of nonroot installation. Else returns the default download directory. :param node_installer: node installer object :return: download directory for given node """ if node_installer.shell.nonroot: return node_installer.nonroot_download_dir return node_installer.download_dir
Gets the download directory for the given node. Returns non-root download directory in case of nonroot installation. Else returns the default download directory.
generate comment.
def copy_file_local_to_remote(self, src_path, des_path): """ Copy file from local to remote server :param src_path: source path of the file to be copied :param des_path: destination path of the file to be copied :return: True if the file was successfully copied else False """ result = True sftp = self._ssh_client.open_sftp() try: sftp.put(src_path, des_path) except IOError: self.log.error('Can not copy file') result = False finally: sftp.close() return result
def copy_file_local_to_remote(self, src_path, des_path): result = True sftp = self._ssh_client.open_sftp() try: sftp.put(src_path, des_path) except IOError: self.log.error('Can not copy file') result = False finally: sftp.close() return result
generate python code for
def get_cbbackupmgr_config(config, section): """ Get CB backup manager configuration :param config: config :param section: section to get configuration from :return: dict of configuration options """ options = {} for option in config.options(section): options[option] = config.get(section, option) return options
Get CB backup manager configuration
Code the following:
def enable_packet_loss(self): """ Changes network to lose 25% of packets using traffic control This is used to simulate a network environment where approximately 25% of packets are lost. :return: None """ o, r = self.execute_command("tc qdisc add dev eth0 root netem loss 25%") self.log_command_output(o, r)
Changes network to lose 25% of packets using traffic control This is used to simulate a network environment where approximately 25% of packets are lost.
generate doc string for following function:
def get_server(ip, config): """ Get the server information from the config :param ip: ip to get information for :param config: config :return: TestInputServer object """ server = TestInputServer() server.ip = ip server.bkrs_client = False for section in config.sections(): if section == ip: options = config.options(section) for option in options: if option == 'username': server.ssh_username = config.get(section, option) if option == 'password': server.ssh_password = config.get(section, option) if option == 'cli': server.cli_path = config.get(section, option) if option == 'ssh_key': server.ssh_key = config.get(section, option) if option == 'port': server.port = config.get(section, option) if option == 'ip': server.ip = config.get(section, option) if option == 'internal_ip': server.internal_ip = config.get(section, option) if option == 'services': server.services = config.get(section, option) if option == 'n1ql_port': server.n1ql_port = config.get(section, option) if option == 'index_port': server.index_port = config.get(section, option) if option == 'fts_port': server.fts_port = config.get(section, option) if option == 'eventing_port': server.eventing_port = config.get(section, option) if option == 'collections': # collections_map = {collection: {bucket:'', scope:'', param:''}} collections = config.get(section, option).split(',') for collection in collections: server.collections_map[collection] = TestInputParser\ .get_collection_config(collection, config) break #get username #get password #get port #get cli_path #get key return server
def get_server(ip, config): server = TestInputServer() server.ip = ip server.bkrs_client = False for section in config.sections(): if section == ip: options = config.options(section) for option in options: if option == 'username': server.ssh_username = config.get(section, option) if option == 'password': server.ssh_password = config.get(section, option) if option == 'cli': server.cli_path = config.get(section, option) if option == 'ssh_key': server.ssh_key = config.get(section, option) if option == 'port': server.port = config.get(section, option) if option == 'ip': server.ip = config.get(section, option) if option == 'internal_ip': server.internal_ip = config.get(section, option) if option == 'services': server.services = config.get(section, option) if option == 'n1ql_port': server.n1ql_port = config.get(section, option) if option == 'index_port': server.index_port = config.get(section, option) if option == 'fts_port': server.fts_port = config.get(section, option) if option == 'eventing_port': server.eventing_port = config.get(section, option) if option == 'collections': # collections_map = {collection: {bucket:'', scope:'', param:''}} collections = config.get(section, option).split(',') for collection in collections: server.collections_map[collection] = TestInputParser\ .get_collection_config(collection, config) break #get username #get password #get port #get cli_path #get key return server
def unpause_memcached(self, os="linux"): """ Unpauses the memcached process on remote server :param os: os type of remote server :return: None """ log.info("*** unpause memcached process ***") if self.nonroot: o, r = self.execute_command("killall -SIGCONT memcached.bin") else: o, r = self.execute_command("killall -SIGCONT memcached") self.log_command_output(o, r)
Unpauses the memcached process on remote server
generate python code for the above
def get_full_hostname(self): """ Get the full hostname of the remote server Override method for windows :return: full hostname if domain is set, else None """ if not self.info.domain: return None return '%s.%s' % (self.info.hostname[0], self.info.domain)
Get the full hostname of the remote server Override method for windows
generate comment.
def start_memcached(self): """ Start memcached process on remote server :return: None """ o, r = self.execute_command("taskkill /F /T /IM memcached") self.log_command_output(o, r, debug=False)
def start_memcached(self): o, r = self.execute_command("taskkill /F /T /IM memcached") self.log_command_output(o, r, debug=False)
generate code for the above:
def read_remote_file(self, remote_path, filename): """ Reads the content of a remote file specified by the path. :param remote_path: Remote path to read the file from :param filename: Name of the file to read. :return: string content of the file """ if self.file_exists(remote_path, filename): if self.remote: sftp = self._ssh_client.open_sftp() remote_file = sftp.open('{0}/{1}'.format(remote_path, filename)) try: out = remote_file.readlines() finally: remote_file.close() return out else: txt = open('{0}/{1}'.format(remote_path, filename)) return txt.read() return None
Reads the content of a remote file specified by the path.
generate comment:
def kill_memcached(self, num_retries=10, poll_interval=2): """ Kill memcached process on remote server :param num_retries: number of times to retry killing the memcached process :param poll_interval: time to wait before each retry in seconds :return: output and error of command killing memcached process """ o, r = self.execute_command("taskkill /F /T /IM memcached*") self.log_command_output(o, r, debug=False)
def kill_memcached(self, num_retries=10, poll_interval=2): o, r = self.execute_command("taskkill /F /T /IM memcached*") self.log_command_output(o, r, debug=False)
generate code for the above:
def kill_memcached(self, num_retries=10, poll_interval=2): """ Kill memcached process on remote server :param num_retries: number of times to retry killing the memcached process :param poll_interval: time to wait before each retry in seconds :return: output and error of command killing memcached process """ # Changed from kill -9 $(ps aux | grep 'memcached' | awk '{print $2}' # as grep was also returning eventing # process which was using memcached-cert o, r = self.execute_command("kill -9 $(ps aux | pgrep 'memcached')" , debug=True) self.log_command_output(o, r, debug=False) while num_retries > 0: self.sleep(poll_interval, "waiting for memcached to start") out,err=self.execute_command('pgrep memcached') if out and out != "": log.info("memcached pid:{} and err: {}".format(out,err)) break else: num_retries -= 1 return o, r
Kill memcached process on remote server
generate comment for above
def set_environment_variable(self, name, value): """ Request an interactive shell session, export custom variable and restart Couchbase server. Shell session is necessary because basic SSH client is stateless. :param name: environment variable :param value: environment variable value :return: None """ shell = self._ssh_client.invoke_shell() shell.send('net stop CouchbaseServer\n') shell.send('set {0}={1}\n'.format(name, value)) shell.send('net start CouchbaseServer\n') shell.close()
def set_environment_variable(self, name, value): shell = self._ssh_client.invoke_shell() shell.send('net stop CouchbaseServer\n') shell.send('set {0}={1}\n'.format(name, value)) shell.send('net start CouchbaseServer\n') shell.close()
generate comment.
def get_domain(self, win_info=None): """ Get the domain of the remote server. :param win_info: Windows info in case of windows server :return: domain of the remote server if found else None """ if win_info: o, _ = self.execute_batch_command('ipconfig') """ remove empty element """ o = list(filter(None, o)) suffix_dns_row = [ row for row in o if row.find(" Connection-specific DNS Suffix") != -1 and len(row.split(':')[1]) > 1] ret = "" if suffix_dns_row: ret = suffix_dns_row[0].split(':')[1].strip() else: ret = self.execute_command_raw('hostname -d', debug=False) return ret
def get_domain(self, win_info=None): if win_info: o, _ = self.execute_batch_command('ipconfig') """ remove empty element
give python code to
def kill_goxdcr(self): """ Kill XDCR process on remote server :return: None """ o, r = self.execute_command("killall -9 goxdcr") self.log_command_output(o, r)
Kill XDCR process on remote server
def kill_eventing_process(self, name): """ Kill eventing process on remote server :param name: name of eventing process :return: None """ o, r = self.execute_command(command="killall -9 {0}".format(name)) self.log_command_output(o, r)
Kill eventing process on remote server
give python code to
def check_directory_exists(self, remote_path): """ Check if the directory exists in the remote path :param remote_path: remote path of the directory to be checked :return: True if the directory exists else False """ sftp = self._ssh_client.open_sftp() try: log.info("Checking if the directory {0} exists or not.".format(remote_path)) sftp.stat(remote_path) except IOError as e: log.info(f'Directory at {remote_path} DOES NOT exist.') sftp.close() return False log.info("Directory at {0} exist.") sftp.close() return True
Check if the directory exists in the remote path
Code the following:
import time from time import sleep def monitor_process_memory(self, process_name, duration_in_seconds=180, end=False): """ Monitor this process and return list of memories in 7 secs interval till the duration specified :param process_name: the name of the process to monitor :param duration_in_seconds: the duration to monitor the process till, in seconds :param end: False :return: list of virtual size (in kB) and resident set size for """ end_time = time.time() + float(duration_in_seconds) count = 0 vsz = [] rss = [] while time.time() < end_time and not end: # get the process list process = self.is_process_running(process_name) if process: vsz.append(process.vsz) rss.append(process.rss) else: log.info("{0}:process {1} is not running. Wait for 2 seconds" .format(self.remote_shell.ip, process_name)) count += 1 self.sleep(2) if count == 5: log.error("{0}:process {1} is not running at all." .format(self.remote_shell.ip, process_name)) exit(1) log.info("sleep for 7 seconds before poll new processes") self.sleep(7) return vsz, rss
Monitor this process and return list of memories in 7 secs interval till the duration specified
generate comment for above
def copy_files_local_to_remote(self, src_path, des_path): """ Copy multi files from local to remote server :param src_path: source path of the files to be copied :param des_path: destination path of the files to be copied :return: None """ files = os.listdir(src_path) self.log.info("copy files from {0} to {1}".format(src_path, des_path)) # self.execute_batch_command("cp -r {0}/* {1}".format(src_path, des_path)) for file in files: if file.find("wget") != 1: a = "" full_src_path = os.path.join(src_path, file) full_des_path = os.path.join(des_path, file) self.copy_file_local_to_remote(full_src_path, full_des_path)
def copy_files_local_to_remote(self, src_path, des_path): files = os.listdir(src_path) self.log.info("copy files from {0} to {1}".format(src_path, des_path)) # self.execute_batch_command("cp -r {0}/* {1}".format(src_path, des_path)) for file in files: if file.find("wget") != 1: a = "" full_src_path = os.path.join(src_path, file) full_des_path = os.path.join(des_path, file) self.copy_file_local_to_remote(full_src_path, full_des_path)
generate python code for the above
def is_enterprise(self): """ Check if the couchbase installed is enterprise edition or not :return: True if couchbase installed is enterprise edition else False """ enterprise = False runtime_file_path = "" if self.nonroot: if self.file_exists("%s/opt/couchbase/etc/" % self.nr_home_path, "runtime.ini"): runtime_file_path = "%s/opt/couchbase/etc/" % self.nr_home_path else: log.info("couchbase server at {0} may not installed yet in nonroot server" .format(self.ip)) elif self.file_exists("/opt/couchbase/etc/", "runtime.ini"): runtime_file_path = "/opt/couchbase/etc/" else: log.info("{} - Couchbase server not found".format(self.ip)) output = self.read_remote_file(runtime_file_path, "runtime.ini") for x in output: x = x.strip() if x and "license = enterprise" in x: enterprise = True return enterprise
Check if the couchbase installed is enterprise edition or not
generate comment for following function:
def monitor_process(self, process_name, duration_in_seconds=120): """ Monitor the given process till the given duration to check if it crashed or restarted :param process_name: the name of the process to monitor :param duration_in_seconds: the duration to monitor the process till, in seconds :return: True if the process didn't restart or crash else False """ end_time = time.time() + float(duration_in_seconds) last_reported_pid = None while time.time() < end_time: process = self.is_process_running(process_name) if process: if not last_reported_pid: last_reported_pid = process.pid elif not last_reported_pid == process.pid: message = 'Process {0} restarted. PID Old: {1}, New: {2}' log.info(message.format(process_name, last_reported_pid, process.pid)) return False # check if its equal else: # we should have an option to wait for the process # to start during the timeout # process might have crashed log.info( "{0}:process {1} is not running or it might have crashed!" .format(self.ip, process_name)) return False time.sleep(1) # log.info('process {0} is running'.format(process_name)) return True
def monitor_process(self, process_name, duration_in_seconds=120): end_time = time.time() + float(duration_in_seconds) last_reported_pid = None while time.time() < end_time: process = self.is_process_running(process_name) if process: if not last_reported_pid: last_reported_pid = process.pid elif not last_reported_pid == process.pid: message = 'Process {0} restarted. PID Old: {1}, New: {2}' log.info(message.format(process_name, last_reported_pid, process.pid)) return False # check if its equal else: # we should have an option to wait for the process # to start during the timeout # process might have crashed log.info( "{0}:process {1} is not running or it might have crashed!" .format(self.ip, process_name)) return False time.sleep(1) # log.info('process {0} is running'.format(process_name)) return True
give python code to
def unpause_memcached(self, os="linux"): """ Unpauses the memcached process on remote server :param os: os type of remote server :return: None """ log.info("*** unpause memcached process ***") if self.nonroot: o, r = self.execute_command("killall -SIGCONT memcached.bin") else: o, r = self.execute_command("killall -SIGCONT memcached") self.log_command_output(o, r)
Unpauses the memcached process on remote server
generate python code for
def restart_couchbase(self): """ Restarts the Couchbase server on the remote server :return: None """ o, r = self.execute_command("open /Applications/Couchbase\ Server.app") self.log_command_output(o, r)
Restarts the Couchbase server on the remote server
generate python code for
def execute_command(self, command, info=None, debug=True, use_channel=False, timeout=600, get_exit_code=False): """ Executes a given command on the remote machine. :param command: The command to execute. :param info: Additional information for execution (optional). :param debug: Enables debug output if True. :param use_channel: Use SSH channel if True. :param timeout: Timeout for command execution in seconds :param get_exit_code: Return the exit code of the command if True. :return: Command output and error as a tuple. """ if getattr(self, "info", None) is None and info is not None : self.info = info if self.info.type.lower() == 'windows': self.use_sudo = False if self.use_sudo: command = "sudo " + command return self.execute_command_raw( command, debug=debug, use_channel=use_channel, timeout=timeout, get_exit_code=get_exit_code)
Executes a given command on the remote machine.
generate code for the following
def disable_disk_readonly(self, disk_location): """ Disables read-only mode for the specified disk location. Override method for Windows :param disk_location: disk location to disable read-only mode. :return: None """ raise NotImplementedError
Disables read-only mode for the specified disk location. Override method for Windows
generate python code for the above
import urllib.request def download_build_locally(self, build_url): """ Downloads the Couchbase build locally :param build_url: Download url to download the build from :return: tuple containing the path to the download build file as well as the resulting HTTPMessage object. """ f_path = "{}/{}".format(".", build_url.split('/')[-1]) f, r = urllib.request.urlretrieve(build_url, f_path) return f, r
Downloads the Couchbase build locally
give python code to
def start_couchbase(self): """ Starts couchbase on remote server :return: None """ running = self.is_couchbase_running() retry = 0 while not running and retry < 3: log.info("Starting couchbase server") if self.nonroot: log.info("Start Couchbase Server with non root method") o, r = self.execute_command( '%s%scouchbase-server \-- -noinput -detached' % (self.nr_home_path, LINUX_COUCHBASE_BIN_PATH)) self.log_command_output(o, r) else: log.info("Running systemd command on this server") o, r = self.execute_command("systemctl start couchbase-server.service") self.log_command_output(o, r) self.sleep(5,"waiting for couchbase server to come up") o, r = self.execute_command("systemctl status couchbase-server.service | grep ExecStop=/opt/couchbase/bin/couchbase-server") log.info("Couchbase server status: {}".format(o)) running = self.is_couchbase_running() retry = retry + 1 if not running and retry >= 3: sys.exit("Failed to start Couchbase server on " + self.info.ip)
Starts couchbase on remote server
generate doc string for following function:
def disable_file_limit(self): """ Change the file limite to 200000 for indexer process :return: None """ o, r = self.execute_command("prlimit --nofile=200000 --pid $(pgrep indexer)") self.log_command_output(o, r)
def disable_file_limit(self): o, r = self.execute_command("prlimit --nofile=200000 --pid $(pgrep indexer)") self.log_command_output(o, r)
generate comment.
def cbbackupmgr_param(self, name, *args): """ Returns the config value from the ini whose key matches 'name' and is stored under the 'cbbackupmgr' section heading. :param name: the key under which an expected value is stored. :param args: expects a single parameter which will be used as the default if the requested key is not found. :return: the value parsed from the ini file/default value if the given key is not found. :raises Exception: if the given key does not exist in the ini and no default value is provided. """ if name in self.cbbackupmgr: return TestInput._parse_param(self.cbbackupmgr[name]) if len(args) == 1: return args[0] if self.cbbackupmgr["name"] != "local_bkrs": raise Exception(f"Parameter '{name}' must be set in the test configuration")
def cbbackupmgr_param(self, name, *args): if name in self.cbbackupmgr: return TestInput._parse_param(self.cbbackupmgr[name]) if len(args) == 1: return args[0] if self.cbbackupmgr["name"] != "local_bkrs": raise Exception(f"Parameter '{name}' must be set in the test configuration")
generate doc string for following function:
def unmount_partition(self, location): """ Unmount the partition at the specified location. :param location: Location of the partition which has to be unmounted :return: Output and error message from the umount command """ command = "umount -l {0}; df -Th".format(location) output, error = self.execute_command(command) return output, error
def unmount_partition(self, location): command = "umount -l {0}; df -Th".format(location) output, error = self.execute_command(command) return output, error
generate doc string for following function:
def _check_output(self, word_check, output): """ Check if certain word is present in the output :param word_check: string or list of strings to check :param output: the output to check against :return: True if word is present in the output else False """ found = False if len(output) >= 1: if isinstance(word_check, list): for ele in word_check: for x in output: if ele.lower() in str(x.lower()): log.info("Found '{0} in output".format(ele)) found = True break elif isinstance(word_check, str): for x in output: if word_check.lower() in str(x.lower()): log.info("Found '{0}' in output".format(word_check)) found = True break else: self.log.error("invalid {0}".format(word_check)) return found
def _check_output(self, word_check, output): found = False if len(output) >= 1: if isinstance(word_check, list): for ele in word_check: for x in output: if ele.lower() in str(x.lower()): log.info("Found '{0} in output".format(ele)) found = True break elif isinstance(word_check, str): for x in output: if word_check.lower() in str(x.lower()): log.info("Found '{0}' in output".format(word_check)) found = True break else: self.log.error("invalid {0}".format(word_check)) return found
Code the following:
import os import paramiko import signal from time import sleep def ssh_connect_with_retries(self, ip, ssh_username, ssh_password, ssh_key, exit_on_failure=False, max_attempts_connect=5, backoff_time=10): """ Connect to the remote server with given user and password, with exponential backoff delay :param ip: IP address of the remote server to connect to :param ssh_username: user to connect to remote server with :param ssh_password: password to connect to remote server with :param ssh_key: ssh key to connect to remote server with :param exit_on_failure: exit the function on error if True :param max_attempts_connect: max number of attempts before giving up :param backoff_time: time to wait between attempts :return: None """ attempt = 0 is_ssh_ok = False while not is_ssh_ok and attempt < max_attempts_connect: attempt += 1 log.info("SSH Connecting to {} with username:{}, attempt#{} of {}" .format(ip, ssh_username, attempt, max_attempts_connect)) try: if self.remote and ssh_key == '': self._ssh_client.connect( hostname=ip.replace('[', '').replace(']', ''), username=ssh_username, password=ssh_password, look_for_keys=False) elif self.remote: self._ssh_client.connect( hostname=ip.replace('[', '').replace(']', ''), username=ssh_username, key_filename=ssh_key, look_for_keys=False) is_ssh_ok = True except paramiko.BadHostKeyException as bhke: log.error("Can't establish SSH (Invalid host key) to {}: {}" .format(ip, bhke)) raise Exception(bhke) except Exception as e: log.error("Can't establish SSH (unknown reason) to {}: {}" .format(ip, e, ssh_username, ssh_password)) if attempt < max_attempts_connect: log.info("Retrying with back off delay for {} secs." .format(backoff_time)) self.sleep(backoff_time) backoff_time *= 2 if not is_ssh_ok: error_msg = "-->No SSH connectivity to {} even after {} times!\n".format(self.ip, attempt) log.error(error_msg) if exit_on_failure: log.error("Exit on failure: killing process") os.kill(os.getpid(), signal.SIGKILL) else: log.error("No exit on failure, raise exception") raise Exception(error_msg) else: log.info("SSH Connected to {} as {}".format(ip, ssh_username))
Connect to the remote server with given user and password, with exponential backoff delay
Code the following:
def run(self): """ Runs the NodeInstaller thread to run various installation steps in the remote server :return: None """ installer = InstallSteps(self.log, self.node_install_info) node_installer = installer.get_node_installer( self.node_install_info) for step in self.steps: self.log.info("{} - Running '{}'" .format(self.node_install_info.server.ip, step)) if step == "populate_build_url": # To download the main build url self.node_install_info.state = "construct_build_url" installer.populate_build_url() elif step == "populate_debug_build_url": # To download the debug_info build url for backtraces self.node_install_info.state = "construct_debug_build_url" installer.populate_debug_build_url() elif step == "check_url_status": self.node_install_info.state = "checking_url_status" installer.check_url_status(self.node_install_info.build_url) if self.node_install_info.debug_build_url: installer.check_url_status( self.node_install_info.debug_build_url) elif step == "local_download_build": self.node_install_info.state = "downloading_build_on_executor" build_urls = [self.node_install_info.build_url] if self.node_install_info.debug_build_url: build_urls.append(self.node_install_info.debug_build_url) for build_url in build_urls: f_name, res = installer.download_build_locally(build_url) self.log.debug("File saved as '{}'".format(f_name)) self.log.debug("File size: {}".format(res["Content-Length"])) self.log.debug("File create date: {}".format(res["Date"])) elif step == "copy_local_build_to_server": self.node_install_info.state = "copying_build_to_remote_server" build_urls = [self.node_install_info.build_url] if self.node_install_info.debug_build_url: build_urls.append(self.node_install_info.build_url) for build_url in build_urls: installer.result = installer.result and \ installer.copy_build_to_server(node_installer, build_url) elif step == "download_build": self.node_install_info.state = "downloading_build" installer.download_build(node_installer, self.node_install_info.build_url) if self.node_install_info.debug_build_url: installer.download_build(node_installer, self.node_install_info.build_url) elif step == "uninstall": self.node_install_info.state = "uninstalling" node_installer.uninstall() elif step == "deep_cleanup": self.node_install_info.state = "deep_cleaning" elif step == "pre_install": self.node_install_info.state = "pre_install_procedure" elif step == "install": self.node_install_info.state = "installing" node_installer.install(self.node_install_info.build_url) node_installer.post_install() elif step == "init_cluster": self.node_install_info.state = "init_cluster" node_installer.init_cluster(self.node_install_info.server) elif step == "post_install": self.node_install_info.state = "post_install_procedure" elif step == "post_install_cleanup": self.node_install_info.state = "post_install_cleanup" else: self.log.critical("Invalid step '{}'".format(step)) installer.result = False if installer.result is False: break node_installer.shell.disconnect() self.result = installer.result
Runs the NodeInstaller thread to run various installation steps in the remote server
generate python code for the following
def change_env_variables(self, dict): """ Change environment variables mentioned in dictionary and restart Couchbase server :param dict: key value pair of environment variables and their values to change to :return: None """ prefix = "\\n " shell = self._ssh_client.invoke_shell() init_file = "couchbase-server" file_path = "/opt/couchbase/bin/" environmentVariables = "" backupfile = file_path + init_file + ".bak" sourceFile = file_path + init_file o, r = self.execute_command("cp " + sourceFile + " " + backupfile) self.log_command_output(o, r) command = "sed -i 's/{0}/{0}".format("ulimit -l unlimited") for key in list(dict.keys()): o, r = self.execute_command( "sed -i 's/{1}.*//' {0}".format(sourceFile, key)) self.log_command_output(o, r) o, r = self.execute_command( "sed -i 's/export ERL_FULLSWEEP_AFTER/export " "ERL_FULLSWEEP_AFTER\\n{1}={2}\\nexport {1}/' {0}" .format(sourceFile, key, dict[key])) self.log_command_output(o, r) for key in list(dict.keys()): environmentVariables += prefix \ + 'export {0}={1}'.format(key, dict[key]) command += environmentVariables + "/'" + " " + sourceFile o, r = self.execute_command(command) self.log_command_output(o, r) # Restart Couchbase o, r = self.execute_command("service couchbase-server restart") self.log_command_output(o, r) shell.close()
Change environment variables mentioned in dictionary and restart Couchbase server
generate code for the following
def reset_env_variables(self): """ Reset environment previously set and restart couchbase server :return: None """ shell = self._ssh_client.invoke_shell() if getattr(self, "info", None) is None: self.info = self.extract_remote_info() init_file = "couchbase-server" file_path = "/opt/couchbase/bin/" backupfile = file_path + init_file + ".bak" sourceFile = file_path + init_file o, r = self.execute_command("mv " + backupfile + " " + sourceFile) self.log_command_output(o, r) # Restart Couchbase o, r = self.execute_command("service couchbase-server restart") self.log_command_output(o, r) shell.close()
Reset environment previously set and restart couchbase server
generate code for the above:
def disable_disk_readonly(self, disk_location): """ Disables read-only mode for the specified disk location. Override method for Windows :param disk_location: disk location to disable read-only mode. :return: None """ raise NotImplementedError
Disables read-only mode for the specified disk location. Override method for Windows
def get_os(info): """ Gets os name from info :param info: server info dictionary to get the data from :return: os name """ os = info.distribution_version.lower() to_be_replaced = ['\n', ' ', 'gnu/linux'] for _ in to_be_replaced: if _ in os: os = os.replace(_, '') if info.deliverable_type == "dmg": major_version = os.split('.') os = major_version[0] + '.' + major_version[1] if info.distribution_type == "Amazon Linux 2": os = "amzn2" return os
def get_os(info): os = info.distribution_version.lower() to_be_replaced = ['\n', ' ', 'gnu/linux'] for _ in to_be_replaced: if _ in os: os = os.replace(_, '') if info.deliverable_type == "dmg": major_version = os.split('.') os = major_version[0] + '.' + major_version[1] if info.distribution_type == "Amazon Linux 2": os = "amzn2" return os
generate comment:
def get_disk_info(self, win_info=None, mac=False): """ Get disk info of a remote server :param win_info: windows info :param mac: get disk info from macOS if True :return: disk info of remote server """ if win_info: if 'Total Physical Memory' not in win_info: win_info = self.create_windows_info() o = "Total Physical Memory =" + win_info['Total Physical Memory'] + '\n' o += "Available Physical Memory =" + win_info['Available Physical Memory'] elif mac: o, r = self.execute_command_raw('df -hl', debug=False) else: o, r = self.execute_command_raw('df -Thl', debug=False) if o: return o
def get_disk_info(self, win_info=None, mac=False): if win_info: if 'Total Physical Memory' not in win_info: win_info = self.create_windows_info() o = "Total Physical Memory =" + win_info['Total Physical Memory'] + '\n' o += "Available Physical Memory =" + win_info['Available Physical Memory'] elif mac: o, r = self.execute_command_raw('df -hl', debug=False) else: o, r = self.execute_command_raw('df -Thl', debug=False) if o: return o
generate python code for the above
def stop_indexer(self): """ Stop indexer process on remote server :return: None """ o, r = self.execute_command("kill -SIGSTOP $(pgrep indexer)") self.log_command_output(o, r, debug=False)
Stop indexer process on remote server
generate python code for
def enable_disk_readonly(self, disk_location): """ Enables read-only mode for the specified disk location. Override method for Windows :param disk_location: disk location to enable read-only mode. :return: None """ raise NotImplementedError
Enables read-only mode for the specified disk location. Override method for Windows
generate comment for following function:
def cpu_stress(self, stop_time): """ Applies CPU stress for a specified duration on the 20 CPU cores. :param stop_time: duration to apply the CPU stress for. :return: None """ o, r = self.execute_command("stress --cpu 20 --timeout {}".format(stop_time)) self.log_command_output(o, r)
def cpu_stress(self, stop_time): o, r = self.execute_command("stress --cpu 20 --timeout {}".format(stop_time)) self.log_command_output(o, r)
def execute_batch_command(self, command): """ Execute a batch of commands. This method copies the commands onto a batch file, changes the file type to executable and then executes them on the remote server :param command: commands to execute in a batch :return: output of the batch commands """ remote_command = "echo \"%s\" > /tmp/cmd.bat ; " \ "chmod u=rwx /tmp/cmd.bat; /tmp/cmd.bat" % command o, r = self.execute_command_raw(remote_command) if r and r!=['']: log.error("Command didn't run successfully. Error: {0}".format(r)) return o, r
Execute a batch of commands. This method copies the commands onto a batch file, changes the file type to executable and then executes them on the remote server
generate python code for the above
def get_instances(cls): """ Returns a list of instances of the class :return: generator that yields instances of the class """ for ins in cls.__refs__: yield ins
Returns a list of instances of the class
generate python code for the following
from shell_util.remote_connection import RemoteMachineShellConnection def check_server_state(self, servers): """ Checks if the servers are reachable :param servers: list of servers to check :return: True if the servers are all reachable else False """ result = True reachable = list() unreachable = list() for server in servers: try: shell = RemoteMachineShellConnection(server) shell.disconnect() reachable.append(server.ip) except Exception as e: self.log.error(e) unreachable.append(server.ip) if len(unreachable) > 0: self.log.info("-" * 100) for server in unreachable: self.log.error("INSTALL FAILED ON: \t{0}".format(server)) self.log.info("-" * 100) for server in reachable: self.log.info("INSTALL COMPLETED ON: \t{0}".format(server)) self.log.info("-" * 100) result = False return result
Checks if the servers are reachable
generate comment.
def __str__(self): """ Returns a string representation of the TestInputServer object with ip, port and ssh_username :return: A string representation of the TestInputServer object """ #ip_str = "ip:{0}".format(self.ip) ip_str = "ip:{0} port:{1}".format(self.ip, self.port) ssh_username_str = "ssh_username:{0}".format(self.ssh_username) return "{0} {1}".format(ip_str, ssh_username_str)
def __str__(self): #ip_str = "ip:{0}".format(self.ip) ip_str = "ip:{0} port:{1}".format(self.ip, self.port) ssh_username_str = "ssh_username:{0}".format(self.ssh_username) return "{0} {1}".format(ip_str, ssh_username_str)
generate code for the above:
def get_memcache_pid(self): """ Get the pid of memcached process :return: pid of memcached process """ raise NotImplementedError
Get the pid of memcached process
generate comment for above
def mount_partition(self, location): """ Mount a partition at the location specified :param location: Mount location :return: Output and error message from the mount command """ command = "mount -o loop,rw,usrquota,grpquota /usr/disk-img/disk-quota.ext3 {0}; df -Thl".format(location) output, error = self.execute_command(command) return output, error
def mount_partition(self, location): command = "mount -o loop,rw,usrquota,grpquota /usr/disk-img/disk-quota.ext3 {0}; df -Thl".format(location) output, error = self.execute_command(command) return output, error
README.md exists but content is empty. Use the Edit dataset card button to edit it.
Downloads last month
3
Edit dataset card