From 31d43a83984f15531e54c6d9faca3188b72b3c99 Mon Sep 17 00:00:00 2001 From: micafer Date: Wed, 22 Jul 2020 12:48:14 +0200 Subject: [PATCH 01/18] Improve docs --- doc/source/web.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/source/web.rst b/doc/source/web.rst index 3ef9e964c..98977b5d5 100644 --- a/doc/source/web.rst +++ b/doc/source/web.rst @@ -133,6 +133,8 @@ Add your own in the docker command: ``docker run -p 80:80 -p 443:443 -v server.crt:/etc/ssl/certs/server.crt -v server.key:/etc/ssl/certs/server.key -d grycap/im-web:1.5.5-ssl`` +Then you can access the IM Web portal in the following URL: http://localhost/im-web/. + .. _use-web: Usage From 4bd407f1d037bd5eae800401e2e39a7962dd48f2 Mon Sep 17 00:00:00 2001 From: micafer Date: Wed, 22 Jul 2020 12:49:07 +0200 Subject: [PATCH 02/18] Remove dup line --- contextualization/conf-ansible.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/contextualization/conf-ansible.yml b/contextualization/conf-ansible.yml index ac7b212c4..b954ee972 100644 --- a/contextualization/conf-ansible.yml +++ b/contextualization/conf-ansible.yml @@ -175,7 +175,6 @@ with_items: - { section: 'defaults', option: 'host_key_checking', value: 'False' } - { section: 'defaults', option: 'nocolor', value: '1' } - - { section: 'defaults', option: 'timeout', value: '30' } - { section: 'ssh_connection', option: 'pipelining', value: 'True' } - { section: 'defaults', option: 'jinja2_extensions', value: 'jinja2.ext.do' } - { section: 'defaults', option: 'allow_world_readable_tmpfiles', value: 'True' } From 0d2b6223779fa2b4cbfe06d5cf5d994adc7836eb Mon Sep 17 00:00:00 2001 From: micafer Date: Thu, 23 Jul 2020 10:52:55 +0200 Subject: [PATCH 03/18] Decrease SSH logins --- IM/ConfManager.py | 21 ++++-- IM/SSH.py | 131 +++++++++++++++++++++++-------------- IM/VirtualMachine.py | 131 +++++++++++++++++++------------------ IM/config.py | 2 +- etc/im.cfg | 2 +- test/functional/test_im.py | 2 +- test/unit/test_im_logic.py | 2 +- 7 files changed, 170 insertions(+), 121 deletions(-) diff --git a/IM/ConfManager.py b/IM/ConfManager.py index 6f2b6b02b..3cc39291b 100644 --- a/IM/ConfManager.py +++ b/IM/ConfManager.py @@ -355,6 +355,7 @@ def launch_ctxt_agent(self, vm, tasks): """ Launch the ctxt agent to configure the specified tasks in the specified VM """ + ssh = None pid = None tmp_dir = None try: @@ -376,7 +377,7 @@ def launch_ctxt_agent(self, vm, tasks): self.log_info("Copy the contextualization agent config file") # Copy the contextualization agent config file - ssh = vm.get_ssh_ansible_master() + ssh = vm.get_ssh_ansible_master(auto_close=False) ssh.sftp_mkdir(remote_dir) ssh.sftp_put(conf_file, remote_dir + "/" + os.path.basename(conf_file)) @@ -408,6 +409,8 @@ def launch_ctxt_agent(self, vm, tasks): pid = None self.log_exception("Error launching the ansible process to configure VM with ID %s" % str(vm.im_id)) finally: + if ssh: + ssh.close() if tmp_dir: shutil.rmtree(tmp_dir, ignore_errors=True) @@ -802,6 +805,7 @@ def configure_master(self): self.log_info("Sleeping %s secs." % (cont ** 2 * 5)) time.sleep(cont ** 2 * 5) cont += 1 + ssh = None try: self.log_info("Start the contextualization process.") @@ -810,7 +814,7 @@ def configure_master(self): else: if not self.inf.vm_master: raise Exception("No master VM found.") - ssh = self.inf.vm_master.get_ssh(retry=True) + ssh = self.inf.vm_master.get_ssh(retry=True, auto_close=False) if not ssh: raise Exception("Master VM does not have IP.") # Activate tty mode to avoid some problems with sudo in @@ -882,6 +886,8 @@ def configure_master(self): self.inf.ansible_configured = False success = False finally: + if ssh: + ssh.close() if tmp_dir: shutil.rmtree(tmp_dir, ignore_errors=True) @@ -915,6 +921,7 @@ def wait_master(self): success = True if not self.inf.ansible_configured: # Select the master VM + ssh = None try: self.inf.add_cont_msg("Select master VM") self.inf.select_vm_master() @@ -951,7 +958,7 @@ def wait_master(self): # Check and change if necessary the credentials of the master # vm - ssh = self.inf.vm_master.get_ssh(retry=True) + ssh = self.inf.vm_master.get_ssh(retry=True, auto_close=False) # Activate tty mode to avoid some problems with sudo in REL ssh.tty = True self.change_master_credentials(ssh) @@ -963,6 +970,9 @@ def wait_master(self): except Exception: self.log_exception("Error waiting the master VM to be running") self.inf.set_configured(False) + finally: + if ssh: + ssh.close() else: self.inf.set_configured(True) @@ -1035,8 +1045,7 @@ def generate_playbooks_and_hosts(self): if self.inf.radl.ansible_hosts: for ansible_host in self.inf.radl.ansible_hosts: (user, passwd, private_key) = ansible_host.getCredentialValues() - ssh = SSHRetry(ansible_host.getHost(), - user, passwd, private_key) + ssh = SSHRetry(ansible_host.getHost(), user, passwd, private_key) ssh.sftp_mkdir(remote_dir) ssh.sftp_put_files(recipe_files) else: @@ -1062,7 +1071,7 @@ def wait_vm_running(self, vm, timeout): - timeout(int): Max time to wait the VM to be running. Returns: True if all the VMs are running or false otherwise """ - delay = 10 + delay = Config.CHECK_CTXT_PROCESS_INTERVAL wait = 0 while not self._stop_thread and wait < timeout: if not vm.destroy: diff --git a/IM/SSH.py b/IM/SSH.py index d0f0307d9..a48b16cff 100644 --- a/IM/SSH.py +++ b/IM/SSH.py @@ -91,10 +91,14 @@ def run(self): class SSH: """ Class to encapsulate SSH operations using paramiko """ - def __init__(self, host, user, passwd=None, private_key=None, port=22, proxy_host=None): + def __init__(self, host, user, passwd=None, private_key=None, port=22, proxy_host=None, auto_close=True): # Atributo para la version "thread" self.thread = None + self.client = None + self.proxy = None + self.auto_close = auto_close + self.proxy_host = proxy_host self.tty = False self.port = port @@ -117,6 +121,17 @@ def __init__(self, host, user, passwd=None, private_key=None, port=22, proxy_hos self.private_key_obj = paramiko.RSAKey.from_private_key( private_key_obj) + def close(self): + """ + Close the SSH client connection + """ + if self.client: + self.client.close() + self.client = None + if self.proxy: + self.proxy.close() + self.proxy = None + def __str__(self): res = "SSH: host: " + self.host + ", port: " + \ str(self.port) + ", user: " + self.username @@ -136,6 +151,9 @@ def connect(self, time_out=None): Returns: a paramiko SSHClient connected with the server. """ + if self.client and self.client.get_transport() and self.client.get_transport().is_authenticated(): + return self.client, self.proxy + client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) @@ -172,6 +190,9 @@ def connect(self, time_out=None): password=self.password, timeout=time_out, sock=proxy_channel, pkey=self.private_key_obj) + self.client = client + self.proxy = proxy + return client, proxy def test_connectivity(self, time_out=None): @@ -228,10 +249,11 @@ def execute(self, command, timeout=None): for line in stderr: res_stderr += line - channel.close() - client.close() - if proxy: - proxy.close() + if self.auto_close: + channel.close() + client.close() + if proxy: + proxy.close() return (res_stdout, res_stderr, exit_status) def sftp_get(self, src, dest): @@ -253,10 +275,11 @@ def sftp_get(self, src, dest): sftp = scp.SCPClient(transport) sftp.get(src, dest) - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() def sftp_get_files(self, src, dest): """ Gets a list of files from the remote server @@ -277,10 +300,11 @@ def sftp_get_files(self, src, dest): for file0, file1 in zip(src, dest): sftp.get(file0, file1) - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() def sftp_put_files(self, files): """ Puts a list of files to the remote server @@ -301,10 +325,11 @@ def sftp_put_files(self, files): for src, dest in files: sftp.put(src, dest) - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() def sftp_put(self, src, dest): """ Puts a file to the remote server @@ -323,10 +348,11 @@ def sftp_put(self, src, dest): # in case of failure try to use scp sftp = scp.SCPClient(transport) sftp.put(src, dest) - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() def sftp_get_dir(self, src, dest): """ Gets recursively a directory from the remote server @@ -348,10 +374,11 @@ def sftp_get_dir(self, src, dest): full_dest = filename.replace(src, dest) sftp.get(filename, full_dest) - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() def sftp_walk(self, src, files=None, sftp=None): """ Gets recursively the list of items in a directory from the remote server @@ -474,10 +501,11 @@ def sftp_mkdir(self, directory, mode=0o777): sftp.mkdir(directory, mode) res = True - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() else: # use mkdir over ssh to create the directory _, _, status = self.execute("mkdir -p %s" % directory) @@ -498,10 +526,11 @@ def sftp_list(self, directory): transport = client.get_transport() sftp = paramiko.SFTPClient.from_transport(transport) res = sftp.listdir(directory) - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() return res def sftp_list_attr(self, directory): @@ -518,10 +547,11 @@ def sftp_list_attr(self, directory): transport = client.get_transport() sftp = paramiko.SFTPClient.from_transport(transport) res = sftp.listdir_attr(directory) - sftp.close() - transport.close() - if proxy: - proxy.close() + if self.auto_close: + sftp.close() + transport.close() + if proxy: + proxy.close() return res def getcwd(self): @@ -539,10 +569,11 @@ def getcwd(self): if sftp_avail: cwd = sftp.getcwd() - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() else: # use rm over ssh to delete the file cwd, _, _ = self.execute("pwd") @@ -603,10 +634,11 @@ def sftp_remove(self, path): if sftp_avail: res = sftp.remove(path) - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() else: # use rm over ssh to delete the file _, _, status = self.execute("rm -f %s" % path) @@ -635,10 +667,11 @@ def sftp_chmod(self, path, mode): if sftp_avail: sftp.chmod(path, mode) res = True - sftp.close() - if proxy: - proxy.close() - transport.close() + if self.auto_close: + sftp.close() + if proxy: + proxy.close() + transport.close() else: # use chmod over ssh to change permissions _, _, status = self.execute("chmod %s %s" % (oct(mode), path)) diff --git a/IM/VirtualMachine.py b/IM/VirtualMachine.py index 855e305b6..083bd0338 100644 --- a/IM/VirtualMachine.py +++ b/IM/VirtualMachine.py @@ -737,7 +737,7 @@ def setIps(self, public_ips, private_ips, remove_old=False, ignore_nets=None): vm_system.setValue('net_interface.%s.ip' % num_net, str(private_ip)) vm_system.setValue('net_interface.%s.connection' % num_net, private_net.id) - def get_ssh(self, retry=False): + def get_ssh(self, retry=False, auto_close=True): """ Get SSH object to connect with this VM """ @@ -754,9 +754,9 @@ def get_ssh(self, retry=False): self.log_warn("VM ID %s does not have IP. Do not return SSH Object." % self.im_id) return None if retry: - return SSHRetry(ip, user, passwd, private_key, self.getSSHPort(), proxy_host) + return SSHRetry(ip, user, passwd, private_key, self.getSSHPort(), proxy_host, auto_close=auto_close) else: - return SSH(ip, user, passwd, private_key, self.getSSHPort(), proxy_host) + return SSH(ip, user, passwd, private_key, self.getSSHPort(), proxy_host, auto_close=auto_close) def is_ctxt_process_running(self): """ Return the PID of the running process or None if it is not running """ @@ -827,70 +827,77 @@ def check_ctxt_process(self): self.ctxt_pid = None self.configured = False + ssh = None initial_count_out = self.cont_out wait = 0 - while self.ctxt_pid: - if self.destroy: - # If the VM has been destroyed set pid to None and return - self.log_debug("VM %s deleted. Exit check_ctxt_process thread." % self.im_id) - self.ctxt_pid = None - return None + try: + while self.ctxt_pid: + if self.destroy: + # If the VM has been destroyed set pid to None and return + self.log_debug("VM %s deleted. Exit check_ctxt_process thread." % self.im_id) + self.ctxt_pid = None + return None - ctxt_pid = self.ctxt_pid - if ctxt_pid != self.WAIT_TO_PID: - ssh = self.get_ssh_ansible_master() + ctxt_pid = self.ctxt_pid + if ctxt_pid != self.WAIT_TO_PID: + if not ssh: + ssh = self.get_ssh_ansible_master(auto_close=False) - try: - self.log_info("Getting status of ctxt process with pid: " + str(ctxt_pid)) - (_, _, exit_status) = ssh.execute("ps " + str(ctxt_pid)) - self.ssh_connect_errors = 0 - except Exception as ex: - self.log_warn("Error getting status of ctxt process with pid: %s. %s" % (ctxt_pid, ex)) - exit_status = 0 - self.ssh_connect_errors += 1 - if self.ssh_connect_errors > Config.MAX_SSH_ERRORS: - self.log_error("Too much errors getting status of ctxt process with pid: " + - str(ctxt_pid) + ". Forget it.") + try: + self.log_info("Getting status of ctxt process with pid: " + str(ctxt_pid)) + (_, _, exit_status) = ssh.execute("ps " + str(ctxt_pid)) self.ssh_connect_errors = 0 - self.configured = False + except Exception as ex: + self.log_warn("Error getting status of ctxt process with pid: %s. %s" % (ctxt_pid, ex)) + exit_status = 0 + self.ssh_connect_errors += 1 + if self.ssh_connect_errors > Config.MAX_SSH_ERRORS: + self.log_error("Too much errors getting status of ctxt process with pid: " + + str(ctxt_pid) + ". Forget it.") + self.ssh_connect_errors = 0 + self.configured = False + self.ctxt_pid = None + self.cont_out = initial_count_out + ("Too much errors getting the status of ctxt process." + " Check some network connection problems or if user " + "credentials has been changed.") + return None + + ip = self.getPublicIP() + if not ip: + ip = ip = self.getPrivateIP() + remote_dir = "%s/%s/%s_%s" % (Config.REMOTE_CONF_DIR, self.inf.id, ip, self.im_id) + + if exit_status != 0: + # The process has finished, get the outputs + self.log_info("The process %s has finished, get the outputs" % ctxt_pid) + ctxt_log = self.get_ctxt_log(remote_dir, ssh, True) + msg = self.get_ctxt_output(remote_dir, ssh, True) + if ctxt_log: + self.cont_out = initial_count_out + msg + ctxt_log + else: + self.cont_out = initial_count_out + msg + \ + "Error getting contextualization process log." self.ctxt_pid = None - self.cont_out = initial_count_out + ("Too much errors getting the status of ctxt process." - " Check some network connection problems or if user " - "credentials has been changed.") - return None - - ip = self.getPublicIP() - if not ip: - ip = ip = self.getPrivateIP() - remote_dir = "%s/%s/%s_%s" % (Config.REMOTE_CONF_DIR, self.inf.id, ip, self.im_id) - - if exit_status != 0: - # The process has finished, get the outputs - self.log_info("The process %s has finished, get the outputs" % ctxt_pid) - ctxt_log = self.get_ctxt_log(remote_dir, ssh, True) - msg = self.get_ctxt_output(remote_dir, ssh, True) - if ctxt_log: - self.cont_out = initial_count_out + msg + ctxt_log else: - self.cont_out = initial_count_out + msg + \ - "Error getting contextualization process log." - self.ctxt_pid = None + # Get the log of the process to update the cont_out + # dynamically + if Config.UPDATE_CTXT_LOG_INTERVAL > 0 and wait > Config.UPDATE_CTXT_LOG_INTERVAL: + wait = 0 + self.log_info("Get the log of the ctxt process with pid: " + str(ctxt_pid)) + ctxt_log = self.get_ctxt_log(remote_dir, ssh) + self.cont_out = initial_count_out + ctxt_log + # The process is still running, wait + self.log_info("The process %s is still running. wait." % ctxt_pid) + time.sleep(Config.CHECK_CTXT_PROCESS_INTERVAL) + wait += Config.CHECK_CTXT_PROCESS_INTERVAL else: - # Get the log of the process to update the cont_out - # dynamically - if Config.UPDATE_CTXT_LOG_INTERVAL > 0 and wait > Config.UPDATE_CTXT_LOG_INTERVAL: - wait = 0 - self.log_info("Get the log of the ctxt process with pid: " + str(ctxt_pid)) - ctxt_log = self.get_ctxt_log(remote_dir, ssh) - self.cont_out = initial_count_out + ctxt_log - # The process is still running, wait - self.log_info("The process %s is still running. wait." % ctxt_pid) + # We are waiting the PID, sleep time.sleep(Config.CHECK_CTXT_PROCESS_INTERVAL) - wait += Config.CHECK_CTXT_PROCESS_INTERVAL - else: - # We are waiting the PID, sleep - time.sleep(Config.CHECK_CTXT_PROCESS_INTERVAL) - + except Exception as gex: + self.log_warn("Error getting status of ctxt process with pid: %s. %s" % (self.ctxt_pid, gex)) + finally: + if ssh: + ssh.close() return self.ctxt_pid def is_configured(self): @@ -1011,17 +1018,17 @@ def get_ansible_host(self): return ansible_host - def get_ssh_ansible_master(self, retry=True): + def get_ssh_ansible_master(self, retry=True, auto_close=True): ansible_host = self.get_ansible_host() if ansible_host: (user, passwd, private_key) = ansible_host.getCredentialValues() if retry: - return SSHRetry(ansible_host.getHost(), user, passwd, private_key) + return SSHRetry(ansible_host.getHost(), user, passwd, private_key, auto_close=auto_close) else: - return SSH(ansible_host.getHost(), user, passwd, private_key) + return SSH(ansible_host.getHost(), user, passwd, private_key, auto_close=auto_close) else: if self.inf.vm_master: - return self.inf.vm_master.get_ssh(retry=retry) + return self.inf.vm_master.get_ssh(retry=retry, auto_close=auto_close) else: self.log_warn("There is not master VM. Do not return SSH object.") return None diff --git a/IM/config.py b/IM/config.py index 33bfd59bf..faebca590 100644 --- a/IM/config.py +++ b/IM/config.py @@ -85,7 +85,7 @@ class Config: MAX_SSH_ERRORS = 5 PRIVATE_NET_MASKS = ["10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16", "169.254.0.0/16", "100.64.0.0/10", "192.0.0.0/24", "198.18.0.0/15"] - CHECK_CTXT_PROCESS_INTERVAL = 5 + CHECK_CTXT_PROCESS_INTERVAL = 10 CONFMAMAGER_CHECK_STATE_INTERVAL = 5 UPDATE_CTXT_LOG_INTERVAL = 20 ANSIBLE_INSTALL_TIMEOUT = 500 diff --git a/etc/im.cfg b/etc/im.cfg index 9d296c47b..df21dbc69 100644 --- a/etc/im.cfg +++ b/etc/im.cfg @@ -82,7 +82,7 @@ RECIPES_DB_FILE = %(CONTEXTUALIZATION_DIR)s/recipes_ansible.db MAX_CONTEXTUALIZATION_TIME = 7200 REMOTE_CONF_DIR = /var/tmp/.im # Interval to update the state of the contextualization process in the VMs (in secs) -CHECK_CTXT_PROCESS_INTERVAL = 5 +CHECK_CTXT_PROCESS_INTERVAL = 10 # Interval to update the log output of the contextualization process in the VMs (in secs) UPDATE_CTXT_LOG_INTERVAL = 20 # Interval to update the state of the processes of the ConfManager (in secs) diff --git a/test/functional/test_im.py b/test/functional/test_im.py index 4dda40868..8349825fa 100755 --- a/test/functional/test_im.py +++ b/test/functional/test_im.py @@ -85,7 +85,7 @@ def register_cloudconnector(self, name, cloud_connector): sys.modules['IM.connectors.' + name] = type('MyConnector', (object,), {name + 'CloudConnector': cloud_connector}) - def get_dummy_ssh(self, retry=False): + def get_dummy_ssh(self, retry=False, auto_close=True): ssh = SSH("", "", "") ssh.test_connectivity = Mock(return_value=True) ssh.execute = Mock(return_value=("10", "", 0)) diff --git a/test/unit/test_im_logic.py b/test/unit/test_im_logic.py index 8a32f4bb1..79167345d 100755 --- a/test/unit/test_im_logic.py +++ b/test/unit/test_im_logic.py @@ -91,7 +91,7 @@ def register_cloudconnector(self, name, cloud_connector): sys.modules['IM.connectors.' + name] = type('MyConnector', (object,), {name + 'CloudConnector': cloud_connector}) - def get_dummy_ssh(self, retry=False): + def get_dummy_ssh(self, retry=False, auto_close=True): ssh = SSH("", "", "") ssh.test_connectivity = Mock(return_value=True) ssh.execute = Mock(return_value=("10", "", 0)) From 24c699050a0747688b8467c198003e9521d56137 Mon Sep 17 00:00:00 2001 From: micafer Date: Thu, 23 Jul 2020 11:51:22 +0200 Subject: [PATCH 04/18] Fix docker-devel --- docker-devel/Dockerfile | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/docker-devel/Dockerfile b/docker-devel/Dockerfile index aa2d2d3ae..aabc49367 100644 --- a/docker-devel/Dockerfile +++ b/docker-devel/Dockerfile @@ -1,25 +1,30 @@ # Dockerfile to create a container with the IM service -FROM grycap/jenkins:ubuntu16.04-im -ARG BRANCH=devel +FROM ubuntu:18.04 LABEL maintainer="Miguel Caballer " LABEL version="1.9.4" LABEL description="Container image to run the IM service. (http://www.grycap.upv.es/im)" - EXPOSE 8899 8800 -# Install im - '$BRANCH' branch -RUN cd tmp \ - && git clone -b $BRANCH https://github.com/grycap/im.git \ - && cd im \ - && pip install /tmp/im +# Ensure system is up to date with mandatory python packages installed +RUN apt-get update && apt-get install --no-install-recommends -y openssh-client sshpass vim libmysqlclient20 && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && rm -rf ~/.cache/ -# Install pip optional libraries -RUN pip install MySQL-python pyOpenSSL cheroot==8.1.0 pymongo msrest msrestazure azure-common azure-mgmt-storage azure-mgmt-compute azure-mgmt-network azure-mgmt-resource azure-mgmt-dns -RUN pip install zipp==3.0.0 +# Install IM +RUN apt-get update && apt-get install --no-install-recommends -y gcc git python python-dev python-pip libmysqld-dev libssl-dev libffi-dev libsqlite3-dev && \ + pip install setuptools --upgrade -I && \ + pip install pip --upgrade -I && \ + /usr/local/bin/pip install msrest msrestazure azure-common azure-mgmt-storage azure-mgmt-compute azure-mgmt-network azure-mgmt-resource azure-mgmt-dns && \ + /usr/local/bin/pip install MySQL-python pyOpenSSL pycrypto xmltodict pymongo && \ + cd tmp && git clone -b $BRANCH https://github.com/grycap/im.git && cd im && pip install /tmp/im && \ + /usr/local/bin/pip uninstall pip -y && \ + apt-get purge -y gcc git python-dev python-pip libmysqld-dev libssl-dev libffi-dev libsqlite3-dev && \ + apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && rm -rf ~/.cache/ # Set the VM_NUM_USE_CTXT_DIST to 3 for the tests RUN sed -i -e 's/VM_NUM_USE_CTXT_DIST = 30/VM_NUM_USE_CTXT_DIST = 3/g' /etc/im/im.cfg +# Copy a ansible.cfg with correct minimum values COPY ansible.cfg /etc/ansible/ansible.cfg +# Start IM service CMD im_service.py From 21adff139fcbd83456a564763f10a816e562a094 Mon Sep 17 00:00:00 2001 From: micafer Date: Thu, 23 Jul 2020 11:59:02 +0200 Subject: [PATCH 05/18] Fix docker-devel --- docker-devel/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker-devel/Dockerfile b/docker-devel/Dockerfile index aabc49367..d8de90ecc 100644 --- a/docker-devel/Dockerfile +++ b/docker-devel/Dockerfile @@ -1,5 +1,6 @@ # Dockerfile to create a container with the IM service FROM ubuntu:18.04 +ARG BRANCH=devel LABEL maintainer="Miguel Caballer " LABEL version="1.9.4" LABEL description="Container image to run the IM service. (http://www.grycap.upv.es/im)" @@ -15,7 +16,7 @@ RUN apt-get update && apt-get install --no-install-recommends -y gcc git python pip install pip --upgrade -I && \ /usr/local/bin/pip install msrest msrestazure azure-common azure-mgmt-storage azure-mgmt-compute azure-mgmt-network azure-mgmt-resource azure-mgmt-dns && \ /usr/local/bin/pip install MySQL-python pyOpenSSL pycrypto xmltodict pymongo && \ - cd tmp && git clone -b $BRANCH https://github.com/grycap/im.git && cd im && pip install /tmp/im && \ + cd tmp && git clone https://github.com/grycap/im.git -b $BRANCH && cd im && pip install /tmp/im && \ /usr/local/bin/pip uninstall pip -y && \ apt-get purge -y gcc git python-dev python-pip libmysqld-dev libssl-dev libffi-dev libsqlite3-dev && \ apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && rm -rf ~/.cache/ From f5877049bd81de3d62cf51a7905c4681395210fa Mon Sep 17 00:00:00 2001 From: micafer Date: Thu, 23 Jul 2020 12:26:01 +0200 Subject: [PATCH 06/18] Fix docker-devel --- docker-devel/Dockerfile | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/docker-devel/Dockerfile b/docker-devel/Dockerfile index d8de90ecc..464e8fc36 100644 --- a/docker-devel/Dockerfile +++ b/docker-devel/Dockerfile @@ -7,18 +7,17 @@ LABEL description="Container image to run the IM service. (http://www.grycap.upv EXPOSE 8899 8800 # Ensure system is up to date with mandatory python packages installed -RUN apt-get update && apt-get install --no-install-recommends -y openssh-client sshpass vim libmysqlclient20 && \ +RUN apt-get update && apt-get install --no-install-recommends -y python3 python3-distutils openssh-client sshpass vim libmysqlclient20 python3-mysqldb && \ apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && rm -rf ~/.cache/ # Install IM -RUN apt-get update && apt-get install --no-install-recommends -y gcc git python python-dev python-pip libmysqld-dev libssl-dev libffi-dev libsqlite3-dev && \ - pip install setuptools --upgrade -I && \ - pip install pip --upgrade -I && \ - /usr/local/bin/pip install msrest msrestazure azure-common azure-mgmt-storage azure-mgmt-compute azure-mgmt-network azure-mgmt-resource azure-mgmt-dns && \ - /usr/local/bin/pip install MySQL-python pyOpenSSL pycrypto xmltodict pymongo && \ - cd tmp && git clone https://github.com/grycap/im.git -b $BRANCH && cd im && pip install /tmp/im && \ - /usr/local/bin/pip uninstall pip -y && \ - apt-get purge -y gcc git python-dev python-pip libmysqld-dev libssl-dev libffi-dev libsqlite3-dev && \ +RUN apt-get update && apt-get install --no-install-recommends -y git python3-pip && \ + pip3 install pip setuptools --upgrade && \ + /usr/local/bin/pip3 install msrest msrestazure azure-common azure-mgmt-storage azure-mgmt-compute azure-mgmt-network azure-mgmt-resource azure-mgmt-dns && \ + /usr/local/bin/pip3 install pyOpenSSL cheroot xmltodict pymongo ansible==2.9.10&& \ + cd tmp && git clone https://github.com/grycap/im.git -b $BRANCH && cd im && /usr/local/bin/pip3 install /tmp/im && \ + /usr/local/bin/pip3 uninstall pip -y && \ + apt-get purge -y git python3-pip && \ apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && rm -rf ~/.cache/ # Set the VM_NUM_USE_CTXT_DIST to 3 for the tests From 0a8f015057f0dd135728f238beb294da0076c662 Mon Sep 17 00:00:00 2001 From: micafer Date: Thu, 23 Jul 2020 13:28:12 +0200 Subject: [PATCH 07/18] Update test VMIs --- test/files/test.radl | 4 +--- test/files/test_cont_dist.radl | 4 +--- test/files/test_simple.json | 6 ++---- 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/test/files/test.radl b/test/files/test.radl index b154d510c..46e033b46 100644 --- a/test/files/test.radl +++ b/test/files/test.radl @@ -26,9 +26,7 @@ cpu.count>=1 and memory.size>=512m and net_interface.0.connection = 'privada' and disk.0.os.name='linux' and -disk.0.image.url = 'one://ramses.i3m.upv.es/1145' and -disk.0.os.credentials.username = 'ubuntu' and -disk.0.os.credentials.password = 'yoyoyo' and +disk.0.image.url = 'one://ramses.i3m.upv.es/1129' and disk.0.os.credentials.new.password = 'Tututu+01' and disk.0.applications contains (name='ganglia') and disk.1.size=1GB and diff --git a/test/files/test_cont_dist.radl b/test/files/test_cont_dist.radl index 789fef22d..53a87b88f 100644 --- a/test/files/test_cont_dist.radl +++ b/test/files/test_cont_dist.radl @@ -20,9 +20,7 @@ cpu.count>=1 and memory.size>=512m and net_interface.0.connection = 'privada' and disk.0.os.name='linux' and -disk.0.image.url = 'one://ramses.i3m.upv.es/1145' and -disk.0.os.credentials.username = 'ubuntu' and -disk.0.os.credentials.password = 'yoyoyo' and +disk.0.image.url = 'one://ramses.i3m.upv.es/1129' and disk.0.os.credentials.new.password = 'Tututu+01' ) diff --git a/test/files/test_simple.json b/test/files/test_simple.json index b03e121f6..514c0e89c 100644 --- a/test/files/test_simple.json +++ b/test/files/test_simple.json @@ -12,9 +12,7 @@ "class": "system", "cpu.arch": "x86_64", "cpu.count_min": 1, - "disk.0.image.url": "one://ramses.i3m.upv.es/1145", - "disk.0.os.credentials.password": "yoyoyo", - "disk.0.os.credentials.username": "ubuntu", + "disk.0.image.url": "one://ramses.i3m.upv.es/1129", "disk.0.os.name": "linux", "id": "front", "memory.size_min": 536870912, @@ -25,7 +23,7 @@ "class": "system", "cpu.arch": "x86_64", "cpu.count_min": 1, - "disk.0.image.url": "one://ramses.i3m.upv.es/1145", + "disk.0.image.url": "one://ramses.i3m.upv.es/1269", "disk.0.os.credentials.password": "yoyoyo", "disk.0.os.credentials.username": "ubuntu", "disk.0.os.name": "linux", From a4eea7f01292c3f161f2bd2a23a4dc2d9e17e601 Mon Sep 17 00:00:00 2001 From: micafer Date: Thu, 23 Jul 2020 13:55:32 +0200 Subject: [PATCH 08/18] Update test VMIs --- test/files/test_simple.json | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/files/test_simple.json b/test/files/test_simple.json index 514c0e89c..b27d1071b 100644 --- a/test/files/test_simple.json +++ b/test/files/test_simple.json @@ -24,8 +24,6 @@ "cpu.arch": "x86_64", "cpu.count_min": 1, "disk.0.image.url": "one://ramses.i3m.upv.es/1269", - "disk.0.os.credentials.password": "yoyoyo", - "disk.0.os.credentials.username": "ubuntu", "disk.0.os.name": "linux", "id": "wn", "memory.size_min": 536870912, From b53a7eb2e18b067b47ecc797a0bf9194f8db8eb8 Mon Sep 17 00:00:00 2001 From: micafer Date: Fri, 24 Jul 2020 08:10:24 +0200 Subject: [PATCH 09/18] Update test VMIs --- test/files/test_simple.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/files/test_simple.json b/test/files/test_simple.json index b27d1071b..95382e0a8 100644 --- a/test/files/test_simple.json +++ b/test/files/test_simple.json @@ -24,6 +24,8 @@ "cpu.arch": "x86_64", "cpu.count_min": 1, "disk.0.image.url": "one://ramses.i3m.upv.es/1269", + "disk.0.os.credentials.password": "GRyCAP01", + "disk.0.os.credentials.username": "ubuntu", "disk.0.os.name": "linux", "id": "wn", "memory.size_min": 536870912, From 6d586cf1e035eee023f780a9c22943692b147442 Mon Sep 17 00:00:00 2001 From: micafer Date: Fri, 24 Jul 2020 09:25:09 +0200 Subject: [PATCH 10/18] Update test VMIs --- test/files/tosca_create.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/test/files/tosca_create.yml b/test/files/tosca_create.yml index a4d5d9b8e..d3c352fa3 100644 --- a/test/files/tosca_create.yml +++ b/test/files/tosca_create.yml @@ -105,6 +105,7 @@ topology_template: architecture: x86_64 type: linux distribution: ubuntu + version: 16.04 outputs: server_url: From 09a6df7b1a2ac2306ce35c58a631558a58f72c9c Mon Sep 17 00:00:00 2001 From: micafer Date: Fri, 24 Jul 2020 10:04:38 +0200 Subject: [PATCH 11/18] Update test VMIs --- contextualization/conf-ansible.yml | 4 ++++ test/files/tosca_create.yml | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/contextualization/conf-ansible.yml b/contextualization/conf-ansible.yml index b954ee972..95ed41dc1 100644 --- a/contextualization/conf-ansible.yml +++ b/contextualization/conf-ansible.yml @@ -118,6 +118,10 @@ pip: name=pyOpenSSL state=latest when: ansible_os_family != "RedHat" or ansible_distribution_major_version|int > 6 + - name: Ubuntu 14 pip cryptography + pip: name=cryptography version=2.9.2 + when: ansible_distribution == "Ubuntu" and ansible_distribution_major_version|int <= 14 + - name: Install pkgs with Pip in RH6 pip: name="{{ item.name }}" version="{{ item.version }}" when: ansible_os_family == "RedHat" and ansible_distribution_major_version|int <= 6 diff --git a/test/files/tosca_create.yml b/test/files/tosca_create.yml index d3c352fa3..a4d5d9b8e 100644 --- a/test/files/tosca_create.yml +++ b/test/files/tosca_create.yml @@ -105,7 +105,6 @@ topology_template: architecture: x86_64 type: linux distribution: ubuntu - version: 16.04 outputs: server_url: From e51cac7a6c93bc67a2aff01a03603def52307f93 Mon Sep 17 00:00:00 2001 From: micafer Date: Fri, 24 Jul 2020 12:32:36 +0200 Subject: [PATCH 12/18] Update test radl --- test/files/test.radl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/test/files/test.radl b/test/files/test.radl index 46e033b46..f2a0befb6 100644 --- a/test/files/test.radl +++ b/test/files/test.radl @@ -61,11 +61,13 @@ configure test ( - vars: NODENAME: '{{IM_INFRASTRUCTURE_RADL|json_query("[?id == ''front''].net_interface_0_dns_name|[0]")}}' pre_tasks: - - apt: name=python-setuptools update_cache=yes cache_valid_time=3600 + - apt: name=python-setuptools,python-pip update_cache=yes cache_valid_time=3600 when: ansible_os_family == "Debian" - - yum: name=python-setuptools + - yum: name=epel-release when: ansible_os_family == "RedHat" - - easy_install: name=jmespath + - yum: name=python-setuptools,python-pip + when: ansible_os_family == "RedHat" + - pip: name=jmespath tasks: - debug: msg="NODENAME = {{NODENAME}}" - debug: msg="VERSION = {{ansible_version.major}}" From 826272b0b4089c9dc50acf4f23196fdc5cb76341 Mon Sep 17 00:00:00 2001 From: micafer Date: Mon, 27 Jul 2020 10:56:18 +0200 Subject: [PATCH 13/18] Fix test --- test/integration/TestIM.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/TestIM.py b/test/integration/TestIM.py index 43cefa8d5..2f8941276 100755 --- a/test/integration/TestIM.py +++ b/test/integration/TestIM.py @@ -730,7 +730,7 @@ def test_95_destroy(self): # self.assertTrue( # success, msg="ERROR calling DestroyInfrastructure: " + str(res)) - def test_100_proxy(self): + def test_97_proxy(self): """ Test connecting a VM using a proxy host """ @@ -785,7 +785,7 @@ def test_100_proxy(self): all_configured = self.wait_inf_state(inf_id2, VirtualMachine.CONFIGURED, 600) self.assertTrue(all_configured, msg="ERROR waiting the infrastructure to be configured (timeout).") - def test_110_destroy(self): + def test_99_destroy(self): """ Test DestroyInfrastructure function """ From 162739d2093da920a8a071b09d410270a9fa04e1 Mon Sep 17 00:00:00 2001 From: micafer Date: Mon, 27 Jul 2020 11:22:28 +0200 Subject: [PATCH 14/18] Fix issue --- IM/ConfManager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IM/ConfManager.py b/IM/ConfManager.py index 3cc39291b..7a6457459 100644 --- a/IM/ConfManager.py +++ b/IM/ConfManager.py @@ -774,7 +774,7 @@ def generate_playbook(self, vm, ctxt_elem, tmp_dir): conf_content = merge_recipes(conf_content, configure.recipes) conf_out = open(conf_filename, 'w') - conf_out.write(conf_content) + conf_out.write(str(conf_content)) conf_out.close() recipe_files.append(ctxt_elem.configure + "_" + ctxt_elem.system + "_task.yml") From f37858393a9fd05b47e34e0ee2b4b7055cb8685d Mon Sep 17 00:00:00 2001 From: micafer Date: Mon, 27 Jul 2020 11:47:12 +0200 Subject: [PATCH 15/18] Fix issue --- IM/VirtualMachine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IM/VirtualMachine.py b/IM/VirtualMachine.py index 083bd0338..cf90dd072 100644 --- a/IM/VirtualMachine.py +++ b/IM/VirtualMachine.py @@ -913,7 +913,6 @@ def is_configured(self): def get_ctxt_log(self, remote_dir, ssh, delete=False): tmp_dir = tempfile.mkdtemp() - conf_out = "" # Download the contextualization agent log try: @@ -933,6 +932,7 @@ def get_ctxt_log(self, remote_dir, ssh, delete=False): self.log_exception( "Error deleting remote contextualization process log: " + remote_dir + '/ctxt_agent.log') except Exception: + conf_out = "" self.log_exception( "Error getting contextualization process log: " + remote_dir + '/ctxt_agent.log') self.configured = False From f60c4151026fcc87942d794df9381c728183c3ae Mon Sep 17 00:00:00 2001 From: micafer Date: Mon, 27 Jul 2020 12:22:30 +0200 Subject: [PATCH 16/18] Fix py3 issue --- IM/ConfManager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IM/ConfManager.py b/IM/ConfManager.py index 7a6457459..3fc71c2de 100644 --- a/IM/ConfManager.py +++ b/IM/ConfManager.py @@ -765,7 +765,7 @@ def generate_playbook(self, vm, ctxt_elem, tmp_dir): if vault_password: vault_edit = self.get_vault_editor(vault_password) if configure.recipes.strip().startswith("$ANSIBLE_VAULT"): - recipes = vault_edit.vault.decrypt(configure.recipes.strip()) + recipes = str(vault_edit.vault.decrypt(configure.recipes.strip())) else: recipes = configure.recipes conf_content = merge_recipes(conf_content, recipes) From 080ab44ead0ab935c97ff35479e23ca2e019b97f Mon Sep 17 00:00:00 2001 From: micafer Date: Tue, 28 Jul 2020 08:20:31 +0200 Subject: [PATCH 17/18] Fix py3 issue --- IM/ConfManager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IM/ConfManager.py b/IM/ConfManager.py index 3fc71c2de..4700d812b 100644 --- a/IM/ConfManager.py +++ b/IM/ConfManager.py @@ -769,7 +769,7 @@ def generate_playbook(self, vm, ctxt_elem, tmp_dir): else: recipes = configure.recipes conf_content = merge_recipes(conf_content, recipes) - conf_content = vault_edit.vault.encrypt(conf_content) + conf_content = str(vault_edit.vault.encrypt(conf_content)) else: conf_content = merge_recipes(conf_content, configure.recipes) From db8eb4e9de0c9869785b725a5d23f5cf01be544b Mon Sep 17 00:00:00 2001 From: micafer Date: Tue, 28 Jul 2020 11:23:18 +0200 Subject: [PATCH 18/18] Fix py3 issue --- IM/ConfManager.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/IM/ConfManager.py b/IM/ConfManager.py index 4700d812b..75fc4954a 100644 --- a/IM/ConfManager.py +++ b/IM/ConfManager.py @@ -765,11 +765,11 @@ def generate_playbook(self, vm, ctxt_elem, tmp_dir): if vault_password: vault_edit = self.get_vault_editor(vault_password) if configure.recipes.strip().startswith("$ANSIBLE_VAULT"): - recipes = str(vault_edit.vault.decrypt(configure.recipes.strip())) + recipes = vault_edit.vault.decrypt(configure.recipes.strip()).decode() else: recipes = configure.recipes conf_content = merge_recipes(conf_content, recipes) - conf_content = str(vault_edit.vault.encrypt(conf_content)) + conf_content = vault_edit.vault.encrypt(conf_content).decode() else: conf_content = merge_recipes(conf_content, configure.recipes) @@ -1026,8 +1026,7 @@ def generate_playbooks_and_hosts(self): for ctxt_elem in contextualizes[ctxt_num]: if ctxt_elem.system in vm_group and ctxt_elem.get_ctxt_tool() == "Ansible": vm = vm_group[ctxt_elem.system][0] - filenames.extend(self.generate_playbook( - vm, ctxt_elem, tmp_dir)) + filenames.extend(self.generate_playbook(vm, ctxt_elem, tmp_dir)) filenames.append(self.generate_etc_hosts(tmp_dir)) filenames.append(self.generate_inventory(tmp_dir))