diff --git a/web/celery-entrypoint.sh b/web/celery-entrypoint.sh index 6a1f7c837..d4226326c 100755 --- a/web/celery-entrypoint.sh +++ b/web/celery-entrypoint.sh @@ -136,12 +136,6 @@ then pip install -r /usr/src/github/CMSeeK/requirements.txt fi -if [ ! -d "/usr/src/github/Infoga" ] -then - echo "Cloning Infoga" - git clone https://github.com/m4ll0k/Infoga /usr/src/github/Infoga -fi - # clone ctfr if [ ! -d "/usr/src/github/ctfr" ] then diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py index 3f5edc656..c2b11728e 100644 --- a/web/reNgine/tasks.py +++ b/web/reNgine/tasks.py @@ -689,10 +689,6 @@ def osint_discovery(config, host, scan_history_id, activity_id, results_dir, ctx grouped_tasks = [] if 'emails' in osint_lookup: - emails = get_and_save_emails(scan_history, activity_id, results_dir) - emails_str = '\n'.join([f'• `{email}`' for email in emails]) - # self.notify(fields={'Emails': emails_str}) - # ctx['track'] = False _task = h8mail.si( config=config, host=host, @@ -4329,56 +4325,6 @@ def get_and_save_dork_results(lookup_target, results_dir, type, lookup_keywords= return results - -def get_and_save_emails(scan_history, activity_id, results_dir): - """Get and save emails from Google, Bing and Baidu. - - Args: - scan_history (startScan.ScanHistory): Scan history object. - activity_id: ScanActivity Object - results_dir (str): Results directory. - - Returns: - list: List of emails found. - """ - emails = [] - - # Proxy settings - # get_random_proxy() - - # Gather emails from Google, Bing and Baidu - output_file = f'{results_dir}/emails_tmp.txt' - history_file = f'{results_dir}/commands.txt' - command = f'python3 /usr/src/github/Infoga/infoga.py --domain {scan_history.domain.name} --source all --report {output_file}' - try: - run_command( - command, - shell=False, - history_file=history_file, - scan_id=scan_history.id, - activity_id=activity_id) - - if not os.path.isfile(output_file): - logger.info('No Email results') - return [] - - with open(output_file) as f: - for line in f.readlines(): - if 'Email' in line: - split_email = line.split(' ')[2] - emails.append(split_email) - - output_path = f'{results_dir}/emails.txt' - with open(output_path, 'w') as output_file: - for email_address in emails: - save_email(email_address, scan_history) - output_file.write(f'{email_address}\n') - - except Exception as e: - logger.exception(e) - return emails - - def save_metadata_info(meta_dict): """Extract metadata from Google Search.