diff --git a/.github/codeql-config.yml b/.github/codeql-config.yml new file mode 100644 index 0000000000..2372524493 --- /dev/null +++ b/.github/codeql-config.yml @@ -0,0 +1,12 @@ +name: "CodeQL config" + +queries: + - uses: security-extended + +query-filters: + - exclude: + id: py/path-injection # To much false positives + +paths-ignore: + - "**/.git/**" + - "**/.github/**" diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ce094bd8d7..b9fb2f4e3a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -1,61 +1,76 @@ -name: "CodeQL" +name: "CodeQL Advanced" on: push: branches: [ "master" ] pull_request: - # The branches below must be a subset of the branches above branches: [ "master" ] schedule: - - cron: '17 16 * * 0' + - cron: '18 14 * * 3' jobs: analyze: - name: Analyze - runs-on: ubuntu-latest + name: Analyze (${{ matrix.language }}) + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + # only required for workflows in private repositories actions: read contents: read - security-events: write strategy: fail-fast: false matrix: - language: [ 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - + include: + - language: python + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - + build-mode: ${{ matrix.build-mode }} + config-file: .github/codeql-config.yml + + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. # ℹī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh + - if: matrix.build-mode == 'manual' + shell: bash + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index b9bb7c4968..a5f5dd6864 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -3,6 +3,12 @@ services: postgres: image: "postgres:13" restart: always + deploy: + resources: + limits: + memory: 1G + reservations: + memory: 512M volumes: - $HOME/MobSF/postgresql_data:/var/lib/postgresql/data environment: @@ -15,6 +21,12 @@ services: nginx: image: nginx:latest restart: always + deploy: + resources: + limits: + memory: 1G + reservations: + memory: 256M ports: - "80:4000" - "1337:4001" @@ -25,8 +37,31 @@ services: networks: - mobsf_network + djangoq: + image: opensecurity/mobile-security-framework-mobsf:latest + build: + context: .. + dockerfile: Dockerfile + restart: unless-stopped + command: /home/mobsf/Mobile-Security-Framework-MobSF/scripts/qcluster.sh + volumes: + - $HOME/MobSF/mobsf_data:/home/mobsf/.MobSF + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=password + - POSTGRES_DB=mobsf + - POSTGRES_HOST=postgres + - POSTGRES_PORT=5432 + depends_on: + - postgres + networks: + - mobsf_network + mobsf: image: opensecurity/mobile-security-framework-mobsf:latest + build: + context: .. + dockerfile: Dockerfile restart: always volumes: - $HOME/MobSF/mobsf_data:/home/mobsf/.MobSF @@ -36,10 +71,15 @@ services: - POSTGRES_DB=mobsf - POSTGRES_HOST=postgres - POSTGRES_PORT=5432 + - MOBSF_ASYNC_ANALYSIS=1 healthcheck: test: curl -f http://localhost:8000/login/ || exit 1 + interval: 30s + timeout: 10s + retries: 5 depends_on: - postgres + - djangoq networks: - mobsf_network extra_hosts: diff --git a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/default/ssl_pinning_bypass.js b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/default/ssl_pinning_bypass.js index 7793712af1..723381bf99 100644 --- a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/default/ssl_pinning_bypass.js +++ b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/default/ssl_pinning_bypass.js @@ -241,15 +241,82 @@ Java.perform(function() { } catch (err) { send('[SSL Pinning Bypass] Cronet not found'); } - /* Certificate Transparency Bypass - Ajin Abraham - opensecurity.in */ - try{ + /* Boye AbstractVerifier */ + try { + Java.use("ch.boye.httpclientandroidlib.conn.ssl.AbstractVerifier").verify.implementation = function(host, ssl) { + send("[SSL Pinning Bypass] Bypassing Boye AbstractVerifier" + host); + }; + } catch (err) { + send("[SSL Pinning Bypass] Boye AbstractVerifier not found"); + } + /* Appmattus */ + try { + /* Certificate Transparency Bypass Ajin Abraham - opensecurity.in */ Java.use('com.babylon.certificatetransparency.CTInterceptorBuilder').includeHost.overload('java.lang.String').implementation = function(host) { send('[SSL Pinning Bypass] Bypassing Certificate Transparency check'); return this.includeHost('nonexistent.domain'); }; + } catch (err) { + send('[SSL Pinning Bypass] babylon certificatetransparency.CTInterceptorBuilder not found'); + } + try { + Java.use("com.appmattus.certificatetransparency.internal.verifier.CertificateTransparencyInterceptor")["intercept"].implementation = function(a) { + send("[SSL Pinning Bypass] Appmattus Certificate Transparency"); + return a.proceed(a.request()); + }; + } catch (err) { + send("[SSL Pinning Bypass] Appmattus CertificateTransparencyInterceptor not found"); + } + try{ + bypassOkHttp3CertificateTransparency(); } catch (err) { send('[SSL Pinning Bypass] certificatetransparency.CTInterceptorBuilder not found'); } - }, 0); + + +function bypassOkHttp3CertificateTransparency() { + // https://gist.github.com/m-rey/f2a235123908ca42395b6d3c5fe1128e + var CertificateTransparencyInterceptor = Java.use('com.appmattus.certificatetransparency.internal.verifier.CertificateTransparencyInterceptor'); + var OkHttpClientBuilder = Java.use('okhttp3.OkHttpClient$Builder'); + + CertificateTransparencyInterceptor.intercept.implementation = function (chain) { + var request = chain.request(); + var url = request.url(); + var host = url.host(); + + // Dynamically access the VerificationResult classes + var VerificationResult = Java.use('com.appmattus.certificatetransparency.VerificationResult'); + var VerificationResultSuccessInsecureConnection = Java.use('com.appmattus.certificatetransparency.VerificationResult$Success$InsecureConnection'); + var VerificationResultFailureNoCertificates = Java.use('com.appmattus.certificatetransparency.VerificationResult$Failure$NoCertificates'); + + // Create instances of the desired VerificationResult classes + var success = VerificationResultSuccessInsecureConnection.$new(host); + var failureNoCertificates = VerificationResultFailureNoCertificates.$new(); + + // Bypass certificate transparency verification + var certs = chain.connection().handshake().peerCertificates(); + if (certs.length === 0) { + send('[SSL Pinning Bypass] Certificate transparency bypassed.'); + return failureNoCertificates; + } + + try { + // Proceed with the original request + return chain.proceed(request); + } catch (e) { + // Catch SSLPeerUnverifiedException and return intercepted response + if (e.toString().includes('SSLPeerUnverifiedException')) { + send('[SSL Pinning Bypass] Certificate transparency failed.'); + return failureNoCertificates; + } + throw e; + } + }; + + OkHttpClientBuilder.build.implementation = function () { + // Intercept the OkHttpClient creation + var client = this.build(); + return client; + }; +} \ No newline at end of file diff --git a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/audit-webview.js b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/audit-webview.js new file mode 100644 index 0000000000..c9e0b2810a --- /dev/null +++ b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/audit-webview.js @@ -0,0 +1,53 @@ +Java.perform(function () { + send("Starting WebView configuration dump..."); + + const WebView = Java.use('android.webkit.WebView'); + + // Hook the first overload: loadUrl(String) + WebView.loadUrl.overload('java.lang.String').implementation = function (url) { + send("[+] WebView.loadUrl(String) called: " + url); + + // Dump WebSettings after loading a URL + dumpWebSettingsSafely(this); + + // Call the original method + this.loadUrl(url); + }; + + // Hook the second overload: loadUrl(String, Map) + WebView.loadUrl.overload('java.lang.String', 'java.util.Map').implementation = function (url, additionalHttpHeaders) { + send("[+] WebView.loadUrl(String, Map) called: " + url); + send(" Additional HTTP Headers: " + additionalHttpHeaders); + + // Dump WebSettings after loading a URL + dumpWebSettingsSafely(this); + + // Call the original method + this.loadUrl(url, additionalHttpHeaders); + }; + + function dumpWebSettingsSafely(webView) { + try { + const webSettings = webView.getSettings(); + send("\n[+] Dumping WebSettings:"); + + // Security-sensitive settings + send(" JavaScript Enabled: " + webSettings.getJavaScriptEnabled()); + send(" Allow File Access: " + webSettings.getAllowFileAccess()); + send(" Allow Content Access: " + webSettings.getAllowContentAccess()); + send(" Mixed Content Mode: " + webSettings.getMixedContentMode()); + send(" Safe Browsing Enabled: " + webSettings.getSafeBrowsingEnabled()); + send(" Dom Storage Enabled: " + webSettings.getDomStorageEnabled()); + send(" Allow Universal Access From File URLs: " + webSettings.getAllowUniversalAccessFromFileURLs()); + send(" Allow File Access From File URLs: " + webSettings.getAllowFileAccessFromFileURLs()); + // Caching and storage + send(" Cache Mode: " + webSettings.getCacheMode()); + // User agent and other information + send(" User Agent String: " + webSettings.getUserAgentString()); + } catch (err) { + send("Error while dumping WebView configuration: " + err); + } + } + + send("Hooks installed for WebView."); +}); diff --git a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/detect-ssl-pinning.js b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/detect-ssl-pinning.js new file mode 100644 index 0000000000..d5efa7a525 --- /dev/null +++ b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/detect-ssl-pinning.js @@ -0,0 +1,20 @@ +try { + var UnverifiedCertError = Java.use('javax.net.ssl.SSLPeerUnverifiedException'); + UnverifiedCertError.$init.implementation = function(str) { + send('Unexpected SSLPeerUnverifiedException occurred'); + try { + var stackTrace = Java.use('java.lang.Thread').currentThread().getStackTrace(); + var exceptionStackIndex = stackTrace.findIndex(stack => stack.getClassName() === "javax.net.ssl.SSLPeerUnverifiedException"); + var callingFunctionStack = stackTrace[exceptionStackIndex + 1]; + var className = callingFunctionStack.getClassName(); + var methodName = callingFunctionStack.getMethodName(); + var callingClass = Java.use(className); + var callingMethod = callingClass[methodName]; + send('SSL exception caused: ' + className + '.' + methodName + '. Patch this method to bypass pinning.'); + if (callingMethod.implementation) { + return; + } + } catch (e) {} + return this.$init(str); + }; +} catch (err) {} diff --git a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/dump-intent.js b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/dump-intent.js index 189c6dffbf..2fd465e47f 100644 --- a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/dump-intent.js +++ b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/dump-intent.js @@ -1,21 +1,93 @@ -// https://gist.github.com/bet4it/b62ac2d5bd45b8cb699905fa498baf5e Java.perform(function () { - var act = Java.use("android.app.Activity"); - act.getIntent.overload().implementation = function () { - var intent = this.getIntent() - var cp = intent.getComponent() - send("[Intent Dumper] Starting " + cp.getPackageName() + "/" + cp.getClassName()) - var ext = intent.getExtras(); - if (ext) { - var keys = ext.keySet() - var iterator = keys.iterator() - while (iterator.hasNext()) { - var k = iterator.next().toString() - var v = ext.get(k) - send("\t" + v.getClass().getName()) - send("\t" + k + ' : ' + v.toString()) - } + var Activity = Java.use("android.app.Activity"); + + Activity.getIntent.overload().implementation = function () { + var intent = this.getIntent(); + var component = intent.getComponent(); + + send("[Intent Dumper] Captured Intent for Activity:"); + + // Component (target package and class) + if (component) { + send(" Component:"); + send(" Package: " + component.getPackageName()); + send(" Class: " + component.getClassName()); + } else { + send(" Component: None"); } - return intent; - }; - }) \ No newline at end of file + + // Action + var action = intent.getAction(); + send(" Action: " + (action ? action : "None")); + + // Data URI + var dataUri = intent.getDataString(); + send(" Data URI: " + (dataUri ? dataUri : "None")); + + // Flags + var flags = intent.getFlags(); + send(" Flags: " + flags); + + // Dumping extras in the Intent + var extras = intent.getExtras(); + if (extras) { + send(" Extras:"); + var iterator = extras.keySet().iterator(); + while (iterator.hasNext()) { + var key = iterator.next(); + var value = extras.get(key); + if (value !== null) { + send(" " + key + " (" + value.getClass().getName() + "): " + valueToString(value)); + } + } + } else { + send(" Extras: None"); + } + + return intent; + }; + + // Helper function to convert intent extras to a readable string + function valueToString(value) { + var valueType = value.getClass().getName(); + + if (valueType === "android.os.Bundle") { + return bundleToString(Java.cast(value, Java.use("android.os.Bundle"))); + } else if (valueType === "java.lang.String") { + return '"' + value + '"'; + } else if (valueType === "java.lang.Integer" || valueType === "java.lang.Float" || valueType === "java.lang.Boolean") { + return value.toString(); + } else if (valueType === "java.util.ArrayList") { + return arrayListToString(Java.cast(value, Java.use("java.util.ArrayList"))); + } else { + send("Unsupported extra type for key. Type: " + valueType); + return value.toString(); + } + } + + // Function to handle nested Bundles + function bundleToString(bundle) { + var result = "{"; + var iterator = bundle.keySet().iterator(); + while (iterator.hasNext()) { + var key = iterator.next(); + var value = bundle.get(key); + result += key + ": " + (value !== null ? valueToString(value) : "null") + ", "; + } + result = result.slice(0, -2); // Remove trailing comma and space + result += "}"; + return result; + } + + // Function to handle ArrayLists (if any) + function arrayListToString(arrayList) { + var result = "["; + for (var i = 0; i < arrayList.size(); i++) { + var item = arrayList.get(i); + result += valueToString(item) + ", "; + } + result = result.slice(0, -2); // Remove trailing comma and space + result += "]"; + return result; + } +}); diff --git a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/ssl-pinning-bypass.js b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/ssl-pinning-bypass.js index 42f380076c..0b97998426 100644 --- a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/ssl-pinning-bypass.js +++ b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/ssl-pinning-bypass.js @@ -720,69 +720,70 @@ function dynamicPatching() { return null; } } - try { - var UnverifiedCertError = Java.use('javax.net.ssl.SSLPeerUnverifiedException'); - UnverifiedCertError.$init.implementation = function(str) { - console.log('[!] Unexpected SSLPeerUnverifiedException occurred, trying to patch it dynamically...!'); - try { - var stackTrace = Java.use('java.lang.Thread').currentThread().getStackTrace(); - var exceptionStackIndex = stackTrace.findIndex(stack => stack.getClassName() === "javax.net.ssl.SSLPeerUnverifiedException"); - var callingFunctionStack = stackTrace[exceptionStackIndex + 1]; - var className = callingFunctionStack.getClassName(); - var methodName = callingFunctionStack.getMethodName(); - var callingClass = Java.use(className); - var callingMethod = callingClass[methodName]; - console.log('[!] Attempting to bypass uncommon SSL Pinning method on: ' + className + '.' + methodName + '!'); - if (callingMethod.implementation) { - return; - } - var returnTypeName = callingMethod.returnType.type; - callingMethod.implementation = function() { - rudimentaryFix(returnTypeName); - }; - } catch (e) { - if (String(e).includes(".overload")) { - var splittedList = String(e).split(".overload"); - for (let i = 2; i < splittedList.length; i++) { - var extractedOverload = splittedList[i].trim().split("(")[1].slice(0, -1).replaceAll("'", ""); - if (extractedOverload.includes(",")) { - var argList = extractedOverload.split(", "); - console.log('[!] Attempting overload of ' + className + '.' + methodName + ' with arguments: ' + extractedOverload + '!'); - if (argList.length == 2) { - callingMethod.overload(argList[0], argList[1]).implementation = function(a, b) { - rudimentaryFix(returnTypeName); - } - } else if (argNum == 3) { - callingMethod.overload(argList[0], argList[1], argList[2]).implementation = function(a, b, c) { - rudimentaryFix(returnTypeName); - } - } else if (argNum == 4) { - callingMethod.overload(argList[0], argList[1], argList[2], argList[3]).implementation = function(a, b, c, d) { - rudimentaryFix(returnTypeName); - } - } else if (argNum == 5) { - callingMethod.overload(argList[0], argList[1], argList[2], argList[3], argList[4]).implementation = function(a, b, c, d, e) { - rudimentaryFix(returnTypeName); - } - } else if (argNum == 6) { - callingMethod.overload(argList[0], argList[1], argList[2], argList[3], argList[4], argList[5]).implementation = function(a, b, c, d, e, f) { - rudimentaryFix(returnTypeName); - } - } - } else { - callingMethod.overload(extractedOverload).implementation = function(a) { - rudimentaryFix(returnTypeName); - } - } - } - } else { - console.log('[-] Failed to dynamically patch SSLPeerUnverifiedException ' + e + '!'); - } - } - return this.$init(str); - }; - } catch (err) {} + // try { + // var UnverifiedCertError = Java.use('javax.net.ssl.SSLPeerUnverifiedException'); + // UnverifiedCertError.$init.implementation = function(str) { + // console.log('[!] Unexpected SSLPeerUnverifiedException occurred, trying to patch it dynamically...!'); + // try { + // var stackTrace = Java.use('java.lang.Thread').currentThread().getStackTrace(); + // var exceptionStackIndex = stackTrace.findIndex(stack => stack.getClassName() === "javax.net.ssl.SSLPeerUnverifiedException"); + // var callingFunctionStack = stackTrace[exceptionStackIndex + 1]; + // var className = callingFunctionStack.getClassName(); + // var methodName = callingFunctionStack.getMethodName(); + // var callingClass = Java.use(className); + // var callingMethod = callingClass[methodName]; + // console.log('[!] Attempting to bypass uncommon SSL Pinning method on: ' + className + '.' + methodName + '!'); + // if (callingMethod.implementation) { + // return; + // } + // var returnTypeName = callingMethod.returnType.type; + // callingMethod.implementation = function() { + // rudimentaryFix(returnTypeName); + // }; + // } catch (e) { + // if (String(e).includes(".overload")) { + // var splittedList = String(e).split(".overload"); + // for (let i = 2; i < splittedList.length; i++) { + // var extractedOverload = splittedList[i].trim().split("(")[1].slice(0, -1).replaceAll("'", ""); + // if (extractedOverload.includes(",")) { + // var argList = extractedOverload.split(", "); + // console.log('[!] Attempting overload of ' + className + '.' + methodName + ' with arguments: ' + extractedOverload + '!'); + // if (argList.length == 2) { + // callingMethod.overload(argList[0], argList[1]).implementation = function(a, b) { + // rudimentaryFix(returnTypeName); + // } + // } else if (argNum == 3) { + // callingMethod.overload(argList[0], argList[1], argList[2]).implementation = function(a, b, c) { + // rudimentaryFix(returnTypeName); + // } + // } else if (argNum == 4) { + // callingMethod.overload(argList[0], argList[1], argList[2], argList[3]).implementation = function(a, b, c, d) { + // rudimentaryFix(returnTypeName); + // } + // } else if (argNum == 5) { + // callingMethod.overload(argList[0], argList[1], argList[2], argList[3], argList[4]).implementation = function(a, b, c, d, e) { + // rudimentaryFix(returnTypeName); + // } + // } else if (argNum == 6) { + // callingMethod.overload(argList[0], argList[1], argList[2], argList[3], argList[4], argList[5]).implementation = function(a, b, c, d, e, f) { + // rudimentaryFix(returnTypeName); + // } + // } + // } else { + // callingMethod.overload(extractedOverload).implementation = function(a) { + // rudimentaryFix(returnTypeName); + // } + // } + // } + // } else { + // console.log('[-] Failed to dynamically patch SSLPeerUnverifiedException ' + e + '!'); + // } + // } + // return this.$init(str); + // }; + // } catch (err) {} } + setTimeout(function() { Java.perform(function() { var X509TrustManager = Java.use('javax.net.ssl.X509TrustManager'); diff --git a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/trace-intent.js b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/trace-intent.js new file mode 100644 index 0000000000..5495153105 --- /dev/null +++ b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/trace-intent.js @@ -0,0 +1,78 @@ +Java.perform(function () { + // Hook the startActivity method in the Activity class + var Activity = Java.use("android.app.Activity"); + + Activity.startActivity.overload("android.content.Intent").implementation = function (intent) { + send("Intercepted startActivity with Intent:"); + + // Dump the Intent details + dumpIntent(intent); + + // Call the original startActivity method to ensure normal behavior + this.startActivity(intent); + }; + + // Function to dump intent details + function dumpIntent(intent) { + // Action + var action = intent.getAction(); + send(" Action: " + (action ? action : "None")); + + // Data URI + var dataUri = intent.getDataString(); + send(" Data URI: " + (dataUri ? dataUri : "None")); + + // Component (target package and class) + var component = intent.getComponent(); + if (component) { + send(" Component:"); + send(" Package: " + component.getPackageName()); + send(" Class: " + component.getClassName()); + } else { + send(" Component: None"); + } + + // Flags + var flags = intent.getFlags(); + send(" Flags: " + flags); + + // Extras + var extras = intent.getExtras(); + if (extras) { + send(" Extras:"); + var iterator = extras.keySet().iterator(); + while (iterator.hasNext()) { + var key = iterator.next(); + var value = extras.get(key); + if (value !== null) { + send(" " + key + ": " + valueToString(value)); + } + } + } else { + send(" Extras: None"); + } + } + + // Helper function to convert intent extras to string for logging + function valueToString(value) { + // Check if the value is a Bundle and handle it accordingly + if (value.getClass().getName() === "android.os.Bundle") { + return bundleToString(Java.cast(value, Java.use("android.os.Bundle"))); + } + return value.toString(); + } + + // Function to handle nested Bundles (if any) + function bundleToString(bundle) { + var result = "{"; + var iterator = bundle.keySet().iterator(); + while (iterator.hasNext()) { + var key = iterator.next(); + var value = bundle.get(key); + result += key + ": " + (value !== null ? value.toString() : "null") + ", "; + } + result = result.slice(0, -2); // Remove trailing comma and space + result += "}"; + return result; + } +}); diff --git a/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/trace-javascript-interface.js b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/trace-javascript-interface.js new file mode 100644 index 0000000000..3905230e1a --- /dev/null +++ b/mobsf/DynamicAnalyzer/tools/frida_scripts/android/others/trace-javascript-interface.js @@ -0,0 +1,25 @@ +Java.perform(function () { + send("Starting JavaScript bridge enumeration..."); + + // Hook the WebView class + const WebView = Java.use('android.webkit.WebView'); + + // Hook the addJavascriptInterface method + WebView.addJavascriptInterface.overload('java.lang.Object', 'java.lang.String').implementation = function (obj, interfaceName) { + send("[+] addJavascriptInterface called"); + send(" Interface Name: " + interfaceName); + send(" Methods exposed:"); + + // Reflect on the object to enumerate methods + const objectClass = obj.getClass(); + const methods = objectClass.getDeclaredMethods(); + for (let i = 0; i < methods.length; i++) { + send(" - " + methods[i].getName()); + } + + // Call the original method + this.addJavascriptInterface(obj, interfaceName); + }; + + send("Hook installed for WebView.addJavascriptInterface."); +}); diff --git a/mobsf/DynamicAnalyzer/views/common/shared.py b/mobsf/DynamicAnalyzer/views/common/shared.py index bfc8d9875b..a87edfb2e7 100644 --- a/mobsf/DynamicAnalyzer/views/common/shared.py +++ b/mobsf/DynamicAnalyzer/views/common/shared.py @@ -3,6 +3,7 @@ import logging import os import re +import errno import json import tarfile import shutil @@ -54,18 +55,35 @@ def safe_paths(tar_meta): yield fh +def onerror(func, path, exc_info): + _, exc, _ = exc_info + if exc.errno == errno.EACCES: # Permission error + try: + os.chmod(path, 0o777) + func(path) + except Exception: + pass + elif exc.errno == errno.ENOTEMPTY: # Directory not empty + try: + func(path) + except Exception: + pass + else: + raise + + def untar_files(tar_loc, untar_dir): """Untar files.""" logger.info('Extracting Tar files') - # Extract Device Data - if not tar_loc.exists(): - return False - if untar_dir.exists(): - # fix for permission errors - shutil.rmtree(untar_dir) - else: - os.makedirs(untar_dir) try: + # Extract Device Data + if not tar_loc.exists(): + return False + if untar_dir.exists(): + # fix for permission errors + shutil.rmtree(untar_dir, onerror=onerror) + else: + os.makedirs(untar_dir) with tarfile.open(tar_loc.as_posix(), errorlevel=1) as tar: def is_within_directory(directory, target): diff --git a/mobsf/DynamicAnalyzer/views/ios/corellium_apis.py b/mobsf/DynamicAnalyzer/views/ios/corellium_apis.py index e4ce98f8e8..ecfc80cf6d 100644 --- a/mobsf/DynamicAnalyzer/views/ios/corellium_apis.py +++ b/mobsf/DynamicAnalyzer/views/ios/corellium_apis.py @@ -29,6 +29,7 @@ settings, 'CORELLIUM_API_KEY', '') logger = logging.getLogger(__name__) +TIMEOUT = 20 class CorelliumInit: @@ -54,7 +55,7 @@ def api_ready(self): """Check API Availability.""" try: r = requests.get(f'{self.api}/ready', - timeout=5, + timeout=TIMEOUT, proxies=self.proxies, verify=self.verify) if r.status_code in SUCCESS_RESP: @@ -74,7 +75,7 @@ def api_auth(self): return False r = requests.get( f'{self.api}/projects', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -91,7 +92,7 @@ def get_projects(self): ids = [] r = requests.get( f'{self.api}/projects?ids_only=true', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -107,7 +108,7 @@ def get_authorized_keys(self): """Get SSH public keys associated with a project.""" r = requests.get( f'{self.api}/projects/{self.project_id}/keys', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -128,7 +129,7 @@ def add_authorized_key(self, key): } r = requests.post( f'{self.api}/projects/{self.project_id}/keys', - timeout=5, + timeout=TIMEOUT, headers=self.headers, json=data, proxies=self.proxies, @@ -154,7 +155,7 @@ def get_instances(self): instances = [] r = requests.get( f'{self.api}/instances', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -174,7 +175,7 @@ def create_ios_instance(self, name, flavor, version): } r = requests.post( f'{self.api}/instances', - timeout=5, + timeout=TIMEOUT, headers=self.headers, json=data, proxies=self.proxies, @@ -189,7 +190,7 @@ class CorelliumModelsAPI(CorelliumInit): def get_models(self): r = requests.get( f'{self.api}/models', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -210,7 +211,7 @@ def get_supported_os(self, model): return False r = requests.get( f'{self.api}/models/{model}/software', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -232,7 +233,7 @@ def start_instance(self): data = {'paused': False} r = requests.post( f'{self.api}/instances/{self.instance_id}/start', - timeout=5, + timeout=TIMEOUT, headers=self.headers, json=data, proxies=self.proxies, @@ -248,7 +249,7 @@ def stop_instance(self): data = {'soft': True} r = requests.post( f'{self.api}/instances/{self.instance_id}/stop', - timeout=5, + timeout=TIMEOUT, headers=self.headers, json=data, proxies=self.proxies, @@ -263,7 +264,7 @@ def unpause_instance(self): """Unpause instance.""" r = requests.post( f'{self.api}/instances/{self.instance_id}/unpause', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -277,7 +278,7 @@ def reboot_instance(self): """Reboot instance.""" r = requests.post( f'{self.api}/instances/{self.instance_id}/reboot', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -304,7 +305,7 @@ def poll_instance(self): """Check instance status.""" r = requests.get( f'{self.api}/instances/{self.instance_id}', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -320,7 +321,7 @@ def screenshot(self): r = requests.get( (f'{self.api}/instances/{self.instance_id}' '/screenshot.png?scale=1'), - timeout=5, + timeout=TIMEOUT, headers=self.headers, stream=True, proxies=self.proxies, @@ -337,7 +338,7 @@ def start_network_capture(self): """Start network capture.""" r = requests.post( f'{self.api}/instances/{self.instance_id}/sslsplit/enable', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -354,7 +355,7 @@ def stop_network_capture(self): """Stop network capture.""" r = requests.post( f'{self.api}/instances/{self.instance_id}/sslsplit/disable', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -368,7 +369,7 @@ def download_network_capture(self): """Download network capture.""" r = requests.get( f'{self.api}/instances/{self.instance_id}/networkMonitor.pcap', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -382,7 +383,7 @@ def console_log(self): """Get Console Log.""" r = requests.get( f'{self.api}/instances/{self.instance_id}/consoleLog', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -396,7 +397,7 @@ def get_ssh_connection_string(self): """Get SSH connection string.""" r = requests.get( f'{self.api}/instances/{self.instance_id}/quickConnectCommand', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -484,7 +485,7 @@ def device_input(self, event, x, y, max_x, max_y): {'buttons': [], 'wait': 100}] r = requests.post( f'{self.api}/instances/{self.instance_id}/input', - timeout=5, + timeout=TIMEOUT, headers=self.headers, json=data, proxies=self.proxies, @@ -506,7 +507,7 @@ def agent_ready(self): """Agent ready.""" r = requests.get( f'{self.api}/instances/{self.instance_id}/agent/v1/app/ready', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -522,7 +523,7 @@ def unlock_device(self): """Unlock iOS device.""" r = requests.post( f'{self.api}/instances/{self.instance_id}/agent/v1/system/unlock', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -556,7 +557,7 @@ def install_ipa(self): """Install IPA.""" r = requests.post( f'{self.api}/instances/{self.instance_id}/agent/v1/app/install', - timeout=5, + timeout=TIMEOUT, headers=self.headers, json={'path': '/tmp/app.ipa'}, proxies=self.proxies, @@ -572,7 +573,7 @@ def run_app(self, bundle_id): r = requests.post( (f'{self.api}/instances/{self.instance_id}' f'/agent/v1/app/apps/{bundle_id}/run'), - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -587,7 +588,7 @@ def stop_app(self, bundle_id): r = requests.post( (f'{self.api}/instances/{self.instance_id}' f'/agent/v1/app/apps/{bundle_id}/kill'), - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -602,7 +603,7 @@ def remove_app(self, bundle_id): r = requests.post( (f'{self.api}/instances/{self.instance_id}' f'/agent/v1/app/apps/{bundle_id}/uninstall'), - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -616,7 +617,7 @@ def list_apps(self): """List all apps installed.""" r = requests.get( f'{self.api}/instances/{self.instance_id}/agent/v1/app/apps', - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) @@ -631,7 +632,7 @@ def get_icons(self, bundleids): r = requests.get( (f'{self.api}/instances/{self.instance_id}' f'/agent/v1/app/icons?{bundleids}'), - timeout=5, + timeout=TIMEOUT, headers=self.headers, proxies=self.proxies, verify=self.verify) diff --git a/mobsf/MalwareAnalyzer/views/android/behaviour_analysis.py b/mobsf/MalwareAnalyzer/views/android/behaviour_analysis.py index a7613e86e4..9425bf6bbf 100644 --- a/mobsf/MalwareAnalyzer/views/android/behaviour_analysis.py +++ b/mobsf/MalwareAnalyzer/views/android/behaviour_analysis.py @@ -7,37 +7,24 @@ from mobsf.MobSF.utils import ( append_scan_status, - get_android_src_dir, -) -from mobsf.StaticAnalyzer.views.sast_engine import ( - scan, ) logger = logging.getLogger(__name__) -def analyze(checksum, app_dir, typ): +def analyze(checksum, sast, data): """Perform behaviour analysis.""" try: root = Path(settings.BASE_DIR) / 'MalwareAnalyzer' / 'views' rules = root / 'android' / 'rules' / 'behaviour_rules.yaml' - app_dir = Path(app_dir) - src = get_android_src_dir(app_dir, typ) - skp = settings.SKIP_CLASS_PATH msg = 'Android Behaviour Analysis Started' logger.info(msg) append_scan_status(checksum, msg) - # Behaviour Analysis - findings = scan( - checksum, - rules.as_posix(), - {'.java', '.kt'}, - [src.as_posix() + '/'], - skp) + behaviour_finds = sast.run_rules(data, rules.as_posix()) msg = 'Android Behaviour Analysis Completed' logger.info(msg) append_scan_status(checksum, msg) - return findings + return behaviour_finds except Exception as exp: msg = 'Failed to perform behaviour analysis' logger.exception(msg) diff --git a/mobsf/MobSF/init.py b/mobsf/MobSF/init.py index b15da72d68..92b7101d85 100644 --- a/mobsf/MobSF/init.py +++ b/mobsf/MobSF/init.py @@ -18,13 +18,13 @@ logger = logging.getLogger(__name__) -VERSION = '4.1.9' +VERSION = '4.2.0' BANNER = r""" - __ __ _ ____ _____ _ _ _ - | \/ | ___ | |__/ ___|| ___|_ _| || | / | - | |\/| |/ _ \| '_ \___ \| |_ \ \ / / || |_ | | - | | | | (_) | |_) |__) | _| \ V /|__ _|| | - |_| |_|\___/|_.__/____/|_| \_/ |_|(_)_| + __ __ _ ____ _____ _ _ ____ + | \/ | ___ | |__/ ___|| ___|_ _| || | |___ \ + | |\/| |/ _ \| '_ \___ \| |_ \ \ / / || |_ __) | + | | | | (_) | |_) |__) | _| \ V /|__ _| / __/ + |_| |_|\___/|_.__/____/|_| \_/ |_|(_)_____| """ # noqa: W291 # ASCII Font: Standard diff --git a/mobsf/MobSF/management/commands/clear_tasks.py b/mobsf/MobSF/management/commands/clear_tasks.py new file mode 100644 index 0000000000..3e436f0384 --- /dev/null +++ b/mobsf/MobSF/management/commands/clear_tasks.py @@ -0,0 +1,18 @@ +from django.core.management.base import BaseCommand + +from django_q.models import ( + OrmQ, + Task, +) + +from mobsf.StaticAnalyzer.models import EnqueuedTask + + +class Command(BaseCommand): + help = 'Deletes all tasks in Django Q' # noqa: A003 + + def handle(self, *args, **kwargs): + Task.objects.all().delete() + OrmQ.objects.all().delete() + EnqueuedTask.objects.all().delete() + self.stdout.write(self.style.SUCCESS('Successfully deleted all Django Q tasks')) diff --git a/mobsf/MobSF/security.py b/mobsf/MobSF/security.py index 257e692606..243b7207c9 100644 --- a/mobsf/MobSF/security.py +++ b/mobsf/MobSF/security.py @@ -2,6 +2,7 @@ import subprocess import functools import logging +import re import sys from shutil import which from pathlib import Path @@ -209,3 +210,15 @@ def sanitize_redirect(url): elif url.startswith('/'): return url return root + + +def sanitize_filename(filename): + """Sanitize Filename.""" + # Remove any characters + # that are not alphanumeric, hyphens, underscores, or dots + safe_filename = re.sub(r'[^a-zA-Z0-9._-]', '_', filename) + # Merge multiple underscores into one + safe_filename = re.sub(r'__+', '_', safe_filename) + # Remove leading and trailing underscores + safe_filename = safe_filename.strip('_') + return safe_filename diff --git a/mobsf/MobSF/settings.py b/mobsf/MobSF/settings.py index 919466346b..7b2f8fc40e 100644 --- a/mobsf/MobSF/settings.py +++ b/mobsf/MobSF/settings.py @@ -180,6 +180,7 @@ # Application definition INSTALLED_APPS = ( # 'django.contrib.admin', + 'django_q', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', @@ -307,6 +308,11 @@ 'level': 'DEBUG', 'propagate': True, }, + 'django_q': { + 'handlers': ['console', 'logfile'], + 'level': 'DEBUG', + 'propagate': True, + }, 'django.db.backends': { 'handlers': ['console', 'logfile'], # DEBUG will log all queries, so change it to WARNING. @@ -335,6 +341,20 @@ }, }, } +Q_CLUSTER = { + 'name': 'scan_queue', + 'workers': int(os.getenv('MOBSF_ASYNC_WORKERS', 3)), + 'recycle': 10, + 'timeout': 3600, + 'retry': 3700, + 'compress': True, + 'label': 'scan_queue', + 'orm': 'default', + 'max_attempts': 2, +} +QUEUE_MAX_SIZE = 100 +ASYNC_ANALYSIS = bool(os.getenv('MOBSF_ASYNC_ANALYSIS', '0') == '1') +MULTIPROCESSING = os.getenv('MOBSF_MULTIPROCESSING') JADX_TIMEOUT = int(os.getenv('MOBSF_JADX_TIMEOUT', 1000)) SAST_TIMEOUT = int(os.getenv('MOBSF_SAST_TIMEOUT', 1000)) BINARY_ANALYSIS_TIMEOUT = int(os.getenv('MOBSF_BINARY_ANALYSIS_TIMEOUT', 600)) diff --git a/mobsf/MobSF/urls.py b/mobsf/MobSF/urls.py index 1076ffd821..8f54a7b341 100755 --- a/mobsf/MobSF/urls.py +++ b/mobsf/MobSF/urls.py @@ -34,6 +34,7 @@ from mobsf.StaticAnalyzer import tests from mobsf.StaticAnalyzer.views.common import ( appsec, + async_task, pdf, shared_func, suppression, @@ -204,7 +205,9 @@ re_path(r'^error/$', home.error, name='error'), re_path(r'^zip_format/$', home.zip_format), re_path(r'^dynamic_analysis/$', home.dynamic_analysis, name='dynamic'), - + re_path(r'^tasks$', + async_task.list_tasks, + name='list_tasks'), # Static Analysis # Android re_path(fr'^static_analyzer/{checksum_regex}/$', diff --git a/mobsf/MobSF/utils.py b/mobsf/MobSF/utils.py index 46ce6fbd50..e6edf739e2 100755 --- a/mobsf/MobSF/utils.py +++ b/mobsf/MobSF/utils.py @@ -22,6 +22,10 @@ import threading from urllib.parse import urlparse from pathlib import Path +from concurrent.futures import ( + ThreadPoolExecutor, + TimeoutError as ThreadPoolTimeoutError, +) from packaging.version import Version @@ -948,21 +952,28 @@ class TaskTimeoutError(Exception): def run_with_timeout(func, limit, *args, **kwargs): - def run_func(result, *args, **kwargs): - result.append(func(*args, **kwargs)) - - result = [] - thread = threading.Thread( - target=run_func, - args=(result, *args), - kwargs=kwargs) - thread.start() - thread.join(limit) - - if thread.is_alive(): - msg = (f'function <{func.__name__}> ' - f'timed out after {limit} seconds') - raise TaskTimeoutError(msg) - if result: - return result[0] - return None + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(func, *args, **kwargs) + try: + return future.result(timeout=limit) + except ThreadPoolTimeoutError: + msg = f'function <{func.__name__}> timed out after {limit} seconds' + raise TaskTimeoutError(msg) + + +def set_permissions(path): + base_path = Path(path) + # Read/Write for directories without execute + perm_dir = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH + # Read/Write for files + perm_file = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH + + # Set permissions for directories and files + for item in base_path.rglob('*'): + try: + if item.is_dir(): + item.chmod(perm_dir) + elif item.is_file(): + item.chmod(perm_file) + except Exception: + pass diff --git a/mobsf/MobSF/views/home.py b/mobsf/MobSF/views/home.py index 84a1568124..6f30af3e3b 100755 --- a/mobsf/MobSF/views/home.py +++ b/mobsf/MobSF/views/home.py @@ -33,6 +33,7 @@ python_dict, ) from mobsf.MobSF.init import api_key +from mobsf.MobSF.security import sanitize_filename from mobsf.MobSF.views.helpers import FileType from mobsf.MobSF.views.scanning import Scanning from mobsf.MobSF.views.apk_downloader import apk_download @@ -430,7 +431,7 @@ def download_binary(request, checksum, api=False): status=HTTP_STATUS_404) return file_download( dwd_file, - filename, + sanitize_filename(robj.FILE_NAME), allowed_exts[file_ext]) except Exception: logger.exception('Download Binary Failed') diff --git a/mobsf/StaticAnalyzer/models.py b/mobsf/StaticAnalyzer/models.py index f3f5fc23d0..ad5a2cca24 100755 --- a/mobsf/StaticAnalyzer/models.py +++ b/mobsf/StaticAnalyzer/models.py @@ -2,6 +2,7 @@ from enum import Enum from django.db import models +from django.utils import timezone class DjangoPermissions(Enum): @@ -167,3 +168,16 @@ class Meta: SUPPRESS_RULE_ID = models.TextField(default=[]) SUPPRESS_FILES = models.TextField(default={}) SUPPRESS_TYPE = models.TextField(default='') + + +class EnqueuedTask(models.Model): + task_id = models.CharField(max_length=255) + checksum = models.CharField(max_length=255) + file_name = models.CharField(max_length=255) + created_at = models.DateTimeField(default=timezone.now) + status = models.CharField(max_length=50, default='Enqueued') + completed_at = models.DateTimeField(null=True) + app_name = models.CharField(max_length=255, default='') + + def __str__(self): + return f'{self.name} ({self.status})' diff --git a/mobsf/StaticAnalyzer/views/android/apk.py b/mobsf/StaticAnalyzer/views/android/apk.py new file mode 100644 index 0000000000..303ce0bdd9 --- /dev/null +++ b/mobsf/StaticAnalyzer/views/android/apk.py @@ -0,0 +1,518 @@ + +"""Android APK and Source Analysis.""" +import logging +import shutil +from pathlib import Path + +import mobsf.MalwareAnalyzer.views.Trackers as Trackers +import mobsf.MalwareAnalyzer.views.VirusTotal as VirusTotal +from mobsf.MalwareAnalyzer.views.android import ( + apkid, + permissions, +) +from mobsf.MalwareAnalyzer.views.MalwareDomainCheck import MalwareDomainCheck + +from django.conf import settings +from django.http import HttpResponseRedirect +from django.shortcuts import render + +from mobsf.MobSF.utils import ( + append_scan_status, + file_size, + print_n_send_error_response, +) +from mobsf.StaticAnalyzer.models import ( + StaticAnalyzerAndroid, + StaticAnalyzerIOS, +) +from mobsf.StaticAnalyzer.views.common.binary.lib_analysis import ( + library_analysis, +) +from mobsf.StaticAnalyzer.views.android.app import ( + get_app_name, + parse_apk, +) +from mobsf.StaticAnalyzer.views.android.cert_analysis import ( + cert_info, + get_hardcoded_cert_keystore, +) +from mobsf.StaticAnalyzer.views.android.code_analysis import code_analysis +from mobsf.StaticAnalyzer.views.android.converter import ( + apk_2_java, + dex_2_smali, +) +from mobsf.StaticAnalyzer.views.android.db_interaction import ( + get_context_from_db_entry, + save_get_ctx, +) +from mobsf.StaticAnalyzer.views.android.icon_analysis import ( + get_icon_apk, + get_icon_from_src, +) +from mobsf.StaticAnalyzer.views.android.manifest_analysis import ( + manifest_analysis, +) +from mobsf.StaticAnalyzer.views.android.manifest_utils import ( + get_manifest, + manifest_data, +) +from mobsf.StaticAnalyzer.views.android.playstore import get_app_details +from mobsf.StaticAnalyzer.views.android.strings import ( + get_strings_metadata, +) +from mobsf.StaticAnalyzer.views.common.shared_func import ( + get_avg_cvss, + hash_gen, + unzip, +) +from mobsf.StaticAnalyzer.views.common.firebase import ( + firebase_analysis, +) +from mobsf.StaticAnalyzer.views.common.appsec import ( + get_android_dashboard, +) +from mobsf.StaticAnalyzer.views.common.async_task import ( + async_analysis, + update_enqueued_task, +) +from mobsf.MobSF.views.authorization import ( + Permissions, + has_permission, +) + +logger = logging.getLogger(__name__) + + +def initialize_app_dic(checksum, app_dic, file_ext): + app_dic['app_file'] = f'{checksum}.{file_ext}' + app_dic['app_path'] = (app_dic['app_dir'] / app_dic['app_file']).as_posix() + app_dic['app_dir'] = app_dic['app_dir'].as_posix() + '/' + app_dic['size'] = str(file_size(app_dic['app_path'])) + 'MB' + app_dic['sha1'], app_dic['sha256'] = hash_gen(checksum, app_dic['app_path']) + return app_dic + + +def get_manifest_data(checksum, app_dic, andro_apk=None): + """Get Manifest Data.""" + # Manifest XML + mani_file, ns, mani_xml = get_manifest( + checksum, + app_dic['app_path'], + app_dic['app_dir'], + app_dic['tools_dir'], + app_dic['zipped'], + andro_apk, + ) + app_dic['manifest_file'] = mani_file + app_dic['parsed_xml'] = mani_xml + # Manifest data extraction + man_data = manifest_data( + checksum, + app_dic['parsed_xml'], + ns) + # Manifest Analysis + man_analysis = manifest_analysis( + checksum, + app_dic['parsed_xml'], + ns, + man_data, + app_dic['zipped'], + app_dic['app_dir']) + return man_data, man_analysis + + +def print_scan_subject(checksum, app_dic, man_data): + """Log scan subject.""" + app_name = app_dic['real_name'] + pkg_name = man_data['packagename'] + subject = '' + if app_name and pkg_name: + subject = f'{app_name} ({pkg_name})' + elif pkg_name: + subject = pkg_name + elif app_name: + subject = app_name + msg = f'Performing Static Analysis on: {subject}' + logger.info(msg) + append_scan_status(checksum, msg) + return subject + + +def apk_analysis_task(checksum, app_dic, rescan, queue=False): + """APK Analysis Task.""" + if queue: + settings.ASYNC_ANALYSIS = True + append_scan_status(checksum, 'init') + initialize_app_dic(checksum, app_dic, 'apk') + msg = 'Extracting APK' + logger.info(msg) + append_scan_status(checksum, msg) + app_dic['files'] = unzip( + checksum, + app_dic['app_path'], + app_dic['app_dir']) + logger.info('APK Extracted') + if not app_dic['files']: + # Can't Analyze APK, bail out. + msg = 'APK file is invalid or corrupt' + logger.error(msg) + append_scan_status(checksum, msg) + if queue: + return update_enqueued_task( + checksum, 'Failed', 'Failed') + return None, msg + app_dic['zipped'] = 'apk' + app_dic['certz'] = get_hardcoded_cert_keystore( + checksum, + app_dic['files']) + # Parse APK with Androguard + andro_apk = parse_apk( + checksum, + app_dic['app_path']) + # Manifest Data + man_data, man_analysis = get_manifest_data( + checksum, + app_dic, + andro_apk) + # Get App name + app_dic['real_name'] = get_app_name( + andro_apk, + app_dic['app_dir'], + True) + # Print scan subject + subject = print_scan_subject(checksum, app_dic, man_data) + app_dic['playstore'] = get_app_details( + checksum, + man_data['packagename']) + # Malware Permission check + mal_perms = permissions.check_malware_permission( + checksum, + man_data['perm']) + man_analysis['malware_permissions'] = mal_perms + # Get icon + # apktool should run before this + get_icon_apk(andro_apk, app_dic) + elf_dict = library_analysis( + checksum, + app_dic['app_dir'], + 'elf') + cert_dic = cert_info( + andro_apk, + app_dic, + man_data) + apkid_results = apkid.apkid_analysis( + checksum, + app_dic['app_path']) + trackers = Trackers.Trackers( + checksum, + app_dic['app_dir'], + app_dic['tools_dir']).get_trackers() + apk_2_java( + checksum, + app_dic['app_path'], + app_dic['app_dir'], + settings.DOWNLOADED_TOOLS_DIR) + dex_2_smali( + checksum, + app_dic['app_dir'], + app_dic['tools_dir']) + code_an_dic = code_analysis( + checksum, + app_dic['app_dir'], + app_dic['zipped'], + app_dic['manifest_file'], + man_data['perm']) + # Get the strings and metadata + get_strings_metadata( + checksum, + andro_apk, + app_dic['app_dir'], + elf_dict['elf_strings'], + app_dic['zipped'], + ['.java'], + code_an_dic) + # Firebase DB Check + code_an_dic['firebase'] = firebase_analysis( + checksum, + code_an_dic) + # Domain Extraction and Malware Check + code_an_dic['domains'] = MalwareDomainCheck().scan( + checksum, + code_an_dic['urls_list']) + context = save_get_ctx( + app_dic, + man_data, + man_analysis, + code_an_dic, + cert_dic, + elf_dict['elf_analysis'], + apkid_results, + trackers, + rescan, + ) + if queue: + return update_enqueued_task( + checksum, subject, 'Success') + return context, None + + +def generate_dynamic_context(request, app_dic, checksum, context, api): + """Generate Dynamic Context.""" + context['appsec'] = get_android_dashboard(context, True) + context['average_cvss'] = get_avg_cvss(context['code_analysis']) + logcat_file = Path(app_dic['app_dir']) / 'logcat.txt' + context['dynamic_analysis_done'] = logcat_file.exists() + context['virus_total'] = None + if settings.VT_ENABLED: + vt = VirusTotal.VirusTotal(checksum) + context['virus_total'] = vt.get_result(app_dic['app_path']) + template = 'static_analysis/android_binary_analysis.html' + return context if api else render(request, template, context) + + +def apk_analysis(request, app_dic, rescan, api): + """APK Analysis.""" + checksum = app_dic['md5'] + db_entry = StaticAnalyzerAndroid.objects.filter(MD5=checksum) + if db_entry.exists() and not rescan: + context = get_context_from_db_entry(db_entry) + return generate_dynamic_context(request, app_dic, checksum, context, api) + else: + # APK Analysis + if not has_permission(request, Permissions.SCAN, api): + return print_n_send_error_response(request, 'Permission Denied', False) + if settings.ASYNC_ANALYSIS: + return async_analysis( + checksum, + app_dic.get('app_name', ''), + apk_analysis_task, checksum, app_dic, rescan) + context, err = apk_analysis_task(checksum, app_dic, rescan) + if err: + return print_n_send_error_response(request, err, api) + return generate_dynamic_context(request, app_dic, checksum, context, api) + + +def src_analysis_task(checksum, app_dic, rescan, pro_type, queue=False): + """Android ZIP Source Code Analysis Begins.""" + if queue: + settings.ASYNC_ANALYSIS = True + cert_dic = { + 'certificate_info': '', + 'certificate_status': '', + 'description': '', + } + app_dic['strings'] = [] + app_dic['secrets'] = [] + # Above fields are only available for APK and not ZIP + app_dic['zipped'] = pro_type + app_dic['certz'] = get_hardcoded_cert_keystore( + checksum, + app_dic['files']) + # Manifest Data + man_data, man_analysis = get_manifest_data( + checksum, + app_dic) + # Get app name + app_dic['real_name'] = get_app_name( + None, + app_dic['app_dir'], + False) + # Print scan subject + subject = print_scan_subject(checksum, app_dic, man_data) + app_dic['playstore'] = get_app_details( + checksum, + man_data['packagename']) + # Malware Permission check + mal_perms = permissions.check_malware_permission( + checksum, + man_data['perm']) + man_analysis['malware_permissions'] = mal_perms + # Get icon + get_icon_from_src( + app_dic, + man_data['icons']) + code_an_dic = code_analysis( + checksum, + app_dic['app_dir'], + app_dic['zipped'], + app_dic['manifest_file'], + man_data['perm']) + # Get the strings and metadata + get_strings_metadata( + checksum, + None, + app_dic['app_dir'], + None, + app_dic['zipped'], + ['.java', '.kt'], + code_an_dic) + # Firebase DB Check + code_an_dic['firebase'] = firebase_analysis( + checksum, + code_an_dic) + # Domain Extraction and Malware Check + code_an_dic['domains'] = MalwareDomainCheck().scan( + checksum, + code_an_dic['urls_list']) + # Extract Trackers from Domains + trackers = Trackers.Trackers( + checksum, + None, + app_dic['tools_dir']).get_trackers_domains_or_deps( + code_an_dic['domains'], []) + context = save_get_ctx( + app_dic, + man_data, + man_analysis, + code_an_dic, + cert_dic, + [], + {}, + trackers, + rescan, + ) + if queue: + return update_enqueued_task( + checksum, subject, 'Success') + return context + + +def generate_dynamic_src_context(request, context, api): + """Generate Dynamic Source Context.""" + context['appsec'] = get_android_dashboard(context, True) + context['average_cvss'] = get_avg_cvss(context['code_analysis']) + template = 'static_analysis/android_source_analysis.html' + return context if api else render(request, template, context) + + +def src_analysis(request, app_dic, rescan, api): + """Source Code Analysis.""" + checksum = app_dic['md5'] + ret = f'/static_analyzer_ios/{checksum}/' + db_entry = StaticAnalyzerAndroid.objects.filter( + MD5=checksum) + ios_db_entry = StaticAnalyzerIOS.objects.filter( + MD5=checksum) + if db_entry.exists() and not rescan: + context = get_context_from_db_entry(db_entry) + return generate_dynamic_src_context(request, context, api) + elif ios_db_entry.exists() and not rescan: + return {'type': 'ios'} if api else HttpResponseRedirect(ret) + else: + # Initialize for both Android and iOS Source Analysis + append_scan_status(checksum, 'init') + initialize_app_dic(checksum, app_dic, 'zip') + msg = 'Extracting ZIP' + logger.info(msg) + append_scan_status(checksum, msg) + app_dic['files'] = unzip( + checksum, + app_dic['app_path'], + app_dic['app_dir']) + # Check if Valid Directory Structure and get ZIP Type + pro_type, valid = valid_source_code( + checksum, + app_dic['app_dir']) + msg = f'Source code type - {pro_type}' + logger.info(msg) + append_scan_status(checksum, msg) + # Handle iOS Source Code + if valid and pro_type == 'ios': + msg = 'Redirecting to iOS Source Code Analyzer' + logger.info(msg) + append_scan_status(checksum, msg) + ret = f'{ret}?rescan={str(int(rescan))}' + return {'type': 'ios'} if api else HttpResponseRedirect(ret) + # Android Source Code Analysis + if not has_permission(request, Permissions.SCAN, api): + return print_n_send_error_response( + request, + 'Permission Denied', + False) + if valid and (pro_type in ['eclipse', 'studio']): + if settings.ASYNC_ANALYSIS: + return async_analysis( + checksum, + app_dic.get('app_name', ''), + src_analysis_task, checksum, app_dic, rescan, pro_type) + context = src_analysis_task(checksum, app_dic, rescan, pro_type) + return generate_dynamic_src_context(request, context, api) + else: + msg = 'This ZIP Format is not supported' + if api: + return print_n_send_error_response( + request, + msg, + True) + else: + print_n_send_error_response(request, msg, False) + ctx = { + 'title': 'Invalid ZIP archive', + 'version': settings.MOBSF_VER, + } + template = 'general/zip.html' + return render(request, template, ctx) + + +def is_android_source(app_path): + """Detect Android Source and IDE Type.""" + # Eclipse + man = app_path / 'AndroidManifest.xml' + src = app_path / 'src' + if man.is_file() and src.exists(): + return 'eclipse', True + + # Studio + man = app_path / 'app' / 'src' / 'main' / 'AndroidManifest.xml' + java = app_path / 'app' / 'src' / 'main' / 'java' + kotlin = app_path / 'app' / 'src' / 'main' / 'kotlin' + if man.is_file() and (java.exists() or kotlin.exists()): + return 'studio', True + + return None, False + + +def move_to_parent(inside_path, app_path): + """Move contents of inside to app dir.""" + for item in inside_path.iterdir(): + shutil.move(str(item), str(app_path)) + shutil.rmtree(inside_path) + + +def valid_source_code(checksum, app_dir): + """Test if this is a valid source code zip.""" + try: + msg = 'Detecting source code type' + logger.info(msg) + append_scan_status(checksum, msg) + + app_path = Path(app_dir) + ide, is_and = is_android_source(app_path) + + if ide: + return ide, is_and + + # Relaxed Android Source check, one level down + for subdir in app_path.iterdir(): + if subdir.is_dir() and subdir.exists(): + ide, is_and = is_android_source(subdir) + if ide: + move_to_parent(subdir, app_path) + return ide, is_and + + # iOS Source + xcode = [f for f in app_path.iterdir() if f.suffix == '.xcodeproj'] + if xcode: + return 'ios', True + + # Relaxed iOS Source Check + for subdir in app_path.iterdir(): + if subdir.is_dir() and subdir.exists(): + if any(f.suffix == '.xcodeproj' for f in subdir.iterdir()): + return 'ios', True + + return '', False + except Exception as exp: + msg = 'Error identifying source code type from zip' + logger.exception(msg) + append_scan_status(checksum, msg, repr(exp)) diff --git a/mobsf/StaticAnalyzer/views/android/app.py b/mobsf/StaticAnalyzer/views/android/app.py index 76b2b230ed..fc841b23de 100644 --- a/mobsf/StaticAnalyzer/views/android/app.py +++ b/mobsf/StaticAnalyzer/views/android/app.py @@ -33,12 +33,15 @@ def get_app_name(a, app_dir, is_apk): base = Path(app_dir) if is_apk: if a: + # Parsed Androguard APK Object return a.get_app_name() else: + # Look for app_name in values folder. val = base / 'apktool_out' / 'res' / 'values' if val.exists(): return get_app_name_from_values_folder(val.as_posix()) else: + # For source code strings_path = base / 'app' / 'src' / 'main' / 'res' / 'values' eclipse_path = base / 'res' / 'values' if strings_path.exists(): diff --git a/mobsf/StaticAnalyzer/views/android/cert_analysis.py b/mobsf/StaticAnalyzer/views/android/cert_analysis.py index 81f5764195..f60beaee64 100755 --- a/mobsf/StaticAnalyzer/views/android/cert_analysis.py +++ b/mobsf/StaticAnalyzer/views/android/cert_analysis.py @@ -167,6 +167,7 @@ def apksigtool_cert(checksum, apk_path, tools_dir): certs_no = 0 min_sdk = None av1, av2, av3, av4 = None, None, None, None + v1, v2, v3, v4 = None, None, None, None try: from apksigtool import ( APKSignatureSchemeBlock, diff --git a/mobsf/StaticAnalyzer/views/android/code_analysis.py b/mobsf/StaticAnalyzer/views/android/code_analysis.py index 22ba2a1a69..f5d57a40f2 100755 --- a/mobsf/StaticAnalyzer/views/android/code_analysis.py +++ b/mobsf/StaticAnalyzer/views/android/code_analysis.py @@ -20,8 +20,11 @@ url_n_email_extract, ) from mobsf.StaticAnalyzer.views.sast_engine import ( - niap_scan, - scan, + ChoiceEngine, + SastEngine, +) +from mobsf.MalwareAnalyzer.views.android import ( + behaviour_analysis, ) logger = logging.getLogger(__name__) @@ -75,6 +78,8 @@ def code_analysis(checksum, app_dir, typ, manifest_file, android_permissions): code_findings = {} api_findings = {} perm_mappings = {} + behaviour_findings = {} + niap_findings = {} email_n_file = [] url_n_file = [] url_list = [] @@ -84,59 +89,78 @@ def code_analysis(checksum, app_dir, typ, manifest_file, android_permissions): msg = f'Code Analysis Started on - {filename_from_path(src)}' logger.info(msg) append_scan_status(checksum, msg) + + options = { + 'match_rules': code_rules.as_posix(), + 'match_extensions': {'.java', '.kt'}, + 'ignore_paths': skp, + } # Code Analysis - code_findings = scan( - checksum, - code_rules.as_posix(), - {'.java', '.kt'}, - [src], - skp) + sast = SastEngine(options, src) + # Read data once and pass it to all the analysis + file_data = sast.read_files() + code_findings = sast.run_rules(file_data, code_rules.as_posix()) msg = 'Android SAST Completed' logger.info(msg) append_scan_status(checksum, msg) + # API Analysis msg = 'Android API Analysis Started' logger.info(msg) append_scan_status(checksum, msg) - api_findings = scan( - checksum, - api_rules.as_posix(), - {'.java', '.kt'}, - [src], - skp) + sast = SastEngine(options, src) + api_findings = sast.run_rules(file_data, api_rules.as_posix()) msg = 'Android API Analysis Completed' logger.info(msg) append_scan_status(checksum, msg) + # Permission Mapping rule_file = get_perm_rules(checksum, perm_rules, android_permissions) if rule_file: msg = 'Android Permission Mapping Started' logger.info(msg) append_scan_status(checksum, msg) - perm_mappings = permission_transform(scan( - checksum, - rule_file.name, - {'.java', '.kt'}, - [src], - {})) + sast = SastEngine(options, src) + perm_mappings = permission_transform( + sast.run_rules(file_data, rule_file.name)) msg = 'Android Permission Mapping Completed' logger.info(msg) append_scan_status(checksum, msg) os.unlink(rule_file.name) + + # Behavior Analysis + sast = SastEngine(options, src) + behaviour_findings = behaviour_analysis.analyze( + checksum, sast, file_data) + # NIAP Scan - niap_findings = niap_scan( - checksum, - niap_rules.as_posix(), - {'.java', '.xml'}, - [src], - manifest_file, - None) + if settings_enabled('NIAP_ENABLED'): + msg = 'Running NIAP Analyzer' + logger.info(msg) + append_scan_status(checksum, msg) + niap_options = { + 'choice_rules': niap_rules.as_posix(), + 'alternative_path': manifest_file if manifest_file else '', + 'choice_extensions': {'.java', '.xml'}, + 'ignore_paths': skp, + } + cengine = ChoiceEngine(niap_options, src) + file_data = cengine.read_files() + niap_findings = cengine.run_rules(file_data, niap_rules.as_posix()) + msg = 'NIAP Analysis Completed' + logger.info(msg) + append_scan_status(checksum, msg) + # Extract URLs and Emails + msg = 'Extracting Emails and URLs from Source Code' + logger.info(msg) + append_scan_status(checksum, msg) for pfile in Path(src).rglob('*'): if ( (pfile.suffix in ('.java', '.kt') and any(skip_path in pfile.as_posix() - for skip_path in skp) is False) + for skip_path in skp) is False + and pfile.is_file()) ): content = None try: @@ -155,6 +179,7 @@ def code_analysis(checksum, app_dir, typ, manifest_file, android_permissions): append_scan_status(checksum, msg) code_an_dic = { 'api': api_findings, + 'behaviour': behaviour_findings, 'perm_mappings': perm_mappings, 'findings': code_findings, 'niap': niap_findings, diff --git a/mobsf/StaticAnalyzer/views/android/db_interaction.py b/mobsf/StaticAnalyzer/views/android/db_interaction.py index 2d4a7f8767..e95419ac8c 100755 --- a/mobsf/StaticAnalyzer/views/android/db_interaction.py +++ b/mobsf/StaticAnalyzer/views/android/db_interaction.py @@ -102,7 +102,6 @@ def get_context_from_analysis(app_dic, cert_dic, bin_anal, apk_id, - behaviour_an, trackers) -> dict: """Get the context for APK/ZIP from analysis results.""" try: @@ -157,7 +156,7 @@ def get_context_from_analysis(app_dic, 'files': app_dic['files'], 'exported_count': man_an_dic['exported_cnt'], 'apkid': apk_id, - 'behaviour': behaviour_an, + 'behaviour': code_an_dic['behaviour'], 'trackers': trackers, 'playstore_details': app_dic['playstore'], 'secrets': code_an_dic['secrets'], @@ -178,7 +177,6 @@ def save_or_update(update_type, cert_dic, bin_anal, apk_id, - behaviour_an, trackers) -> None: """Save/Update an APK/ZIP DB entry.""" try: @@ -223,7 +221,7 @@ def save_or_update(update_type, 'FILES': app_dic['files'], 'EXPORTED_COUNT': man_an_dic['exported_cnt'], 'APKID': apk_id, - 'QUARK': behaviour_an, + 'QUARK': code_an_dic['behaviour'], 'TRACKERS': trackers, 'PLAYSTORE_DETAILS': app_dic['playstore'], 'NETWORK_SECURITY': man_an_dic['network_security'], @@ -255,7 +253,7 @@ def save_or_update(update_type, append_scan_status(app_dic['md5'], msg, repr(exp)) -def save_get_ctx(app, man, m_anal, code, cert, elf, apkid, behaviour, trk, rscn): +def save_get_ctx(app, man, m_anal, code, cert, elf, apkid, trk, rscn): # SAVE TO DB if rscn: msg = 'Updating Database...' @@ -277,7 +275,6 @@ def save_get_ctx(app, man, m_anal, code, cert, elf, apkid, behaviour, trk, rscn) cert, elf, apkid, - behaviour, trk, ) return get_context_from_analysis( @@ -288,6 +285,5 @@ def save_get_ctx(app, man, m_anal, code, cert, elf, apkid, behaviour, trk, rscn) cert, elf, apkid, - behaviour, trk, ) diff --git a/mobsf/StaticAnalyzer/views/android/jar_aar.py b/mobsf/StaticAnalyzer/views/android/jar_aar.py index 4b32c7c23e..6f22e1eec8 100644 --- a/mobsf/StaticAnalyzer/views/android/jar_aar.py +++ b/mobsf/StaticAnalyzer/views/android/jar_aar.py @@ -8,7 +8,6 @@ import mobsf.MalwareAnalyzer.views.Trackers as Trackers import mobsf.MalwareAnalyzer.views.VirusTotal as VirusTotal from mobsf.MalwareAnalyzer.views.android import ( - behaviour_analysis, permissions, ) from mobsf.MobSF.utils import ( @@ -209,10 +208,6 @@ def common_analysis(request, app_dic, rescan, api, analysis_type): checksum, app_dic['app_dir'], code_an_dic) - behaviour_an = behaviour_analysis.analyze( - checksum, - app_dic['app_dir'], - APK_TYPE) # Get the strings and metadata get_strings_metadata( checksum, @@ -240,7 +235,6 @@ def common_analysis(request, app_dic, rescan, api, analysis_type): cert_dic, elf_dict['elf_analysis'], {}, - behaviour_an, tracker_res, rescan, ) diff --git a/mobsf/StaticAnalyzer/views/android/manifest_utils.py b/mobsf/StaticAnalyzer/views/android/manifest_utils.py index eac3d08c64..d2fbdcc3d9 100644 --- a/mobsf/StaticAnalyzer/views/android/manifest_utils.py +++ b/mobsf/StaticAnalyzer/views/android/manifest_utils.py @@ -58,6 +58,9 @@ def get_android_manifest_androguard(apk, app_dir): """Get AndroidManifest.xml using Androguard.""" try: logger.info('Extracting AndroidManifest.xml with Androguard') + if not apk: + logger.warning('Androgaurd APK parsing failed') + return manifest = apk.get_android_manifest_axml() if not manifest: return diff --git a/mobsf/StaticAnalyzer/views/android/network_security.py b/mobsf/StaticAnalyzer/views/android/network_security.py index 4fe0f69746..af5d119122 100644 --- a/mobsf/StaticAnalyzer/views/android/network_security.py +++ b/mobsf/StaticAnalyzer/views/android/network_security.py @@ -23,7 +23,7 @@ def read_netsec_config(checksum, app_dir, config, src_type): config_file = None config = config.replace('@xml/', '', 1) base = Path(app_dir) - if src_type: + if src_type == 'studio': # Support only android studio source files xml_dir = base / 'app' / 'src' / 'main' / 'res' / 'xml' else: diff --git a/mobsf/StaticAnalyzer/views/android/so.py b/mobsf/StaticAnalyzer/views/android/so.py index 072ce83280..b6916939a0 100644 --- a/mobsf/StaticAnalyzer/views/android/so.py +++ b/mobsf/StaticAnalyzer/views/android/so.py @@ -116,6 +116,7 @@ def so_analysis(request, app_dic, rescan, api): apkid_results = {} code_an_dic = { 'api': {}, + 'behaviour': {}, 'perm_mappings': {}, 'findings': {}, 'niap': {}, @@ -123,7 +124,6 @@ def so_analysis(request, app_dic, rescan, api): 'urls': [], 'emails': [], } - behaviour_an = [] # Get the strings and metadata from shared object get_strings_metadata( checksum, @@ -157,7 +157,6 @@ def so_analysis(request, app_dic, rescan, api): cert_dic, elf_dict['elf_analysis'], apkid_results, - behaviour_an, trackers, rescan, ) diff --git a/mobsf/StaticAnalyzer/views/android/static_analyzer.py b/mobsf/StaticAnalyzer/views/android/static_analyzer.py index 6e0868b4ad..4b5f26a73a 100755 --- a/mobsf/StaticAnalyzer/views/android/static_analyzer.py +++ b/mobsf/StaticAnalyzer/views/android/static_analyzer.py @@ -2,30 +2,14 @@ """Android Static Code Analysis.""" import logging -import os -import shutil from pathlib import Path -import mobsf.MalwareAnalyzer.views.Trackers as Trackers -import mobsf.MalwareAnalyzer.views.VirusTotal as VirusTotal -from mobsf.MalwareAnalyzer.views.android import ( - apkid, - behaviour_analysis, - permissions, -) -from mobsf.MalwareAnalyzer.views.MalwareDomainCheck import MalwareDomainCheck - from django.conf import settings -from django.http import HttpResponseRedirect -from django.shortcuts import render from django.template.defaulttags import register from mobsf.MobSF.utils import ( android_component, append_scan_status, - file_size, - is_dir_exists, - is_file_exists, is_md5, key, pathify, @@ -34,49 +18,16 @@ ) from mobsf.StaticAnalyzer.models import ( RecentScansDB, - StaticAnalyzerAndroid, - StaticAnalyzerIOS, -) -from mobsf.StaticAnalyzer.views.common.binary.lib_analysis import ( - library_analysis, -) -from mobsf.StaticAnalyzer.views.android.app import ( - get_app_name, - parse_apk, -) -from mobsf.StaticAnalyzer.views.android.cert_analysis import ( - cert_info, - get_hardcoded_cert_keystore, -) -from mobsf.StaticAnalyzer.views.android.code_analysis import code_analysis -from mobsf.StaticAnalyzer.views.android.converter import ( - apk_2_java, - dex_2_smali, -) -from mobsf.StaticAnalyzer.views.android.db_interaction import ( - get_context_from_db_entry, - save_get_ctx, -) -from mobsf.StaticAnalyzer.views.android.icon_analysis import ( - get_icon_apk, - get_icon_from_src, -) -from mobsf.StaticAnalyzer.views.android.manifest_analysis import ( - manifest_analysis, -) -from mobsf.StaticAnalyzer.views.android.manifest_utils import ( - get_manifest, - manifest_data, -) -from mobsf.StaticAnalyzer.views.android.playstore import get_app_details -from mobsf.StaticAnalyzer.views.android.strings import ( - get_strings_metadata, ) from mobsf.StaticAnalyzer.views.android.xapk import ( handle_aab, handle_split_apk, handle_xapk, ) +from mobsf.StaticAnalyzer.views.android.apk import ( + apk_analysis, + src_analysis, +) from mobsf.StaticAnalyzer.views.android.jar_aar import ( aar_analysis, jar_analysis, @@ -84,24 +35,9 @@ from mobsf.StaticAnalyzer.views.android.so import ( so_analysis, ) -from mobsf.StaticAnalyzer.views.common.shared_func import ( - get_avg_cvss, - hash_gen, - unzip, -) -from mobsf.StaticAnalyzer.views.common.firebase import ( - firebase_analysis, -) -from mobsf.StaticAnalyzer.views.common.appsec import ( - get_android_dashboard, -) from mobsf.MobSF.views.authentication import ( login_required, ) -from mobsf.MobSF.views.authorization import ( - Permissions, - has_permission, -) APK_TYPE = 'apk' logger = logging.getLogger(__name__) @@ -172,187 +108,9 @@ def static_analyzer(request, checksum, api=False): if not handle_aab(app_dic): raise Exception('Invalid AAB File') typ = APK_TYPE + # Route to respective analysis if typ == APK_TYPE: - app_dic['app_file'] = f'{checksum}.apk' - app_dic['app_path'] = ( - app_dic['app_dir'] / app_dic['app_file']).as_posix() - app_dic['app_dir'] = app_dic['app_dir'].as_posix() + '/' - # Check if in DB - # pylint: disable=E1101 - db_entry = StaticAnalyzerAndroid.objects.filter(MD5=checksum) - if db_entry.exists() and not rescan: - context = get_context_from_db_entry(db_entry) - else: - if not has_permission(request, Permissions.SCAN, api): - return print_n_send_error_response( - request, - 'Permission Denied', - False) - # ANALYSIS BEGINS - append_scan_status(checksum, 'init') - app_dic['size'] = str( - file_size(app_dic['app_path'])) + 'MB' # FILE SIZE - app_dic['sha1'], app_dic['sha256'] = hash_gen( - checksum, - app_dic['app_path']) - msg = 'Extracting APK' - logger.info(msg) - append_scan_status(checksum, msg) - app_dic['files'] = unzip( - checksum, - app_dic['app_path'], - app_dic['app_dir']) - logger.info('APK Extracted') - if not app_dic['files']: - # Can't Analyze APK, bail out. - msg = 'APK file is invalid or corrupt' - append_scan_status(checksum, msg) - return print_n_send_error_response( - request, - msg, - api) - app_dic['certz'] = get_hardcoded_cert_keystore( - checksum, - app_dic['files']) - # Parse APK with Androguard - apk = parse_apk( - checksum, - app_dic['app_path']) - # get app_name - app_dic['real_name'] = get_app_name( - apk, - app_dic['app_dir'], - True, - ) - # Manifest XML - mani_file, ns, mani_xml = get_manifest( - checksum, - app_dic['app_path'], - app_dic['app_dir'], - app_dic['tools_dir'], - APK_TYPE, - apk, - ) - app_dic['manifest_file'] = mani_file - app_dic['parsed_xml'] = mani_xml - # Manifest data extraction - man_data_dic = manifest_data( - checksum, - app_dic['parsed_xml'], - ns) - # Get App name - app_name = app_dic['real_name'] - pkg_name = man_data_dic['packagename'] - if app_name or pkg_name: - if app_name and pkg_name: - subject = f'{app_name} ({pkg_name})' - elif app_name: - subject = app_name - elif pkg_name: - subject = pkg_name - msg = f'Performing Static Analysis on: {subject}' - logger.info(msg) - append_scan_status(checksum, msg) - app_dic['playstore'] = get_app_details( - checksum, - man_data_dic['packagename']) - man_an_dic = manifest_analysis( - checksum, - app_dic['parsed_xml'], - ns, - man_data_dic, - '', - app_dic['app_dir']) - # Malware Permission check - mal_perms = permissions.check_malware_permission( - checksum, - man_data_dic['perm']) - man_an_dic['malware_permissions'] = mal_perms - # Get icon - # apktool should run before this - get_icon_apk(apk, app_dic) - elf_dict = library_analysis( - checksum, - app_dic['app_dir'], - 'elf') - cert_dic = cert_info( - apk, - app_dic, - man_data_dic) - apkid_results = apkid.apkid_analysis( - checksum, - app_dic['app_path']) - tracker = Trackers.Trackers( - checksum, - app_dic['app_dir'], - app_dic['tools_dir']) - tracker_res = tracker.get_trackers() - apk_2_java( - checksum, - app_dic['app_path'], - app_dic['app_dir'], - settings.DOWNLOADED_TOOLS_DIR) - dex_2_smali( - checksum, - app_dic['app_dir'], - app_dic['tools_dir']) - code_an_dic = code_analysis( - checksum, - app_dic['app_dir'], - APK_TYPE, - app_dic['manifest_file'], - man_data_dic['perm']) - behaviour_an = behaviour_analysis.analyze( - checksum, - app_dic['app_dir'], - APK_TYPE) - # Get the strings and metadata - get_strings_metadata( - checksum, - apk, - app_dic['app_dir'], - elf_dict['elf_strings'], - APK_TYPE, - ['.java'], - code_an_dic) - # Firebase DB Check - code_an_dic['firebase'] = firebase_analysis( - checksum, - code_an_dic) - # Domain Extraction and Malware Check - code_an_dic['domains'] = MalwareDomainCheck().scan( - checksum, - code_an_dic['urls_list']) - - app_dic['zipped'] = APK_TYPE - context = save_get_ctx( - app_dic, - man_data_dic, - man_an_dic, - code_an_dic, - cert_dic, - elf_dict['elf_analysis'], - apkid_results, - behaviour_an, - tracker_res, - rescan, - ) - context['appsec'] = get_android_dashboard(context, True) - context['average_cvss'] = get_avg_cvss( - context['code_analysis']) - context['dynamic_analysis_done'] = is_file_exists( - os.path.join(app_dic['app_dir'], 'logcat.txt')) - - context['virus_total'] = None - if settings.VT_ENABLED: - vt = VirusTotal.VirusTotal(checksum) - context['virus_total'] = vt.get_result( - app_dic['app_path']) - template = 'static_analysis/android_binary_analysis.html' - if api: - return context - else: - return render(request, template, context) + return apk_analysis(request, app_dic, rescan, api) elif typ == 'jar': return jar_analysis(request, app_dic, rescan, api) elif typ == 'aar': @@ -360,195 +118,7 @@ def static_analyzer(request, checksum, api=False): elif typ == 'so': return so_analysis(request, app_dic, rescan, api) elif typ == 'zip': - ret = f'/static_analyzer_ios/{checksum}/' - app_dic['app_file'] = f'{checksum}.zip' - app_dic['app_path'] = ( - app_dic['app_dir'] / app_dic['app_file']).as_posix() - app_dic['app_dir'] = app_dic['app_dir'].as_posix() + '/' - db_entry = StaticAnalyzerAndroid.objects.filter( - MD5=checksum) - ios_db_entry = StaticAnalyzerIOS.objects.filter( - MD5=checksum) - if db_entry.exists() and not rescan: - context = get_context_from_db_entry(db_entry) - elif ios_db_entry.exists() and not rescan: - if api: - return {'type': 'ios'} - else: - return HttpResponseRedirect(ret) - else: - append_scan_status(checksum, 'init') - msg = 'Extracting ZIP' - logger.info(msg) - append_scan_status(checksum, msg) - app_dic['files'] = unzip( - checksum, - app_dic['app_path'], - app_dic['app_dir']) - # Check if Valid Directory Structure and get ZIP Type - pro_type, valid = valid_source_code( - checksum, - app_dic['app_dir']) - msg = f'Source code type - {pro_type}' - logger.info(msg) - append_scan_status(checksum, msg) - if valid and pro_type == 'ios': - msg = 'Redirecting to iOS Source Code Analyzer' - logger.info(msg) - append_scan_status(checksum, msg) - if api: - return {'type': 'ios'} - else: - ret += f'?rescan={str(int(rescan))}' - return HttpResponseRedirect(ret) - if not has_permission(request, Permissions.SCAN, api): - return print_n_send_error_response( - request, - 'Permission Denied', - False) - # Android ZIP Source Code Analysis Begins - if valid and (pro_type in ['eclipse', 'studio']): - cert_dic = { - 'certificate_info': '', - 'certificate_status': '', - 'description': '', - } - app_dic['strings'] = [] - app_dic['secrets'] = [] - # Above fields are only available for APK and not ZIP - app_dic['zipped'] = pro_type - app_dic['size'] = str( - file_size(app_dic['app_path'])) + 'MB' # FILE SIZE - app_dic['sha1'], app_dic['sha256'] = hash_gen( - checksum, - app_dic['app_path']) - app_dic['certz'] = get_hardcoded_cert_keystore( - checksum, - app_dic['files']) - # get app_name - app_dic['real_name'] = get_app_name( - app_dic['app_path'], - app_dic['app_dir'], - False, - ) - # Manifest XML - mani_file, ns, mani_xml = get_manifest( - checksum, - '', - app_dic['app_dir'], - app_dic['tools_dir'], - pro_type, - None, - ) - app_dic['manifest_file'] = mani_file - app_dic['parsed_xml'] = mani_xml - # Get manifest data - man_data_dic = manifest_data( - checksum, - app_dic['parsed_xml'], - ns) - # Get app name - app_name = app_dic['real_name'] - pkg_name = man_data_dic['packagename'] - if app_name or pkg_name: - if app_name and pkg_name: - subject = f'{app_name} ({pkg_name})' - elif app_name: - subject = app_name - elif pkg_name: - subject = pkg_name - msg = f'Performing Static Analysis on: {subject}' - logger.info(msg) - - app_dic['playstore'] = get_app_details( - checksum, - man_data_dic['packagename']) - man_an_dic = manifest_analysis( - checksum, - app_dic['parsed_xml'], - ns, - man_data_dic, - pro_type, - app_dic['app_dir'], - ) - # Malware Permission check - mal_perms = permissions.check_malware_permission( - checksum, - man_data_dic['perm']) - man_an_dic['malware_permissions'] = mal_perms - # Get icon - get_icon_from_src( - app_dic, - man_data_dic['icons']) - code_an_dic = code_analysis( - checksum, - app_dic['app_dir'], - pro_type, - app_dic['manifest_file'], - man_data_dic['perm']) - behaviour_an = behaviour_analysis.analyze( - checksum, - app_dic['app_dir'], - pro_type) - # Get the strings and metadata - get_strings_metadata( - checksum, - None, - app_dic['app_dir'], - None, - pro_type, - ['.java', '.kt'], - code_an_dic) - # Firebase DB Check - code_an_dic['firebase'] = firebase_analysis( - checksum, - code_an_dic) - # Domain Extraction and Malware Check - code_an_dic['domains'] = MalwareDomainCheck().scan( - checksum, - code_an_dic['urls_list']) - # Extract Trackers from Domains - trk = Trackers.Trackers( - checksum, - None, - app_dic['tools_dir']) - trackers = trk.get_trackers_domains_or_deps( - code_an_dic['domains'], []) - context = save_get_ctx( - app_dic, - man_data_dic, - man_an_dic, - code_an_dic, - cert_dic, - [], - {}, - behaviour_an, - trackers, - rescan, - ) - else: - msg = 'This ZIP Format is not supported' - if api: - return print_n_send_error_response( - request, - msg, - True) - else: - print_n_send_error_response(request, msg, False) - ctx = { - 'title': 'Invalid ZIP archive', - 'version': settings.MOBSF_VER, - } - template = 'general/zip.html' - return render(request, template, ctx) - context['appsec'] = get_android_dashboard(context, True) - context['average_cvss'] = get_avg_cvss( - context['code_analysis']) - template = 'static_analysis/android_source_analysis.html' - if api: - return context - else: - return render(request, template, context) + return src_analysis(request, app_dic, rescan, api) else: err = ('Only APK, JAR, AAR, SO and Zipped ' 'Android/iOS Source code supported now!') @@ -561,65 +131,3 @@ def static_analyzer(request, checksum, api=False): exp = exp.__doc__ append_scan_status(checksum, errmsg, repr(exp)) return print_n_send_error_response(request, repr(exp), api, exp) - - -def is_android_source(app_dir): - """Detect Android Source and IDE Type.""" - # Eclipse - man = os.path.isfile(os.path.join(app_dir, 'AndroidManifest.xml')) - src = os.path.exists(os.path.join(app_dir, 'src/')) - if man and src: - return 'eclipse', True - # Studio - man = os.path.isfile( - os.path.join(app_dir, 'app/src/main/AndroidManifest.xml'), - ) - java = os.path.exists(os.path.join(app_dir, 'app/src/main/java/')) - kotlin = os.path.exists(os.path.join(app_dir, 'app/src/main/kotlin/')) - if man and (java or kotlin): - return 'studio', True - return None, False - - -def valid_source_code(checksum, app_dir): - """Test if this is an valid source code zip.""" - try: - msg = 'Detecting source code type' - logger.info(msg) - append_scan_status(checksum, msg) - ide, is_and = is_android_source(app_dir) - if ide: - return ide, is_and - # Relaxed Android Source check, one level down - for x in os.listdir(app_dir): - obj = os.path.join(app_dir, x) - if not is_dir_exists(obj): - continue - ide, is_and = is_android_source(obj) - if ide: - move_to_parent(obj, app_dir) - return ide, is_and - # iOS Source - xcode = [f for f in os.listdir(app_dir) if f.endswith('.xcodeproj')] - if xcode: - return 'ios', True - # Relaxed iOS Source Check - for x in os.listdir(app_dir): - obj = os.path.join(app_dir, x) - if not is_dir_exists(obj): - continue - if [f for f in os.listdir(obj) if f.endswith('.xcodeproj')]: - return 'ios', True - return '', False - except Exception as exp: - msg = 'Error identifying source code type from zip' - logger.exception(msg) - append_scan_status(checksum, msg, repr(exp)) - - -def move_to_parent(inside, app_dir): - """Move contents of inside to app dir.""" - for x in os.listdir(inside): - full_path = os.path.join(inside, x) - shutil.move(full_path, app_dir) - shutil.rmtree(inside) diff --git a/mobsf/StaticAnalyzer/views/common/a.py b/mobsf/StaticAnalyzer/views/common/a.py index 2bbf2965d6..535ab9fbf9 100644 --- a/mobsf/StaticAnalyzer/views/common/a.py +++ b/mobsf/StaticAnalyzer/views/common/a.py @@ -114,6 +114,8 @@ def a_analysis(request, app_dict, rescan, api): 'bundle_supported_platforms': [], 'bundle_version_name': '', } + app_dict['infoplist'] = infoplist_dict + app_dict['all_files'] = all_files app_dict['appstore'] = '' app_dict['secrets'] = [] bin_dict = { @@ -172,10 +174,8 @@ def a_analysis(request, app_dict, rescan, api): code_dict['trackers'] = trackers context = save_get_ctx( app_dict, - infoplist_dict, code_dict, bin_dict, - all_files, rescan) context['virus_total'] = None if settings.VT_ENABLED: diff --git a/mobsf/StaticAnalyzer/views/common/async_task.py b/mobsf/StaticAnalyzer/views/common/async_task.py new file mode 100644 index 0000000000..0fb580d4e8 --- /dev/null +++ b/mobsf/StaticAnalyzer/views/common/async_task.py @@ -0,0 +1,106 @@ +"""Views to handle asynchronous tasks.""" +import logging +from datetime import timedelta + +from django.utils import timezone +from django.shortcuts import render +from django.conf import settings +from django.http import ( + HttpResponseRedirect, + JsonResponse, +) +from django.views.decorators.http import require_http_methods + +from django_q.tasks import async_task + +from mobsf.StaticAnalyzer.models import EnqueuedTask +from mobsf.MobSF.views.authentication import ( + login_required, +) +from mobsf.MobSF.utils import ( + append_scan_status, + get_scan_logs, +) + +logger = logging.getLogger(__name__) + + +def async_analysis(checksum, app_name, func, *args): + # Check if there is any task with the same checksum + # created within the last 1 minute + recent_task_exists = EnqueuedTask.objects.filter( + checksum=checksum, + created_at__gte=timezone.now() - timedelta(minutes=1), + ).exists() + if recent_task_exists: + logger.info('Analysis already in progress') + return HttpResponseRedirect('/tasks') + # Clear old tasks + queue_size = settings.QUEUE_MAX_SIZE + task_count = EnqueuedTask.objects.count() + if task_count > queue_size: + logger.info('Deleting oldest enqueued tasks') + # Get IDs of tasks to delete (keep the latest queue_size) + oldest_task_ids = list( + EnqueuedTask.objects.order_by('created_at') + .values_list('id', flat=True)[:task_count - queue_size]) + # Delete tasks by IDs + EnqueuedTask.objects.filter(id__in=oldest_task_ids).delete() + # Enqueue the task + task_id = async_task( + func, + *args, + queue=True, + save=False) + EnqueuedTask.objects.create( + task_id=task_id, + checksum=checksum, + file_name=app_name[:254]) + msg = f'Scan Queued with ID: {task_id}' + logger.info(msg) + append_scan_status(checksum, msg) + return HttpResponseRedirect('/tasks') + + +def update_enqueued_task(checksum, app_name, status): + """Update the Enqueued Task and others that matches the checksum.""" + EnqueuedTask.objects.filter(checksum=checksum).update( + app_name=app_name, + completed_at=timezone.now(), + status=status, + ) + return True + + +def get_live_status(enq): + """Get Live Status of the Task.""" + if enq.status not in {'Success', 'Failed'}: + logs = get_scan_logs(enq.checksum) + if logs: + return logs[-1] + return enq.status + + +@login_required +@require_http_methods(['POST', 'GET']) +def list_tasks(request): + if request.method == 'POST': + enqueued = EnqueuedTask.objects.all().order_by('-created_at') + task_data = [] + for enq in enqueued: + # Enqueued task is not in the completed tasks + task_data.append({ + 'task_id': enq.task_id, + 'file_name': enq.file_name, + 'app_name': enq.app_name, + 'checksum': enq.checksum, + 'created_at': enq.created_at, + 'completed_at': enq.completed_at, + 'status': get_live_status(enq), + }) + return JsonResponse(task_data, safe=False) + context = { + 'title': 'Scan Tasks', + 'version': settings.MOBSF_VER, + } + return render(request, 'general/tasks.html', context) diff --git a/mobsf/StaticAnalyzer/views/common/binary/strings.py b/mobsf/StaticAnalyzer/views/common/binary/strings.py index 13ad86e79d..c1d1694f34 100644 --- a/mobsf/StaticAnalyzer/views/common/binary/strings.py +++ b/mobsf/StaticAnalyzer/views/common/binary/strings.py @@ -34,3 +34,4 @@ def strings_on_binary(bin_path): return list(set(strings_util(bin_path))) except Exception: logger.exception('Extracting strings from binary') + return [] diff --git a/mobsf/StaticAnalyzer/views/common/shared_func.py b/mobsf/StaticAnalyzer/views/common/shared_func.py index 13b6d30fbf..d7939d650a 100755 --- a/mobsf/StaticAnalyzer/views/common/shared_func.py +++ b/mobsf/StaticAnalyzer/views/common/shared_func.py @@ -4,7 +4,6 @@ Module providing the shared functions for iOS and Android """ -import io import hashlib import logging import os @@ -30,6 +29,7 @@ is_path_traversal, is_safe_path, print_n_send_error_response, + set_permissions, ) from mobsf.MobSF.views.scanning import ( add_to_recent_scan, @@ -62,7 +62,7 @@ def hash_gen(checksum, app_path) -> tuple: sha1 = hashlib.sha1() sha256 = hashlib.sha256() block_size = 65536 - with io.open(app_path, mode='rb') as afile: + with open(app_path, mode='rb') as afile: buf = afile.read(block_size) while buf: sha1.update(buf) @@ -108,6 +108,9 @@ def unzip(checksum, app_path, ext_path): unzip_b = shutil.which('unzip') subprocess.call( [unzip_b, '-o', '-q', app_path, '-d', ext_path]) + # Set permissions, packed files + # may not have proper permissions + set_permissions(ext_path) dat = subprocess.check_output([unzip_b, '-qq', '-l', app_path]) dat = dat.decode('utf-8').split('\n') files_det = ['Length Date Time Name'] diff --git a/mobsf/StaticAnalyzer/views/ios/binary_analysis.py b/mobsf/StaticAnalyzer/views/ios/binary_analysis.py index 6b3675dffb..62d30f523d 100755 --- a/mobsf/StaticAnalyzer/views/ios/binary_analysis.py +++ b/mobsf/StaticAnalyzer/views/ios/binary_analysis.py @@ -84,6 +84,7 @@ def binary_analysis(checksum, src, tools_dir, app_dir, executable_name): 'strings': [], 'bin_info': {}, 'bin_type': '', + 'bin_path': None, } try: binary_findings = {} @@ -133,8 +134,8 @@ def binary_analysis(checksum, src, tools_dir, app_dir, executable_name): bin_dict['bin_info'] = bin_info bin_dict['bin_type'] = bin_type logger.info('Running strings against the Binary') - bin_dict['strings'] = strings_on_binary( - bin_path.as_posix()) + bin_dict['strings'] = strings_on_binary(bin_path.as_posix()) + bin_dict['bin_path'] = bin_path except Exception as exp: msg = 'Failed to run IPA Binary Analysis' logger.exception(msg) diff --git a/mobsf/StaticAnalyzer/views/ios/code_analysis.py b/mobsf/StaticAnalyzer/views/ios/code_analysis.py index d29a923a36..1a55567f94 100755 --- a/mobsf/StaticAnalyzer/views/ios/code_analysis.py +++ b/mobsf/StaticAnalyzer/views/ios/code_analysis.py @@ -10,7 +10,7 @@ from mobsf.StaticAnalyzer.views.common.shared_func import ( url_n_email_extract, ) -from mobsf.StaticAnalyzer.views.sast_engine import scan +from mobsf.StaticAnalyzer.views.sast_engine import SastEngine from mobsf.MobSF.utils import ( append_scan_status, ) @@ -54,49 +54,75 @@ def ios_source_analysis(checksum, src): domains = {} source_type = '' source_types = set() + skp = settings.SKIP_CLASS_PATH + msg = 'iOS Source Code Analysis Started' + logger.info(msg) + append_scan_status(checksum, msg) - # Code and API Analysis - objc_findings = scan( - checksum, - objective_c_rules.as_posix(), - {'.m'}, - [src], - settings.SKIP_CLASS_PATH) + # Objective C Analysis + options = { + 'match_rules': objective_c_rules.as_posix(), + 'match_extensions': {'.m'}, + 'ignore_paths': skp, + } + objc_findings = SastEngine(options, src).scan() if objc_findings: source_types.add(_SourceType.objc) - swift_findings = scan( - checksum, - swift_rules.as_posix(), - {'.swift'}, - [src], - settings.SKIP_CLASS_PATH) + + # Swift Analysis + options = { + 'match_rules': swift_rules.as_posix(), + 'match_extensions': {'.swift'}, + 'ignore_paths': skp, + } + swift_findings = SastEngine(options, src).scan() if swift_findings: source_types.add(_SourceType.swift) code_findings = merge_findings(swift_findings, objc_findings) + msg = 'iOS Source Code Analysis Completed' + logger.info(msg) + append_scan_status(checksum, msg) + # API Analysis - api_findings = scan( - checksum, - api_rules.as_posix(), - {'.m', '.swift'}, - [src], - settings.SKIP_CLASS_PATH) + msg = 'iOS API Analysis Started' + logger.info(msg) + append_scan_status(checksum, msg) + options = { + 'match_rules': api_rules.as_posix(), + 'match_extensions': {'.m', '.swift'}, + 'ignore_paths': skp, + } + api_findings = SastEngine(options, src).scan() + msg = 'iOS API Analysis Completed' + logger.info(msg) + append_scan_status(checksum, msg) # Extract URLs and Emails - skp = settings.SKIP_CLASS_PATH + msg = 'Extracting Emails and URLs from Source Code' + logger.info(msg) + append_scan_status(checksum, msg) for pfile in Path(src).rglob('*'): if ( (pfile.suffix in ('.m', '.swift') and any(skip_path in pfile.as_posix() for skip_path in skp) is False - and pfile.is_dir() is False) + and pfile.is_file()) ): - relative_java_path = pfile.as_posix().replace(src, '') + content = None + try: + content = pfile.read_text('utf-8', 'ignore') + # Certain file path cannot be read in windows + except Exception: + continue + relative_src_path = pfile.as_posix().replace(src, '') urls, urls_nf, emails_nf = url_n_email_extract( - pfile.read_text('utf-8', 'ignore'), relative_java_path) + content, relative_src_path) url_list.extend(urls) url_n_file.extend(urls_nf) email_n_file.extend(emails_nf) - + msg = 'Email and URL Extraction Completed' + logger.info(msg) + append_scan_status(checksum, msg) if not source_types: source_type = _SourceType.nocode.value elif len(source_types) > 1: diff --git a/mobsf/StaticAnalyzer/views/ios/db_interaction.py b/mobsf/StaticAnalyzer/views/ios/db_interaction.py index 541b989f96..9de946b028 100755 --- a/mobsf/StaticAnalyzer/views/ios/db_interaction.py +++ b/mobsf/StaticAnalyzer/views/ios/db_interaction.py @@ -81,12 +81,12 @@ def get_context_from_db_entry(db_entry): def get_context_from_analysis(app_dict, - info_dict, code_dict, - bin_dict, - all_files): + bin_dict): """Get the context for IPA/ZIP from analysis results.""" try: + info_dict = app_dict['infoplist'] + all_files = app_dict['all_files'] bundle_id = info_dict['id'] code = process_suppression( code_dict['code_anal'], @@ -146,12 +146,12 @@ def get_context_from_analysis(app_dict, def save_or_update(update_type, app_dict, - info_dict, code_dict, - bin_dict, - all_files): + bin_dict): """Save/Update an IPA/ZIP DB entry.""" try: + info_dict = app_dict['infoplist'] + all_files = app_dict['all_files'] values = { 'FILE_NAME': app_dict['file_name'], 'APP_NAME': info_dict['bin_name'], @@ -218,7 +218,7 @@ def save_or_update(update_type, append_scan_status(app_dict['md5_hash'], msg, repr(exp)) -def save_get_ctx(app_dict, pdict, code_dict, bin_dict, all_files, rescan): +def save_get_ctx(app_dict, code_dict, bin_dict, rescan): # Saving to DB logger.info('Connecting to DB') if rescan: @@ -235,13 +235,9 @@ def save_get_ctx(app_dict, pdict, code_dict, bin_dict, all_files, rescan): save_or_update( action, app_dict, - pdict, code_dict, - bin_dict, - all_files) + bin_dict) return get_context_from_analysis( app_dict, - pdict, code_dict, - bin_dict, - all_files) + bin_dict) diff --git a/mobsf/StaticAnalyzer/views/ios/dylib.py b/mobsf/StaticAnalyzer/views/ios/dylib.py index e52955ec42..6180233988 100644 --- a/mobsf/StaticAnalyzer/views/ios/dylib.py +++ b/mobsf/StaticAnalyzer/views/ios/dylib.py @@ -101,6 +101,8 @@ def dylib_analysis(request, app_dict, rescan, api): 'bundle_supported_platforms': [], 'bundle_version_name': '', } + app_dict['infoplist'] = infoplist_dict + app_dict['all_files'] = all_files app_dict['appstore'] = '' app_dict['secrets'] = [] bin_dict = { @@ -154,10 +156,8 @@ def dylib_analysis(request, app_dict, rescan, api): code_dict['trackers'] = trackers context = save_get_ctx( app_dict, - infoplist_dict, code_dict, bin_dict, - all_files, rescan) context['virus_total'] = None if settings.VT_ENABLED: diff --git a/mobsf/StaticAnalyzer/views/ios/file_analysis.py b/mobsf/StaticAnalyzer/views/ios/file_analysis.py index 40a924938c..ea0248dd6a 100644 --- a/mobsf/StaticAnalyzer/views/ios/file_analysis.py +++ b/mobsf/StaticAnalyzer/views/ios/file_analysis.py @@ -1,7 +1,6 @@ # -*- coding: utf_8 -*- """iOS File Analysis.""" -import os import shutil import logging from pathlib import Path @@ -18,7 +17,7 @@ logger = logging.getLogger(__name__) -def ios_list_files(md5_hash, src, binary_form, mode): +def ios_list_files(md5_hash, src, mode): """List iOS files.""" try: msg = 'iOS File Analysis and Normalization' @@ -31,63 +30,73 @@ def ios_list_files(md5_hash, src, binary_form, mode): full_paths = [] database = [] plist = [] - for dirname, _, files in os.walk(src): - for jfile in files: - if not jfile.endswith('.DS_Store'): - file_path = os.path.join(src, dirname, jfile) - if '__MACOSX' in file_path: - continue - if '+' in jfile: - plus2x = os.path.join( - src, dirname, jfile.replace('+', 'x')) - shutil.move(file_path, plus2x) - file_path = plus2x - fileparam = file_path.replace(src, '') - filez.append(fileparam) - full_paths.append(file_path) - ext = Path(jfile).suffix - if ext in ('.cer', '.pem', '.cert', '.crt', - '.pub', '.key', '.pfx', '.p12', '.der'): - certz.append({ - 'file_path': escape(file_path.replace(src, '')), - 'type': None, - 'hash': None, - }) - if ext in ('.db', '.sqlitedb', '.sqlite', '.sqlite3'): - database.append({ - 'file_path': escape(fileparam), - 'type': mode, - 'hash': md5_hash, - }) + mode = 'ios' if mode == 'zip' else 'ipa' - if jfile.endswith('.plist'): - if binary_form: - convert_bin_xml(file_path) - plist.append({ - 'file_path': escape(fileparam), - 'type': mode, - 'hash': md5_hash, - }) + # Walk through the directory + for file_path in Path(src).rglob('*'): + if (file_path.is_file() + and not (file_path.name.endswith('.DS_Store') + or '__MACOSX' in str(file_path))): + # Normalize '+' in file names + if '+' in file_path.name: + normalized_path = file_path.with_name( + file_path.name.replace('+', 'x')) + shutil.move(file_path, normalized_path) + file_path = normalized_path - if len(database) > 0: + # Append file details + relative_path = file_path.relative_to(src) + filez.append(str(relative_path)) + full_paths.append(str(file_path)) + + ext = file_path.suffix.lower() + + # Categorize files by type + if ext in {'.cer', '.pem', '.cert', '.crt', '.pub', + '.key', '.pfx', '.p12', '.der'}: + certz.append({ + 'file_path': escape(str(relative_path)), + 'type': None, + 'hash': None, + }) + elif ext in {'.db', '.sqlitedb', '.sqlite', '.sqlite3'}: + database.append({ + 'file_path': escape(str(relative_path)), + 'type': mode, + 'hash': md5_hash, + }) + elif ext in {'.plist', '.json'}: + if mode == 'ipa' and ext == '.plist': + convert_bin_xml(file_path.as_posix()) + plist.append({ + 'file_path': escape(str(relative_path)), + 'type': mode, + 'hash': md5_hash, + }) + + # Group special files + if database: sfiles.append({ 'issue': 'SQLite Files', 'files': database, }) - if len(plist) > 0: + if plist: sfiles.append({ 'issue': 'Plist Files', 'files': plist, }) - if len(certz) > 0: + if certz: sfiles.append({ 'issue': 'Certificate/Key Files Hardcoded inside the App.', 'files': certz, }) - return {'files_short': filez, - 'files_long': full_paths, - 'special_files': sfiles} + + return { + 'files_short': filez, + 'files_long': full_paths, + 'special_files': sfiles, + } except Exception as exp: msg = 'iOS File Analysis' logger.exception(msg) diff --git a/mobsf/StaticAnalyzer/views/ios/icon_analysis.py b/mobsf/StaticAnalyzer/views/ios/icon_analysis.py index bec66a398a..ff0c00f33d 100644 --- a/mobsf/StaticAnalyzer/views/ios/icon_analysis.py +++ b/mobsf/StaticAnalyzer/views/ios/icon_analysis.py @@ -19,9 +19,10 @@ logger = logging.getLogger(__name__) -def get_icon_from_ipa(app_dict, binary): +def get_icon_from_ipa(app_dict): """Get app icon from IPA.""" try: + binary = app_dict['infoplist'].get('bin') md5 = app_dict['md5_hash'] bin_dir = app_dict['bin_dir'] msg = 'Fetching IPA icon path' @@ -37,8 +38,8 @@ def get_icon_from_ipa(app_dict, binary): return icon_file = icons.pop() outfile = Path(settings.DWD_DIR) / f'{md5}-icon.png' + app_dict['icon_path'] = outfile.name tools_dir = Path(settings.BASE_DIR) / 'StaticAnalyzer' / 'tools' / 'ios' - cgbipng_bin = None arch = platform.machine() system = platform.system() # Uncrush PNG. CgBI -> PNG @@ -54,24 +55,27 @@ def get_icon_from_ipa(app_dict, binary): raise ValueError('PNG is not CgBI') except Exception: shutil.copy2(icon_file, outfile.as_posix()) - elif system == 'Windows' and arch in ('AMD64', 'x86'): - cgbipng_bin = 'CgbiPngFix.exe' - elif system == 'Linux' and arch == 'x86_64': - cgbipng_bin = 'CgbiPngFix_amd64' - elif system == 'Linux' and arch == 'aarch64': - cgbipng_bin = 'CgbiPngFix_arm64' - if cgbipng_bin: - cbin = tools_dir / 'CgbiPngFix' / cgbipng_bin - args = [cbin.as_posix(), '-i', - icon_file, '-o', outfile.as_posix()] - try: - out = subprocess.run(args, capture_output=True) - except Exception: - # Fails or PNG is not crushed - shutil.copy2(icon_file, outfile.as_posix()) else: - shutil.copy2(icon_file, outfile.as_posix()) - app_dict['icon_path'] = outfile.name + # Windows/Linux + cgbipng_bin = None + if system == 'Windows' and arch in ('AMD64', 'x86'): + cgbipng_bin = 'CgbiPngFix.exe' + elif system == 'Linux' and arch == 'x86_64': + cgbipng_bin = 'CgbiPngFix_amd64' + elif system == 'Linux' and arch == 'aarch64': + cgbipng_bin = 'CgbiPngFix_arm64' + if cgbipng_bin: + cbin = tools_dir / 'CgbiPngFix' / cgbipng_bin + args = [cbin.as_posix(), '-i', + icon_file, '-o', outfile.as_posix()] + try: + out = subprocess.run(args, capture_output=True) + except Exception: + # Fails or PNG is not crushed + shutil.copy2(icon_file, outfile.as_posix()) + else: + logger.warning('CgbiPngFix not available for %s %s', system, arch) + shutil.copy2(icon_file, outfile.as_posix()) except Exception as exp: msg = 'Error Fetching IPA icon' logger.exception(msg) diff --git a/mobsf/StaticAnalyzer/views/ios/ipa.py b/mobsf/StaticAnalyzer/views/ios/ipa.py new file mode 100644 index 0000000000..9822eb5020 --- /dev/null +++ b/mobsf/StaticAnalyzer/views/ios/ipa.py @@ -0,0 +1,330 @@ +# -*- coding: utf_8 -*- +"""iOS Analysis.""" +import logging +from pathlib import Path + +import mobsf.MalwareAnalyzer.views.Trackers as Trackers +import mobsf.MalwareAnalyzer.views.VirusTotal as VirusTotal + +from django.conf import settings +from django.shortcuts import render + +from mobsf.MobSF.utils import ( + append_scan_status, + file_size, + print_n_send_error_response, +) +from mobsf.StaticAnalyzer.models import ( + StaticAnalyzerIOS, +) +from mobsf.StaticAnalyzer.views.ios.appstore import app_search +from mobsf.StaticAnalyzer.views.ios.binary_analysis import ( + binary_analysis, +) +from mobsf.StaticAnalyzer.views.common.binary.lib_analysis import ( + library_analysis, +) +from mobsf.StaticAnalyzer.views.ios.code_analysis import ios_source_analysis +from mobsf.StaticAnalyzer.views.ios.db_interaction import ( + get_context_from_db_entry, + save_get_ctx, +) +from mobsf.StaticAnalyzer.views.ios.file_analysis import ios_list_files +from mobsf.StaticAnalyzer.views.ios.icon_analysis import ( + get_icon_from_ipa, + get_icon_source, +) +from mobsf.StaticAnalyzer.views.ios.plist_analysis import ( + get_plist_secrets, + plist_analysis, +) +from mobsf.StaticAnalyzer.views.ios.strings import ( + get_strings_metadata, +) +from mobsf.StaticAnalyzer.views.common.shared_func import ( + get_avg_cvss, + hash_gen, + strings_and_entropies, + unzip, +) +from mobsf.StaticAnalyzer.views.common.firebase import ( + firebase_analysis, +) +from mobsf.StaticAnalyzer.views.common.appsec import ( + get_ios_dashboard, +) +from mobsf.StaticAnalyzer.views.common.async_task import ( + async_analysis, + update_enqueued_task, +) +from mobsf.MalwareAnalyzer.views.MalwareDomainCheck import ( + MalwareDomainCheck, +) +from mobsf.MobSF.views.authorization import ( + Permissions, + has_permission, +) + +logger = logging.getLogger(__name__) + + +def initialize_app_dic(app_dic, checksum, scan_type): + """Initialize App Dictionary.""" + app_dic['app_file'] = f'{checksum}.{scan_type}' + app_dic['app_path'] = (app_dic['app_dirp'] / app_dic['app_file']).as_posix() + app_dic['size'] = str(file_size(app_dic['app_path'])) + 'MB' + app_dic['sha1'], app_dic['sha256'] = hash_gen(checksum, app_dic['app_path']) + + +def extract_and_check_ipa(checksum, app_dic): + """Extract and Check IPA.""" + # EXTRACT IPA + msg = 'Extracting IPA' + logger.info(msg) + append_scan_status(checksum, msg) + unzip( + checksum, + app_dic['app_path'], + app_dic['app_dir']) + # Identify Payload directory + dirs = app_dic['app_dirp'].glob('**/*') + for _dir in dirs: + if 'payload' in _dir.as_posix().lower(): + app_dic['bin_dir'] = app_dic['app_dirp'] / _dir + break + else: + return False + app_dic['bin_dir'] = app_dic['bin_dir'].as_posix() + '/' + return True + + +def common_analysis(scan_type, app_dic, checksum): + """Common Analysis for ipa and zip.""" + location = app_dic['app_dir'] + if scan_type == 'ipa': + location = app_dic['bin_dir'] + # Get Files + app_dic['all_files'] = ios_list_files( + checksum, + location, + scan_type) + # Plist files are converted to xml/readable for ipa + app_dic['infoplist'] = plist_analysis( + checksum, + location, + scan_type) + app_dic['appstore'] = app_search( + checksum, + app_dic['infoplist'].get('id')) + app_dic['secrets'] = get_plist_secrets( + checksum, + location) + + +def common_firebase_and_trackers(code_dict, app_dic, checksum): + """Common Firebase and Trackers.""" + # Firebase Analysis + code_dict['firebase'] = firebase_analysis( + checksum, + code_dict) + # Extract Trackers from Domains + trk = Trackers.Trackers( + checksum, + None, + app_dic['tools_dir']) + code_dict['trackers'] = trk.get_trackers_domains_or_deps( + code_dict['domains'], []) + + +def get_scan_subject(app_dic, bin_dict): + """Get Scan Subject.""" + app_name = None + pkg_name = None + if bin_dict.get('bin_path'): + app_name = bin_dict['bin_path'].name if bin_dict['bin_path'] else None + if app_dic.get('infoplist'): + pkg_name = app_dic['infoplist'].get('id') + + if app_name and pkg_name: + return f'{app_name} ({pkg_name})' + elif pkg_name: + return pkg_name + elif app_name: + return app_name + else: + return 'iOS Binary' + + +def ipa_analysis_task(checksum, app_dic, rescan, queue=False): + """IPA Analysis Task.""" + if queue: + settings.ASYNC_ANALYSIS = True + scan_type = 'ipa' + append_scan_status(checksum, 'init') + msg = 'iOS Binary (IPA) Analysis Started' + logger.info(msg) + append_scan_status(checksum, msg) + initialize_app_dic(app_dic, checksum, scan_type) + + if not extract_and_check_ipa(checksum, app_dic): + msg = ('IPA is malformed! MobSF cannot find Payload directory') + append_scan_status(checksum, 'IPA is malformed', msg) + if queue: + return update_enqueued_task( + checksum, 'Failed', 'Failed') + return None, msg + common_analysis(scan_type, app_dic, checksum) + + # IPA Binary Analysis + bin_dict = binary_analysis( + checksum, + app_dic['bin_dir'], + app_dic['tools_dir'], + app_dic['app_dir'], + app_dic['infoplist'].get('bin')) + # Analyze dylibs and frameworks + lb = library_analysis( + checksum, + app_dic['bin_dir'], + 'macho') + bin_dict['dylib_analysis'] = lb['macho_analysis'] + bin_dict['framework_analysis'] = lb['framework_analysis'] + # Extract String metadata from binary + code_dict = get_strings_metadata( + app_dic, + bin_dict, + app_dic['all_files'], + lb['macho_strings']) + # Domain Extraction and Malware Check + code_dict['domains'] = MalwareDomainCheck().scan( + checksum, + code_dict['urls_list']) + # Get Icon + get_icon_from_ipa(app_dic) + # Firebase and Trackers + common_firebase_and_trackers(code_dict, app_dic, checksum) + + code_dict['api'] = {} + code_dict['code_anal'] = {} + context = save_get_ctx( + app_dic, + code_dict, + bin_dict, + rescan) + if queue: + subject = get_scan_subject(app_dic, bin_dict) + return update_enqueued_task( + checksum, subject, 'Success') + return context, None + + +def generate_dynamic_context(request, app_dic, context, checksum, api): + """Generate Dynamic Context.""" + context['virus_total'] = None + if settings.VT_ENABLED: + vt = VirusTotal.VirusTotal(checksum) + context['virus_total'] = vt.get_result(app_dic['app_path']) + context['appsec'] = get_ios_dashboard(context, True) + context['average_cvss'] = get_avg_cvss(context['binary_analysis']) + template = 'static_analysis/ios_binary_analysis.html' + return context if api else render(request, template, context) + + +def ipa_analysis(request, app_dic, rescan, api): + """IPA Analysis.""" + checksum = app_dic['md5_hash'] + ipa_db = StaticAnalyzerIOS.objects.filter(MD5=checksum) + if ipa_db.exists() and not rescan: + context = get_context_from_db_entry(ipa_db) + return generate_dynamic_context(request, app_dic, context, checksum, api) + else: + # IPA Analysis + if not has_permission(request, Permissions.SCAN, api): + return print_n_send_error_response(request, 'Permission Denied', False) + if settings.ASYNC_ANALYSIS: + return async_analysis( + checksum, + app_dic.get('file_name', ''), + ipa_analysis_task, checksum, app_dic, rescan) + context, err = ipa_analysis_task(checksum, app_dic, rescan) + if err: + return print_n_send_error_response(request, err, api) + return generate_dynamic_context(request, app_dic, context, checksum, api) + + +def ios_analysis_task(checksum, app_dic, rescan, queue=False): + """IOS Analysis Task.""" + if queue: + settings.ASYNC_ANALYSIS = True + scan_type = 'zip' + logger.info('iOS Source Code Analysis Started') + initialize_app_dic(app_dic, checksum, scan_type) + + # ANALYSIS BEGINS - Already Unzipped + # append_scan_status init done in android static analyzer + common_analysis(scan_type, app_dic, checksum) + + # IOS Source Code Analysis + code_dict = ios_source_analysis( + checksum, + app_dic['app_dir']) + # Extract Strings and entropies from source code + ios_strs = strings_and_entropies( + checksum, + Path(app_dic['app_dir']), + ['.swift', '.m', '.h', '.plist', '.json']) + if ios_strs['secrets']: + app_dic['secrets'].extend(list(ios_strs['secrets'])) + # Get App Icon + get_icon_source(app_dic) + # Firebase and Trackers + common_firebase_and_trackers(code_dict, app_dic, checksum) + + bin_dict = { + 'checksec': {}, + 'libraries': [], + 'bin_code_analysis': {}, + 'strings': list(ios_strs['strings']), + 'bin_info': {}, + 'bin_type': code_dict['source_type'], + 'dylib_analysis': {}, + 'framework_analysis': {}, + } + context = save_get_ctx( + app_dic, + code_dict, + bin_dict, + rescan) + if queue: + subject = get_scan_subject(app_dic, bin_dict) + return update_enqueued_task( + checksum, subject, 'Success') + return context + + +def generate_dynamic_ios_context(request, context, api): + """Generate Dynamic Context for IOS.""" + context['appsec'] = get_ios_dashboard(context, True) + context['average_cvss'] = get_avg_cvss(context['code_analysis']) + template = 'static_analysis/ios_source_analysis.html' + return context if api else render(request, template, context) + + +def ios_analysis(request, app_dic, rescan, api): + """IOS Source Code Analysis.""" + checksum = app_dic['md5_hash'] + ios_zip_db = StaticAnalyzerIOS.objects.filter(MD5=checksum) + if ios_zip_db.exists() and not rescan: + context = get_context_from_db_entry(ios_zip_db) + return generate_dynamic_ios_context(request, context, api) + else: + # IOS Source Analysis + if not has_permission(request, Permissions.SCAN, api): + return print_n_send_error_response(request, 'Permission Denied', False) + if settings.ASYNC_ANALYSIS: + return async_analysis( + checksum, + app_dic.get('file_name', ''), + ios_analysis_task, checksum, app_dic, rescan) + context = ios_analysis_task(checksum, app_dic, rescan) + return generate_dynamic_ios_context(request, context, api) diff --git a/mobsf/StaticAnalyzer/views/ios/plist_analysis.py b/mobsf/StaticAnalyzer/views/ios/plist_analysis.py index 862a124c9a..60b3a16db2 100755 --- a/mobsf/StaticAnalyzer/views/ios/plist_analysis.py +++ b/mobsf/StaticAnalyzer/views/ios/plist_analysis.py @@ -108,7 +108,7 @@ def convert_bin_xml(bin_xml_file): logger.warning('Failed to convert plist') -def plist_analysis(checksum, src, is_source): +def plist_analysis(checksum, src, scan_type): """Plist Analysis.""" try: msg = 'iOS Info.plist Analysis Started' @@ -134,7 +134,7 @@ def plist_analysis(checksum, src, is_source): } plist_file = None plist_files = [] - if is_source: + if scan_type == 'zip': msg = 'Finding Info.plist in iOS Source' logger.info(msg) append_scan_status(checksum, msg) @@ -174,7 +174,7 @@ def plist_analysis(checksum, src, is_source): plist_obj).decode('utf-8', 'ignore') plist_info['bin_name'] = (plist_obj.get('CFBundleDisplayName', '') or plist_obj.get('CFBundleName', '')) - if not plist_info['bin_name'] and not is_source: + if not plist_info['bin_name'] and scan_type == 'ipa': # For iOS IPA plist_info['bin_name'] = dot_app_dir.replace('.app', '') plist_info['bin'] = plist_obj.get('CFBundleExecutable', '') diff --git a/mobsf/StaticAnalyzer/views/ios/static_analyzer.py b/mobsf/StaticAnalyzer/views/ios/static_analyzer.py index cea57ae21b..e339980dce 100755 --- a/mobsf/StaticAnalyzer/views/ios/static_analyzer.py +++ b/mobsf/StaticAnalyzer/views/ios/static_analyzer.py @@ -3,82 +3,35 @@ import logging from pathlib import Path -import mobsf.MalwareAnalyzer.views.Trackers as Trackers -import mobsf.MalwareAnalyzer.views.VirusTotal as VirusTotal - from django.conf import settings -from django.shortcuts import render from django.template.defaulttags import register from mobsf.MobSF.utils import ( append_scan_status, - file_size, is_md5, print_n_send_error_response, relative_path, ) from mobsf.StaticAnalyzer.models import ( RecentScansDB, - StaticAnalyzerIOS, -) -from mobsf.StaticAnalyzer.views.ios.appstore import app_search -from mobsf.StaticAnalyzer.views.ios.binary_analysis import ( - binary_analysis, -) -from mobsf.StaticAnalyzer.views.common.binary.lib_analysis import ( - library_analysis, -) -from mobsf.StaticAnalyzer.views.ios.code_analysis import ios_source_analysis -from mobsf.StaticAnalyzer.views.ios.db_interaction import ( - get_context_from_db_entry, - save_get_ctx, -) -from mobsf.StaticAnalyzer.views.ios.dylib import dylib_analysis -from mobsf.StaticAnalyzer.views.ios.file_analysis import ios_list_files -from mobsf.StaticAnalyzer.views.ios.icon_analysis import ( - get_icon_from_ipa, - get_icon_source, -) -from mobsf.StaticAnalyzer.views.ios.plist_analysis import ( - get_plist_secrets, - plist_analysis, -) -from mobsf.StaticAnalyzer.views.ios.strings import ( - get_strings_metadata, ) from mobsf.StaticAnalyzer.views.common.a import ( a_analysis, ) -from mobsf.StaticAnalyzer.views.common.shared_func import ( - get_avg_cvss, - hash_gen, - strings_and_entropies, - unzip, -) -from mobsf.StaticAnalyzer.views.common.firebase import ( - firebase_analysis, -) -from mobsf.StaticAnalyzer.views.common.appsec import ( - get_ios_dashboard, +from mobsf.StaticAnalyzer.views.ios.dylib import ( + dylib_analysis, ) -from mobsf.MalwareAnalyzer.views.MalwareDomainCheck import ( - MalwareDomainCheck, +from mobsf.StaticAnalyzer.views.ios.ipa import ( + ios_analysis, + ipa_analysis, ) from mobsf.MobSF.views.authentication import ( login_required, ) -from mobsf.MobSF.views.authorization import ( - Permissions, - has_permission, -) logger = logging.getLogger(__name__) register.filter('relative_path', relative_path) -############################################################## -# iOS Static Code Analysis IPA and Source Code -############################################################## - @login_required def static_analyzer_ios(request, checksum, api=False): @@ -120,225 +73,20 @@ def static_analyzer_ios(request, checksum, api=False): app_dict['directory'] = Path(settings.BASE_DIR) # BASE DIR app_dict['file_name'] = filename # APP ORIGINAL NAME app_dict['md5_hash'] = checksum # MD5 - app_dir = Path(settings.UPLD_DIR) / checksum - app_dict['app_dir'] = app_dir.as_posix() + '/' + app_dict['app_dirp'] = Path(settings.UPLD_DIR) / checksum + app_dict['app_dir'] = app_dict['app_dirp'].as_posix() + '/' tools_dir = app_dict[ 'directory'] / 'StaticAnalyzer' / 'tools' / 'ios' app_dict['tools_dir'] = tools_dir.as_posix() app_dict['icon_path'] = '' if file_type == 'ipa': - app_dict['app_file'] = f'{checksum}.ipa' - app_dict['app_path'] = app_dir / app_dict['app_file'] - app_dict['app_path'] = app_dict['app_path'].as_posix() - # DB - ipa_db = StaticAnalyzerIOS.objects.filter(MD5=checksum) - if ipa_db.exists() and not rescan: - context = get_context_from_db_entry(ipa_db) - else: - if not has_permission(request, Permissions.SCAN, api): - return print_n_send_error_response( - request, - 'Permission Denied', - False) - append_scan_status(checksum, 'init') - msg = 'iOS Binary (IPA) Analysis Started' - logger.info(msg) - append_scan_status(checksum, msg) - app_dict['size'] = str( - file_size(app_dict['app_path'])) + 'MB' # FILE SIZE - app_dict['sha1'], app_dict['sha256'] = hash_gen( - checksum, - app_dict['app_path']) # SHA1 & SHA256 HASHES - msg = 'Extracting IPA' - logger.info(msg) - append_scan_status(checksum, msg) - # EXTRACT IPA - unzip( - checksum, - app_dict['app_path'], - app_dict['app_dir']) - # Identify Payload directory - dirs = app_dir.glob('**/*') - for _dir in dirs: - if 'payload' in _dir.as_posix().lower(): - app_dict['bin_dir'] = app_dict['app_dir'] / _dir - break - else: - msg = ('IPA is malformed! ' - 'MobSF cannot find Payload directory') - append_scan_status(checksum, 'IPA is malformed', msg) - return print_n_send_error_response( - request, - msg, - api) - app_dict['bin_dir'] = app_dict['bin_dir'].as_posix() + '/' - # Get Files - all_files = ios_list_files( - checksum, - app_dict['bin_dir'], - True, - 'ipa') - # Plist files are converted to xml/readable - infoplist_dict = plist_analysis( - checksum, - app_dict['bin_dir'], - False) - app_dict['appstore'] = app_search( - checksum, - infoplist_dict.get('id')) - app_dict['secrets'] = get_plist_secrets( - checksum, - app_dict['bin_dir']) - bin_dict = binary_analysis( - checksum, - app_dict['bin_dir'], - app_dict['tools_dir'], - app_dict['app_dir'], - infoplist_dict.get('bin')) - # Analyze dylibs and frameworks - lb = library_analysis( - checksum, - app_dict['bin_dir'], - 'macho') - bin_dict['dylib_analysis'] = lb['macho_analysis'] - bin_dict['framework_analysis'] = lb['framework_analysis'] - # Get Icon - get_icon_from_ipa( - app_dict, - infoplist_dict.get('bin')) - # Extract String metadata - code_dict = get_strings_metadata( - app_dict, - bin_dict, - all_files, - lb['macho_strings']) - # Domain Extraction and Malware Check - code_dict['domains'] = MalwareDomainCheck().scan( - checksum, - code_dict['urls_list']) - # Extract Trackers from Domains - trk = Trackers.Trackers( - checksum, - None, - app_dict['tools_dir']) - trackers = trk.get_trackers_domains_or_deps( - code_dict['domains'], []) - code_dict['api'] = {} - code_dict['code_anal'] = {} - code_dict['firebase'] = firebase_analysis( - checksum, - code_dict) - code_dict['trackers'] = trackers - context = save_get_ctx( - app_dict, - infoplist_dict, - code_dict, - bin_dict, - all_files, - rescan) - context['virus_total'] = None - if settings.VT_ENABLED: - vt = VirusTotal.VirusTotal(checksum) - context['virus_total'] = vt.get_result( - app_dict['app_path']) - context['appsec'] = get_ios_dashboard(context, True) - context['average_cvss'] = get_avg_cvss( - context['binary_analysis']) - template = 'static_analysis/ios_binary_analysis.html' - if api: - return context - else: - return render(request, template, context) + return ipa_analysis(request, app_dict, rescan, api) elif file_type == 'dylib': return dylib_analysis(request, app_dict, rescan, api) elif file_type == 'a': return a_analysis(request, app_dict, rescan, api) elif file_type in ('ios', 'zip'): - ios_zip_db = StaticAnalyzerIOS.objects.filter( - MD5=checksum) - if ios_zip_db.exists() and not rescan: - context = get_context_from_db_entry(ios_zip_db) - else: - if not has_permission(request, Permissions.SCAN, api): - return print_n_send_error_response( - request, - 'Permission Denied', - False) - logger.info('iOS Source Code Analysis Started') - app_dict['app_file'] = app_dict[ - 'md5_hash'] + '.zip' # NEW FILENAME - app_dict['app_path'] = app_dir / app_dict['app_file'] - app_dict['app_path'] = app_dict['app_path'].as_posix() - # ANALYSIS BEGINS - Already Unzipped - # append_scan_status init done in android static analyzer - app_dict['size'] = str( - file_size(app_dict['app_path'])) + 'MB' # FILE SIZE - app_dict['sha1'], app_dict['sha256'] = hash_gen( - checksum, - app_dict['app_path']) # SHA1 & SHA256 HASHES - all_files = ios_list_files( - checksum, - app_dict['app_dir'], - False, - 'ios') - infoplist_dict = plist_analysis( - checksum, - app_dict['app_dir'], - True) - app_dict['appstore'] = app_search( - checksum, - infoplist_dict.get('id')) - app_dict['secrets'] = get_plist_secrets( - checksum, - app_dict['app_dir']) - code_analysis_dic = ios_source_analysis( - checksum, - app_dict['app_dir']) - ios_strs = strings_and_entropies( - checksum, - Path(app_dict['app_dir']), - ['.swift', '.m', '.h', '.plist']) - if ios_strs['secrets']: - app_dict['secrets'].extend(list(ios_strs['secrets'])) - # Get App Icon - get_icon_source(app_dict) - # Firebase DB Check - code_analysis_dic['firebase'] = firebase_analysis( - checksum, - code_analysis_dic) - # Extract Trackers from Domains - trk = Trackers.Trackers( - checksum, - None, - app_dict['tools_dir']) - trackers = trk.get_trackers_domains_or_deps( - code_analysis_dic['domains'], []) - code_analysis_dic['trackers'] = trackers - fake_bin_dict = { - 'checksec': {}, - 'libraries': [], - 'bin_code_analysis': {}, - 'strings': list(ios_strs['strings']), - 'bin_info': {}, - 'bin_type': code_analysis_dic['source_type'], - 'dylib_analysis': {}, - 'framework_analysis': {}, - } - context = save_get_ctx( - app_dict, - infoplist_dict, - code_analysis_dic, - fake_bin_dict, - all_files, - rescan) - context['appsec'] = get_ios_dashboard(context, True) - context['average_cvss'] = get_avg_cvss( - context['code_analysis']) - template = 'static_analysis/ios_source_analysis.html' - if api: - return context - else: - return render(request, template, context) + return ios_analysis(request, app_dict, rescan, api) else: err = ('File Type not supported, ' 'Only IPA, A, DYLIB and ZIP are supported') diff --git a/mobsf/StaticAnalyzer/views/sast_engine.py b/mobsf/StaticAnalyzer/views/sast_engine.py index 8fc1a65e4a..cef56f57ca 100644 --- a/mobsf/StaticAnalyzer/views/sast_engine.py +++ b/mobsf/StaticAnalyzer/views/sast_engine.py @@ -1,88 +1,118 @@ # -*- coding: utf_8 -*- """SAST engine.""" import logging +import platform from libsast import Scanner +from libsast.core_matcher.pattern_matcher import PatternMatcher +from libsast.core_matcher.choice_matcher import ChoiceMatcher +from libsast.common import get_worker_count from django.conf import settings from mobsf.MobSF.utils import ( - append_scan_status, run_with_timeout, - settings_enabled, ) logger = logging.getLogger(__name__) -def scan(checksum, rule, extensions, paths, ignore_paths=None): - """The libsast scan.""" - try: - options = { - 'match_rules': rule, - 'match_extensions': extensions, - 'ignore_paths': ignore_paths, - 'show_progress': False} - scanner = Scanner(options, paths) - res = run_with_timeout(scanner.scan, settings.SAST_TIMEOUT) - if res and res.get('pattern_matcher'): - return format_findings(res['pattern_matcher'], paths[0]) - except Exception as exp: - msg = 'libsast scan failed' - logger.exception(msg) - append_scan_status(checksum, msg, repr(exp)) - return {} - - -def niap_scan(checksum, rule, extensions, paths, apath, ignore_paths=None): - """NIAP scan.""" - if not settings_enabled('NIAP_ENABLED'): - return {} - try: - msg = 'Running NIAP Analyzer' - logger.info(msg) - append_scan_status(checksum, msg) - if not apath: - apath = '' - options = { - 'choice_rules': rule, - 'alternative_path': apath, - 'choice_extensions': extensions, - 'ignore_paths': ignore_paths, - 'show_progress': False} - scanner = Scanner(options, paths) - res = run_with_timeout(scanner.scan, settings.SAST_TIMEOUT) - if res and res.get('choice_matcher'): - return res['choice_matcher'] - except Exception as exp: - msg = 'NIAP Analyzer Failed' - logger.exception(msg) - append_scan_status(checksum, msg, repr(exp)) - return {} - - -def format_findings(findings, root): - """Format findings.""" - for details in findings.values(): - tmp_dict = {} - for file_meta in details['files']: - file_meta['file_path'] = file_meta[ - 'file_path'].replace(root, '', 1) - file_path = file_meta['file_path'] - start = file_meta['match_lines'][0] - end = file_meta['match_lines'][1] - if start == end: - match_lines = start - else: - exp_lines = [] - for i in range(start, end + 1): - exp_lines.append(i) - match_lines = ','.join(str(m) for m in exp_lines) - if file_path not in tmp_dict: - tmp_dict[file_path] = str(match_lines) - elif tmp_dict[file_path].endswith(','): - tmp_dict[file_path] += str(match_lines) - else: - tmp_dict[file_path] += ',' + str(match_lines) - details['files'] = tmp_dict - return findings +def get_multiprocessing_strategy(): + """Get the multiprocessing strategy.""" + if settings.MULTIPROCESSING: + # Settings take precedence + mp = settings.MULTIPROCESSING + elif platform.system() == 'Windows' and settings.ASYNC_ANALYSIS: + # Set to thread on Windows for async analysis + mp = 'thread' + elif settings.ASYNC_ANALYSIS: + # Set to billiard for async analysis + mp = 'billiard' + else: + # Defaults to processpoolexecutor for sync analysis + mp = 'default' + return mp + + +class SastEngine: + def __init__(self, options, path): + self.root = path + mp = get_multiprocessing_strategy() + cpu_core = get_worker_count() + options['cpu_core'] = cpu_core + options['multiprocessing'] = mp + if mp != 'default': + logger.debug( + 'Multiprocessing strategy set to %s with (%d) CPU cores', mp, cpu_core) + self.scan_paths = Scanner(options, [path]).get_scan_files() + self.pattern_matcher = PatternMatcher(options) + + def scan(self): + """Scan the files with given rules.""" + finds = run_with_timeout( + self.pattern_matcher.scan, + settings.SAST_TIMEOUT, + self.scan_paths) + return self.format_findings(finds) + + def read_files(self): + """Read the files.""" + logger.info('Reading file contents for SAST') + return self.pattern_matcher.read_file_contents(self.scan_paths) + + def run_rules(self, file_contents, rule_path): + """Run the rules.""" + finds = run_with_timeout( + self.pattern_matcher.regex_scan, + settings.SAST_TIMEOUT, + file_contents, + rule_path) + return self.format_findings(finds) + + def format_findings(self, findings): + """Format the findings.""" + for details in findings.values(): + tmp_dict = {} + for file_meta in details['files']: + file_meta['file_path'] = file_meta[ + 'file_path'].replace(self.root, '', 1) + file_path = file_meta['file_path'] + start = file_meta['match_lines'][0] + end = file_meta['match_lines'][1] + if start == end: + match_lines = start + else: + exp_lines = [] + for i in range(start, end + 1): + exp_lines.append(i) + match_lines = ','.join(str(m) for m in exp_lines) + if file_path not in tmp_dict: + tmp_dict[file_path] = str(match_lines) + elif tmp_dict[file_path].endswith(','): + tmp_dict[file_path] += str(match_lines) + else: + tmp_dict[file_path] += ',' + str(match_lines) + details['files'] = tmp_dict + return findings + + +class ChoiceEngine: + def __init__(self, options, path): + self.root = path + options['cpu_core'] = get_worker_count() + options['multiprocessing'] = get_multiprocessing_strategy() + self.scan_paths = Scanner(options, [path]).get_scan_files() + self.choice_matcher = ChoiceMatcher(options) + + def read_files(self): + """Read the files.""" + logger.info('Reading file contents for NIAP Scan') + return self.choice_matcher.read_file_contents(self.scan_paths) + + def run_rules(self, file_contents, rule_path): + """Run the rules.""" + return run_with_timeout( + self.choice_matcher.regex_scan, + settings.SAST_TIMEOUT, + file_contents, + rule_path) diff --git a/mobsf/static/landing/css/home.css b/mobsf/static/landing/css/home.css index c1b17a80da..a50c9b5a7f 100644 --- a/mobsf/static/landing/css/home.css +++ b/mobsf/static/landing/css/home.css @@ -194,41 +194,56 @@ progress::-webkit-progress-value { /* The snackbar - position it at the bottom and in the middle of the screen */ #snackbar { visibility: hidden; /* Hidden by default. Visible on click */ - min-width: 250px; /* Set a default minimum width */ + width: 100%; /* Make the snackbar full width */ background-color: #333; /* Black background color */ color: #fff; /* White text color */ text-align: center; /* Centered text */ - border-radius: 2px; /* Rounded borders */ + border-radius: 0; /* Remove rounded borders for full width */ padding: 16px; /* Padding */ z-index: 1; /* Add a z-index if needed */ - left: 50%; /* Center the snackbar */ + left: 0; /* Align to the left edge of the screen */ bottom: 30px; /* 30px from the bottom */ + position: fixed; /* Ensure it stays at the bottom of the viewport */ } -/* Show the snackbar when clicking on a button (class added with JavaScript) */ -#snackbar.show { - visibility: visible; /* Show the snackbar */ - /* Add animation: Take 0.5 seconds to fade in and out the snackbar. - However, delay the fade out process for 2.5 seconds */ - -webkit-animation: fadein 0.5s, fadeout 0.5s 2.5s; - animation: fadein 0.5s, fadeout 0.5s 2.5s; +/* Alert bar class */ +.alert-bar { + position: fixed; /* Sticks to the bottom of the screen */ + bottom: 0; /* Aligns to the bottom */ + left: 0; /* Aligns to the left edge */ + width: 100%; /* Spans the full width of the screen */ + background-color: #00000021; /* Red background (customize as needed) */ + color: #fff; /* White text color */ + text-align: center; /* Centered text */ + padding: 16px; /* Padding inside the bar */ + font-size: 16px; /* Font size for the text */ + z-index: 9999; /* Ensure it appears above other content */ + box-shadow: 0 -2px 5px rgba(0, 0, 0, 0.2); /* Adds a subtle shadow */ + visibility: hidden; /* Hidden by default */ + opacity: 0; /* Fully transparent */ + transition: visibility 0s, opacity 0.5s ease-in-out; /* Smooth fade-in/out */ } -/* Animations to fade the snackbar in and out */ - - -@keyframes fadein { - from {bottom: 0; opacity: 0;} - to {bottom: 30px; opacity: 1;} +/* Show the alert bar */ +.alert-bar.show { + visibility: visible; /* Make it visible */ + opacity: 1; /* Fully opaque */ } -/* -@keyframes fadeout { - from {bottom: 30px; opacity: 1;} - to {bottom: 0; opacity: 0;} -} */ +/* Optional: Keyframes for additional animation */ +@keyframes slideUp { + from { + transform: translateY(100%); + } + to { + transform: translateY(0); + } +} -/* Snackbar ends */ +.alert-bar.slide-up { + animation: slideUp 0.5s ease-in-out; +} +/* Alert bar ends */ /* Logo padding ^*/ diff --git a/mobsf/templates/base/base_layout.html b/mobsf/templates/base/base_layout.html index 1ac24f0360..d9d16f2361 100644 --- a/mobsf/templates/base/base_layout.html +++ b/mobsf/templates/base/base_layout.html @@ -22,8 +22,13 @@ + {% block extra_css %} - + {% endblock %} diff --git a/mobsf/templates/general/home.html b/mobsf/templates/general/home.html index fdd5d9976b..5388643184 100644 --- a/mobsf/templates/general/home.html +++ b/mobsf/templates/general/home.html @@ -81,7 +81,7 @@
RECENT SCANS | -
Analysis started! Please wait to be redirected or check recent scans after sometime.
+
Analysis started! Please wait to be redirected or check recent scans after sometime.
@@ -99,17 +99,17 @@
RECENT SCANS | + /* Loader Spinner */ + .loader { + border: 2px solid #f3f3f3; /* Light gray */ + border-top: 2px solid #3498db; /* Blue */ + border-radius: 50%; + width: 12px; + height: 12px; + animation: spin 1s linear infinite; + display: inline-block; + vertical-align: middle; + } + + @keyframes spin { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(360deg); + } + } + +{% endblock %} +{% block content %} + +{% endblock %} +{% block extra_scripts %} + +{% endblock %} \ No newline at end of file diff --git a/mobsf/templates/static_analysis/android_binary_analysis.html b/mobsf/templates/static_analysis/android_binary_analysis.html index 5f8def15bc..c5133f86e5 100755 --- a/mobsf/templates/static_analysis/android_binary_analysis.html +++ b/mobsf/templates/static_analysis/android_binary_analysis.html @@ -184,6 +184,12 @@