diff --git a/actions/http.py b/actions/http.py new file mode 100644 index 0000000..7a4da28 --- /dev/null +++ b/actions/http.py @@ -0,0 +1,602 @@ + +# This file gutted from scapy as the module there doesn't quite work there. + +# This file is part of Scapy +# See http://www.secdev.org/projects/scapy for more information +# Copyright (C) 2019 Gabriel Potter +# Copyright (C) 2012 Luca Invernizzi +# Copyright (C) 2012 Steeve Barbeau +# This file is a modified version of the former scapy_http plugin. +# It was reimplemented for scapy 2.4.3+ using sessions, stream handling. +# Original Authors : Steeve Barbeau, Luca Invernizzi +# Originally published under a GPLv2 license + +import os +import re +import struct +import subprocess + +from scapy.compat import plain_str, bytes_encode, \ + gzip_compress, gzip_decompress +from scapy.config import conf +from scapy.consts import WINDOWS +from scapy.error import warning +from scapy.fields import StrField +from scapy.packet import Packet, bind_layers, bind_bottom_up, Raw +from scapy.utils import get_temp_file, ContextManagerSubprocess + +from scapy.layers.inet import TCP, TCP_client + +from scapy.modules import six + +if "http" not in conf.contribs: + conf.contribs["http"] = {} + conf.contribs["http"]["auto_compression"] = True + +# https://en.wikipedia.org/wiki/List_of_HTTP_header_fields + +GENERAL_HEADERS = [ + "Cache-Control", + "Connection", + "Permanent", + "Content-Length", + "Content-MD5", + "Content-Type", + "Date", + "Keep-Alive", + "Pragma", + "Upgrade", + "Via", + "Warning" +] + +COMMON_UNSTANDARD_GENERAL_HEADERS = [ + "X-Request-ID", + "X-Correlation-ID" +] + +REQUEST_HEADERS = [ + "A-IM", + "Accept", + "Accept-Charset", + "Accept-Encoding", + "Accept-Language", + "Accept-Datetime", + "Access-Control-Request-Method", + "Access-Control-Request-Headers", + "Authorization", + "Cookie", + "Expect", + "Forwarded", + "From", + "Host", + "HTTP2-Settings", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Range", + "If-Unmodified-Since", + "Max-Forwards", + "Origin", + "Proxy-Authorization", + "Range", + "Referer", + "TE", + "User-Agent" +] + +COMMON_UNSTANDARD_REQUEST_HEADERS = [ + "Upgrade-Insecure-Requests", + "Upgrade-Insecure-Requests", + "X-Requested-With", + "DNT", + "X-Forwarded-For", + "X-Forwarded-Host", + "X-Forwarded-Proto", + "Front-End-Https", + "X-Http-Method-Override", + "X-ATT-DeviceId", + "X-Wap-Profile", + "Proxy-Connection", + "X-UIDH", + "X-Csrf-Token", + "Save-Data", +] + +RESPONSE_HEADERS = [ + "Access-Control-Allow-Origin", + "Access-Control-Allow-Credentials", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Headers", + "Accept-Patch", + "Accept-Ranges", + "Age", + "Allow", + "Alt-Svc", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-Location", + "Content-Range", + "Delta-Base", + "ETag", + "Expires", + "IM", + "Last-Modified", + "Link", + "Location", + "Permanent", + "P3P", + "Proxy-Authenticate", + "Public-Key-Pins", + "Retry-After", + "Server", + "Set-Cookie", + "Strict-Transport-Security", + "Trailer", + "Transfer-Encoding", + "Tk", + "Vary", + "WWW-Authenticate", + "X-Frame-Options", +] + +COMMON_UNSTANDARD_RESPONSE_HEADERS = [ + "Content-Security-Policy", + "X-Content-Security-Policy", + "X-WebKit-CSP", + "Refresh", + "Status", + "Timing-Allow-Origin", + "X-Content-Duration", + "X-Content-Type-Options", + "X-Powered-By", + "X-UA-Compatible", + "X-XSS-Protection", +] + +def _strip_header_name(name): + """Takes a header key (i.e., "Host" in "Host: www.google.com", + and returns a stripped representation of it + """ + return plain_str(name.strip()).replace("-", "_") + +def _header_line(name, val): + """Creates a HTTP header line""" + # Python 3.4 doesn't support % on bytes + return bytes_encode(name) + b": " + bytes_encode(val) + +def _parse_headers(s): + headers = s.split(b"\r\n") + headers_found = {} + for header_line in headers: + try: + key, value = header_line.split(b':', 1) + except ValueError: + continue + header_key = _strip_header_name(key).lower() + # headers_found[header_key] = (key, value.strip()) # The first big change occurs here, using the header_line instead of value.strip() + headers_found[header_key] = (key, header_line .strip()) + return headers_found + +def _parse_headers_and_body(s): + ''' Takes a HTTP packet, and returns a tuple containing: + _ the first line (e.g., "GET ...") + _ the headers in a dictionary + _ the body + ''' + crlfcrlf = b"\r\n\r\n" + crlfcrlfIndex = s.find(crlfcrlf) + if crlfcrlfIndex != -1: + headers = s[:crlfcrlfIndex + len(crlfcrlf)] + body = s[crlfcrlfIndex + len(crlfcrlf):] + else: + headers = s + body = b'' + first_line, headers = headers.split(b"\r\n", 1) + return first_line.strip(), _parse_headers(headers), body + +def _dissect_headers(obj, s): + """Takes a HTTP packet as the string s, and populates the scapy layer obj + (either HTTPResponse or HTTPRequest). Returns the first line of the + HTTP packet, and the body + """ + first_line, headers, body = _parse_headers_and_body(s) + for f in obj.fields_desc: + # We want to still parse wrongly capitalized fields + stripped_name = _strip_header_name(f.name).lower() + try: + _, value = headers.pop(stripped_name) + except KeyError: + continue + obj.setfieldval(f.name, value) + if headers: + headers = {key: value for key, value in six.itervalues(headers)} + obj.setfieldval('Unknown_Headers', headers) + return first_line, body + +class _HTTPContent(Packet): + # https://developer.mozilla.org/fr/docs/Web/HTTP/Headers/Transfer-Encoding + def _get_encodings(self): + encodings = [] + if isinstance(self, HTTPResponse): + if self.Transfer_Encoding: + encodings += [plain_str(x).strip().lower() for x in + plain_str(self.Transfer_Encoding).split(",")] + if self.Content_Encoding: + encodings += [plain_str(x).strip().lower() for x in + plain_str(self.Content_Encoding).split(",")] + return encodings + + def hashret(self): + # The only field both Answers and Responses have in common + return self.Http_Version + + def post_dissect(self, s): + if not conf.contribs["http"]["auto_compression"]: + return s + encodings = self._get_encodings() + # Un-chunkify + if "chunked" in encodings: + data = b"" + while s: + length, _, body = s.partition(b"\r\n") + try: + length = int(length, 16) + except ValueError: + # Not a valid chunk. Ignore + break + else: + load = body[:length] + if body[length:length + 2] != b"\r\n": + # Invalid chunk. Ignore + break + s = body[length + 2:] + data += load + if not s: + s = data + # Decompress + try: + if "deflate" in encodings: + import zlib + s = zlib.decompress(s) + elif "gzip" in encodings: + s = gzip_decompress(s) + elif "compress" in encodings: + import lzw + s = lzw.decompress(s) + except Exception: + # Cannot decompress - probably incomplete data + pass + return s + + def post_build(self, pkt, pay): + if not conf.contribs["http"]["auto_compression"]: + return pkt + pay + encodings = self._get_encodings() + # Compress + if "deflate" in encodings: + import zlib + pay = zlib.compress(pay) + elif "gzip" in encodings: + pay = gzip_compress(pay) + elif "compress" in encodings: + import lzw + pay = lzw.compress(pay) + return pkt + pay + + def self_build(self, field_pos_list=None): + ''' Takes an HTTPRequest or HTTPResponse object, and creates its + string representation.''' + if not isinstance(self.underlayer, HTTP): + warning( + "An HTTPResponse/HTTPRequest should always be below an HTTP" + ) + p = b"" + # Walk all the fields, in order + for f in self.fields_desc: + if f.name == "Unknown_Headers": + continue + # Get the field value + val = self.getfieldval(f.name) + if not val: + # Not specified. Skip + continue + # Fields used in the first line have a space as a separator, + # whereas headers are terminated by a new line + if isinstance(self, HTTPRequest): + if f.name in ['Method', 'Path']: + separator = b' ' + else: + separator = b'\r\n' + elif isinstance(self, HTTPResponse): + if f.name in ['Http_Version', 'Status_Code']: + separator = b' ' + else: + separator = b'\r\n' + # Add the field into the packet + p = f.addfield(self, p, val + separator) + # Handle Unknown_Headers + if self.Unknown_Headers: + headers_text = b"" + for name, value in six.iteritems(self.Unknown_Headers): + headers_text += _header_line(name, value) + b"\r\n" + p = self.get_field("Unknown_Headers").addfield( + self, p, headers_text + ) + # The packet might be empty, and in that case it should stay empty. + if p: + # Add an additional line after the last header + p = f.addfield(self, p, b'\r\n') + return p + + def guess_payload_class(self, payload): + """Detect potential payloads + """ + if self.Connection and b"Upgrade" in self.Connection: + from scapy.contrib.http2 import H2Frame + return H2Frame + return super(_HTTPContent, self).guess_payload_class(payload) + +class _HTTPHeaderField(StrField): + """Modified StrField to handle HTTP Header names""" + __slots__ = ["real_name"] + + def __init__(self, name, default): + self.real_name = name + name = _strip_header_name(name) + StrField.__init__(self, name, default, fmt="H") + +def _generate_headers(*args): + """Generate the header fields based on their name""" + # Order headers + all_headers = [] + for headers in args: + all_headers += headers + # Generate header fields + results = [] + for h in sorted(all_headers): + results.append(_HTTPHeaderField(h, None)) + return results + +# Create Request and Response packets +class HTTPRequest(_HTTPContent): + name = "HTTPRequest" + fields_desc = [ + # First line + _HTTPHeaderField("Method", "GET"), + _HTTPHeaderField("Path", "/"), + _HTTPHeaderField("Http-Version", "HTTP/1.1"), + # Headers + ] + ( + _generate_headers( + GENERAL_HEADERS, + REQUEST_HEADERS, + COMMON_UNSTANDARD_GENERAL_HEADERS, + COMMON_UNSTANDARD_REQUEST_HEADERS + ) + ) + [ + _HTTPHeaderField("Unknown-Headers", None), + ] + + def do_dissect(self, s): + """From the HTTP packet string, populate the scapy object""" + first_line, body = _dissect_headers(self, s) + try: + Method, Path, HTTPVersion = re.split(br"\s+", first_line, 2) + self.setfieldval('Method', Method) + self.setfieldval('Path', Path) + self.setfieldval('Http_Version', HTTPVersion) + except ValueError: + pass + if body: + self.raw_packet_cache = s[:-len(body)] + else: + self.raw_packet_cache = s + return body + + def mysummary(self): + return self.sprintf( + "%HTTPRequest.Method% %HTTPRequest.Path% " + "%HTTPRequest.Http_Version%" + ) + +# TODO: decide to keep or not +class HTTPResponse(_HTTPContent): + name = "HTTP Response" + fields_desc = [ + # First line + _HTTPHeaderField("Http-Version", "HTTP/1.1"), + _HTTPHeaderField("Status-Code", "200"), + _HTTPHeaderField("Reason-Phrase", "OK"), + # Headers + ] + ( + _generate_headers( + GENERAL_HEADERS, + RESPONSE_HEADERS, + COMMON_UNSTANDARD_GENERAL_HEADERS, + COMMON_UNSTANDARD_RESPONSE_HEADERS + ) + ) + [ + _HTTPHeaderField("Unknown-Headers", None), + ] + + def answers(self, other): + return HTTPRequest in other + + def do_dissect(self, s): + ''' From the HTTP packet string, populate the scapy object ''' + first_line, body = _dissect_headers(self, s) + try: + HTTPVersion, Status, Reason = re.split(br"\s+", first_line, 2) + self.setfieldval('Http_Version', HTTPVersion) + self.setfieldval('Status_Code', Status) + self.setfieldval('Reason_Phrase', Reason) + except ValueError: + pass + if body: + self.raw_packet_cache = s[:-len(body)] + else: + self.raw_packet_cache = s + return body + + def mysummary(self): + return self.sprintf( + "%HTTPResponse.Http_Version% %HTTPResponse.Status_Code% " + "%HTTPResponse.Reason_Phrase%" + ) + +class HTTP(Packet): + name = "HTTP 1" + fields_desc = [] + show_indent = 0 + + @classmethod + def dispatch_hook(cls, _pkt=None, *args, **kargs): + if _pkt and len(_pkt) >= 9: + from scapy.contrib.http2 import _HTTP2_types, H2Frame + # To detect a valid HTTP2, we check that the type is correct + # that the Reserved bit is set and length makes sense. + while _pkt: + if len(_pkt) < 9: + # Invalid total length + return cls + if ord(_pkt[3:4]) not in _HTTP2_types: + # Invalid type + return cls + length = struct.unpack("!I", b"\0" + _pkt[:3])[0] + 9 + if length > len(_pkt): + # Invalid length + return cls + sid = struct.unpack("!I", _pkt[5:9])[0] + if sid >> 31 != 0: + # Invalid Reserved bit + return cls + _pkt = _pkt[length:] + return H2Frame + return cls + + # tcp_reassemble is used by TCPSession in session.py + @classmethod + def tcp_reassemble(cls, data, metadata): + detect_end = metadata.get("detect_end", None) + is_unknown = metadata.get("detect_unknown", True) + if not detect_end or is_unknown: + metadata["detect_unknown"] = False + http_packet = HTTP(data) + # Detect packing method + if not isinstance(http_packet.payload, _HTTPContent): + return http_packet + length = http_packet.Content_Length + if length is not None: + # The packet provides a Content-Length attribute: let's + # use it. When the total size of the frags is high enough, + # we have the packet + length = int(length) + # Subtract the length of the "HTTP*" layer + if http_packet.payload.payload or length == 0: + http_length = len(data) - len(http_packet.payload.payload) + detect_end = lambda dat: len(dat) - http_length >= length + else: + # The HTTP layer isn't fully received. + detect_end = lambda dat: False + metadata["detect_unknown"] = True + else: + # It's not Content-Length based. It could be chunked + encodings = http_packet[HTTP].payload._get_encodings() + chunked = ("chunked" in encodings) + if chunked: + detect_end = lambda dat: dat.endswith(b"\r\n\r\n") + else: + # If neither Content-Length nor chunked is specified, + # it means it's the TCP packet that contains the data, + # or that the information hasn't been given yet. + detect_end = lambda dat: metadata.get("tcp_end", False) + metadata["detect_unknown"] = True + metadata["detect_end"] = detect_end + if detect_end(data): + return http_packet + else: + if detect_end(data): + http_packet = HTTP(data) + return http_packet + + def guess_payload_class(self, payload): + """Decides if the payload is an HTTP Request or Response, or + something else. + """ + try: + prog = re.compile( + br"^(?:OPTIONS|GET|HEAD|POST|PUT|DELETE|TRACE|CONNECT) " + br"(?:.+?) " + br"HTTP/\d\.\d$" + ) + crlfIndex = payload.index(b"\r\n") + req = payload[:crlfIndex] + result = prog.match(req) + if result: + return HTTPRequest + else: + prog = re.compile(br"^HTTP/\d\.\d \d\d\d .*$") + result = prog.match(req) + if result: + return HTTPResponse + except ValueError: + # Anything that isn't HTTP but on port 80 + pass + return Raw + + +class GenevaHTTPRequest(): + """ + Defines a Geneva HTTP request, where we can replace, set, delete, or insert to existing fields. + """ + def __init__(self, content): + """ + content: Raw string of the request (Bytes) + Creates a Geneva HTTP request + """ + self.original_content = content # Save off the original just in case + self.parsed_content = HTTPRequest(content) + + def replace(self, header, index, content): + # replace contents at index + if header not in self.parsed_content["HTTPRequest"].fields: + # TODO: throw some sort of error, this header doesn't exist + return None + + if index+len(content) > len(self.parsed_content["HTTPRequest"].fields[header]): + # TODO: throw some sort of error, this index is too large + return None + + self.parsed_content["HTTPRequest"].fields[header] = self.parsed_content["HTTPRequest"].fields[header][0:index] \ + + content + \ + self.parsed_content["HTTPRequest"].fields[header][index+len(content):] + return str(self) + + def delete(self, header, index, num): + # delete num characters from header beginning at index + if header not in self.parsed_content["HTTPRequest"].fields: + # TODO: throw some sort of error, this header doesn't exist + return None + + if index+num > len(self.parsed_content["HTTPRequest"].fields[header]): + # TODO: throw some sort of error, this index is too large + return None + + self.parsed_content["HTTPRequest"].fields[header] = self.parsed_content["HTTPRequest"].fields[header][0:index] \ + + self.parsed_content["HTTPRequest"].fields[header][index+num:] + return str(self) + + def set_header(self, header, content): + # Completely replaces a header with content. We don't care if the + # field already exists. + + self.parsed_content["HTTPRequest"].fields[header] = content + + return str(self) + + def __str__(self): + return str(self.parsed_content) diff --git a/actions/layer.py b/actions/layer.py index 1f16991..be8e99a 100644 --- a/actions/layer.py +++ b/actions/layer.py @@ -6,6 +6,8 @@ import urllib.parse from scapy.all import IP, RandIP, UDP, DNS, DNSQR, Raw, TCP, fuzz +from actions.http import HTTPRequest + class Layer(): """ @@ -601,6 +603,81 @@ class UDPLayer(Layer): } +class HTTPRequestLayer(Layer): + """ + Defines an interface to access parsed HTTP fields + """ + + name = "HTTPRequest" + protocol = HTTPRequest + + _fields = [ + "Method", + "Path", + "Http_Version", + "Host" + ] + fields = _fields + + def __init__(self, layer): + """ + Initializes the HTTP layer. + """ + Layer.__init__(self, layer) + self.request = HTTPRequest(bytes(layer)) # TODO: I dont like this + self.getters = { + 'load' : self.get_load, + } + self.setters = { + 'load' : self.set_load, + } + self.generators = { + 'load' : self.gen_load, + } + + def get(self, field): + """ + Override get, since the HTTPRequest doesn't immediately make its fields known + """ + assert field in self.fields + if field in self.getters: + return self.getters[field](field) + return getattr(self.request, field) + + def set(self, packet, field, value): + """ + Override get, since the HTTPRequest doesn't immediately make its fields known + """ + print('entering set, but I dont like this') + print(packet) + print(field) + print(value) + assert field in self.fields + base = field.split("-")[0] + if field in self.setters: + self.setters[field](packet, field, value) + + # Dual field accessors are fields that require two pieces of information + # to retrieve them (for example, "options-eol"). These are delimited by + # a dash "-". + elif "-" in field and base in self.setters: + self.setters[base](packet, field, value) + else: + setattr(self.layer, field, value) + + # Request the packet be reparsed to confirm the value is stable + # XXX Temporarily disabling the reconstitution check due to scapy bug (#2034) + #assert bytes(self.protocol(bytes(self.layer))) == bytes(self.layer) + + + @classmethod + def name_matches(cls, name): + """ + Override the name parsing to check for HTTP REQUEST here. + """ + return name.upper() in ["HTTPREQUEST"] + + class DNSLayer(Layer): """ Defines an interface to access DNS header fields. diff --git a/actions/packet.py b/actions/packet.py index f40b6d1..5b8bc90 100644 --- a/actions/packet.py +++ b/actions/packet.py @@ -3,13 +3,18 @@ import random import actions.layer +from scapy.layers.http import HTTP as ScapyHTTP, HTTPRequest as ScapyHTTPRequest +from scapy.all import IP, TCP +from actions.http import HTTPRequest as GenevaHTTPRequest _SUPPORTED_LAYERS = [ actions.layer.IPLayer, actions.layer.TCPLayer, actions.layer.UDPLayer, actions.layer.DNSLayer, - actions.layer.DNSQRLayer + actions.layer.DNSQRLayer, + #actions.layer.HTTPLayer, + actions.layer.HTTPRequestLayer ] SUPPORTED_LAYERS = _SUPPORTED_LAYERS @@ -23,7 +28,13 @@ class Packet(): """ Initializes the packet object. """ - self.packet = packet + if(ScapyHTTPRequest in packet.layers()): + # We have a ScapyHTTPRequest. Instead, let's convert to our HttpRequest. + # Overwrite the current ScapyHTTPRequest with a GenevaHTTPRequest + self.packet = packet + self.packet[ScapyHTTPRequest] = GenevaHTTPRequest(bytes(packet[ScapyHTTPRequest])) + else: + self.packet = packet self.layers = self.setup_layers() self.sleep = 0 @@ -131,7 +142,7 @@ class Packet(): """ layers = {} for layer in self.read_layers(): - layers[layer.name.upper()] = layer + layers[layer.name] = layer return layers def copy(self): @@ -179,7 +190,11 @@ class Packet(): if self.haslayer("TCP"): del self.packet["TCP"].chksum - return self.layers[str_protocol].set(self.packet, field, value) + lay = self.layers[str_protocol].set(self.packet, field, value) + + print(lay) + + return lay def get(self, str_protocol, field): """ diff --git a/actions/strategy.py b/actions/strategy.py index 613f029..c7330d7 100644 --- a/actions/strategy.py +++ b/actions/strategy.py @@ -85,4 +85,5 @@ class Strategy(object): # If no action tree was applicable, send the packet unimpeded if not ran: packets_to_send = [packet] + return packets_to_send diff --git a/actions/tamper.py b/actions/tamper.py index 480cb33..696e901 100644 --- a/actions/tamper.py +++ b/actions/tamper.py @@ -15,44 +15,82 @@ import actions.utils from actions.layer import DNSLayer import random +from actions.http import HTTPRequest as HTTPRequest +import urllib.parse +import string # All supported tamper primitives -SUPPORTED_PRIMITIVES = ["corrupt", "replace", "add", "compress"] +SUPPORTED_PRIMITIVES = ["corrupt", "replace", "add", "compress", "insert", "delete"] class TamperAction(Action): """ Defines the TamperAction for Geneva. """ - def __init__(self, environment_id=None, field=None, tamper_type=None, tamper_value=None, tamper_proto="TCP"): + def __init__(self, environment_id=None, field=None, tamper_type=None, tamper_value=None, tamper_proto="TCP", start_index=None, end_index=None, encoded_payload=None): Action.__init__(self, "tamper", "both") self.field = field self.tamper_value = tamper_value self.tamper_proto = actions.utils.string_to_protocol(tamper_proto) self.tamper_proto_str = tamper_proto self.tamper_type = tamper_type + self.start_index = start_index + self.end_index = end_index + self.encoded_payload = encoded_payload + if encoded_payload: + self.decoded_payload = bytes(urllib.parse.unquote(encoded_payload), "UTF-8") def tamper(self, packet, logger): """ Edits a given packet according to the action settings. """ + # Return packet untouched if not applicable if not packet.haslayer(self.tamper_proto_str): return packet - + # Retrieve the old value of the field for logging purposes old_value = packet.get(self.tamper_proto_str, self.field) - + new_value = self.tamper_value # If corrupting the packet field, generate a value for it try: if self.tamper_type == "corrupt": - new_value = packet.gen(self.tamper_proto_str, self.field) + if self.tamper_proto == HTTPRequest: + packet = corrupt(packet, self.field, self.start_index, self.end_index) + del packet["IP"].chksum + del packet["IP"].len + del packet["TCP"].chksum + del packet["TCP"].dataofs + return packet + else: + new_value = packet.gen(self.tamper_proto_str, self.field) elif self.tamper_type == "add": new_value = int(self.tamper_value) + int(old_value) elif self.tamper_type == "compress": return packet.dns_decompress(logger) + elif self.tamper_type == "insert": + packet = insert(packet, self.field, self.start_index, self.decoded_payload) + + del packet["IP"].chksum + del packet["IP"].len + del packet["TCP"].chksum + del packet["TCP"].dataofs + return packet + elif self.tamper_type == "replace": + packet = replace(packet, self.field, self.start_index, self.decoded_payload) + + del packet["IP"].chksum + del packet["IP"].len + del packet["TCP"].chksum + del packet["TCP"].dataofs + + return packet + elif self.tamper_type == "delete": + packet = delete(packet, self.field, self.start_index, self.end_index) + return packet + except NotImplementedError: # If a primitive does not support the type of packet given return packet @@ -62,7 +100,7 @@ class TamperAction(Action): logger.debug(" - Tampering %s field `%s` (%s) by %s (to %s)" % (self.tamper_proto_str, self.field, str(old_value), self.tamper_type, str(new_value))) - + print("about to call set") packet.set(self.tamper_proto_str, self.field, new_value) return packet @@ -82,11 +120,19 @@ class TamperAction(Action): """ s = Action.__str__(self) if self.tamper_type == "corrupt": - s += "{%s:%s:%s}" % (self.tamper_proto_str, self.field, self.tamper_type) - elif self.tamper_type in ["replace", "add"]: - s += "{%s:%s:%s:%s}" % (self.tamper_proto_str, self.field, self.tamper_type, self.tamper_value) + if self.tamper_proto == HTTPRequest: + s += "{%s:%s:%s:%s}" % (self.tamper_proto_str, self.field, self.tamper_type, str(self.start_index) + "-" + str(self.end_index)) + else: + s += "{%s:%s:%s}" % (self.tamper_proto_str, self.field, self.tamper_type) + elif self.tamper_type in ["replace", "add", "insert"]: + if self.tamper_proto == HTTPRequest: + s += "{%s:%s:%s:%s:%s}" % (self.tamper_proto_str, self.field, self.tamper_type, str(self.start_index), self.encoded_payload) + else: + s += "{%s:%s:%s:%s}" % (self.tamper_proto_str, self.field, self.tamper_type, self.tamper_value) elif self.tamper_type == "compress": s += "{%s:%s:compress}" % ("DNS", "qd", ) + elif self.tamper_type == "delete": + s += "{%s:%s:%s:%s}" % (self.tamper_proto_str, self.field, self.tamper_type, str(self.start_index) + "-" + str(self.end_index)) return s @@ -105,28 +151,129 @@ class TamperAction(Action): # Count the number of params in this given string num_parameters = string.count(":") - # If num_parameters is greater than 3, it's not a valid tamper action - if num_parameters > 3 or num_parameters < 2: + # If num_parameters is greater than 4, it's not a valid tamper action + if num_parameters > 4 or num_parameters < 2: msg = "Cannot parse tamper action %s" % string logger.error(msg) raise Exception(msg) params = string.split(":") - if num_parameters == 3: - self.tamper_proto_str, self.field, self.tamper_type, self.tamper_value = params + if num_parameters == 4: + # HTTP replace or insert + self.tamper_proto_str, self.field, self.tamper_type, self.start_index, self.encoded_payload = params + self.start_index = int(self.start_index) self.tamper_proto = actions.utils.string_to_protocol(self.tamper_proto_str) - if "options" in self.field: - if not self.tamper_value: - self.tamper_value = '' # An empty string instead of an empty byte literal + self.decoded_payload = bytes(urllib.parse.unquote(self.encoded_payload), "UTF-8") - # tamper_value might be parsed as a string despite being an integer in most cases. - # Try to parse it out here - try: - if "load" not in self.field: - self.tamper_value = int(self.tamper_value) - except: - pass - else: + elif num_parameters == 3: + # HTTP corrupt or delete could be here, check for those first + self.tamper_proto_str = params[0] + self.tamper_proto = actions.utils.string_to_protocol(self.tamper_proto_str) + if self.tamper_proto_str == "HTTPRequest": + self.field = params[1] + self.tamper_type = params[2] + indices = params[3].split('-') + self.start_index = int(indices[0]) + self.end_index = int(indices[1]) + else: + self.tamper_proto_str, self.field, self.tamper_type, self.tamper_value = params + if "options" in self.field: + if not self.tamper_value: + self.tamper_value = '' # An empty string instead of an empty byte literal + # tamper_value might be parsed as a string despite being an integer in most cases. + # Try to parse it out here + try: + if "load" not in self.field: + self.tamper_value = int(self.tamper_value) + except: + pass + + elif num_parameters == 2: self.tamper_proto_str, self.field, self.tamper_type = params self.tamper_proto = actions.utils.string_to_protocol(self.tamper_proto_str) return True + + +def insert(packet, header, index, content): + """ + Helper method to insert content into packet[header][index] + """ + if header not in packet["HTTPRequest"].fields: + # TODO: throw some sort of error, this header doesn't exist + return None + + if index > len(packet["HTTPRequest"].fields[header]): + # TODO: throw some sort of error, this index is too large + return None + packet["HTTPRequest"].fields[header] = packet["HTTPRequest"].fields[header][0:index] \ + + content + \ + packet["HTTPRequest"].fields[header][index:] + + return packet + +def replace(packet, header, index, content): + """ + Helper method to replace packet[header][index] with content + """ + if header not in packet["HTTPRequest"].fields: + # TODO: throw some sort of error, this header doesn't exist + return None + + if index+len(content) > len(packet["HTTPRequest"].fields[header]): + # TODO: throw some sort of error, this index is too large + return None + + packet["HTTPRequest"].fields[header] = packet["HTTPRequest"].fields[header][0:index] \ + + content + \ + packet["HTTPRequest"].fields[header][index+len(content):] + return packet + + +def delete(packet, header, start_index, end_index): + """ + Helper method to remove the characters at header[start_index] to header[end_index] + """ + if header not in packet["HTTPRequest"].fields: + # TODO: throw some sort of error, this header doesn't exist + return None + + if end_index+1 > len(packet["HTTPRequest"].fields[header]): + # TODO: throw some sort of error, this index is too large + return None + + packet["HTTPRequest"].fields[header] = packet["HTTPRequest"].fields[header][0:start_index] \ + + packet["HTTPRequest"].fields[header][end_index+1:] + return packet + +def corrupt(packet, header, start_index, end_index): + """ + Helper method to remove the characters at header[start_index] to header[end_index] + """ + print("in cor") + if header not in packet["HTTPRequest"].fields: + # TODO: throw some sort of error, this header doesn't exist + return None + + if end_index+1 > len(packet["HTTPRequest"].fields[header]): + # TODO: throw some sort of error, this index is too large + return None + print("in cor") + try: + old_field = packet["HTTPRequest"].fields[header] + tampered_field = packet["HTTPRequest"].fields[header][0:start_index] + + for i in range(0, end_index - start_index + 1): + # Ensure we're getting a new character + new_character = bytes(random.choice(string.printable), "UTF-8") + while(new_character == old_field[i]): + new_character = bytes(random.choice(string.printable), "UTF-8") + tampered_field = tampered_field + new_character + + tampered_field = tampered_field + packet["HTTPRequest"].fields[header][end_index+1:] + print("settings tampered field to:") + print(tampered_field) + packet["HTTPRequest"].fields[header] = bytes(tampered_field, "UTF-8") + except Exception as e: + print(e) + + return packet diff --git a/actions/utils.py b/actions/utils.py index a795442..b39834c 100644 --- a/actions/utils.py +++ b/actions/utils.py @@ -14,6 +14,7 @@ import actions.trigger import actions.packet from scapy.all import TCP, IP, UDP, rdpcap +from actions.http import HTTPRequest import netifaces @@ -153,6 +154,8 @@ def string_to_protocol(protocol): return IP elif protocol.upper() == "UDP": return UDP + elif protocol.upper() == "HTTPREQUEST": + return HTTPRequest def get_id(): diff --git a/engine.py b/engine.py index 0f6cf08..fbecd03 100644 --- a/engine.py +++ b/engine.py @@ -177,10 +177,14 @@ class WindowsEngine(GenericEngine): try: self.logger.debug("Sending packet %s", str(packet)) # Convert the packet to a bytearray so memoryview can edit the underlying memory + print("About to cnvert") pack = bytearray(bytes(packet.packet)) + print("about to send") + print(pack) # Don't recalculate checksum since sometimes we will have already changed it self.divert.send(pydivert.Packet(memoryview(pack), self.interface, dir), recalculate_checksum=False) - except Exception: + except Exception as e: + print(e) self.logger.exception("Error in engine mysend.") def handle_outbound_packet(self, divert_packet): @@ -197,7 +201,7 @@ class WindowsEngine(GenericEngine): # Send all of the packets we've collected to send for out_packet in packets_to_send: - self.mysend(out_packet, Direction.OUTBOUND) + self.mysend(out_packet, Direction.OUTBOUND) def handle_inbound_packet(self, divert_packet): """