Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 1 | """ |
| 2 | The LLVM Compiler Infrastructure |
| 3 | |
| 4 | This file is distributed under the University of Illinois Open Source |
| 5 | License. See LICENSE.TXT for details. |
| 6 | |
| 7 | Sync lldb and related source from a local machine to a remote machine. |
| 8 | |
| 9 | This facilitates working on the lldb sourcecode on multiple machines |
| 10 | and multiple OS types, verifying changes across all. |
| 11 | |
| 12 | |
| 13 | This module provides asyncore channels used within the LLDB test |
| 14 | framework. |
| 15 | """ |
| 16 | |
Zachary Turner | 35d017f | 2015-10-23 17:04:29 +0000 | [diff] [blame] | 17 | from __future__ import print_function |
Zachary Turner | c1b7cd7 | 2015-11-05 19:22:28 +0000 | [diff] [blame] | 18 | from __future__ import absolute_import |
Zachary Turner | 35d017f | 2015-10-23 17:04:29 +0000 | [diff] [blame] | 19 | |
Zachary Turner | 19474e1 | 2015-11-03 19:20:39 +0000 | [diff] [blame] | 20 | |
Zachary Turner | c1b7cd7 | 2015-11-05 19:22:28 +0000 | [diff] [blame] | 21 | # System modules |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 22 | import asyncore |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 23 | import socket |
| 24 | |
Zachary Turner | c1b7cd7 | 2015-11-05 19:22:28 +0000 | [diff] [blame] | 25 | # Third-party modules |
Zachary Turner | 814236d | 2015-10-21 17:48:52 +0000 | [diff] [blame] | 26 | from six.moves import cPickle |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 27 | |
Zachary Turner | c1b7cd7 | 2015-11-05 19:22:28 +0000 | [diff] [blame] | 28 | # LLDB modules |
| 29 | |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 30 | class UnpicklingForwardingReaderChannel(asyncore.dispatcher): |
| 31 | """Provides an unpickling, forwarding asyncore dispatch channel reader. |
| 32 | |
| 33 | Inferior dotest.py processes with side-channel-based test results will |
| 34 | send test result event data in a pickled format, one event at a time. |
| 35 | This class supports reconstructing the pickled data and forwarding it |
| 36 | on to its final destination. |
| 37 | |
| 38 | The channel data is written in the form: |
| 39 | {num_payload_bytes}#{payload_bytes} |
| 40 | |
| 41 | The bulk of this class is devoted to reading and parsing out |
| 42 | the payload bytes. |
| 43 | """ |
| 44 | def __init__(self, file_object, async_map, forwarding_func): |
| 45 | asyncore.dispatcher.__init__(self, sock=file_object, map=async_map) |
| 46 | |
Zachary Turner | fe868aca | 2015-12-02 23:07:33 +0000 | [diff] [blame] | 47 | self.header_contents = b"" |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 48 | self.packet_bytes_remaining = 0 |
| 49 | self.reading_header = True |
Todd Fiala | c843111 | 2015-12-02 21:45:15 +0000 | [diff] [blame] | 50 | self.ibuffer = b'' |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 51 | self.forwarding_func = forwarding_func |
| 52 | if forwarding_func is None: |
| 53 | # This whole class is useless if we do nothing with the |
| 54 | # unpickled results. |
| 55 | raise Exception("forwarding function must be set") |
| 56 | |
| 57 | def deserialize_payload(self): |
| 58 | """Unpickles the collected input buffer bytes and forwards.""" |
| 59 | if len(self.ibuffer) > 0: |
| 60 | self.forwarding_func(cPickle.loads(self.ibuffer)) |
Todd Fiala | c843111 | 2015-12-02 21:45:15 +0000 | [diff] [blame] | 61 | self.ibuffer = b'' |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 62 | |
| 63 | def consume_header_bytes(self, data): |
| 64 | """Consumes header bytes from the front of data. |
| 65 | @param data the incoming data stream bytes |
| 66 | @return any data leftover after consuming header bytes. |
| 67 | """ |
| 68 | # We're done if there is no content. |
| 69 | if not data or (len(data) == 0): |
| 70 | return None |
| 71 | |
Zachary Turner | fe868aca | 2015-12-02 23:07:33 +0000 | [diff] [blame] | 72 | full_header_len = 4 |
| 73 | |
| 74 | assert(len(self.header_contents) < full_header_len) |
| 75 | |
| 76 | bytes_avail = len(data) |
| 77 | bytes_needed = full_header_len - len(self.header_contents) |
| 78 | header_bytes_avail = min(bytes_needed, bytes_avail) |
| 79 | self.header_contents += data[:header_bytes_avail] |
| 80 | if len(self.header_contents) == full_header_len: |
| 81 | import struct |
| 82 | # End of header. |
| 83 | self.packet_bytes_remaining = struct.unpack("!I", self.header_contents)[0] |
| 84 | self.header_contents = b"" |
| 85 | self.reading_header = False |
| 86 | return data[header_bytes_avail:] |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 87 | |
| 88 | # If we made it here, we've exhausted the data and |
| 89 | # we're still parsing header content. |
| 90 | return None |
| 91 | |
| 92 | def consume_payload_bytes(self, data): |
| 93 | """Consumes payload bytes from the front of data. |
| 94 | @param data the incoming data stream bytes |
| 95 | @return any data leftover after consuming remaining payload bytes. |
| 96 | """ |
| 97 | if not data or (len(data) == 0): |
| 98 | # We're done and there's nothing to do. |
| 99 | return None |
| 100 | |
| 101 | data_len = len(data) |
| 102 | if data_len <= self.packet_bytes_remaining: |
| 103 | # We're consuming all the data provided. |
| 104 | self.ibuffer += data |
| 105 | self.packet_bytes_remaining -= data_len |
| 106 | |
| 107 | # If we're no longer waiting for payload bytes, |
| 108 | # we flip back to parsing header bytes and we |
| 109 | # unpickle the payload contents. |
| 110 | if self.packet_bytes_remaining < 1: |
| 111 | self.reading_header = True |
| 112 | self.deserialize_payload() |
| 113 | |
| 114 | # We're done, no more data left. |
| 115 | return None |
| 116 | else: |
| 117 | # We're only consuming a portion of the data since |
| 118 | # the data contains more than the payload amount. |
| 119 | self.ibuffer += data[:self.packet_bytes_remaining] |
| 120 | data = data[self.packet_bytes_remaining:] |
| 121 | |
| 122 | # We now move on to reading the header. |
| 123 | self.reading_header = True |
| 124 | self.packet_bytes_remaining = 0 |
| 125 | |
| 126 | # And we can deserialize the payload. |
| 127 | self.deserialize_payload() |
| 128 | |
| 129 | # Return the remaining data. |
| 130 | return data |
| 131 | |
| 132 | def handle_read(self): |
| 133 | data = self.recv(8192) |
Zachary Turner | 35d017f | 2015-10-23 17:04:29 +0000 | [diff] [blame] | 134 | # print('driver socket READ: %d bytes' % len(data)) |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 135 | |
| 136 | while data and (len(data) > 0): |
| 137 | # If we're reading the header, gather header bytes. |
| 138 | if self.reading_header: |
| 139 | data = self.consume_header_bytes(data) |
| 140 | else: |
| 141 | data = self.consume_payload_bytes(data) |
| 142 | |
| 143 | def handle_close(self): |
Zachary Turner | 35d017f | 2015-10-23 17:04:29 +0000 | [diff] [blame] | 144 | # print("socket reader: closing port") |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 145 | self.close() |
| 146 | |
| 147 | |
| 148 | class UnpicklingForwardingListenerChannel(asyncore.dispatcher): |
| 149 | """Provides a socket listener asyncore channel for unpickling/forwarding. |
| 150 | |
| 151 | This channel will listen on a socket port (use 0 for host-selected). Any |
| 152 | client that connects will have an UnpicklingForwardingReaderChannel handle |
| 153 | communication over the connection. |
| 154 | |
| 155 | The dotest parallel test runners, when collecting test results, open the |
| 156 | test results side channel over a socket. This channel handles connections |
| 157 | from inferiors back to the test runner. Each worker fires up a listener |
| 158 | for each inferior invocation. This simplifies the asyncore.loop() usage, |
| 159 | one of the reasons for implementing with asyncore. This listener shuts |
| 160 | down once a single connection is made to it. |
| 161 | """ |
Todd Fiala | 871b2e5 | 2015-09-22 22:47:34 +0000 | [diff] [blame] | 162 | def __init__(self, async_map, host, port, backlog_count, forwarding_func): |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 163 | asyncore.dispatcher.__init__(self, map=async_map) |
| 164 | self.create_socket(socket.AF_INET, socket.SOCK_STREAM) |
| 165 | self.set_reuse_addr() |
| 166 | self.bind((host, port)) |
| 167 | self.address = self.socket.getsockname() |
Todd Fiala | 871b2e5 | 2015-09-22 22:47:34 +0000 | [diff] [blame] | 168 | self.listen(backlog_count) |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 169 | self.handler = None |
| 170 | self.async_map = async_map |
| 171 | self.forwarding_func = forwarding_func |
| 172 | if forwarding_func is None: |
| 173 | # This whole class is useless if we do nothing with the |
| 174 | # unpickled results. |
| 175 | raise Exception("forwarding function must be set") |
| 176 | |
| 177 | def handle_accept(self): |
| 178 | (sock, addr) = self.socket.accept() |
| 179 | if sock and addr: |
Zachary Turner | 35d017f | 2015-10-23 17:04:29 +0000 | [diff] [blame] | 180 | # print('Incoming connection from %s' % repr(addr)) |
Todd Fiala | 68615ce | 2015-09-15 21:38:04 +0000 | [diff] [blame] | 181 | self.handler = UnpicklingForwardingReaderChannel( |
| 182 | sock, self.async_map, self.forwarding_func) |
| 183 | |
| 184 | def handle_close(self): |
| 185 | self.close() |