in http/get_simple/python/server/http.server/server.py [0:0]
def do_GET(self):
### given a source of record batches, this function sends them
### to a client using HTTP chunked transfer encoding.
source = self.resolve_batches()
if self.request_version == 'HTTP/1.0':
self.protocol_version = 'HTTP/1.0'
chunked = False
else:
self.protocol_version = 'HTTP/1.1'
chunked = chunked_encoding
self.close_connection = True
self.send_response(200)
self.send_header('Content-Type', 'application/vnd.apache.arrow.stream')
### set these headers if testing with a local browser-based client:
#self.send_header('Access-Control-Allow-Origin', 'http://localhost:8008')
#self.send_header('Access-Control-Allow-Methods', 'GET')
#self.send_header('Access-Control-Allow-Headers', 'Content-Type')
### set this header to make browsers download the file with a name and extension:
#self.send_header('Content-Disposition', 'attachment; filename="data.arrows"')
if chunked:
self.send_header('Transfer-Encoding', 'chunked')
self.end_headers()
### if any record batch could be larger than 2 GB, Python's
### http.server will error when calling self.wfile.write(),
### so you will need to split them into smaller chunks by
### using the generate_chunked_buffers() function instead
### if generate_buffers().
# for buffer in generate_chunked_buffers(schema, source, int(2e9)):
for buffer in generate_buffers(schema, source):
if chunked:
self.wfile.write('{:X}\r\n'.format(len(buffer)).encode('utf-8'))
self.wfile.write(buffer)
if chunked:
self.wfile.write('\r\n'.encode('utf-8'))
self.wfile.flush()
if chunked:
self.wfile.write('0\r\n\r\n'.encode('utf-8'))
self.wfile.flush()