mirror of
git://xwords.git.sourceforge.net/gitroot/xwords/xwords
synced 2025-01-03 23:04:08 +01:00
add test method and implement query() endpoint
This commit is contained in:
parent
e6e93c09ab
commit
fbaa1f139e
2 changed files with 69 additions and 7 deletions
|
@ -1,13 +1,15 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import mod_python, json, socket, base64
|
||||
import base64, json, mod_python, socket, struct, sys
|
||||
|
||||
PROTOCOL_VERSION = 0
|
||||
PRX_GET_MSGS = 4
|
||||
|
||||
try:
|
||||
from mod_python import apache
|
||||
apacheAvailable = True
|
||||
except ImportError:
|
||||
apacheAvailable = False
|
||||
print('failed')
|
||||
|
||||
def post(req, params):
|
||||
err = 'none'
|
||||
|
@ -16,7 +18,7 @@ def post(req, params):
|
|||
data = base64.b64decode(jobj['data'])
|
||||
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.settimeout(3) # seconds
|
||||
sock.settimeout(1) # seconds
|
||||
addr = ("127.0.0.1", 10997)
|
||||
sock.sendto(data, addr)
|
||||
|
||||
|
@ -33,10 +35,70 @@ def post(req, params):
|
|||
jobj = {'err' : err, 'data' : responses}
|
||||
return json.dumps(jobj)
|
||||
|
||||
def query(req, ids):
|
||||
idsLen = 0
|
||||
for id in ids: idsLen += len(id)
|
||||
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(5) # seconds
|
||||
sock.connect(('127.0.0.1', 10998))
|
||||
|
||||
lenShort = 2 + idsLen + len(ids) + 1
|
||||
sock.send(struct.pack("hBBh", socket.htons(lenShort),
|
||||
PROTOCOL_VERSION, PRX_GET_MSGS,
|
||||
socket.htons(len(ids))))
|
||||
|
||||
for id in ids: sock.send(id + '\n')
|
||||
|
||||
unpacker = struct.Struct('2H') # 2s f')
|
||||
data = sock.recv(unpacker.size)
|
||||
resLen, nameCount = unpacker.unpack(data)
|
||||
resLen = socket.ntohs(resLen)
|
||||
nameCount = socket.ntohs(nameCount)
|
||||
print('resLen:', resLen, 'nameCount:', nameCount)
|
||||
msgsLists = {}
|
||||
if nameCount == len(ids):
|
||||
for ii in range(nameCount):
|
||||
perGame = []
|
||||
shortUnpacker = struct.Struct('H')
|
||||
countsThisGame, = shortUnpacker.unpack(sock.recv(shortUnpacker.size))
|
||||
countsThisGame = socket.ntohs(countsThisGame)
|
||||
print('countsThisGame:', countsThisGame)
|
||||
for jj in range(countsThisGame):
|
||||
msgLen, = shortUnpacker.unpack(sock.recv(shortUnpacker.size))
|
||||
msgLen = socket.ntohs(msgLen)
|
||||
print('msgLen:', msgLen)
|
||||
msgs = []
|
||||
if msgLen > 0:
|
||||
msg = sock.recv(msgLen)
|
||||
print('msg len:', len(msg))
|
||||
msg = base64.b64encode(msg)
|
||||
msgs.append(msg)
|
||||
perGame.append(msgs)
|
||||
msgsLists[ids[ii]] = perGame
|
||||
|
||||
return json.dumps(msgsLists)
|
||||
|
||||
# received = sock.recv(1024*4)
|
||||
# print('len:', len(received))
|
||||
# short resLen = dis.readShort(); // total message length
|
||||
# short nameCount = dis.readShort();
|
||||
|
||||
|
||||
def dosend(sock, bytes):
|
||||
totalsent = 0
|
||||
while totalsent < len(bytes):
|
||||
sent = sock.send(bytes[totalsent:])
|
||||
if sent == 0:
|
||||
raise RuntimeError("socket connection broken")
|
||||
totalsent = totalsent + sent
|
||||
|
||||
|
||||
def main():
|
||||
params = { 'data' : 'V2VkIE9jdCAxOCAwNjowNDo0OCBQRFQgMjAxNwo=' }
|
||||
params = json.dumps(params)
|
||||
print(post(None, params))
|
||||
print(query(None, sys.argv[1:]))
|
||||
# Params = { 'data' : 'V2VkIE9jdCAxOCAwNjowNDo0OCBQRFQgMjAxNwo=' }
|
||||
# params = json.dumps(params)
|
||||
# print(post(None, params))
|
||||
|
||||
##############################################################################
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -452,7 +452,7 @@ post( RelayConStorage* storage, const XP_U8* msgbuf, XP_U16 len )
|
|||
XP_ASSERT(res == CURLE_OK);
|
||||
CURL* curl = curl_easy_init();
|
||||
|
||||
curl_easy_setopt(curl, CURLOPT_URL, "http://localhost/relay.py/post");
|
||||
curl_easy_setopt(curl, CURLOPT_URL, "http://localhost/xw4/relay.py/post");
|
||||
curl_easy_setopt(curl, CURLOPT_POST, 1L);
|
||||
|
||||
char* curl_params = curl_easy_escape( curl, asStr, strlen(asStr) );
|
||||
|
|
Loading…
Reference in a new issue