2018-09-30 14:12:02 +00:00
|
|
|
import os
|
2018-10-06 15:01:03 +00:00
|
|
|
import asyncio
|
2018-10-09 01:29:51 +00:00
|
|
|
import pytest
|
2018-10-13 20:32:56 +00:00
|
|
|
from p2pfs import *
|
2018-10-09 05:27:01 +00:00
|
|
|
import time
|
2018-10-11 00:28:54 +00:00
|
|
|
from tests.conftest import fmd5, setup_tracker_and_peers, TEST_SMALL_FILE, TEST_LARGE_FILE, \
|
2018-10-10 03:53:03 +00:00
|
|
|
TEST_SMALL_FILE_SIZE, TEST_LARGE_FILE_SIZE, TEST_SMALL_FILE_1
|
2018-10-09 01:29:51 +00:00
|
|
|
pytestmark = pytest.mark.asyncio
|
2018-09-30 14:12:02 +00:00
|
|
|
|
2018-10-08 17:51:02 +00:00
|
|
|
|
2018-10-10 23:34:52 +00:00
|
|
|
def cleanup_files(files):
|
|
|
|
for file in files:
|
|
|
|
try:
|
|
|
|
os.remove(file)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2018-10-13 21:35:25 +00:00
|
|
|
async def test_connect_refused(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
peer = Peer()
|
2018-10-13 21:47:58 +00:00
|
|
|
await peer.start(('localhost', 0))
|
2018-10-13 21:35:25 +00:00
|
|
|
with pytest.raises(ConnectionRefusedError):
|
|
|
|
await peer.connect(('localhost', unused_tcp_port))
|
2018-10-02 01:47:53 +00:00
|
|
|
|
|
|
|
|
2018-10-09 01:29:51 +00:00
|
|
|
async def test_start_stop(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
tracker, peers = await setup_tracker_and_peers(1, unused_tcp_port)
|
2018-10-01 19:17:53 +00:00
|
|
|
|
2018-10-09 01:29:51 +00:00
|
|
|
await tracker.stop()
|
|
|
|
await asyncio.gather(*[peer.stop() for peer in peers])
|
2018-10-08 18:00:45 +00:00
|
|
|
|
2018-10-09 01:29:51 +00:00
|
|
|
|
|
|
|
async def test_publish_refuse(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
tracker, peers = await setup_tracker_and_peers(1, unused_tcp_port)
|
2018-10-10 04:05:24 +00:00
|
|
|
try:
|
|
|
|
with open('test_publish_refuse', 'wb') as fout:
|
|
|
|
fout.write(os.urandom(100))
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish('test_publish_refuse')
|
|
|
|
with pytest.raises(FileNotFoundError):
|
|
|
|
await peers[0].publish('__not_existed_file')
|
|
|
|
with pytest.raises(FileExistsError):
|
|
|
|
await peers[0].publish('test_publish_refuse')
|
2018-10-10 04:05:24 +00:00
|
|
|
os.remove('test_publish_refuse')
|
|
|
|
finally:
|
|
|
|
await tracker.stop()
|
|
|
|
await asyncio.gather(*[peer.stop() for peer in peers])
|
2018-10-08 18:10:39 +00:00
|
|
|
|
|
|
|
|
2018-10-09 01:29:51 +00:00
|
|
|
async def test_publish(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
tracker, peers = await setup_tracker_and_peers(2, unused_tcp_port)
|
2018-10-10 04:05:24 +00:00
|
|
|
try:
|
|
|
|
# peer0 publishes a small_file and peer1 publishes a large file
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish(TEST_SMALL_FILE)
|
2018-10-10 04:05:24 +00:00
|
|
|
file_list = tracker.file_list()
|
|
|
|
assert TEST_SMALL_FILE in file_list
|
|
|
|
assert file_list[TEST_SMALL_FILE]['size'] == TEST_SMALL_FILE_SIZE
|
2018-10-13 20:48:58 +00:00
|
|
|
file_list= await peers[1].list_file()
|
2018-10-10 04:05:24 +00:00
|
|
|
assert TEST_SMALL_FILE in file_list
|
2018-10-09 01:29:51 +00:00
|
|
|
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[1].publish(TEST_LARGE_FILE)
|
2018-10-10 04:05:24 +00:00
|
|
|
file_list = tracker.file_list()
|
|
|
|
assert TEST_LARGE_FILE in file_list and TEST_SMALL_FILE in file_list
|
|
|
|
assert file_list[TEST_LARGE_FILE]['size'] == TEST_LARGE_FILE_SIZE
|
2018-10-13 20:48:58 +00:00
|
|
|
file_list = await peers[0].list_file()
|
2018-10-10 04:05:24 +00:00
|
|
|
assert TEST_LARGE_FILE in file_list and TEST_SMALL_FILE in file_list
|
|
|
|
finally:
|
|
|
|
await tracker.stop()
|
|
|
|
await asyncio.gather(*[peer.stop() for peer in peers])
|
2018-10-09 01:29:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def test_download(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
tracker, peers = await setup_tracker_and_peers(5, unused_tcp_port)
|
|
|
|
to_cleanup = set()
|
2018-10-09 20:37:44 +00:00
|
|
|
try:
|
2018-10-10 03:53:21 +00:00
|
|
|
# peer0 publishes a small_file and peer1 publishes a large file
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish(TEST_SMALL_FILE)
|
2018-10-10 03:53:21 +00:00
|
|
|
file_list = tracker.file_list()
|
|
|
|
assert TEST_SMALL_FILE in file_list
|
|
|
|
assert file_list[TEST_SMALL_FILE]['size'] == TEST_SMALL_FILE_SIZE
|
2018-10-13 20:48:58 +00:00
|
|
|
file_list = await peers[1].list_file()
|
2018-10-10 03:53:21 +00:00
|
|
|
assert TEST_SMALL_FILE in file_list
|
|
|
|
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[1].publish(TEST_LARGE_FILE)
|
2018-10-10 03:53:21 +00:00
|
|
|
file_list = tracker.file_list()
|
|
|
|
assert TEST_LARGE_FILE in file_list and TEST_SMALL_FILE in file_list
|
|
|
|
assert file_list[TEST_LARGE_FILE]['size'] == TEST_LARGE_FILE_SIZE
|
2018-10-13 20:48:58 +00:00
|
|
|
file_list = await peers[0].list_file()
|
2018-10-10 03:53:21 +00:00
|
|
|
assert TEST_LARGE_FILE in file_list and TEST_SMALL_FILE in file_list
|
|
|
|
|
|
|
|
def reporthook(chunk_num, chunk_size, total_size):
|
|
|
|
reporthook.value = (chunk_num, total_size)
|
|
|
|
|
|
|
|
# download small file
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_SMALL_FILE)
|
2018-10-13 21:10:36 +00:00
|
|
|
await peers[1].download(TEST_SMALL_FILE, 'downloaded_' + TEST_SMALL_FILE, reporthook=reporthook)
|
2018-10-10 03:53:21 +00:00
|
|
|
assert os.path.exists('downloaded_' + TEST_SMALL_FILE)
|
2018-10-09 20:37:44 +00:00
|
|
|
assert fmd5(TEST_SMALL_FILE) == fmd5('downloaded_' + TEST_SMALL_FILE)
|
|
|
|
assert reporthook.value == (1, 1000)
|
2018-09-30 14:12:02 +00:00
|
|
|
|
2018-10-10 03:53:21 +00:00
|
|
|
# download large file from single source
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_LARGE_FILE + '_0')
|
2018-10-13 21:10:36 +00:00
|
|
|
await peers[0].download(TEST_LARGE_FILE, 'downloaded_' + TEST_LARGE_FILE + '_0')
|
2018-10-10 03:53:21 +00:00
|
|
|
assert os.path.exists('downloaded_' + TEST_LARGE_FILE + '_0')
|
2018-10-09 20:37:44 +00:00
|
|
|
assert fmd5(TEST_LARGE_FILE) == fmd5('downloaded_' + TEST_LARGE_FILE + '_0')
|
2018-10-08 18:00:45 +00:00
|
|
|
|
2018-10-10 03:53:21 +00:00
|
|
|
# download large file from multiple sources
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_LARGE_FILE + '_2')
|
2018-10-13 21:10:36 +00:00
|
|
|
await peers[2].download(TEST_LARGE_FILE, 'downloaded_' + TEST_LARGE_FILE + '_2')
|
2018-10-10 03:53:21 +00:00
|
|
|
assert os.path.exists('downloaded_' + TEST_LARGE_FILE + '_2')
|
2018-10-09 20:37:44 +00:00
|
|
|
assert fmd5(TEST_LARGE_FILE) == fmd5('downloaded_' + TEST_LARGE_FILE + '_2')
|
2018-10-09 01:29:51 +00:00
|
|
|
|
2018-10-10 03:53:21 +00:00
|
|
|
# download large file concurrently
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_LARGE_FILE + '_3')
|
|
|
|
to_cleanup.add('downloaded_' + TEST_LARGE_FILE + '_4')
|
2018-10-10 03:53:21 +00:00
|
|
|
download_task_1 = peers[3].download(TEST_LARGE_FILE, 'downloaded_' + TEST_LARGE_FILE + '_3')
|
|
|
|
download_task_2 = peers[4].download(TEST_LARGE_FILE, 'downloaded_' + TEST_LARGE_FILE + '_4')
|
2018-10-13 21:10:36 +00:00
|
|
|
await asyncio.gather(download_task_1, download_task_2)
|
2018-10-10 03:53:21 +00:00
|
|
|
assert os.path.exists('downloaded_' + TEST_LARGE_FILE + '_3')
|
2018-10-09 20:50:09 +00:00
|
|
|
assert fmd5(TEST_LARGE_FILE) == fmd5('downloaded_' + TEST_LARGE_FILE + '_3')
|
2018-10-10 03:53:21 +00:00
|
|
|
assert os.path.exists('downloaded_' + TEST_LARGE_FILE + '_4')
|
2018-10-09 20:50:09 +00:00
|
|
|
assert fmd5(TEST_LARGE_FILE) == fmd5('downloaded_' + TEST_LARGE_FILE + '_4')
|
|
|
|
finally:
|
2018-10-10 23:34:52 +00:00
|
|
|
cleanup_files(to_cleanup)
|
2018-10-10 03:53:21 +00:00
|
|
|
await tracker.stop()
|
|
|
|
await asyncio.gather(*[peer.stop() for peer in peers])
|
2018-10-09 04:26:42 +00:00
|
|
|
|
|
|
|
|
2018-10-09 05:27:01 +00:00
|
|
|
async def test_delay(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
tracker, peers = await setup_tracker_and_peers(2, unused_tcp_port)
|
2018-10-09 05:27:01 +00:00
|
|
|
|
|
|
|
# peer0 publishes a small_file and peer1 publishes a large file
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish(TEST_SMALL_FILE)
|
2018-10-09 05:27:01 +00:00
|
|
|
file_list = tracker.file_list()
|
|
|
|
assert TEST_SMALL_FILE in file_list
|
|
|
|
assert file_list[TEST_SMALL_FILE]['size'] == TEST_SMALL_FILE_SIZE
|
2018-10-13 20:48:58 +00:00
|
|
|
file_list = await peers[1].list_file()
|
2018-10-09 05:27:01 +00:00
|
|
|
assert TEST_SMALL_FILE in file_list
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish(TEST_SMALL_FILE_1)
|
2018-10-10 03:53:03 +00:00
|
|
|
file_list = tracker.file_list()
|
|
|
|
assert TEST_SMALL_FILE in file_list
|
|
|
|
assert file_list[TEST_SMALL_FILE_1]['size'] == TEST_SMALL_FILE_SIZE
|
2018-10-13 20:48:58 +00:00
|
|
|
file_list = await peers[1].list_file()
|
2018-10-10 03:53:03 +00:00
|
|
|
assert TEST_SMALL_FILE_1 in file_list
|
2018-10-10 23:34:52 +00:00
|
|
|
to_cleanup = set()
|
2018-10-10 03:54:36 +00:00
|
|
|
try:
|
|
|
|
# download small file
|
|
|
|
start = time.time()
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_SMALL_FILE)
|
2018-10-13 22:11:42 +00:00
|
|
|
await peers[1].download(TEST_SMALL_FILE, 'downloaded_' + TEST_SMALL_FILE)
|
2018-10-10 03:54:36 +00:00
|
|
|
assert os.path.exists('downloaded_' + TEST_SMALL_FILE)
|
|
|
|
assert fmd5(TEST_SMALL_FILE) == fmd5('downloaded_' + TEST_SMALL_FILE)
|
2018-10-09 05:27:01 +00:00
|
|
|
|
2018-10-10 03:54:36 +00:00
|
|
|
download_time = time.time() - start
|
|
|
|
start = time.time()
|
|
|
|
peers[0].set_delay(1)
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_SMALL_FILE_1)
|
2018-10-13 22:11:42 +00:00
|
|
|
await peers[1].download(TEST_SMALL_FILE_1, 'downloaded_' + TEST_SMALL_FILE_1)
|
2018-10-10 03:54:36 +00:00
|
|
|
download_time_with_delay = time.time() - start
|
|
|
|
assert download_time_with_delay > download_time
|
|
|
|
peers[0].set_delay(0)
|
|
|
|
finally:
|
2018-10-10 23:34:52 +00:00
|
|
|
cleanup_files(to_cleanup)
|
2018-10-10 03:54:36 +00:00
|
|
|
await tracker.stop()
|
|
|
|
await asyncio.gather(*[peer.stop() for peer in peers])
|
2018-10-09 05:27:01 +00:00
|
|
|
|
|
|
|
|
2018-10-09 04:26:42 +00:00
|
|
|
async def test_peer_disconnect(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
tracker, peers = await setup_tracker_and_peers(1, unused_tcp_port)
|
2018-10-10 04:05:24 +00:00
|
|
|
try:
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish(TEST_SMALL_FILE)
|
2018-10-10 04:05:24 +00:00
|
|
|
assert TEST_SMALL_FILE in tracker.file_list()
|
|
|
|
|
|
|
|
# stop peer and check the file has been removed
|
2018-10-10 23:34:52 +00:00
|
|
|
await peers[0].stop()
|
2018-10-10 04:05:24 +00:00
|
|
|
# return control to the loop for tracker code to run
|
|
|
|
await asyncio.sleep(1)
|
|
|
|
assert TEST_SMALL_FILE not in tracker.file_list()
|
|
|
|
finally:
|
|
|
|
await tracker.stop()
|
2018-10-10 20:42:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def test_peer_download_disconnect(unused_tcp_port):
|
2018-10-10 23:34:52 +00:00
|
|
|
tracker, peers = await setup_tracker_and_peers(3, unused_tcp_port)
|
|
|
|
to_cleanup = set()
|
2018-10-10 20:42:41 +00:00
|
|
|
try:
|
2018-10-13 22:23:33 +00:00
|
|
|
await peers[0].publish(TEST_LARGE_FILE)
|
2018-10-10 20:42:41 +00:00
|
|
|
assert TEST_LARGE_FILE in tracker.file_list()
|
|
|
|
|
|
|
|
# download large file from single source
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_LARGE_FILE + '_1')
|
2018-10-13 22:23:33 +00:00
|
|
|
await peers[1].download(TEST_LARGE_FILE, 'downloaded_' + TEST_LARGE_FILE + '_1')
|
2018-10-10 20:42:41 +00:00
|
|
|
assert os.path.exists('downloaded_' + TEST_LARGE_FILE + '_1')
|
|
|
|
assert fmd5(TEST_LARGE_FILE) == fmd5('downloaded_' + TEST_LARGE_FILE + '_1')
|
|
|
|
|
|
|
|
peers[1].set_delay(0.1)
|
|
|
|
|
|
|
|
# stop the peer_0, peer_2 should continue to download because peer_1 has all chunks of the file
|
|
|
|
# but the speed will be noticeably slower because peer_1 has delay
|
|
|
|
async def stop_peer_after(peer, delay):
|
|
|
|
await asyncio.sleep(delay)
|
|
|
|
await peer.stop()
|
|
|
|
# run download and stop peer task concurrently
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_LARGE_FILE + '_2')
|
2018-10-13 22:23:33 +00:00
|
|
|
await asyncio.gather(peers[2].download(TEST_LARGE_FILE, 'downloaded_' + TEST_LARGE_FILE + '_2'),
|
2018-10-10 20:42:41 +00:00
|
|
|
stop_peer_after(peers[0], 1))
|
|
|
|
assert os.path.exists('downloaded_' + TEST_LARGE_FILE + '_2')
|
|
|
|
assert fmd5(TEST_LARGE_FILE) == fmd5('downloaded_' + TEST_LARGE_FILE + '_2')
|
|
|
|
finally:
|
2018-10-10 23:34:52 +00:00
|
|
|
cleanup_files(to_cleanup)
|
2018-10-10 20:42:41 +00:00
|
|
|
await tracker.stop()
|
|
|
|
await asyncio.gather(*[peer.stop() for peer in peers[1:]])
|
2018-10-11 00:28:54 +00:00
|
|
|
|
|
|
|
|
2018-10-11 22:08:39 +00:00
|
|
|
async def test_tracker_download_disconnect(unused_tcp_port):
|
|
|
|
tracker, peers = await setup_tracker_and_peers(2, unused_tcp_port)
|
|
|
|
to_cleanup = set()
|
|
|
|
try:
|
2018-10-13 22:23:33 +00:00
|
|
|
await peers[0].publish(TEST_LARGE_FILE)
|
2018-10-11 22:08:39 +00:00
|
|
|
assert TEST_LARGE_FILE in tracker.file_list()
|
|
|
|
|
|
|
|
# stop tracker in the download process, should still successfully download since peer is still alive
|
|
|
|
async def stop_after(obj, delay):
|
|
|
|
await asyncio.sleep(delay)
|
|
|
|
await obj.stop()
|
|
|
|
# run download and stop task concurrently
|
2018-10-13 19:01:12 +00:00
|
|
|
to_cleanup.add('downloaded_' + TEST_LARGE_FILE + '_2')
|
2018-10-13 22:23:33 +00:00
|
|
|
await asyncio.gather(peers[1].download(TEST_LARGE_FILE, 'downloaded_' + TEST_LARGE_FILE + '_2'),
|
2018-10-11 22:08:39 +00:00
|
|
|
stop_after(tracker, 1))
|
|
|
|
assert os.path.exists('downloaded_' + TEST_LARGE_FILE + '_2')
|
|
|
|
assert fmd5(TEST_LARGE_FILE) == fmd5('downloaded_' + TEST_LARGE_FILE + '_2')
|
|
|
|
finally:
|
|
|
|
cleanup_files(to_cleanup)
|
2018-10-12 03:06:37 +00:00
|
|
|
await asyncio.gather(*[peer.stop() for peer in peers])
|
2018-10-11 22:08:39 +00:00
|
|
|
|
|
|
|
|
2018-10-11 00:28:54 +00:00
|
|
|
async def test_peer_restart(unused_tcp_port):
|
|
|
|
tracker, peers = await setup_tracker_and_peers(1, unused_tcp_port)
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish(TEST_SMALL_FILE)
|
2018-10-11 00:28:54 +00:00
|
|
|
assert TEST_SMALL_FILE in tracker.file_list()
|
2018-10-13 22:23:33 +00:00
|
|
|
await peers[0].disconnect()
|
2018-10-11 00:28:54 +00:00
|
|
|
await asyncio.sleep(0.5)
|
|
|
|
assert TEST_SMALL_FILE not in tracker.file_list()
|
2018-10-13 20:40:38 +00:00
|
|
|
await peers[0].connect(('localhost', unused_tcp_port))
|
2018-10-13 20:32:56 +00:00
|
|
|
await peers[0].publish(TEST_SMALL_FILE)
|
2018-10-11 00:28:54 +00:00
|
|
|
assert TEST_SMALL_FILE in tracker.file_list()
|
2018-10-11 00:32:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def test_tracker_restart(unused_tcp_port):
|
|
|
|
tracker, peers = await setup_tracker_and_peers(2, unused_tcp_port)
|
|
|
|
await tracker.stop()
|
2018-10-11 00:34:52 +00:00
|
|
|
assert not tracker.is_running()
|
2018-10-11 01:02:52 +00:00
|
|
|
await asyncio.sleep(0.5)
|
|
|
|
is_all_connected = await asyncio.gather(*[peer.is_connected() for peer in peers])
|
|
|
|
assert not any(is_all_connected)
|
2018-10-13 20:49:12 +00:00
|
|
|
with pytest.raises(TrackerNotConnectedError):
|
|
|
|
await peers[0].publish(TEST_SMALL_FILE)
|
|
|
|
with pytest.raises(TrackerNotConnectedError):
|
|
|
|
await peers[1].list_file()
|
2018-10-11 00:32:45 +00:00
|
|
|
await tracker.start(('localhost', unused_tcp_port))
|
2018-10-11 00:34:52 +00:00
|
|
|
assert tracker.is_running()
|