scrape.js
1 import bencode from 'bencode' 2 import Client from '../index.js' 3 import common from './common.js' 4 import commonLib from '../lib/common.js' 5 import fixtures from 'webtorrent-fixtures' 6 import fetch from 'cross-fetch-ponyfill' 7 import test from 'tape' 8 import { hex2bin } from 'uint8-util' 9 10 const peerId = Buffer.from('01234567890123456789') 11 12 function testSingle (t, serverType) { 13 common.createServer(t, serverType, (server, announceUrl) => { 14 const client = new Client({ 15 infoHash: fixtures.leaves.parsedTorrent.infoHash, 16 announce: announceUrl, 17 peerId, 18 port: 6881, 19 wrtc: {} 20 }) 21 22 if (serverType === 'ws') common.mockWebsocketTracker(client) 23 client.on('error', err => { t.error(err) }) 24 client.on('warning', err => { t.error(err) }) 25 26 client.scrape() 27 28 client.on('scrape', data => { 29 t.equal(data.announce, announceUrl) 30 t.equal(data.infoHash, fixtures.leaves.parsedTorrent.infoHash) 31 t.equal(typeof data.complete, 'number') 32 t.equal(typeof data.incomplete, 'number') 33 t.equal(typeof data.downloaded, 'number') 34 client.destroy() 35 server.close(() => { 36 t.end() 37 }) 38 }) 39 }) 40 } 41 42 test('http: single info_hash scrape', t => { 43 testSingle(t, 'http') 44 }) 45 46 test('udp: single info_hash scrape', t => { 47 testSingle(t, 'udp') 48 }) 49 50 test('ws: single info_hash scrape', t => { 51 testSingle(t, 'ws') 52 }) 53 54 function clientScrapeStatic (t, serverType) { 55 common.createServer(t, serverType, (server, announceUrl) => { 56 const client = Client.scrape({ 57 announce: announceUrl, 58 infoHash: fixtures.leaves.parsedTorrent.infoHash, 59 wrtc: {} 60 }, (err, data) => { 61 t.error(err) 62 t.equal(data.announce, announceUrl) 63 t.equal(data.infoHash, fixtures.leaves.parsedTorrent.infoHash) 64 t.equal(typeof data.complete, 'number') 65 t.equal(typeof data.incomplete, 'number') 66 t.equal(typeof data.downloaded, 'number') 67 server.close(() => { 68 t.end() 69 }) 70 }) 71 if (serverType === 'ws') common.mockWebsocketTracker(client) 72 }) 73 } 74 75 test('http: scrape using Client.scrape static method', t => { 76 clientScrapeStatic(t, 'http') 77 }) 78 79 test('udp: scrape using Client.scrape static method', t => { 80 clientScrapeStatic(t, 'udp') 81 }) 82 83 test('ws: scrape using Client.scrape static method', t => { 84 clientScrapeStatic(t, 'ws') 85 }) 86 87 // Ensure the callback function gets called when an invalid url is passed 88 function clientScrapeStaticInvalid (t, serverType) { 89 let announceUrl = `${serverType}://invalid.lol` 90 if (serverType === 'http') announceUrl += '/announce' 91 92 const client = Client.scrape({ 93 announce: announceUrl, 94 infoHash: fixtures.leaves.parsedTorrent.infoHash, 95 wrtc: {} 96 }, (err, data) => { 97 t.ok(err instanceof Error) 98 t.end() 99 }) 100 if (serverType === 'ws') common.mockWebsocketTracker(client) 101 } 102 103 test('http: scrape using Client.scrape static method (invalid url)', t => { 104 clientScrapeStaticInvalid(t, 'http') 105 }) 106 107 test('udp: scrape using Client.scrape static method (invalid url)', t => { 108 clientScrapeStaticInvalid(t, 'udp') 109 }) 110 111 test('ws: scrape using Client.scrape static method (invalid url)', t => { 112 clientScrapeStaticInvalid(t, 'ws') 113 }) 114 115 function clientScrapeMulti (t, serverType) { 116 const infoHash1 = fixtures.leaves.parsedTorrent.infoHash 117 const infoHash2 = fixtures.alice.parsedTorrent.infoHash 118 119 common.createServer(t, serverType, (server, announceUrl) => { 120 Client.scrape({ 121 infoHash: [infoHash1, infoHash2], 122 announce: announceUrl 123 }, (err, results) => { 124 t.error(err) 125 126 t.equal(results[infoHash1].announce, announceUrl) 127 t.equal(results[infoHash1].infoHash, infoHash1) 128 t.equal(typeof results[infoHash1].complete, 'number') 129 t.equal(typeof results[infoHash1].incomplete, 'number') 130 t.equal(typeof results[infoHash1].downloaded, 'number') 131 132 t.equal(results[infoHash2].announce, announceUrl) 133 t.equal(results[infoHash2].infoHash, infoHash2) 134 t.equal(typeof results[infoHash2].complete, 'number') 135 t.equal(typeof results[infoHash2].incomplete, 'number') 136 t.equal(typeof results[infoHash2].downloaded, 'number') 137 138 server.close(() => { 139 t.end() 140 }) 141 }) 142 }) 143 } 144 145 test('http: MULTI scrape using Client.scrape static method', t => { 146 clientScrapeMulti(t, 'http') 147 }) 148 149 test('udp: MULTI scrape using Client.scrape static method', t => { 150 clientScrapeMulti(t, 'udp') 151 }) 152 153 test('server: multiple info_hash scrape (manual http request)', t => { 154 t.plan(12) 155 156 const binaryInfoHash1 = hex2bin(fixtures.leaves.parsedTorrent.infoHash) 157 const binaryInfoHash2 = hex2bin(fixtures.alice.parsedTorrent.infoHash) 158 159 common.createServer(t, 'http', async (server, announceUrl) => { 160 const scrapeUrl = announceUrl.replace('/announce', '/scrape') 161 162 const url = `${scrapeUrl}?${commonLib.querystringStringify({ 163 info_hash: [binaryInfoHash1, binaryInfoHash2] 164 })}` 165 let res 166 try { 167 res = await fetch(url) 168 } catch (err) { 169 t.error(err) 170 } 171 let data = Buffer.from(await res.arrayBuffer()) 172 173 t.equal(res.status, 200) 174 175 data = bencode.decode(data) 176 t.ok(data.files) 177 t.equal(Object.keys(data.files).length, 2) 178 179 t.ok(data.files[binaryInfoHash1]) 180 t.equal(typeof data.files[binaryInfoHash1].complete, 'number') 181 t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number') 182 t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number') 183 184 t.ok(data.files[binaryInfoHash2]) 185 t.equal(typeof data.files[binaryInfoHash2].complete, 'number') 186 t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number') 187 t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number') 188 189 server.close(() => { t.pass('server closed') }) 190 }) 191 }) 192 193 test('server: all info_hash scrape (manual http request)', t => { 194 t.plan(9) 195 196 const binaryInfoHash = hex2bin(fixtures.leaves.parsedTorrent.infoHash) 197 198 common.createServer(t, 'http', (server, announceUrl) => { 199 const scrapeUrl = announceUrl.replace('/announce', '/scrape') 200 201 // announce a torrent to the tracker 202 const client = new Client({ 203 infoHash: fixtures.leaves.parsedTorrent.infoHash, 204 announce: announceUrl, 205 peerId, 206 port: 6881 207 }) 208 client.on('error', err => { t.error(err) }) 209 client.on('warning', err => { t.error(err) }) 210 211 client.start() 212 213 server.once('start', async () => { 214 // now do a scrape of everything by omitting the info_hash param 215 let res 216 try { 217 res = await fetch(scrapeUrl) 218 } catch (err) { 219 t.error(err) 220 } 221 let data = Buffer.from(await res.arrayBuffer()) 222 223 t.equal(res.status, 200) 224 data = bencode.decode(data) 225 t.ok(data.files) 226 t.equal(Object.keys(data.files).length, 1) 227 228 t.ok(data.files[binaryInfoHash]) 229 t.equal(typeof data.files[binaryInfoHash].complete, 'number') 230 t.equal(typeof data.files[binaryInfoHash].incomplete, 'number') 231 t.equal(typeof data.files[binaryInfoHash].downloaded, 'number') 232 233 client.destroy(() => { t.pass('client destroyed') }) 234 server.close(() => { t.pass('server closed') }) 235 }) 236 }) 237 })