Skip to content

Commit 2c34583

Browse files
committed
http tracker: support multiple info_hash scrapes
1 parent 0a51e59 commit 2c34583

File tree

1 file changed

+32
-17
lines changed

1 file changed

+32
-17
lines changed

server.js

Lines changed: 32 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -114,23 +114,22 @@ Server.prototype._onHttpRequest = function (req, res) {
114114
var s = req.url.split('?')
115115
var params = querystring.parse(s[1])
116116

117-
// TODO: support multiple info_hash parameters as a concatenation of individual requests
118-
var infoHash = params.info_hash && bytewiseDecodeURIComponent(params.info_hash).toString('hex')
117+
if (s[0] === '/announce') {
118+
var infoHash = typeof params.info_hash === 'string' &&
119+
bytewiseDecodeURIComponent(params.info_hash).toString('hex')
120+
var port = Number(params.port)
121+
var peerId = typeof params.peer_id === 'string' &&
122+
bytewiseDecodeURIComponent(params.peer_id).toString('utf8')
119123

120-
if (!infoHash) return error('missing info_hash')
121-
if (infoHash.length !== 40) return error('invalid info_hash')
124+
if (!infoHash) return error('invalid info_hash')
125+
if (infoHash.length !== 40) return error('invalid info_hash')
126+
if (!port) return error('invalid port')
127+
if (!peerId) return error('invalid peer_id')
122128

123-
if (s[0] === '/announce' || s[0] === '/') {
124129
var ip = self._trustProxy
125130
? req.headers['x-forwarded-for'] || req.connection.remoteAddress
126131
: req.connection.remoteAddress.replace(REMOVE_IPV6_RE, '') // force ipv4
127-
var port = Number(params.port)
128132
var addr = ip + ':' + port
129-
var peerId = params.peer_id && bytewiseDecodeURIComponent(params.peer_id).toString('utf8')
130-
131-
if (!port) return error('missing port')
132-
if (!peerId) return error('missing peer_id')
133-
134133
var swarm = self._getSwarm(infoHash)
135134
var peer = swarm.peers[addr]
136135

@@ -219,18 +218,34 @@ Server.prototype._onHttpRequest = function (req, res) {
219218
res.end(bencode.encode(response))
220219

221220
} else if (s[0] === '/scrape') { // unofficial scrape message
222-
var swarm = self._getSwarm(infoHash)
223-
var response = { files : { } }
221+
if (typeof params.info_hash === 'string') {
222+
params.info_hash = [ params.info_hash ]
223+
}
224+
if (!Array.isArray(params.info_hash)) return error('invalid info_hash')
224225

225-
response.files[params.info_hash] = {
226-
complete: swarm.complete,
227-
incomplete: swarm.incomplete,
228-
downloaded: swarm.complete, // TODO: this only provides a lower-bound
226+
var response = {
227+
files: {},
229228
flags: {
230229
min_request_interval: self._intervalMs
231230
}
232231
}
233232

233+
params.info_hash.some(function (infoHash) {
234+
var infoHashHex = bytewiseDecodeURIComponent(infoHash).toString('hex')
235+
if (infoHashHex.length !== 40) {
236+
error('invalid info_hash')
237+
return true // early return
238+
}
239+
240+
var swarm = self._getSwarm(infoHashHex)
241+
242+
response.files[infoHash] = {
243+
complete: swarm.complete,
244+
incomplete: swarm.incomplete,
245+
downloaded: swarm.complete // TODO: this only provides a lower-bound
246+
}
247+
})
248+
234249
res.end(bencode.encode(response))
235250
}
236251

0 commit comments

Comments
 (0)