Skip to content

Commit 227ddb2

Browse files
committed
perf: drop simple-get
1 parent 6864ef9 commit 227ddb2

File tree

4 files changed

+196
-175
lines changed

4 files changed

+196
-175
lines changed

lib/client/http-tracker.js

Lines changed: 56 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import arrayRemove from 'unordered-array-remove'
22
import bencode from 'bencode'
33
import clone from 'clone'
44
import Debug from 'debug'
5-
import get from 'simple-get'
5+
import fetch from 'cross-fetch-ponyfill'
66
import Socks from 'socks'
77

88
import common from '../common.js'
@@ -12,6 +12,14 @@ import compact2string from 'compact2string'
1212
const debug = Debug('bittorrent-tracker:http-tracker')
1313
const HTTP_SCRAPE_SUPPORT = /\/(announce)[^/]*$/
1414

15+
function abortTimeout (ms) {
16+
const controller = new AbortController()
17+
setTimeout(() => {
18+
controller.abort()
19+
}, ms).unref?.()
20+
return controller
21+
}
22+
1523
/**
1624
* HTTP torrent tracker client (for an individual tracker)
1725
*
@@ -111,7 +119,7 @@ class HTTPTracker extends Tracker {
111119
}
112120
}
113121

114-
_request (requestUrl, params, cb) {
122+
async _request (requestUrl, params, cb) {
115123
const self = this
116124
const parsedUrl = new URL(requestUrl + (requestUrl.indexOf('?') === -1 ? '?' : '&') + common.querystringStringify(params))
117125
let agent
@@ -124,56 +132,59 @@ class HTTPTracker extends Tracker {
124132

125133
this.cleanupFns.push(cleanup)
126134

127-
let request = get.concat({
128-
url: parsedUrl.toString(),
129-
agent,
130-
timeout: common.REQUEST_TIMEOUT,
131-
headers: {
132-
'user-agent': this.client._userAgent || ''
133-
}
134-
}, onResponse)
135-
136-
function cleanup () {
137-
if (request) {
138-
arrayRemove(self.cleanupFns, self.cleanupFns.indexOf(cleanup))
139-
request.abort()
140-
request = null
141-
}
142-
if (self.maybeDestroyCleanup) self.maybeDestroyCleanup()
135+
let res
136+
let controller = abortTimeout(common.REQUEST_TIMEOUT)
137+
try {
138+
res = await fetch(parsedUrl.toString(), {
139+
agent,
140+
signal: controller.signal,
141+
dispatcher: agent,
142+
headers: {
143+
'user-agent': this.client._userAgent || ''
144+
}
145+
})
146+
} catch (err) {
147+
if (err) return cb(err)
143148
}
149+
let data = new Uint8Array(await res.arrayBuffer())
150+
cleanup()
151+
if (this.destroyed) return
144152

145-
function onResponse (err, res, data) {
146-
cleanup()
147-
if (self.destroyed) return
153+
if (res.status !== 200) {
154+
return cb(new Error(`Non-200 response code ${res.statusCode} from ${this.announceUrl}`))
155+
}
156+
if (!data || data.length === 0) {
157+
return cb(new Error(`Invalid tracker response from${this.announceUrl}`))
158+
}
148159

149-
if (err) return cb(err)
150-
if (res.statusCode !== 200) {
151-
return cb(new Error(`Non-200 response code ${res.statusCode} from ${self.announceUrl}`))
152-
}
153-
if (!data || data.length === 0) {
154-
return cb(new Error(`Invalid tracker response from${self.announceUrl}`))
155-
}
160+
try {
161+
data = bencode.decode(data)
162+
} catch (err) {
163+
return cb(new Error(`Error decoding tracker response: ${err.message}`))
164+
}
165+
const failure = data['failure reason'] && Buffer.from(data['failure reason']).toString()
166+
if (failure) {
167+
debug(`failure from ${requestUrl} (${failure})`)
168+
return cb(new Error(failure))
169+
}
156170

157-
try {
158-
data = bencode.decode(data)
159-
} catch (err) {
160-
return cb(new Error(`Error decoding tracker response: ${err.message}`))
161-
}
162-
const failure = data['failure reason'] && Buffer.from(data['failure reason']).toString()
163-
if (failure) {
164-
debug(`failure from ${requestUrl} (${failure})`)
165-
return cb(new Error(failure))
166-
}
171+
const warning = data['warning message'] && Buffer.from(data['warning message']).toString()
172+
if (warning) {
173+
debug(`warning from ${requestUrl} (${warning})`)
174+
this.client.emit('warning', new Error(warning))
175+
}
167176

168-
const warning = data['warning message'] && Buffer.from(data['warning message']).toString()
169-
if (warning) {
170-
debug(`warning from ${requestUrl} (${warning})`)
171-
self.client.emit('warning', new Error(warning))
172-
}
177+
debug(`response from ${requestUrl}`)
173178

174-
debug(`response from ${requestUrl}`)
179+
cb(null, data)
175180

176-
cb(null, data)
181+
function cleanup () {
182+
if (!controller.signal.aborted) {
183+
arrayRemove(self.cleanupFns, self.cleanupFns.indexOf(cleanup))
184+
controller.abort()
185+
controller = null
186+
}
187+
if (self.maybeDestroyCleanup) self.maybeDestroyCleanup()
177188
}
178189
}
179190

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
"chrome-dgram": "^3.0.6",
3535
"clone": "^2.0.0",
3636
"compact2string": "^1.4.1",
37+
"cross-fetch-ponyfill": "^1.0.1",
3738
"debug": "^4.1.1",
3839
"ip": "^1.1.5",
3940
"lru": "^3.1.0",
@@ -44,7 +45,6 @@
4445
"randombytes": "^2.1.0",
4546
"run-parallel": "^1.2.0",
4647
"run-series": "^1.1.9",
47-
"simple-get": "^4.0.0",
4848
"simple-peer": "^9.11.0",
4949
"socks": "^2.0.0",
5050
"string2compact": "^2.0.0",

test/scrape.js

Lines changed: 40 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import Client from '../index.js'
33
import common from './common.js'
44
import commonLib from '../lib/common.js'
55
import fixtures from 'webtorrent-fixtures'
6-
import get from 'simple-get'
6+
import fetch from 'cross-fetch-ponyfill'
77
import test from 'tape'
88

99
const peerId = Buffer.from('01234567890123456789')
@@ -150,44 +150,47 @@ test('udp: MULTI scrape using Client.scrape static method', t => {
150150
})
151151

152152
test('server: multiple info_hash scrape (manual http request)', t => {
153-
t.plan(13)
153+
t.plan(12)
154154

155155
const binaryInfoHash1 = commonLib.hexToBinary(fixtures.leaves.parsedTorrent.infoHash)
156156
const binaryInfoHash2 = commonLib.hexToBinary(fixtures.alice.parsedTorrent.infoHash)
157157

158-
common.createServer(t, 'http', (server, announceUrl) => {
158+
common.createServer(t, 'http', async (server, announceUrl) => {
159159
const scrapeUrl = announceUrl.replace('/announce', '/scrape')
160160

161161
const url = `${scrapeUrl}?${commonLib.querystringStringify({
162162
info_hash: [binaryInfoHash1, binaryInfoHash2]
163163
})}`
164-
165-
get.concat(url, (err, res, data) => {
164+
let res
165+
try {
166+
res = await fetch(url)
167+
} catch (err) {
166168
t.error(err)
169+
}
170+
let data = Buffer.from(await res.arrayBuffer())
167171

168-
t.equal(res.statusCode, 200)
172+
t.equal(res.status, 200)
169173

170-
data = bencode.decode(data)
171-
t.ok(data.files)
172-
t.equal(Object.keys(data.files).length, 2)
174+
data = bencode.decode(data)
175+
t.ok(data.files)
176+
t.equal(Object.keys(data.files).length, 2)
173177

174-
t.ok(data.files[binaryInfoHash1])
175-
t.equal(typeof data.files[binaryInfoHash1].complete, 'number')
176-
t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number')
177-
t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number')
178+
t.ok(data.files[binaryInfoHash1])
179+
t.equal(typeof data.files[binaryInfoHash1].complete, 'number')
180+
t.equal(typeof data.files[binaryInfoHash1].incomplete, 'number')
181+
t.equal(typeof data.files[binaryInfoHash1].downloaded, 'number')
178182

179-
t.ok(data.files[binaryInfoHash2])
180-
t.equal(typeof data.files[binaryInfoHash2].complete, 'number')
181-
t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number')
182-
t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number')
183+
t.ok(data.files[binaryInfoHash2])
184+
t.equal(typeof data.files[binaryInfoHash2].complete, 'number')
185+
t.equal(typeof data.files[binaryInfoHash2].incomplete, 'number')
186+
t.equal(typeof data.files[binaryInfoHash2].downloaded, 'number')
183187

184-
server.close(() => { t.pass('server closed') })
185-
})
188+
server.close(() => { t.pass('server closed') })
186189
})
187190
})
188191

189192
test('server: all info_hash scrape (manual http request)', t => {
190-
t.plan(10)
193+
t.plan(9)
191194

192195
const binaryInfoHash = commonLib.hexToBinary(fixtures.leaves.parsedTorrent.infoHash)
193196

@@ -206,24 +209,28 @@ test('server: all info_hash scrape (manual http request)', t => {
206209

207210
client.start()
208211

209-
server.once('start', () => {
212+
server.once('start', async () => {
210213
// now do a scrape of everything by omitting the info_hash param
211-
get.concat(scrapeUrl, (err, res, data) => {
214+
let res
215+
try {
216+
res = await fetch(scrapeUrl)
217+
} catch (err) {
212218
t.error(err)
219+
}
220+
let data = Buffer.from(await res.arrayBuffer())
213221

214-
t.equal(res.statusCode, 200)
215-
data = bencode.decode(data)
216-
t.ok(data.files)
217-
t.equal(Object.keys(data.files).length, 1)
222+
t.equal(res.status, 200)
223+
data = bencode.decode(data)
224+
t.ok(data.files)
225+
t.equal(Object.keys(data.files).length, 1)
218226

219-
t.ok(data.files[binaryInfoHash])
220-
t.equal(typeof data.files[binaryInfoHash].complete, 'number')
221-
t.equal(typeof data.files[binaryInfoHash].incomplete, 'number')
222-
t.equal(typeof data.files[binaryInfoHash].downloaded, 'number')
227+
t.ok(data.files[binaryInfoHash])
228+
t.equal(typeof data.files[binaryInfoHash].complete, 'number')
229+
t.equal(typeof data.files[binaryInfoHash].incomplete, 'number')
230+
t.equal(typeof data.files[binaryInfoHash].downloaded, 'number')
223231

224-
client.destroy(() => { t.pass('client destroyed') })
225-
server.close(() => { t.pass('server closed') })
226-
})
232+
client.destroy(() => { t.pass('client destroyed') })
233+
server.close(() => { t.pass('server closed') })
227234
})
228235
})
229236
})

0 commit comments

Comments
 (0)