Written by Bram Cohen see LICENSE txt for license information from zur

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
# Written by Bram Cohen
# see LICENSE.txt for license information
from zurllib import urlopen
from urlparse import urlparse
from BT1.btformats import check_message
from BT1.Choker import Choker
from BT1.Storage import Storage
from BT1.StorageWrapper import StorageWrapper
from BT1.FileSelector import FileSelector
from BT1.Uploader import Upload
from BT1.Downloader import Downloader
from BT1.HTTPDownloader import HTTPDownloader
from BT1.Connecter import Connecter
from RateLimiter import RateLimiter
from BT1.Encrypter import Encoder
from RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
from BT1.Rerequester import Rerequester
from BT1.DownloaderFeedback import DownloaderFeedback
from RateMeasure import RateMeasure
from CurrentRateMeasure import Measure
from BT1.PiecePicker import PiecePicker
from BT1.Statistics import Statistics
from ConfigDir import ConfigDir
from bencode import bencode, bdecode
from natpunch import UPnP_test
from sha import sha
from os import path, makedirs, listdir
from parseargs import parseargs, formatDefinitions, defaultargs
from socket import error as socketerror
from random import seed
from threading import Thread, Event
from clock import clock
from BTcrypto import CRYPTO_OK
from __init__ import createPeerID
try:
True
except:
True = 1
False = 0
defaults = [
('max_uploads', 7,
"the maximum number of uploads to allow at once."),
('keepalive_interval', 120.0,
'number of seconds to pause between sending keepalives'),
('download_slice_size', 2 ** 14,
"How many bytes to query for per request."),
('upload_unit_size', 1460,
"when limiting upload rate, how many bytes to send at a time"),
('request_backlog', 10,
"maximum number of requests to keep in a single pipe at once."),
('max_message_length', 2 ** 23,
"maximum length prefix encoding you'll accept over the wire - larger values get the connection dropped."),
('ip', '',
"ip to report you have to the tracker."),
('minport', 10000, 'minimum port to listen on, counts up if unavailable'),
('maxport', 60000, 'maximum port to listen on'),
('random_port', 1, 'whether to choose randomly inside the port range ' +
'instead of counting up linearly'),
('responsefile', '',
'file the server response was stored in, alternative to url'),
('url', '',
'url to get file from, alternative to responsefile'),
('crypto_allowed', int(CRYPTO_OK),
'whether to allow the client to accept encrypted connections'),
('crypto_only', 0,
'whether to only create or allow encrypted connections'),
('crypto_stealth', 0,
'whether to prevent all non-encrypted connection attempts; ' +
'will result in an effectively firewalled state on older trackers'),
('selector_enabled', 1,
'whether to enable the file selector and fast resume function'),
('expire_cache_data', 10,
'the number of days after which you wish to expire old cache data ' +
'(0 = disabled)'),
('priority', '',
'a list of file priorities separated by commas, must be one per file, ' +
'0 = highest, 1 = normal, 2 = lowest, -1 = download disabled'),
('saveas', '',
'local file name to save the file as, null indicates query user'),
('timeout', 300.0,
'time to wait between closing sockets which nothing has been received on'),
('timeout_check_interval', 60.0,
'time to wait between checking if any connections have timed out'),
('max_slice_length', 2 ** 17,
"maximum length slice to send to peers, larger requests are ignored"),
('max_rate_period', 20.0,
"maximum amount of time to guess the current rate estimate represents"),
('bind', '',
'comma-separated list of ips/hostnames to bind to locally'),
# ('ipv6_enabled', autodetect_ipv6(),
('ipv6_enabled', 0,
'allow the client to connect to peers via IPv6'),
('ipv6_binds_v4', autodetect_socket_style(),
"set if an IPv6 server socket won't also field IPv4 connections"),
('upnp_nat_access', 1,
'attempt to autoconfigure a UPnP router to forward a server port ' +
'(0 = disabled, 1 = mode 1 [fast], 2 = mode 2 [slow])'),
('upload_rate_fudge', 5.0,
'time equivalent of writing to kernel-level TCP buffer, for rate adjustment'),
('tcp_ack_fudge', 0.03,
'how much TCP ACK download overhead to add to upload rate calculations ' +
'(0 = disabled)'),
('display_interval', .5,
'time between updates of displayed information'),
('rerequest_interval', 5 * 60,
'time to wait between requesting more peers'),
('min_peers', 20,
'minimum number of peers to not do rerequesting'),
('http_timeout', 60,
'number of seconds to wait before assuming that an http connection has timed out'),
('max_initiate', 40,
'number of peers at which to stop initiating new connections'),
('check_hashes', 1,
'whether to check hashes on disk'),
('max_upload_rate', 0,
'maximum kB/s to upload at (0 = no limit, -1 = automatic)'),
('max_download_rate', 0,
'maximum kB/s to download at (0 = no limit)'),
('alloc_type', 'normal',
'allocation type (may be normal, background, pre-allocate or sparse)'),
('alloc_rate', 2.0,
'rate (in MiB/s) to allocate space at using background allocation'),
('buffer_reads', 1,
'whether to buffer disk reads'),
('write_buffer_size', 4,
'the maximum amount of space to use for buffering disk writes ' +
'(in megabytes, 0 = disabled)'),
('breakup_seed_bitfield', 1,
'sends an incomplete bitfield and then fills with have messages, '
'in order to get around stupid ISP manipulation'),
('snub_time', 30.0,
"seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"),
('spew', 0,
"whether to display diagnostic info to stdout"),
('rarest_first_cutoff', 2,
"number of downloads at which to switch from random to rarest first"),
('rarest_first_priority_cutoff', 5,
'the number of peers which need to have a piece before other partials take priority over rarest first'),
('min_uploads', 4,
"the number of uploads to fill out to with extra optimistic unchokes"),
('max_files_open', 50,
'the maximum number of files to keep open at a time, 0 means no limit'),
('round_robin_period', 30,
"the number of seconds between the client's switching upload targets"),
('super_seeder', 0,
"whether to use special upload-efficiency-maximizing routines (only for dedicated seeds)"),
('security', 1,
"whether to enable extra security features intended to prevent abuse"),
('max_connections', 0,
"the absolute maximum number of peers to connect with (0 = no limit)"),
('auto_kick', 1,
"whether to allow the client to automatically kick/ban peers that send bad data"),
('double_check', 1,
"whether to double-check data being written to the disk for errors (may increase CPU load)"),
('triple_check', 0,
"whether to thoroughly check data being written to the disk (may slow disk access)"),
('lock_files', 1,
"whether to lock files the client is working with"),
('lock_while_reading', 0,
"whether to lock access to files being read"),
('auto_flush', 0,
"minutes between automatic flushes to disk (0 = disabled)"),
('dedicated_seed_id', '',
"code to send to tracker identifying as a dedicated seed"),
]
argslistheader = 'Arguments are:\n\n'
def _failfunc(x):
print x
# old-style downloader
def download(params, filefunc, statusfunc, finfunc, errorfunc, doneflag, cols,
pathFunc = None, presets = {}, exchandler = None,
failed = _failfunc, paramfunc = None):
try:
config = parse_params(params, presets)
except ValueError, e:
failed('error: ' + str(e) + '\nrun with no args for parameter explanations')
return
if not config:
errorfunc(get_usage())
return
myid = createPeerID()
seed(myid)
rawserver = RawServer(doneflag, config['timeout_check_interval'],
config['timeout'], ipv6_enable = config['ipv6_enabled'],
failfunc = failed, errorfunc = exchandler)
upnp_type = UPnP_test(config['upnp_nat_access'])
try:
listen_port = rawserver.find_and_bind(config['minport'], config['maxport'],
config['bind'], ipv6_socket_style = config['ipv6_binds_v4'],
upnp = upnp_type, randomizer = config['random_port'])
except socketerror, e:
failed("Couldn't listen - " + str(e))
return
response = get_response(config['responsefile'], config['url'], failed)
if not response:
return
infohash = sha(bencode(response['info'])).digest()
d = BT1Download(statusfunc, finfunc, errorfunc, exchandler, doneflag,
config, response, infohash, myid, rawserver, listen_port)
if not d.saveAs(filefunc):
return
if pathFunc:
pathFunc(d.getFilename())
hashcheck = d.initFiles(old_style = True)
if not hashcheck:
return
if not hashcheck():
return
if not d.startEngine():
return
d.startRerequester()
d.autoStats()
statusfunc(activity = 'connecting to peers')
if paramfunc:
paramfunc({ 'max_upload_rate' : d.setUploadRate, # change_max_upload_rate(<int KiB/sec>)
'max_uploads': d.setConns, # change_max_uploads(<int max uploads>)
'listen_port' : listen_port, # int
'peer_id' : myid, # string
'info_hash' : infohash, # string
'start_connection' : d._startConnection, # start_connection((<string ip>, <int port>), <peer id>)
})
rawserver.listen_forever(d.getPortHandler())
d.shutdown()
def parse_params(params, presets = {}):
if len(params) == 0:
return None
config, args = parseargs(params, defaults, 0, 1, presets = presets)
if args:
if config['responsefile'] or config['url']:
raise ValueError,'must have responsefile or url as arg or parameter, not both'
if path.isfile(args[0]):
config['responsefile'] = args[0]
else:
try:
urlparse(args[0])
except:
raise ValueError, 'bad filename or url'
config['url'] = args[0]
elif (config['responsefile'] == '') == (config['url'] == ''):
raise ValueError, 'need responsefile or url, must have one, cannot have both'
return config
def get_usage(defaults = defaults, cols = 100, presets = {}):
return (argslistheader + formatDefinitions(defaults, cols, presets))
def get_response(file, url, errorfunc):
try:
if file:
h = open(file, 'rb')
try:
line = h.read(10) # quick test to see if responsefile contains a dict
front,garbage = line.split(':',1)
assert front[0] == 'd'
int(front[1:])
except:
errorfunc(file+' is not a valid responsefile')
return None
try:
h.seek(0)
except:
try:
h.close()
except:
pass
h = open(file, 'rb')
else:
try:
h = urlopen(url)
except:
errorfunc(url+' bad url')
return None
response = h.read()
except IOError, e:
errorfunc('problem getting response info - ' + str(e))
return None
try:
h.close()
except:
pass
try:
try:
response = bdecode(response)
except:
errorfunc("warning: bad data in responsefile")
response = bdecode(response, sloppy=1)
check_message(response)
except ValueError, e:
errorfunc("got bad file info - " + str(e))
return None
return response
class BT1Download:
def __init__(self, statusfunc, finfunc, errorfunc, excfunc, doneflag,
config, response, infohash, id, rawserver, port,
appdataobj = None):
self.statusfunc = statusfunc
self.finfunc = finfunc
self.errorfunc = errorfunc
self.excfunc = excfunc
self.doneflag = doneflag
self.config = config
self.response = response
self.infohash = infohash
self.myid = id
self.rawserver = rawserver
self.port = port
self.info = self.response['info']
self.pieces = [self.info['pieces'][x:x+20]
for x in xrange(0, len(self.info['pieces']), 20)]
self.len_pieces = len(self.pieces)
self.argslistheader = argslistheader
self.unpauseflag = Event()
self.unpauseflag.set()
self.downloader = None
self.storagewrapper = None
self.fileselector = None
self.super_seeding_active = False
self.filedatflag = Event()
self.spewflag = Event()
self.superseedflag = Event()
self.whenpaused = None
self.finflag = Event()
self.rerequest = None
self.tcp_ack_fudge = config['tcp_ack_fudge']
self.selector_enabled = config['selector_enabled']
if appdataobj:
self.appdataobj = appdataobj
elif self.selector_enabled:
self.appdataobj = ConfigDir()
self.appdataobj.deleteOldCacheData( config['expire_cache_data'],
[self.infohash] )
self.excflag = self.rawserver.get_exception_flag()
self.failed = False
self.checking = False
self.started = False
self.picker = PiecePicker(self.len_pieces, config['rarest_first_cutoff'],
config['rarest_first_priority_cutoff'])
self.choker = Choker(config, rawserver.add_task,
self.picker, self.finflag.isSet)
def checkSaveLocation(self, loc):
if self.info.has_key('length'):
return path.exists(loc)
for x in self.info['files']:
if path.exists(path.join(loc, x['path'][0])):
return True
return False
def saveAs(self, filefunc, pathfunc = None):
try:
def make(f, forcedir = False):
if not forcedir:
f = path.split(f)[0]
if f != '' and not path.exists(f):
makedirs(f)
if self.info.has_key('length'):
file_length = self.info['length']
file = filefunc(self.info['name'], file_length,
self.config['saveas'], False)
if file is None:
return None
make(file)
files = [(file, file_length)]
else:
file_length = 0L
for x in self.info['files']:
file_length += x['length']
file = filefunc(self.info['name'], file_length,
self.config['saveas'], True)
if file is None:
return None
# if this path exists, and no files from the info dict exist, we assume it's a new download and
# the user wants to create a new directory with the default name
existing = 0
if path.exists(file):
if not path.isdir(file):
self.errorfunc(file + 'is not a dir')
return None
if len(listdir(file)) > 0: # if it's not empty
for x in self.info['files']:
if path.exists(path.join(file, x['path'][0])):
existing = 1
if not existing:
file = path.join(file, self.info['name'])
if path.exists(file) and not path.isdir(file):
if file[-8:] == '.torrent':
file = file[:-8]
if path.exists(file) and not path.isdir(file):
self.errorfunc("Can't create dir - " + self.info['name'])
return None
make(file, True)
# alert the UI to any possible change in path
if pathfunc != None:
pathfunc(file)
files = []
for x in self.info['files']:
n = file
for i in x['path']:
n = path.join(n, i)
files.append((n, x['length']))
make(n)
except OSError, e:
self.errorfunc("Couldn't allocate dir - " + str(e))
return None
self.filename = file
self.files = files
self.datalength = file_length
return file
def getFilename(self):
return self.filename
def _finished(self):
self.finflag.set()
try:
self.storage.set_readonly()
except (IOError, OSError), e:
self.errorfunc('trouble setting readonly at end - ' + str(e))
if self.superseedflag.isSet():
self._set_super_seed()
self.choker.set_round_robin_period(
max( self.config['round_robin_period'],
self.config['round_robin_period'] *
self.info['piece length'] / 200000 ) )
self.rerequest_complete()
self.finfunc()
def _data_flunked(self, amount, index):
self.ratemeasure_datarejected(amount)
if not self.doneflag.isSet():
self.errorfunc('piece %d failed hash check, re-downloading it' % index)
def _failed(self, reason):
self.failed = True
self.doneflag.set()
if reason is not None:
self.errorfunc(reason)
def initFiles(self, old_style = False, statusfunc = None):
if self.doneflag.isSet():
return None
if not statusfunc:
statusfunc = self.statusfunc
disabled_files = None
if self.selector_enabled:
self.priority = self.config['priority']
if self.priority:
try:
self.priority = self.priority.split(',')
assert len(self.priority) == len(self.files)
self.priority = [int(p) for p in self.priority]
for p in self.priority:
assert p >= -1
assert p <= 2
except:
self.errorfunc('bad priority list given, ignored')
self.priority = None
data = self.appdataobj.getTorrentData(self.infohash)
try:
d = data['resume data']['priority']
assert len(d) == len(self.files)
disabled_files = [x == -1 for x in d]
except:
try:
disabled_files = [x == -1 for x in self.priority]
except:
pass
try:
try:
self.storage = Storage(self.files, self.info['piece length'],
self.doneflag, self.config, disabled_files)
except IOError, e:
self.errorfunc('trouble accessing files - ' + str(e))
return None
if self.doneflag.isSet():
return None
self.storagewrapper = StorageWrapper(self.storage, self.config['download_slice_size'],
self.pieces, self.info['piece length'], self._finished, self._failed,
statusfunc, self.doneflag, self.config['check_hashes'],
self._data_flunked, self.rawserver.add_task,
self.config, self.unpauseflag)
except ValueError, e:
self._failed('bad data - ' + str(e))
except IOError, e:
self._failed('IOError - ' + str(e))
if self.doneflag.isSet():
return None
if self.selector_enabled:
self.fileselector = FileSelector(self.files, self.info['piece length'],
self.appdataobj.getPieceDir(self.infohash),
self.storage, self.storagewrapper,
self.rawserver.add_task,
self._failed)
if data:
data = data.get('resume data')
if data:
self.fileselector.unpickle(data)
self.checking = True
if old_style:
return self.storagewrapper.old_style_init()
return self.storagewrapper.initialize
def getCachedTorrentData(self):
return self.appdataobj.getTorrentData(self.infohash)
def _make_upload(self, connection, ratelimiter, totalup):
return Upload(connection, ratelimiter, totalup,
self.choker, self.storagewrapper, self.picker,
self.config)
def _kick_peer(self, connection):
def k(connection = connection):
connection.close()
self.rawserver.add_task(k,0)
def _ban_peer(self, ip):
self.encoder_ban(ip)
def _received_raw_data(self, x):
if self.tcp_ack_fudge:
x = int(x*self.tcp_ack_fudge)
self.ratelimiter.adjust_sent(x)
def _received_data(self, x):
self.downmeasure.update_rate(x)
self.ratemeasure.data_came_in(x)
def _received_http_data(self, x):
self.downmeasure.update_rate(x)
self.ratemeasure.data_came_in(x)
self.downloader.external_data_received(x)
def _cancelfunc(self, pieces):
self.downloader.cancel_piece_download(pieces)
self.httpdownloader.cancel_piece_download(pieces)
def _reqmorefunc(self, pieces):
self.downloader.requeue_piece_download(pieces)
def startEngine(self, ratelimiter = None, statusfunc = None):
if self.doneflag.isSet():
return False
if not statusfunc:
statusfunc = self.statusfunc
self.checking = False
if not CRYPTO_OK:
if self.config['crypto_allowed']:
self.errorfunc('warning - crypto library not installed')
self.config['crypto_allowed'] = 0
self.config['crypto_only'] = 0
self.config['crypto_stealth'] = 0
for i in xrange(self.len_pieces):
if self.storagewrapper.do_I_have(i):
self.picker.complete(i)
self.upmeasure = Measure(self.config['max_rate_period'],
self.config['upload_rate_fudge'])
self.downmeasure = Measure(self.config['max_rate_period'])
if ratelimiter:
self.ratelimiter = ratelimiter
else:
self.ratelimiter = RateLimiter(self.rawserver.add_task,
self.config['upload_unit_size'],
self.setConns)
self.ratelimiter.set_upload_rate(self.config['max_upload_rate'])
self.ratemeasure = RateMeasure()
self.ratemeasure_datarejected = self.ratemeasure.data_rejected
self.downloader = Downloader(self.storagewrapper, self.picker,
self.config['request_backlog'], self.config['max_rate_period'],
self.len_pieces, self.config['download_slice_size'],
self._received_data, self.config['snub_time'], self.config['auto_kick'],
self._kick_peer, self._ban_peer)
self.downloader.set_download_rate(self.config['max_download_rate'])
self.connecter = Connecter(self._make_upload, self.downloader, self.choker,
self.len_pieces, self.upmeasure, self.config,
self.ratelimiter, self.rawserver.add_task)
self.encoder = Encoder(self.connecter, self.rawserver,
self.myid, self.config['max_message_length'], self.rawserver.add_task,
self.config['keepalive_interval'], self.infohash,
self._received_raw_data, self.config)
self.encoder_ban = self.encoder.ban
self.httpdownloader = HTTPDownloader(self.storagewrapper, self.picker,
self.rawserver, self.finflag, self.errorfunc, self.downloader,
self.config['max_rate_period'], self.infohash, self._received_http_data,
self.connecter.got_piece)
if self.response.has_key('httpseeds') and not self.finflag.isSet():
for u in self.response['httpseeds']:
self.httpdownloader.make_download(u)
if self.selector_enabled:
self.fileselector.tie_in(self.picker, self._cancelfunc,
self._reqmorefunc, self.rerequest_ondownloadmore)
if self.priority:
self.fileselector.set_priorities_now(self.priority)
self.appdataobj.deleteTorrentData(self.infohash)
# erase old data once you've started modifying it
if self.config['super_seeder']:
self.set_super_seed()
self.started = True
return True
def rerequest_complete(self):
if self.rerequest:
self.rerequest.announce(1)
def rerequest_stopped(self):
if self.rerequest:
self.rerequest.announce(2)
def rerequest_lastfailed(self):
if self.rerequest:
return self.rerequest.last_failed
return False
def rerequest_ondownloadmore(self):
if self.rerequest:
self.rerequest.hit()
def startRerequester(self, seededfunc = None, force_rapid_update = False):
if self.response.has_key('announce-list'):
trackerlist = self.response['announce-list']
else:
trackerlist = [[self.response['announce']]]
self.rerequest = Rerequester(self.port, self.myid, self.infohash,
trackerlist, self.config,
self.rawserver.add_task, self.rawserver.add_task,
self.errorfunc, self.excfunc,
self.encoder.start_connections,
self.connecter.how_many_connections,
self.storagewrapper.get_amount_left,
self.upmeasure.get_total, self.downmeasure.get_total,
self.upmeasure.get_rate, self.downmeasure.get_rate,
self.doneflag, self.unpauseflag, seededfunc, force_rapid_update )
self.rerequest.start()
def _init_stats(self):
self.statistics = Statistics(self.upmeasure, self.downmeasure,
self.connecter, self.httpdownloader, self.ratelimiter,
self.rerequest_lastfailed, self.filedatflag)
if self.info.has_key('files'):
self.statistics.set_dirstats(self.files, self.info['piece length'])
if self.config['spew']:
self.spewflag.set()
def autoStats(self, displayfunc = None):
if not displayfunc:
displayfunc = self.statusfunc
self._init_stats()
DownloaderFeedback(self.choker, self.httpdownloader, self.rawserver.add_task,
self.upmeasure.get_rate, self.downmeasure.get_rate,
self.ratemeasure, self.storagewrapper.get_stats,
self.datalength, self.finflag, self.spewflag, self.statistics,
displayfunc, self.config['display_interval'])
def startStats(self):
self._init_stats()
d = DownloaderFeedback(self.choker, self.httpdownloader, self.rawserver.add_task,
self.upmeasure.get_rate, self.downmeasure.get_rate,
self.ratemeasure, self.storagewrapper.get_stats,
self.datalength, self.finflag, self.spewflag, self.statistics)
return d.gather
def getPortHandler(self):
return self.encoder
def shutdown(self, torrentdata = {}):
if self.checking or self.started:
self.storagewrapper.sync()
self.storage.close()
self.rerequest_stopped()
if self.fileselector and self.started:
if not self.failed:
self.fileselector.finish()
torrentdata['resume data'] = self.fileselector.pickle()
try:
self.appdataobj.writeTorrentData(self.infohash,torrentdata)
except:
self.appdataobj.deleteTorrentData(self.infohash) # clear it
return not self.failed and not self.excflag.isSet()
# if returns false, you may wish to auto-restart the torrent
def setUploadRate(self, rate):
try:
def s(self = self, rate = rate):
self.config['max_upload_rate'] = rate
self.ratelimiter.set_upload_rate(rate)
self.rawserver.add_task(s)
except AttributeError:
pass
def setConns(self, conns, conns2 = None):
if not conns2:
conns2 = conns
try:
def s(self = self, conns = conns, conns2 = conns2):
self.config['min_uploads'] = conns
self.config['max_uploads'] = conns2
if (conns > 30):
self.config['max_initiate'] = conns + 10
self.rawserver.add_task(s)
except AttributeError:
pass
def setDownloadRate(self, rate):
try:
def s(self = self, rate = rate):
self.config['max_download_rate'] = rate
self.downloader.set_download_rate(rate)
self.rawserver.add_task(s)
except AttributeError:
pass
def startConnection(self, ip, port, id):
self.encoder._start_connection((ip, port), id)
def _startConnection(self, ipandport, id):
self.encoder._start_connection(ipandport, id)
def setInitiate(self, initiate):
try:
def s(self = self, initiate = initiate):
self.config['max_initiate'] = initiate
self.rawserver.add_task(s)
except AttributeError:
pass
def getConfig(self):
return self.config
def getDefaults(self):
return defaultargs(defaults)
def getUsageText(self):
return self.argslistheader
def reannounce(self, special = None):
try:
def r(self = self, special = special):
if special is None:
self.rerequest.announce()
else:
self.rerequest.announce(specialurl = special)
self.rawserver.add_task(r)
except AttributeError:
pass
def getResponse(self):
try:
return self.response
except:
return None
def Pause(self):
if not self.storagewrapper:
return False
self.unpauseflag.clear()
self.rawserver.add_task(self.onPause)
return True
def onPause(self):
self.whenpaused = clock()
if not self.downloader:
return
self.downloader.pause(True)
self.encoder.pause(True)
self.choker.pause(True)
def Unpause(self):
self.unpauseflag.set()
self.rawserver.add_task(self.onUnpause)
def onUnpause(self):
if not self.downloader:
return
self.downloader.pause(False)
self.encoder.pause(False)
self.choker.pause(False)
if self.rerequest and self.whenpaused and clock()-self.whenpaused > 60:
self.rerequest.announce(3) # rerequest automatically if paused for >60 seconds
def set_super_seed(self):
try:
self.superseedflag.set()
def s(self = self):
if self.finflag.isSet():
self._set_super_seed()
self.rawserver.add_task(s)
except AttributeError:
pass
def _set_super_seed(self):
if not self.super_seeding_active:
self.super_seeding_active = True
self.errorfunc(' ** SUPER-SEED OPERATION ACTIVE **\n' +
' please set Max uploads so each peer gets 6-8 kB/s')
def s(self = self):
self.downloader.set_super_seed()
self.choker.set_super_seed()
self.rawserver.add_task(s)
if self.finflag.isSet(): # mode started when already finished
def r(self = self):
self.rerequest.announce(3) # so after kicking everyone off, reannounce
self.rawserver.add_task(r)
def am_I_finished(self):
return self.finflag.isSet()
def get_transfer_stats(self):
return self.upmeasure.get_total(), self.downmeasure.get_total()