I would like to programmatically download some files but am getting MemoryError exception for larger ones. For example, when I try to download a small file, the code is fine, but when I try to download a larger file, I catch a MemoryError.
Here is my code:
def __download_gpl_file(accession):
try:
bin_string = __get_response(accession)
if bin_string is None:
return False
string = __unzip(bin_string)
except MemoryError:
print 'Out of memory for: ' + accession
return False
if string:
filename = DOWNLOADED + accession + '.txt'
with open(filename, 'w+') as f:
f.write(string)
return True
return False
def __get_response(attempts=5):
url = __construct_gpl_url(accession) # Not shown
response = None
while attempts > 0:
try:
response = urllib2.urlopen(url)
if response and response.getcode() < 201:
break
else:
attempts -= 1
except urllib2.URLError:
print 'URLError with: ' + url
return response.read()
def __unzip(bin_string):
f = StringIO(bin_string)
decompressed = gzip.GzipFile(fileobj=f)
return decompressed.read()
Is there anything I can do to download larger files? Thanks in advance.