Skip to content

Commit dd0c799

Browse files
committed
Remove some syntaxis warnings. Fix the new algorithm for fixing chunks, it was trying to read chunk outside the region file.
1 parent 10e4229 commit dd0c799

File tree

1 file changed

+30
-20
lines changed

1 file changed

+30
-20
lines changed

regionfixer_core/world.py

Lines changed: 30 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -377,26 +377,36 @@ def fix_problematic_chunks(self, status):
377377
# read the data raw
378378
m = region_file.metadata[local_coords[0], local_coords[1]]
379379
region_file.file.seek(m.blockstart * region.SECTOR_LENGTH + 5)
380-
raw_chunk = region_file.file.read(m.length - 1)
381-
# decompress byte by byte so we can get as much as we can before the error happens
382-
try:
380+
# these status doesn't provide a good enough data, we could end up reading garbage
381+
if m.status not in (region.STATUS_CHUNK_IN_HEADER, region.STATUS_CHUNK_MISMATCHED_LENGTHS,
382+
region.STATUS_CHUNK_OUT_OF_FILE, region.STATUS_CHUNK_OVERLAPPING,
383+
region.STATUS_CHUNK_ZERO_LENGTH):
384+
# get the raw data of the chunk
385+
raw_chunk = region_file.file.read(m.length - 1)
386+
# decompress byte by byte so we can get as much as we can before the error happens
383387
dc = zlib.decompressobj()
384388
out = ""
385389
for i in raw_chunk:
386390
out += dc.decompress(i)
387-
except:
388-
pass
389-
# compare the sizes of the new compressed strem and the old one to see if we've got something good
390-
cdata = zlib.compress(out.encode())
391-
if len(cdata) == len(raw_chunk):
392-
# the chunk is probably good, write it in the region file
393-
region_file.write_blockdata(local_coords[0], local_coords[1], out)
394-
print("The chunk {0},{1} in region file {2} was fixed successfully.".format(local_coords[0], local_coords[1], self.filename))
395-
else:
396-
print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename))
397-
#print("Extracted: " + str(len(out)))
398-
#print("Size of the compressed stream: " + str(len(raw_chunk)))
399-
391+
# compare the sizes of the new compressed strem and the old one to see if we've got something good
392+
cdata = zlib.compress(out.encode())
393+
if len(cdata) == len(raw_chunk):
394+
# the chunk is probably good, write it in the region file
395+
region_file.write_blockdata(local_coords[0], local_coords[1], out)
396+
print("The chunk {0},{1} in region file {2} was fixed successfully.".format(local_coords[0], local_coords[1], self.filename))
397+
else:
398+
print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename))
399+
#=======================================================
400+
# print("Extracted: " + str(len(out)))
401+
# print("Size of the compressed stream: " + str(len(raw_chunk)))
402+
#=======================================================
403+
except region.ChunkHeaderError:
404+
# usually a chunk with zero length, pass
405+
print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename))
406+
except region.RegionHeaderError:
407+
# usually a chunk with zero length, pass
408+
print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename))
409+
400410
if status == c.CHUNK_MISSING_ENTITIES_TAG:
401411
# The arguments to create the empty TAG_List have been somehow extracted by comparing
402412
# the tag list from a healthy chunk with the one created by nbt
@@ -994,7 +1004,7 @@ def generate_report(self, standalone):
9941004
table_data = []
9951005
table_data.append(['Problem', 'Count'])
9961006
for p in c.CHUNK_PROBLEMS:
997-
if chunk_counts[p] is not 0:
1007+
if chunk_counts[p] != 0:
9981008
table_data.append([c.CHUNK_STATUS_TEXT[p], chunk_counts[p]])
9991009
table_data.append(['Total', chunk_counts['TOTAL']])
10001010
text += table(table_data)
@@ -1007,7 +1017,7 @@ def generate_report(self, standalone):
10071017
table_data = []
10081018
table_data.append(['Problem', 'Count'])
10091019
for p in c.REGION_PROBLEMS:
1010-
if region_counts[p] is not 0:
1020+
if region_counts[p] != 0:
10111021
table_data.append([c.REGION_STATUS_TEXT[p], region_counts[p]])
10121022
table_data.append(['Total', region_counts['TOTAL']])
10131023
text += table(table_data)
@@ -1561,7 +1571,7 @@ def generate_report(self, standalone):
15611571
table_data = []
15621572
table_data.append(['Problem', 'Count'])
15631573
for p in c.CHUNK_PROBLEMS:
1564-
if chunk_counts[p] is not 0:
1574+
if chunk_counts[p] != 0:
15651575
table_data.append([c.CHUNK_STATUS_TEXT[p], chunk_counts[p]])
15661576
table_data.append(['Total', chunk_counts['TOTAL']])
15671577
text += table(table_data)
@@ -1574,7 +1584,7 @@ def generate_report(self, standalone):
15741584
table_data = []
15751585
table_data.append(['Problem', 'Count'])
15761586
for p in c.REGION_PROBLEMS:
1577-
if region_counts[p] is not 0:
1587+
if region_counts[p] != 0:
15781588
table_data.append([c.REGION_STATUS_TEXT[p], region_counts[p]])
15791589
table_data.append(['Total', region_counts['TOTAL']])
15801590
text += table(table_data)

0 commit comments

Comments
 (0)