From 638b95dd462a81c16a1e01eb24fb5a51765fcbdf Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 18 Jan 2014 16:54:04 +0100 Subject: [PATCH 001/151] Huge commit with: -A first version of the gui working -Moved sources to a python package -Better OOP in scan.py -Cleaning and commenting --- gui/__init__.py | 6 + gui/about.py | 53 ++ gui/backups.py | 178 +++++++ gui/main.py | 363 +++++++++++++ gui/starter.py | 28 ++ nbt/setup.py | 28 ++ region-fixer.py | 305 ----------- regionfixer.py | 475 ++++++++++++++++++ regionfixer_core/__init__.py | 2 + .../interactive.py | 70 +-- .../progressbar.py | 0 scan.py => regionfixer_core/scan.py | 459 +++++++++++------ util.py => regionfixer_core/util.py | 6 +- world.py => regionfixer_core/world.py | 148 +++--- regionfixer_gui.py | 12 + 15 files changed, 1600 insertions(+), 533 deletions(-) create mode 100644 gui/__init__.py create mode 100644 gui/about.py create mode 100644 gui/backups.py create mode 100644 gui/main.py create mode 100644 gui/starter.py create mode 100755 nbt/setup.py delete mode 100644 region-fixer.py create mode 100644 regionfixer.py create mode 100644 regionfixer_core/__init__.py rename interactive.py => regionfixer_core/interactive.py (91%) rename progressbar.py => regionfixer_core/progressbar.py (100%) rename scan.py => regionfixer_core/scan.py (54%) rename util.py => regionfixer_core/util.py (99%) rename world.py => regionfixer_core/world.py (90%) create mode 100644 regionfixer_gui.py diff --git a/gui/__init__.py b/gui/__init__.py new file mode 100644 index 0000000..b8fa9d7 --- /dev/null +++ b/gui/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from main import MainWindow +from backups import BackupsWindow +from starter import Starter diff --git a/gui/about.py b/gui/about.py new file mode 100644 index 0000000..1fee7b0 --- /dev/null +++ b/gui/about.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import wx + + +class AboutWindow(wx.Frame): + def __init__(self, parent, title = "About"): + wx.Frame.__init__(self, parent, title=title, style = wx.CLOSE_BOX | wx.RESIZE_BORDER | wx.CAPTION) + + self.about1 = wx.StaticText(self, style=wx.ALIGN_CENTER, label="Minecraft Region-Fixer (GUI)") + self.about2 = wx.StaticText(self, style=wx.ALIGN_CENTER, label="Fix problems in Minecraft worlds.") + self.about3 = wx.StaticText(self, style=wx.ALIGN_CENTER, label="Official-web:") + self.link_github = \ + wx.HyperlinkCtrl(self, wx.ID_ABOUT, + "https://github.com/Fenixin/Minecraft-Region-Fixer", + "https://github.com/Fenixin/Minecraft-Region-Fixer", + style = wx.ALIGN_CENTER) + self.about4 = wx.StaticText(self, style=wx.TE_MULTILINE | wx.ALIGN_CENTER, label="Minecraft forums post:") + self.link_minecraft_forums = \ + wx.HyperlinkCtrl(self, wx.ID_ABOUT, + "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", + "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", + style = wx.ALIGN_CENTER) + + self.close_button = wx.Button(self, wx.ID_CLOSE) + + + self.sizer = wx.BoxSizer(wx.VERTICAL) + self.sizer.Add(self.about1, 0, wx.ALIGN_CENTER | wx.TOP, 10) + self.sizer.Add(self.about2, 0, wx.ALIGN_CENTER) + self.sizer.Add(self.about3, 0, wx.ALIGN_CENTER| wx.TOP, 20) + self.sizer.Add(self.link_github, 0, wx.ALIGN_CENTER | wx.ALL, 5) + self.sizer.Add(self.about4, 0, wx.ALIGN_CENTER | wx.TOP, 20) + self.sizer.Add(self.link_minecraft_forums, 0, wx.ALIGN_CENTER | wx.ALL, 5) + self.sizer.Add(self.close_button, 0, wx.ALIGN_CENTER | wx.ALL, 10) + + self.SetSizerAndFit(self.sizer) + size = self.sizer.GetMinSize() + self.SetMinSize(size) + self.SetMaxSize(size) + + + self.Bind(wx.EVT_BUTTON, self.OnClose, self.close_button) + + def OnClose(self, e): + self.Show(False) + + + + + + diff --git a/gui/backups.py b/gui/backups.py new file mode 100644 index 0000000..badb76c --- /dev/null +++ b/gui/backups.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import wx +import os + +# TODO: just copied this file to this module, is a cutre solution +# improve it! See Importing python modules from relative paths, or +# order this in a better way +from world import World + +class BackupsWindow(wx.Frame): + def __init__(self, parent, title): + wx.Frame.__init__(self, parent, title=title, size = (100,500)) + + # Sizer with all the elements in the window + self.all_sizer = wx.BoxSizer(wx.VERTICAL) + + # Text with help in the top + self.help_text = wx.StaticText(self, style=wx.TE_MULTILINE, label="Region-Fixer will use the worlds in\nthis list in top-down order.") + + # List of worlds to use as backups + self.world_list_box = wx.ListBox(self, size = (80, 100) ) + #~ test_list = ["world1", "world2", "world3"] + test_list = [] + self.world_list_box.Set(test_list) + # Here will be the worlds to use as backup + self.world_list = test_list[:] + self.world_list_text = test_list[:] + # Last path we used in the file dialog + self.last_path = "" + + # Buttons + self.buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) + self.add = wx.Button(self, label = "Add") + self.move_up = wx.Button(self, label = "Move up") + self.move_down = wx.Button(self, label = "Move down") + self.buttons_sizer.Add(self.add, 0, 0) + self.buttons_sizer.Add(self.move_up, 0, 0) + self.buttons_sizer.Add(self.move_down, 0, 0) + + # Add things to the general sizer + self.all_sizer.Add(self.help_text, 0, wx.GROW | wx.ALL, 10) + self.all_sizer.Add(self.world_list_box, 1, wx.EXPAND | wx.ALL, 10) + self.all_sizer.Add(self.buttons_sizer, 0, wx.ALIGN_CENTER | wx.ALL, 10) + + # Layout sizers + self.SetSizerAndFit(self.all_sizer) + + # Bindings + self.Bind(wx.EVT_CLOSE, self.OnClose) + self.Bind(wx.EVT_BUTTON, self.OnAddWorld, self.add) + self.Bind(wx.EVT_BUTTON, self.OnMoveUp, self.move_up) + self.Bind(wx.EVT_BUTTON, self.OnMoveDown, self.move_down) + + # Show the window, usually False, True for fast testing + self.Show(False) + + def get_dirs(self, list_dirs): + """ From a list of paths return only the directories. """ + + tmp = [] + for p in self.dirnames: + if os.path.isdir(p): + tmp.append(p) + return tmp + + + def are_there_files(self, list_dirs): + """ Given a list of paths return True if there are + any files. + + """ + + for d in list_dirs: + if not os.path.isdir(d): + return True + return False + + def OnAddWorld(self, e): + """ Called when the buttom Add is clicked. """ + + dlg = wx.DirDialog(self, "Choose a Minecraf world folder") + # Set the last path used + dlg.SetPath(self.last_path) + if dlg.ShowModal() == wx.ID_OK: + self.dirname = dlg.GetPath() + # Check if it's a minecraft world + w = World(self.dirname) + if not w.isworld: + error = wx.MessageDialog(self, "This directory doesn't look like a Minecraft world", "Error", wx.ICON_EXCLAMATION) + error.ShowModal() + error.Destroy() + else: + # Insert it in the ListBox + self.world_list.append(w) + index = self.world_list.index(w) + # TODO check if it's a minecraft world + self.world_list_box.InsertItems([w.name], pos = index) + + # Properly recover the last path used + self.last_path = os.path.split(dlg.GetPath())[0] + dlg.Destroy() + + def get_selected_index(self, list_box): + """ Returns the index of the selected item in a list_box. """ + + index = None + for i in range(len(self.world_list)): + if list_box.IsSelected(i): + index = i + return index + + def move_left_inlist(self, l, index): + """ Move the element in the list with index to the left. + + Return the index where the moved element is. + + """ + + tmp = l.pop(index) + index = index - 1 if index != 0 else 0 + l.insert(index, tmp) + + return index + + def move_right_inlist(self, l, index): + """ Move the element in the list with index to the right. + + Return the index where the moved element is. + + """ + + len_l = len(l) + tmp = l.pop(index) + index = index + 1 + if index == len_l: + l.append(tmp) + index = len_l - 1 + else: + l.insert(index, tmp) + + return index + + def get_names_from_worlds(self, world_list): + """ Return a list of names from a list of worlds in order. """ + + t = [] + for i in world_list: + t.append(i.name) + return t + + def OnMoveUp(self, e): + """ Move up in the world list the selected item. """ + + index = self.get_selected_index(self.world_list_box) + + if index is not None: + index = self.move_left_inlist(self.world_list, index) + #~ self.world_list_box.Set(self.world_list) + self.world_list_box.Set(self.get_names_from_worlds(self.world_list)) + self.world_list_box.Select(index) + + def OnMoveDown(self, e): + """ Move down in the world list the selected item. """ + + index = self.get_selected_index(self.world_list_box) + len_world_list = len(self.world_list) + + if index is not None: + index = self.move_right_inlist(self.world_list, index) + self.world_list_box.Set(self.get_names_from_worlds(self.world_list)) + #~ self.world_list_box.Set(self.world_list) + self.world_list_box.Select(index) + + def OnClose(self, e): + """ Ran when the user closes this window. """ + self.Show(False) diff --git a/gui/main.py b/gui/main.py new file mode 100644 index 0000000..cb4ea94 --- /dev/null +++ b/gui/main.py @@ -0,0 +1,363 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import wx +import os + +from backups import BackupsWindow +from scan import AsyncWorldScanner, AsyncPlayerScanner +import world +from world import World +from time import sleep +from os.path import split + +class MainWindow(wx.Frame): + def __init__(self, parent, title, backups = None): + wx.Frame.__init__(self, parent, title=title, size = (300,400)) + + self.backups = backups + + # Variables + self.last_path = "" # Last path opened + self.world = None # World to scan + + # Status bar + self.CreateStatusBar() + + # Create menu + filemenu=wx.Menu() + windowsmenu = wx.Menu() + helpmenu = wx.Menu() + + # Add elements to filemenu + menuOpen = filemenu.Append(wx.ID_OPEN, "&Open", "Open a Minecraft world") + filemenu.AppendSeparator() + menuExit = filemenu.Append(wx.ID_EXIT, "E&xit","Terminate program") + + # Add elements to helpmenu + menuAbout = helpmenu.Append(wx.ID_ABOUT, "&About", "Information about this program") + + # Add elements to windowsmenu + menuBackups = windowsmenu.Append(-1, "&Backups", "Manage list of backups") + menuAdvanced = windowsmenu.Append(-1, "A&dvanced actions", "Manage list of backups") + + # Create a menu bar + menuBar = wx.MenuBar() + menuBar.Append(filemenu,"&File") + menuBar.Append(windowsmenu,"&View") + menuBar.Append(helpmenu,"&Help") + self.SetMenuBar(menuBar) + + # Create elements in the window + # First row: + + self.status_text = wx.StaticText(self, style=wx.TE_MULTILINE, label="test") + self.open_button = wx.Button(self, label="Open") + self.scan_button = wx.Button(self, label="Scan") + self.scan_button.Disable() + self.firstrow_sizer = wx.BoxSizer(wx.HORIZONTAL) + self.firstrow_sizer.Add(self.status_text, 1, wx.ALIGN_CENTER) + self.firstrow_sizer.Add(self.open_button, 0, wx.EXPAND) + self.firstrow_sizer.Add(self.scan_button, 0, wx.EXPAND) + self.firstrow_static_box = wx.StaticBox(self, label = "World loaded") + self.firstrow_static_box_sizer = wx.StaticBoxSizer(self.firstrow_static_box) + self.firstrow_static_box_sizer.Add(self.firstrow_sizer, 1, wx.EXPAND) + + + # Second row: + self.proc_info_text = wx.StaticText(self, label="Threads to use: ") + self.proc_text = wx.TextCtrl(self, value="1") + self.el_info_text = wx.StaticText(self, label="Entity limit: " ) + self.el_text = wx.TextCtrl(self, value="150") + self.secondrow_sizer = wx.BoxSizer(wx.HORIZONTAL) + self.secondrow_sizer.Add(self.proc_info_text, 0, wx.ALIGN_CENTER) + self.secondrow_sizer.Add(self.proc_text, 0, wx.ALIGN_LEFT) + self.secondrow_sizer.Add(self.el_info_text, 0, wx.ALIGN_CENTER) + self.secondrow_sizer.Add(self.el_text, 0, wx.ALIGN_RIGHT) + self.secondrow_static_box_sizer = wx.StaticBoxSizer(wx.StaticBox(self, label = "Scan options")) + self.secondrow_static_box_sizer.Add(self.secondrow_sizer, 1, wx.EXPAND) + + # Third row: + # Note: In order to use a static box add it directly to a + # static box sizer and add to the same sizer it's contents + self.results_text = wx.TextCtrl(self, style=wx.TE_READONLY | wx.TE_MULTILINE, value="Scan the world to get results", size = (500,200)) + # Lets try to create a monospaced font: + ffont = wx.Font(9, wx.FONTFAMILY_MODERN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL) +# print ffont.IsFixedWidth() + textattr = wx.TextAttr(font = ffont) + self.results_text.SetFont(ffont) + self.results_text_box = wx.StaticBox(self, label="Results", size = (100,100)) + self.results_text_box_sizer = wx.StaticBoxSizer(self.results_text_box) + self.results_text_box_sizer.Add(self.results_text, 1, wx.EXPAND) + + self.delete_all_chunks_button = wx.Button(self, label = "Delete all bad chunks") + self.replace_all_chunks_button = wx.Button(self, label = "Replace all bad chunks (using backups)") + self.delete_all_regions_button = wx.Button(self, label = "Delete all bad regions") + self.replace_all_regions_button = wx.Button(self, label = "Replace all bad regions (using backups)") + self.update_delete_buttons_status(False) + self.update_replace_buttons_status(False) + + self.thirdrow_sizer = wx.BoxSizer(wx.HORIZONTAL) + self.thirdrow_actions_box = wx.StaticBox(self, label="Actions", size = (-1,-1)) + self.thirdrow_buttons_box_sizer = wx.StaticBoxSizer(self.thirdrow_actions_box) + self.thirdrow_buttons_sizer = wx.BoxSizer(wx.VERTICAL) + self.thirdrow_buttons_sizer.Add(self.delete_all_chunks_button, 1, wx.EXPAND) + self.thirdrow_buttons_sizer.Add(self.replace_all_chunks_button, 1, wx.EXPAND) + self.thirdrow_buttons_sizer.Add(self.delete_all_regions_button, 1, wx.EXPAND) + self.thirdrow_buttons_sizer.Add(self.replace_all_regions_button, 1, wx.EXPAND) + self.thirdrow_buttons_box_sizer.Add(self.thirdrow_buttons_sizer, 1, wx.EXPAND) + self.thirdrow_sizer.Add(self.results_text_box_sizer, 1, wx.EXPAND) + self.thirdrow_sizer.Add(self.thirdrow_buttons_box_sizer, 0, wx.EXPAND) + + # All together now + self.frame_sizer = wx.BoxSizer(wx.VERTICAL) + self.frame_sizer.Add(self.firstrow_static_box_sizer, 0, wx.EXPAND) + self.frame_sizer.Add(self.secondrow_static_box_sizer, 0, wx.EXPAND) + self.frame_sizer.Add(self.thirdrow_sizer, 1, wx.EXPAND) + + # Layout sizers + self.SetSizerAndFit(self.frame_sizer) + + self.frame_sizer.Fit(self) + + # Bindings + self.Bind(wx.EVT_MENU, self.OnAbout, menuAbout) + self.Bind(wx.EVT_MENU, self.OnOpen, menuOpen) + self.Bind(wx.EVT_MENU, self.OnBackups, menuBackups) + self.Bind(wx.EVT_MENU, self.OnExit, menuExit) + self.Bind(wx.EVT_BUTTON, self.OnScan, self.scan_button) + self.Bind(wx.EVT_BUTTON, self.OnOpen, self.open_button) + self.Bind(wx.EVT_BUTTON, self.OnDeleteChunks, self.delete_all_chunks_button) + self.Bind(wx.EVT_BUTTON, self.OnReplaceChunks, self.replace_all_chunks_button) + self.Bind(wx.EVT_BUTTON, self.OnDeleteRegions, self.delete_all_regions_button) + self.Bind(wx.EVT_BUTTON, self.OnReplaceRegions, self.replace_all_regions_button) + + self.Show(True) + + def OnExit(self, e): + self.Close(True) + + def OnBackups(self, e): + self.backups.Show(True) + + def OnAbout(self, e): + self.about.Show(True) + + def OnOpen(self, e): + dlg = wx.DirDialog(self, "Choose a Minecraf world folder") + # Set the last path used + dlg.SetPath(self.last_path) + if dlg.ShowModal() == wx.ID_OK: + self.dirname = dlg.GetPath() + # Check if it's a minecraft world + w = World(self.dirname) + if not w.isworld: + error = wx.MessageDialog(self, "This directory doesn't look like a Minecraft world", "Error", wx.ICON_EXCLAMATION) + error.ShowModal() + error.Destroy() + else: + # Insert it in the ListBox + self.world = w + self.update_world_status(self.world) + + # Properly recover the last path used + self.last_path = os.path.split(dlg.GetPath())[0] + dlg.Destroy() + + # Rest the results textctrl + self.results_text.SetValue("") + + def OnDeleteChunks(self, e): + progressdlg = wx.ProgressDialog("Removing chunks", "Removing...", + self.world.count_regions(), self, + style = wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + wx.PD_CAN_SKIP | + wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + progressdlg.Pulse() + self.world.remove_problematic_chunks(world.CHUNK_CORRUPTED) + progressdlg.Pulse() + print "1" + self.world.remove_problematic_chunks(world.CHUNK_SHARED_OFFSET) + progressdlg.Pulse() + print "2" + self.world.remove_problematic_chunks(world.CHUNK_WRONG_LOCATED) + progressdlg.Pulse() + print "3" + self.world.remove_problematic_chunks(world.CHUNK_TOO_MANY_ENTITIES) + progressdlg.Pulse() + print "4" + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + + def OnDeleteRegions(self, e): + progressdlg = wx.ProgressDialog("Removing regions", "Removing...", + self.world.count_regions(), self, + style = wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + #~ wx.PD_CAN_SKIP | + #~ wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + + self.world.remove_problematic_regions(world.REGION_TOO_SMALL) + progressdlg.pulse() + self.world.remove_problematic_regions(world.REGION_UNREADABLE) + progressdlg.pulse() + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + self.update_replace_buttons_status(False) + + def OnReplaceChunks(self, e): + progressdlg = wx.ProgressDialog("Removing chunks", "Removing...", + self.world.count_regions(), self, + style = wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + #~ wx.PD_CAN_SKIP | + #~ wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + + backups = self.backups.world_list + + self.world.replace_problematic_chunks(world.CHUNK_CORRUPTED, backups) + progressdlg.pulse() + self.world.replace_problematic_chunks(world.CHUNK_SHARED_OFFSET, backups) + progressdlg.pulse() + self.world.replace_problematic_chunks(world.CHUNK_WRONG_LOCATED, backups) + progressdlg.pulse() + self.world.replace_problematic_chunks(world.CHUNK_TOO_MANY_ENTITIES, backups) + progressdlg.pulse() + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + self.update_replace_buttons_status(False) + + def OnReplaceRegions(self, e): + progressdlg = wx.ProgressDialog("Removing regions", "Removing...", + self.world.count_regions(), self, + style = wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + #~ wx.PD_CAN_SKIP | + #~ wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + + self.world.remove_problematic_regions(world.REGION_TOO_SMALL) + progressdlg.pulse() + self.world.remove_problematic_regions(world.REGION_UNREADABLE) + progressdlg.pulse() + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + self.update_replace_buttons_status(False) + + + def OnScan(self, e): + # Let's simulate the options stuff + class Options(object): + def __init__(self, main): + self.entity_limit = int(main.el_text.GetValue()) + self.processes = int(main.proc_text.GetValue()) + self.verbose = True + self.delete_entities = False + self.gui = True + + options = Options(self) + progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", + self.world.count_regions(), self, + style = wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + wx.PD_CAN_SKIP | + wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH) + options.progressdlg = progressdlg + + ws = AsyncWorldScanner(self.world, options.processes, + options.entity_limit, + options.delete_entities) + ws.scan() + counter = 0 + while not ws.finished: + sleep(0.01) + result = ws.get_last_result() + rs = ws.current_regionset + if result: + counter += 1 + progressdlg.Update(counter, + "Scanning regions from: " + rs.get_name()) + + progressdlg.Destroy() + + progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", + len(self.world.players), self, + style = wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + wx.PD_CAN_SKIP | + wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH) + + ps = AsyncPlayerScanner(self.world.players, options.processes) + ps.scan() + counter = 0 + last_player = "" + while not ps.finished: + sleep(0.001) + result = ps.get_last_result() + if result: + counter += 1 + last_player = result.filename.split('.')[0] + progressdlg.Update(counter, + "Last player scanned: " + last_player) + + progressdlg.Destroy() + + # TODO! We need to make every module truly independent. + # We need to make better way to separate print of text to + # console and scanning + self.results_text.SetValue(self.world.generate_report(True)) + self.update_delete_buttons_status(True) + + + def update_delete_buttons_status(self, status): + + if status: + self.delete_all_chunks_button.Enable() + self.delete_all_regions_button.Enable() + else: + self.delete_all_chunks_button.Disable() + self.delete_all_regions_button.Disable() + + def update_replace_buttons_status(self, status): + + if status: + self.replace_all_chunks_button.Enable() + self.replace_all_regions_button.Enable() + else: + self.replace_all_chunks_button.Disable() + self.replace_all_regions_button.Disable() + + + def update_world_status(self, world): + self.status_text.SetLabel(world.path) + self.scan_button.Enable() diff --git a/gui/starter.py b/gui/starter.py new file mode 100644 index 0000000..7353136 --- /dev/null +++ b/gui/starter.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import wx + +from main import MainWindow +from backups import BackupsWindow +from about import AboutWindow + + +class Starter(object): + def __init__(self): + """ Create the windows and set some variables. """ + + self.app = wx.App(False) + + self.frame = MainWindow(None, "Region-Fixer-GUI") + # NOTE: It's very important that the MainWindow is parent of all others windows + self.backups = BackupsWindow(self.frame, "Backups") + self.about = AboutWindow(self.frame, "About") + self.frame.backups = self.backups + self.frame.about = self.about + + + def run(self): + """ Run the app main loop. """ + + self.app.MainLoop() diff --git a/nbt/setup.py b/nbt/setup.py new file mode 100755 index 0000000..2aaea80 --- /dev/null +++ b/nbt/setup.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python + +from setuptools import setup +from nbt import VERSION + +setup( + name = 'NBT', + version = ".".join(str(x) for x in VERSION), + description = 'Named Binary Tag Reader/Writer', + author = 'Thomas Woolford', + author_email = 'woolford.thomas@gmail.com', + url = 'http://github.com/twoolie/NBT', + license = open("LICENSE.txt").read(), + long_description = open("README.txt").read(), + packages = ['nbt'], + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Topic :: Games/Entertainment", + "Topic :: Software Development :: Libraries :: Python Modules" + ] +) diff --git a/region-fixer.py b/region-fixer.py deleted file mode 100644 index 911f87d..0000000 --- a/region-fixer.py +++ /dev/null @@ -1,305 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# -# Region Fixer. -# Fix your region files with a backup copy of your Minecraft world. -# Copyright (C) 2011 Alejandro Aguilera (Fenixin) -# https://github.com/Fenixin/Minecraft-Region-Fixer -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -from multiprocessing import freeze_support -from optparse import OptionParser, OptionGroup -from getpass import getpass -import sys - -import world -from scan import scan_regionset, scan_world -from interactive import interactive_loop -from util import entitle, is_bare_console, parse_world_list, parse_paths, parse_backup_list - -def delete_bad_chunks(options, scanned_obj): - """ Takes a scanned object (world object or regionset object) and - the options given to region-fixer, it deletes all the chunks with - problems iterating through all the possible problems. """ - print # a blank line - options_delete = [options.delete_corrupted, options.delete_wrong_located, options.delete_entities, options.delete_shared_offset] - deleting = zip(options_delete, world.CHUNK_PROBLEMS) - for delete, problem in deleting: - status = world.CHUNK_STATUS_TEXT[problem] - total = scanned_obj.count_chunks(problem) - if delete: - if total: - text = ' Deleting chunks with status: {0} '.format(status) - print "{0:#^60}".format(text) - counter = scanned_obj.remove_problematic_chunks(problem) - - print "\nDeleted {0} chunks with status: {1}".format(counter,status) - else: - print "No chunks to delete with status: {0}".format(status) - -def delete_bad_regions(options, scanned_obj): - """ Takes an scanned object (world object or regionset object) and - the options give to region-fixer, it deletes all the region files - with problems iterating through all the possible problems. """ - print # a blank line - options_delete = [options.delete_too_small] - deleting = zip(options_delete, world.REGION_PROBLEMS) - for delete, problem in deleting: - status = world.REGION_STATUS_TEXT[problem] - total = scanned_obj.count_regions(problem) - if delete: - if total: - text = ' Deleting regions with status: {0} '.format(status) - print "{0:#^60}".format(text) - counter = scanned_obj.remove_problematic_regions(problem) - - print "Deleted {0} regions with status: {1}".format(counter,status) - else: - print "No regions to delete with status: {0}".format(status) - -def main(): - - usage = 'usage: %prog [options] ... ...' - epilog = 'Copyright (C) 2011 Alejandro Aguilera (Fenixin) \ - https://github.com/Fenixin/Minecraft-Region-Fixer \ - This program comes with ABSOLUTELY NO WARRANTY; for details see COPYING.txt. This is free software, and you are welcome to redistribute it under certain conditions; see COPYING.txt for details.' - - parser = OptionParser(description='Script to check the integrity of Minecraft worlds and fix them when possible. It uses NBT by twoolie. Author: Alejandro Aguilera (Fenixin)',\ - prog = 'region-fixer', version='0.1.3', usage=usage, epilog=epilog) - - parser.add_option('--backups', '-b', help = 'List of backup directories of the Minecraft world to use to fix corrupted chunks and/or wrong located chunks. Warning! Region-Fixer is not going to check if it\'s the same world, be careful! This argument can be a comma separated list (but never with spaces between elements!). This option can be only used scanning one world.',\ - metavar = '', type = str, dest = 'backups', default = None) - - parser.add_option('--replace-corrupted','--rc', help = 'Tries to replace the corrupted chunks using the backup directories. This option can be only used scanning one world.',\ - default = False, dest = 'replace_corrupted', action='store_true') - - parser.add_option('--replace-wrong-located','--rw', help = 'Tries to replace the wrong located chunks using the backup directories. This option can be only used scanning one world.',\ - default = False, dest = 'replace_wrong_located', action='store_true') - - parser.add_option('--replace-entities','--re', help = 'Tries to replace the chunks with too many entities using the backup directories. This option can be only used scanning one world.',\ - default = False, dest = 'replace_entities', action='store_true') - - parser.add_option('--replace-shared-offset','--rs', help = 'Tries to replace the chunks with a shared offset using the backup directories. This option can be only used scanning one world.',\ - default = False, dest = 'replace_shared_offset', action='store_true') - - parser.add_option('--replace-too-small','--rt', help = 'Tries to replace the region files that are too small to be actually be a region file using the backup directories. This option can be only used scanning one world.',\ - default = False, dest = 'replace_too_small', action='store_true') - - parser.add_option('--delete-corrupted', '--dc', help = '[WARNING!] This option deletes! This option will delete all the corrupted chunks. Used with --replace-corrupted or --replace-wrong-located it will delete all the non-replaced chunks.',\ - action = 'store_true', default = False) - - parser.add_option('--delete-wrong-located', '--dw', help = '[WARNING!] This option deletes! The same as --delete-corrupted but for wrong located chunks',\ - action = 'store_true', default = False, dest='delete_wrong_located') - - parser.add_option('--delete-entities', '--de', help = '[WARNING!] This option deletes! This option deletes ALL the entities in chunks with more entities than --entity-limit (300 by default). In a Minecraft entities are mostly mobs and items dropped in the grond, items in chests and other stuff won\'t be touched. Read the README for more info. Region-Fixer will delete the entities while scanning so you can stop and resume the process',\ - action = 'store_true', default = False, dest = 'delete_entities') - - parser.add_option('--delete-shared-offset', '--ds', help = '[WARNING!] This option deletes! This option will delete all the chunk with status shared offset. It will remove the region header for the false chunk, note that you don\'t loos any chunk doing this.',\ - action = 'store_true', default = False, dest = 'delete_shared_offset') - - parser.add_option('--delete-too-small', '--dt', help = '[WARNING!] This option deletes! Removes any region files found to be too small to actually be a region file.',\ - dest ='delete_too_small', default = False, action = 'store_true') - - parser.add_option('--entity-limit', '--el', help = 'Specify the limit for the --delete-entities option (default = 300).',\ - dest = 'entity_limit', default = 300, action = 'store', type = int) - - parser.add_option('--processes', '-p', help = 'Set the number of workers to use for scanning. (defaulta = 1, not use multiprocessing at all)',\ - action = 'store', type = int, default = 1) - - parser.add_option('--verbose', '-v', help='Don\'t use a progress bar, instead print a line per scanned region file with results information. The letters mean c: corrupted; w: wrong located; t: total of chunksm; tme: too many entities problem',\ - action='store_true', default = False) - - parser.add_option('--interactive', '-i',help='Enter in interactive mode, where you can scan, see the problems, and fix them in a terminal like mode',\ - dest = 'interactive',default = False, action='store_true',) - - parser.add_option('--log', '-l',help='Saves a log of all the problems found in the spicifyed file. The log file contains all the problems found with this information: region file, chunk coordinates and problem. Use \'-\' as name to show the log at the end of the scan.',\ - type = str, default = None, dest = 'summary') - - (options, args) = parser.parse_args() - - if is_bare_console(): - print - print "Minecraft Region Fixer is a command line aplication, if you want to run it" - print "you need to open a command line (cmd.exe in the start menu in windows 7)." - print - getpass("Press enter to continue:") - return 1 - - # Args are world_paths and region files - if not args: - parser.error("No world paths or region files specified! Use --help for a complete list of options.") - - world_list, region_list = parse_paths(args) - - if not (world_list or region_list): - print ("Error: No worlds or region files to scan!") - return 1 - - # Check basic options compatibilities - any_chunk_replace_option = options.replace_corrupted or options.replace_wrong_located or options.replace_entities or options.replace_shared_offset - any_chunk_delete_option = options.delete_corrupted or options.delete_wrong_located or options.delete_entities or options.delete_shared_offset - any_region_replace_option = options.replace_too_small - any_region_delete_option = options.delete_too_small - - if options.interactive or options.summary: - if any_chunk_replace_option or any_region_replace_option: - parser.error("Can't use the options --replace-* , --delete-* and --log with --interactive. You can choose all this while in the interactive mode.") - - else: # not options.interactive - if options.backups: - if not any_chunk_replace_option and not any_region_replace_option: - parser.error("The option --backups needs at least one of the --replace-* options") - else: - if (len(region_list.regions) > 0): - parser.error("You can't use the replace options while scanning sparate region files. The input should be only one world and you intruduced {0} individual region files.".format(len(region_list.regions))) - elif (len(world_list) > 1): - parser.error("You can't use the replace options while scanning multiple worlds. The input should be only one world and you intruduced {0} worlds.".format(len(world_list))) - - if not options.backups and any_chunk_replace_option: - parser.error("The options --replace-* need the --backups option") - - if options.entity_limit < 0: - parser.error("The entity limit must be at least 0!") - - print "\nWelcome to Region Fixer!" - print "(version: {0})".format(parser.version) - - # do things with the option options args - if options.backups: # create a list of worlds containing the backups of the region files - backup_worlds = parse_backup_list(options.backups) - if not backup_worlds: - print "[WARNING] No valid backup directories found, won't fix any chunk." - else: - backup_worlds = [] - - - # The program starts - if options.interactive: - # TODO: WARNING, NEEDS CHANGES FOR WINDOWS. check while making the windows exe - c = interactive_loop(world_list, region_list, options, backup_worlds) - c.cmdloop() - - else: - summary_text = "" - # scan the separate region files - if len(region_list.regions) > 0: - print entitle("Scanning separate region files", 0) - scan_regionset(region_list, options) - - print region_list.generate_report(True) - - # delete chunks - delete_bad_chunks(options, region_list) - - # delete region files - delete_bad_regions(options, region_list) - - # verbose log - if options.summary: - summary_text += "\n" - summary_text += entitle("Separate region files") - summary_text += "\n" - t = region_list.summary() - if t: - summary_text += t - else: - summary_text += "No problems found.\n\n" - - # scan all the world folders - for world_obj in world_list: - print entitle(' Scanning world: {0} '.format(world_obj.get_name()),0) - - scan_world(world_obj, options) - - print world_obj.generate_report(standalone = True) - corrupted, wrong_located, entities_prob, shared_prob, total_chunks, too_small_region, unreadable_region, total_regions = world_obj.generate_report(standalone = False) - print - - # replace chunks - if backup_worlds and not len(world_list) > 1: - options_replace = [options.replace_corrupted, options.replace_wrong_located, options.replace_entities, options.replace_shared_offset] - replacing = zip(options_replace, world.CHUNK_PROBLEMS_ITERATOR) - for replace, (problem, status, arg) in replacing: - if replace: - total = world_obj.count_chunks(problem) - if total: - text = " Replacing chunks with status: {0} ".format(status) - print "{0:#^60}".format(text) - fixed = world_obj.replace_problematic_chunks(backup_worlds, problem, options) - print "\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status) - else: print "No chunks to replace with status: {0}".format(status) - - elif any_chunk_replace_option and not backup_worlds: - print "Info: Won't replace any chunk." - print "No backup worlds found, won't replace any chunks/region files!" - elif any_chunk_replace_option and backup_worlds and len(world_list) > 1: - print "Info: Won't replace any chunk." - print "Can't use the replace options while scanning more than one world!" - - # replace region files - if backup_worlds and not len(world_list) > 1: - options_replace = [options.replace_too_small] - replacing = zip(options_replace, world.REGION_PROBLEMS_ITERATOR) - for replace, (problem, status, arg) in replacing: - if replace: - total = world_obj.count_regions(problem) - if total: - text = " Replacing regions with status: {0} ".format(status) - print "{0:#^60}".format(text) - fixed = world_obj.replace_problematic_regions(backup_worlds, problem, options) - print "\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status) - else: print "No region to replace with status: {0}".format(status) - - elif any_region_replace_option and not backup_worlds: - print "Info: Won't replace any regions." - print "No valid backup worlds found, won't replace any chunks/region files!" - print "Note: You probably inserted some backup worlds with the backup option but they are probably no valid worlds, the most common issue is wrong path." - elif any_region_replace_option and backup_worlds and len(world_list) > 1: - print "Info: Won't replace any regions." - print "Can't use the replace options while scanning more than one world!" - - # delete chunks - delete_bad_chunks(options, world_obj) - - # delete region files - delete_bad_regions(options, world_obj) - - # print a summary for this world - if options.summary: - summary_text += world_obj.summary() - - # verbose log text - if options.summary == '-': - print "\nPrinting log:\n" - print summary_text - elif options.summary != None: - try: - f = open(options.summary, 'w') - f.write(summary_text) - f.write('\n') - f.close() - print "Log file saved in \'{0}\'.".format(options.summary) - except: - print "Something went wrong while saving the log file!" - - return 0 - - -if __name__ == '__main__': - freeze_support() - value = main() - sys.exit(value) diff --git a/regionfixer.py b/regionfixer.py new file mode 100644 index 0000000..a995f53 --- /dev/null +++ b/regionfixer.py @@ -0,0 +1,475 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Region Fixer. +# Fix your region files with a backup copy of your Minecraft world. +# Copyright (C) 2011 Alejandro Aguilera (Fenixin) +# https://github.com/Fenixin/Minecraft-Region-Fixer +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + +from multiprocessing import freeze_support +from optparse import OptionParser, OptionGroup +from getpass import getpass +import sys + +import world +from scan import console_scan_world, AsyncRegionsetScanner,\ + console_scan_regionset +from interactive import InteractiveLoop +from util import entitle, is_bare_console, parse_world_list, parse_paths, parse_backup_list +from time import sleep +import progressbar + + +class FractionWidget(progressbar.ProgressBarWidget): + """ Convenience class to use the progressbar.py """ + def __init__(self, sep=' / '): + self.sep = sep + + def update(self, pbar): + return '%2d%s%2d' % (pbar.currval, self.sep, pbar.maxval) + + +def delete_bad_chunks(options, scanned_obj): + """ Takes a scanned object (world object or regionset object) and + the options given to region-fixer, it deletes all the chunks with + problems iterating through all the possible problems. """ + print # a blank line + options_delete = [options.delete_corrupted, options.delete_wrong_located, options.delete_entities, options.delete_shared_offset] + deleting = zip(options_delete, world.CHUNK_PROBLEMS) + for delete, problem in deleting: + status = world.CHUNK_STATUS_TEXT[problem] + total = scanned_obj.count_chunks(problem) + if delete: + if total: + text = ' Deleting chunks with status: {0} '.format(status) + print "{0:#^60}".format(text) + counter = scanned_obj.remove_problematic_chunks(problem) + + print "\nDeleted {0} chunks with status: {1}".format(counter,status) + else: + print "No chunks to delete with status: {0}".format(status) + +def delete_bad_regions(options, scanned_obj): + """ Takes an scanned object (world object or regionset object) and + the options give to region-fixer, it deletes all the region files + with problems iterating through all the possible problems. """ + print # a blank line + options_delete = [options.delete_too_small] + deleting = zip(options_delete, world.REGION_PROBLEMS) + for delete, problem in deleting: + status = world.REGION_STATUS_TEXT[problem] + total = scanned_obj.count_regions(problem) + if delete: + if total: + text = ' Deleting regions with status: {0} '.format(status) + print "{0:#^60}".format(text) + counter = scanned_obj.remove_problematic_regions(problem) + + print "Deleted {0} regions with status: {1}".format(counter,status) + else: + print "No regions to delete with status: {0}".format(status) + + +def main(): + + usage = ('usage: \n%prog [options] ' + ' ... ...') + epilog = ('Copyright (C) 2011 Alejandro Aguilera (Fenixin)\n' + 'https://github.com/Fenixin/Minecraft-Region-Fixer\n' + 'This program comes with ABSOLUTELY NO WARRANTY; for ' + 'details see COPYING.txt. This is free software, and you ' + 'are welcome to redistribute it under certain conditions; ' + 'see COPYING.txt for details.') + + parser = OptionParser(description=('Program to check the integrity of ' + 'Minecraft worlds and fix them when ' + 'possible. It uses NBT by twoolie. ' + 'Author: Alejandro Aguilera (Fenixin)'), + prog='region_fixer', + version='0.1.3', + usage=usage, + epilog=epilog) + + add_option = parser.add_option + + add_option('--backups', + '-b', + help=('List of backup directories of the Minecraft world ' + 'to use to fix corrupted chunks and/or wrong located ' + 'chunks. Warning! Region-Fixer is not going to check if' + 'it\'s the same world, be careful! This argument can be a' + ' comma separated list (but never with spaces between ' + 'elements!). This option can be only used scanning one ' + 'world.'), + metavar='', + type=str, + dest='backups', + default=None) + + add_option('--replace-corrupted', + '--rc', + help='Tries to replace the corrupted chunks using the backup' + ' directories. This option can be only used scanning one' + ' world.', + default=False, + dest='replace_corrupted', + action='store_true') + + add_option('--replace-wrong-located', + '--rw', + help='Tries to replace the wrong located chunks using the ' + 'backup directories. This option can be only used scanning' + ' one world.', + default=False, + dest='replace_wrong_located', + action='store_true') + + add_option('--replace-entities', + '--re', + help='Tries to replace the chunks with too many entities using ' + 'the backup directories. This option can be only used ' + 'scanning one world.', + default=False, + dest='replace_entities', + action='store_true') + + add_option('--replace-shared-offset', + '--rs', + help='Tries to replace the chunks with a shared offset using ' + 'the backup directories. This option can be only used' + 'scanning one world.', + default=False, + dest='replace_shared_offset', + action='store_true') + + add_option('--replace-too-small', + '--rt', + help='Tries to replace the region files that are too small to ' + 'be actually be a region file using the backup ' + 'directories. This option can be only used scanning one ' + 'world.', + default=False, + dest='replace_too_small', + action='store_true') + + add_option('--delete-corrupted', + '--dc', + help='[WARNING!] This option deletes! This option will delete ' + 'all the corrupted chunks. Used with --replace-corrupted ' + 'or --replace-wrong-located it will delete all the ' + 'non-replaced chunks.', + action='store_true', + default=False) + + add_option('--delete-wrong-located', + '--dw', + help=('[WARNING!] This option deletes!' + 'The same as --delete-corrupted but for wrong ' + 'located chunks'), + action='store_true', + default=False, + dest='delete_wrong_located') + + add_option('--delete-entities', + '--de', + help='[WARNING!] This option deletes! This option deletes ALL ' + 'the entities in chunks with more entities than ' + '--entity-limit (300 by default). In a Minecraft ' + 'entities are mostly mobs and items dropped in the ' + 'grond, items in chests and other stuff won\'t be ' + 'touched. Read the README for more info. Region-Fixer ' + 'will delete the entities while scanning so you can ' + 'stop and resume the process', + action='store_true', + default=False, + dest='delete_entities') + + add_option('--delete-shared-offset', + '--ds', + help='[WARNING!] This option deletes! This option will delete ' + 'all the chunk with status shared offset. It will remove ' + 'the region header for the false chunk, note that you ' + 'don\'t loos any chunk doing this.', + action='store_true', + default=False, + dest='delete_shared_offset') + + add_option('--delete-too-small', + '--dt', + help='[WARNING!] This option deletes! Removes any region files ' + 'found to be too small to actually be a region file.', + dest='delete_too_small', + default=False, + action='store_true') + + add_option('--entity-limit', + '--el', + help='Specify the limit for the --delete-entities option ' + '(default = 300).', + dest='entity_limit', + default=300, + action='store', + type=int) + + add_option('--processes', + '-p', + help='Set the number of workers to use for scanning. (defaulta ' + '= 1, not use multiprocessing at all)', + action='store', + type=int, + default=1) + + add_option('--verbose', + '-v', + help='Don\'t use a progress bar, instead print a line per ' + 'scanned region file with results information. The ' + 'letters mean c: corrupted; w: wrong located; t: total of ' + 'chunksm; tme: too many entities problem', + action='store_true', + default=False) + + add_option('--interactive', + '-i', + help='Enter in interactive mode, where you can scan, see the ' + 'problems, and fix them in a terminal like mode', + dest='interactive', + default=False, + action='store_true',) + + add_option('--log', + '-l', + help='Saves a log of all the problems found in the spicifyed ' + 'file. The log file contains all the problems found with ' + 'this information: region file, chunk coordinates and ' + 'problem. Use \'-\' as name to show the log at the end ' + 'of the scan.', + type=str, + default=None, + dest='summary') + + (options, args) = parser.parse_args() + o = options + + if is_bare_console(): + print + print "Minecraft Region Fixer is a command line aplication, if you want to run it" + print "you need to open a command line (cmd.exe in the start menu in windows 7)." + print + getpass("Press enter to continue:") + return 1 + + # Args are world_paths and region files + if not args: + parser.error('No world paths or region files specified! Use ' + '--help for a complete list of options.') + + world_list, regionset = parse_paths(args) + + if not (world_list or regionset): + print ("Error: No worlds or region files to scan!") + return 1 + + # Check basic options compatibilities + any_chunk_replace_option = o.replace_corrupted or \ + o.replace_wrong_located or \ + o.replace_entities or \ + o.replace_shared_offset + any_chunk_delete_option = o.delete_corrupted or \ + o.delete_wrong_located or \ + o.delete_entities or \ + o.delete_shared_offset + any_region_replace_option = o.replace_too_small + any_region_delete_option = o.delete_too_small + + error = parser.error + + # All scanners will use this progress bar + widgets = ['Scanning: ', + FractionWidget(), + ' ', + progressbar.Percentage(), + ' ', + progressbar.Bar(left='[', right=']'), + ' ', + progressbar.ETA()] + + if o.interactive or o.summary: + if any_chunk_replace_option or any_region_replace_option: + error('Can\'t use the options --replace-* , --delete-* and ' + '--log with --interactive. You can choose all this ' + 'while in the interactive mode.') + + else: + # Not options.interactive + if o.backups: + if not any_chunk_replace_option and not any_region_replace_option: + error('The option --backups needs at least one of the ' + '--replace-* options') + else: + if (len(regionset.regions) > 0): + error('You can\'t use the replace options while scanning ' + 'sparate region files. The input should be only one ' + 'world and you intruduced {0} individual region ' + 'files.'.format(len(regionset.regions))) + elif (len(world_list) > 1): + error('You can\'t use the replace options while scanning ' + 'multiple worlds. The input should be only one ' + 'world and you intruduced {0} ' + 'worlds.'.format(len(world_list))) + + if not o.backups and any_chunk_replace_option: + error("The options --replace-* need the --backups option") + + if o.entity_limit < 0: + error("The entity limit must be at least 0!") + + print "\nWelcome to Region Fixer!" + print "(version: {0})".format(parser.version) + + # Do things with the option options args + # Create a list of worlds containing the backups of the region files + if o.backups: + backup_worlds = parse_backup_list(o.backups) + if not backup_worlds: + print ('[WARNING] No valid backup directories found, won\'t fix ' + 'any chunk.') + else: + backup_worlds = [] + + # The program starts + if o.interactive: + c = InteractiveLoop(world_list, regionset, o, backup_worlds) + c.cmdloop() + else: + summary_text = "" + # scan the separate region files + if len(regionset.regions) > 0: + print entitle("Scanning separate region files", 0) + + console_scan_regionset(regionset, o.processes, o.entity_limit, + o.delete_entities) + print regionset.generate_report(True) + + # delete chunks + delete_bad_chunks(options, regionset) + + # delete region files + delete_bad_regions(options, regionset) + + # verbose log + if options.summary: + summary_text += "\n" + summary_text += entitle("Separate region files") + summary_text += "\n" + t = regionset.summary() + if t: + summary_text += t + else: + summary_text += "No problems found.\n\n" + + # scan all the world folders + for w in world_list: + print entitle(' Scanning world: {0} '.format(w.get_name()), 0) + + console_scan_world(w, o.processes, o.entity_limit, + o.delete_entities) + + print w.generate_report(True) + + corrupted, wrong_located, entities_prob, shared_prob, \ + total_chunks, too_small_region, unreadable_region, total_regions \ + = w.generate_report(standalone = False) + + print + # replace chunks + if backup_worlds and not len(world_list) > 1: + options_replace = [o.replace_corrupted, + o.replace_wrong_located, + o.replace_entities, + o.replace_shared_offset] + replacing = zip(options_replace, world.CHUNK_PROBLEMS_ITERATOR) + for replace, (problem, status, arg) in replacing: + if replace: + total = w.count_chunks(problem) + if total: + text = " Replacing chunks with status: {0} ".format(status) + print "{0:#^60}".format(text) + fixed = w.replace_problematic_chunks(backup_worlds, problem, options) + print "\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status) + else: print "No chunks to replace with status: {0}".format(status) + + elif any_chunk_replace_option and not backup_worlds: + print "Info: Won't replace any chunk." + print "No backup worlds found, won't replace any chunks/region files!" + elif any_chunk_replace_option and backup_worlds and len(world_list) > 1: + print "Info: Won't replace any chunk." + print "Can't use the replace options while scanning more than one world!" + + # replace region files + if backup_worlds and not len(world_list) > 1: + options_replace = [o.replace_too_small] + replacing = zip(options_replace, world.REGION_PROBLEMS_ITERATOR) + for replace, (problem, status, arg) in replacing: + if replace: + total = w.count_regions(problem) + if total: + text = " Replacing regions with status: {0} ".format(status) + print "{0:#^60}".format(text) + fixed = w.replace_problematic_regions(backup_worlds, problem, options) + print "\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status) + else: print "No region to replace with status: {0}".format(status) + + elif any_region_replace_option and not backup_worlds: + print "Info: Won't replace any regions." + print "No valid backup worlds found, won't replace any chunks/region files!" + print "Note: You probably inserted some backup worlds with the backup option but they are probably no valid worlds, the most common issue is wrong path." + elif any_region_replace_option and backup_worlds and len(world_list) > 1: + print "Info: Won't replace any regions." + print "Can't use the replace options while scanning more than one world!" + + # delete chunks + delete_bad_chunks(options, w) + + # delete region files + delete_bad_regions(options, w) + + # print a summary for this world + if options.summary: + summary_text += w.summary() + + # verbose log text + if options.summary == '-': + print "\nPrinting log:\n" + print summary_text + elif options.summary != None: + try: + f = open(options.summary, 'w') + f.write(summary_text) + f.write('\n') + f.close() + print "Log file saved in \'{0}\'.".format(options.summary) + except: + print "Something went wrong while saving the log file!" + + return 0 + + +if __name__ == '__main__': + freeze_support() + value = main() + sys.exit(value) diff --git a/regionfixer_core/__init__.py b/regionfixer_core/__init__.py new file mode 100644 index 0000000..a5682fb --- /dev/null +++ b/regionfixer_core/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- \ No newline at end of file diff --git a/interactive.py b/regionfixer_core/interactive.py similarity index 91% rename from interactive.py rename to regionfixer_core/interactive.py index 80f88d8..975cb25 100644 --- a/interactive.py +++ b/regionfixer_core/interactive.py @@ -22,19 +22,19 @@ # -# TODO needs big update! import world from cmd import Cmd -from scan import scan_world, scan_regionset +from scan import console_scan_world, console_scan_regionset -class interactive_loop(Cmd): + +class InteractiveLoop(Cmd): def __init__(self, world_list, regionset, options, backup_worlds): Cmd.__init__(self) self.world_list = world_list self.regionset = regionset self.world_names = [str(i.name) for i in self.world_list] - # if there's only one world use it + # if there's only one world use it if len(self.world_list) == 1 and len(self.regionset) == 0: self.current = world_list[0] elif len(self.world_list) == 0 and len(self.regionset) > 0: @@ -44,32 +44,32 @@ def __init__(self, world_list, regionset, options, backup_worlds): self.options = options self.backup_worlds = backup_worlds self.prompt = "#-> " - self.intro = "Minecraft Region-Fixer interactive mode.\n(Use tab to autocomplete. Autocomplete doens't work on Windows. Type help for a list of commands.)\n" - - # other region-fixer stuff + self.intro = ("Minecraft Region-Fixer interactive mode.\n(Use tab to " + "autocomplete. Type help for a list of commands.)\n") - # possible args for chunks stuff + # Possible args for chunks stuff possible_args = "" first = True for i in world.CHUNK_PROBLEMS_ARGS.values() + ['all']: if not first: possible_args += ", " - possible_args += i + possible_args += i first = False self.possible_chunk_args_text = possible_args - - # possible args for region stuff + + # Possible args for region stuff possible_args = "" first = True for i in world.REGION_PROBLEMS_ARGS.values() + ['all']: if not first: possible_args += ", " - possible_args += i + possible_args += i first = False self.possible_region_args_text = possible_args - - - # do + + ################################################# + # Do methods + ################################################# def do_set(self,arg): """ Command to change some options and variables in interactive mode """ @@ -191,19 +191,22 @@ def do_current_workload(self, arg): print "This command doesn't use any arguments." def do_scan(self, arg): + """ Scans the current workload. """ # TODO: what about scanning while deleting entities as done in non-interactive mode? # this would need an option to choose which of the two methods use - """ Scans the current workload. """ + o = self.options if len(arg.split()) > 0: print "Error: too many parameters." else: if self.current: if isinstance(self.current, world.World): self.current = world.World(self.current.path) - scan_world(self.current, self.options) + console_scan_world(self.current, o.processes, + o.entity_limit, o.delete_entities) elif isinstance(self.current, world.RegionSet): print "\n{0:-^60}".format(' Scanning region files ') - scan_regionset(self.current, self.options) + console_scan_regionset(self.current, o.processes, + o.entity_limit, o.delete_entities) else: print "No world set! Use \'set workload\'" @@ -371,7 +374,9 @@ def do_EOF(self, arg): print "Quitting." return True - # complete + ################################################# + # Complete methods + ################################################# def complete_arg(self, text, possible_args): l = [] for arg in possible_args: @@ -413,19 +418,26 @@ def complete_replace_regions(self, text, line, begidx, endidx): possible_args = world.REGION_PROBLEMS_ARGS.values() + ['all'] return self.complete_arg(text, possible_args) - # help + ################################################# + # Help methods + ################################################# # TODO sería una buena idea poner un artículo de ayuda de como usar el programa en un caso típico. # TODO: the help texts need a normalize def help_set(self): - print "\nSets some variables used for the scan in interactive mode. If you run this command without an argument for a variable you can see the current state of the variable. You can set:" - print " verbose" - print "If True prints a line per scanned region file instead of showing a progress bar." - print "\n entity-limit" - print "If a chunk has more than this number of entities it will be added to the list of chunks with too many entities problem." - print "\n processes" - print "Number of cores used while scanning the world." - print "\n workload" - print "If you input a few worlds you can choose wich one will be scanned using this command.\n" + print ("\nSets some variables used for the scan in interactive mode. " + "If you run this command without an argument for a variable " + "you can see the current state of the variable. You can set:\n" + " verbose\n" + "If True prints a line per scanned region file instead of " + "showing a progress bar.\n" + " entity-limit\n" + "If a chunk has more than this number of entities it will be " + "added to the list of chunks with too many entities problem.\n" + " processes" + "Number of cores used while scanning the world.\n" + " workload\n" + "If you input a few worlds you can choose wich one will be " + "scanned using this command.\n") def help_current_workload(self): print "\nPrints information of the current region-set/world. This will be the region-set/world to scan and fix.\n" def help_scan(self): diff --git a/progressbar.py b/regionfixer_core/progressbar.py similarity index 100% rename from progressbar.py rename to regionfixer_core/progressbar.py diff --git a/scan.py b/regionfixer_core/scan.py similarity index 54% rename from scan.py rename to regionfixer_core/scan.py index 324126f..78c6b96 100644 --- a/scan.py +++ b/regionfixer_core/scan.py @@ -38,6 +38,20 @@ import sys import traceback +from copy import copy +import logging +from time import sleep + + +#~ TUPLE_COORDS = 0 +#~ TUPLE_DATA_COORDS = 0 +#~ TUPLE_GLOBAL_COORDS = 2 +TUPLE_NUM_ENTITIES = 0 +TUPLE_STATUS = 1 + + +logging.basicConfig(filename='scan.log', level=logging.DEBUG) + class ChildProcessException(Exception): """Takes the child process traceback text and prints it as a @@ -45,16 +59,17 @@ class ChildProcessException(Exception): def __init__(self, error): # Helps to see wich one is the child process traceback traceback = error[2] - print "*"*10 + print "*" * 10 print "*** Error while scanning:" print "*** ", error[0] - print "*"*10 + print "*" * 10 print "*** Printing the child's Traceback:" print "*** Exception:", traceback[0], traceback[1] for tb in traceback[2]: - print "*"*10 + print "*" * 10 print "*** File {0}, line {1}, in {2} \n*** {3}".format(*tb) - print "*"*10 + print "*" * 10 + class FractionWidget(progressbar.ProgressBarWidget): """ Convenience class to use the progressbar.py """ @@ -64,19 +79,198 @@ def __init__(self, sep=' / '): def update(self, pbar): return '%2d%s%2d' % (pbar.currval, self.sep, pbar.maxval) -def scan_world(world_obj, options): - """ Scans a world folder including players, region folders and - level.dat. While scanning prints status messages. """ - w = world_obj - # scan the world dir - print "Scanning directory..." - if not w.scanned_level.path: - print "Warning: No \'level.dat\' file found!" +class AsyncRegionsetScanner(object): + def __init__(self, regionset, processes, entity_limit, + remove_entities=False): + + self._regionset = regionset + self.processes = processes + self.entity_limit = entity_limit + self.remove_entities = remove_entities + + # Queue used by processes to pass results + self.queue = q = queues.SimpleQueue() + self.pool = multiprocessing.Pool(processes=processes, + initializer=_mp_pool_init, + initargs=(regionset, entity_limit, remove_entities, q)) + + def scan(self): + """ Scan and fill the given regionset. """ + total_regions = len(self._regionset.regions) + self._results = self.pool.map_async(multithread_scan_regionfile, + self._regionset.list_regions(None), + max(1,total_regions//self.processes)) + + def get_last_result(self): + """ Return results of last region file scanned. + + If there are left no scanned region files return None. The + ScannedRegionFile returned is the same instance in the regionset, + don't modify it or you will modify the regionset results. + """ + + q = self.queue + logging.debug("AsyncRegionsetScanner: starting get_last_result") + logging.debug("AsyncRegionsetScanner: queue empty: {0}".format(q.empty())) + if not q.empty(): + logging.debug("AsyncRegionsetScanner: queue not empty") + r = q.get() + logging.debug("AsyncRegionsetScanner: result: {0}".format(r)) + if r is None: + # Something went wrong scanning! + raise ChildProcessException("Something went wrong \ + scanning a region-file.") + # Overwrite it in the regionset + self._regionset[r.get_coords()] = r + return r + else: + return None + + @property + def finished(self): + """ Finished the operation. The queue could have elements """ + return self._results.ready() and self.queue.empty() + + @property + def regionset(self): + return self._regionset + + +class AsyncWorldScanner(object): + def __init__(self, world_obj, processes, entity_limit, + remove_entities=False): + + self._world_obj = world_obj + self.processes = processes + self.entity_limit = entity_limit + self.remove_entities = remove_entities + + self.regionsets = copy(world_obj.regionsets) + + self._current_regionset = None + + def scan(self): + """ Scan and fill the given regionset. """ + cr = AsyncRegionsetScanner(self.regionsets.pop(0), + self.processes, + self.entity_limit, + self.remove_entities) + self._current_regionset = cr + cr.scan() + + def get_last_result(self): + """ Return results of last region file scanned. + + If there are left no scanned region files return None. The + ScannedRegionFile returned is the same instance in the regionset, + don't modify it or you will modify the regionset results. + """ + cr = self._current_regionset + logging.debug("AsyncWorldScanner: current_regionset {0}".format(cr)) + if cr is not None: + logging.debug("AsyncWorldScanner: cr.finished {0}".format(cr.finished)) + if not cr.finished: + return cr.get_last_result() + elif self.regionsets: + self.scan() + return None + else: + return None + + else: + return None + + @property + def current_regionset(self): + return self._current_regionset.regionset + + @property + def finished(self): + """ Finished the operation. The queue could have elements """ + return not self.regionsets and self._current_regionset.finished + + @property + def world_obj(self): + return self._world_obj + + +class AsyncPlayerScanner(object): + def __init__(self, player_dict, processes): + + self._player_dict = player_dict + self.processes = processes + + self.queue = q = queues.SimpleQueue() + self.pool = multiprocessing.Pool(processes=processes, + initializer=_mp_player_pool_init, + initargs=(q,)) + + def scan(self): + """ Scan and fill the given player_dict generated by world.py. """ + total_players = len(self._player_dict) + player_list = self._player_dict.values() + self._results = self.pool.map_async(multiprocess_scan_player, + player_list, + max(1, total_players//self.processes)) + + def get_last_result(self): + """ Return results of last player scanned. """ + + q = self.queue + logging.debug("AsyncPlayerScanner: starting get_last_result") + logging.debug("AsyncPlayerScanner: queue empty: {0}".format(q.empty())) + if not q.empty(): + logging.debug("AsyncPlayerScanner: queue not empty") + p = q.get() + logging.debug("AsyncPlayerScanner: result: {0}".format(p)) +# if p is None: +# # Something went wrong scanning! +# raise ChildProcessException("Something went wrong \ +# scanning a player-file.") + # Overwrite it in the regionset + self._player_dict[p.filename.split('.')[0]] = p + return p + else: + return None + + @property + def finished(self): + """ Have the scan finished? """ + return self._results.ready() and self.queue.empty() + + @property + def player_dict(self): + return self._player_dict + + + +# All scanners will use this progress bar +widgets = ['Scanning: ', + FractionWidget(), + ' ', + progressbar.Percentage(), + ' ', + progressbar.Bar(left='[', right=']'), + ' ', + progressbar.ETA()] + + +def console_scan_world(world_obj, processes, entity_limit, remove_entities): + """ Scans a world folder including players and prints status to console. + + This functions uses AsyncPlayerScanner and AsyncWorldScanner. + """ + + w = world_obj + # Scan the world directory + print "World info:" if w.players: - print "There are {0} region files and {1} player files in the world directory.".format(\ - w.get_number_regions(), len(w.players)) + print ("There are {0} region files and {1} player files " + "in the world directory.").format( + w.get_number_regions(), + len(w.players)) else: print "There are {0} region files in the world directory.".format(\ w.get_number_regions()) @@ -93,32 +287,81 @@ def scan_world(world_obj, options): print "[WARNING!]: \'level.dat\' is corrupted with the following error/s:" print "\t {0}".format(w.scanned_level.status_text) - print "\n{0:-^60}".format(' Checking player files ') - # TODO multiprocessing! - # Probably, create a scanner object with a nice buffer of logger for text and logs and debugs + # Scan player files + print "\n{0:-^60}".format(' Scanning player files ') if not w.players: print "Info: No player files to scan." else: - scan_all_players(w) - all_ok = True - for name in w.players: - if w.players[name].readable == False: - print "[WARNING]: Player file {0} has problems.\n\tError: {1}".format(w.players[name].filename, w.players[name].status_text) - all_ok = False - if all_ok: - print "All player files are readable." + total_players = len(w.players) + pbar = progressbar.ProgressBar(widgets=widgets, + maxval=total_players) + + ps = AsyncPlayerScanner(w.players, processes) + ps.scan() + counter = 0 + while not ps.finished: + sleep(0.001) + result = ps.get_last_result() + if result: + counter += 1 + pbar.update(counter) # SCAN ALL THE CHUNKS! if w.get_number_regions == 0: print "No region files to scan!" else: - for r in w.regionsets: - if r.regions: - print "\n{0:-^60}".format(' Scanning the {0} '.format(r.get_name())) - scan_regionset(r, options) + print "\n{0:-^60}".format(' Scanning region files ') + #Scan world regionsets + ws = AsyncWorldScanner(w, processes, entity_limit, + remove_entities) + + total_regions = ws.world_obj.count_regions() + pbar = progressbar.ProgressBar(widgets=widgets, + maxval=total_regions) + pbar = progressbar.ProgressBar( + widgets=widgets, + maxval=total_regions) + pbar.start() + ws.scan() + + counter = 0 + while not ws.finished: + sleep(0.01) + result = ws.get_last_result() + if result: + counter += 1 + pbar.update(counter) + + pbar.finish() + w.scanned = True +def console_scan_regionset(regionset, processes, entity_limit, + remove_entities): + """ Scan a regionset printing status to console. + + Uses AsyncRegionsetScanner. + """ + + total_regions = len(regionset) + pbar = progressbar.ProgressBar(widgets=widgets, + maxval=total_regions) + pbar.start() + rs = AsyncRegionsetScanner(regionset, processes, entity_limit, + remove_entities) + rs.scan() + counter = 0 + while not rs.finished: + sleep(0.01) + result = rs.get_last_result() + if result: + counter += 1 + pbar.update(counter) + + pbar.finish() + + def scan_player(scanned_dat_file): """ At the moment only tries to read a .dat player file. It returns 0 if it's ok and 1 if has some problem """ @@ -130,6 +373,20 @@ def scan_player(scanned_dat_file): except Exception, e: s.readable = False s.status_text = e + return s + + +def multiprocess_scan_player(player): + """ Does the multithread stuff for scan_region_file """ + p = player + p = scan_player(p) + multiprocess_scan_player.q.put(p) + + +def _mp_player_pool_init(q): + """ Function to initialize the multiprocessing in scan_regionset. + Is used to pass values to the child process. """ + multiprocess_scan_player.q = q def scan_all_players(world_obj): @@ -139,20 +396,18 @@ def scan_all_players(world_obj): scan_player(world_obj.players[name]) -def scan_region_file(scanned_regionfile_obj, options): +def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): """ Given a scanned region file object with the information of a region files scans it and returns the same obj filled with the results. - + If delete_entities is True it will delete entities while scanning - + entiti_limit is the threshold tof entities to conisder a chunk with too much entities problems. """ - o = options - delete_entities = o.delete_entities - entity_limit = o.entity_limit + try: r = scanned_regionfile_obj # counters of problems @@ -191,7 +446,7 @@ def scan_region_file(scanned_regionfile_obj, options): # start the actual chunk scanning g_coords = r.get_global_chunk_coords(x, z) - chunk, c = scan_chunk(region_file, (x,z), g_coords, o) + chunk, c = scan_chunk(region_file, (x,z), g_coords, entity_limit) if c != None: # chunk not created r.chunks[(x,z)] = c chunk_count += 1 @@ -253,27 +508,20 @@ def scan_region_file(scanned_regionfile_obj, options): # Fatal exceptions: except: # anything else is a ChildProcessException - except_type, except_class, tb = sys.exc_info() - r = (r.path, r.coords, (except_type, except_class, traceback.extract_tb(tb))) + try: + # Not even r was created, something went really wrong + except_type, except_class, tb = sys.exc_info() + r = (r.path, r.coords, (except_type, except_class, traceback.extract_tb(tb))) + except NameError: + r = (None, None, (except_type, except_class, traceback.extract_tb(tb))) + return r -def multithread_scan_regionfile(region_file): - """ Does the multithread stuff for scan_region_file """ - r = region_file - o = multithread_scan_regionfile.options - # call the normal scan_region_file with this parameters - r = scan_region_file(r,o) - - # exceptions will be handled in scan_region_file which is in the - # single thread land - multithread_scan_regionfile.q.put(r) - - - -def scan_chunk(region_file, coords, global_coords, options): +def scan_chunk(region_file, coords, global_coords, entity_limit): """ Takes a RegionFile obj and the local coordinatesof the chunk as inputs, then scans the chunk and returns all the data.""" + el = entity_limit try: chunk = region_file.get_chunk(*coords) data_coords = world.get_chunk_data_coords(chunk) @@ -282,9 +530,9 @@ def scan_chunk(region_file, coords, global_coords, options): status = world.CHUNK_WRONG_LOCATED status_text = "Mismatched coordinates (wrong located chunk)." scan_time = time.time() - elif num_entities > options.entity_limit: + elif num_entities > el: status = world.CHUNK_TOO_MANY_ENTITIES - status_text = "The chunks has too many entities (it has {0}, and it's more than the limit {1})".format(num_entities, options.entity_limit) + status_text = "The chunks has too many entities (it has {0}, and it's more than the limit {1})".format(num_entities, entity_limit) scan_time = time.time() else: status = world.CHUNK_OK @@ -331,100 +579,29 @@ def scan_chunk(region_file, coords, global_coords, options): return chunk, (num_entities, status) if status != world.CHUNK_NOT_CREATED else None -#~ TUPLE_COORDS = 0 -#~ TUPLE_DATA_COORDS = 0 -#~ TUPLE_GLOBAL_COORDS = 2 -TUPLE_NUM_ENTITIES = 0 -TUPLE_STATUS = 1 - -#~ def scan_and_fill_chunk(region_file, scanned_chunk_obj, options): - #~ """ Takes a RegionFile obj and a ScannedChunk obj as inputs, - #~ scans the chunk, fills the ScannedChunk obj and returns the chunk - #~ as a NBT object.""" -#~ - #~ c = scanned_chunk_obj - #~ chunk, region_file, c.h_coords, c.d_coords, c.g_coords, c.num_entities, c.status, c.status_text, c.scan_time, c.region_path = scan_chunk(region_file, c.h_coords, options) - #~ return chunk -def _mp_pool_init(regionset,options,q): +def _mp_pool_init(regionset, entity_limit, remove_entities, q): """ Function to initialize the multiprocessing in scan_regionset. Is used to pass values to the child process. """ multithread_scan_regionfile.regionset = regionset multithread_scan_regionfile.q = q - multithread_scan_regionfile.options = options - - -def scan_regionset(regionset, options): - """ This function scans all te region files in a regionset object - and fills the ScannedRegionFile obj with the results - """ - - total_regions = len(regionset.regions) - total_chunks = 0 - corrupted_total = 0 - wrong_total = 0 - entities_total = 0 - too_small_total = 0 - unreadable = 0 + multithread_scan_regionfile.entity_limit = entity_limit + multithread_scan_regionfile.remove_entities = remove_entities - # init progress bar - if not options.verbose: - pbar = progressbar.ProgressBar( - widgets=['Scanning: ', FractionWidget(), ' ', progressbar.Percentage(), ' ', progressbar.Bar(left='[',right=']'), ' ', progressbar.ETA()], - maxval=total_regions) - # queue used by processes to pass finished stuff - q = queues.SimpleQueue() - pool = multiprocessing.Pool(processes=options.processes, - initializer=_mp_pool_init,initargs=(regionset,options,q)) - - if not options.verbose: - pbar.start() +def multithread_scan_regionfile(region_file): + """ Does the multithread stuff for scan_region_file """ + r = region_file + entity_limit = multithread_scan_regionfile.entity_limit + remove_entities = multithread_scan_regionfile.remove_entities + # call the normal scan_region_file with this parameters + r = scan_region_file(r, entity_limit, remove_entities) - # start the pool - # Note to self: every child process has his own memory space, - # that means every obj recived by them will be a copy of the - # main obj - result = pool.map_async(multithread_scan_regionfile, regionset.list_regions(None), max(1,total_regions//options.processes)) + # exceptions will be handled in scan_region_file which is in the + # single thread land + + multithread_scan_regionfile.q.put(r) - # printing status - region_counter = 0 - while not result.ready() or not q.empty(): - time.sleep(0.01) - if not q.empty(): - r = q.get() - if r == None: # something went wrong scanning this region file - # probably a bug... don't know if it's a good - # idea to skip it - continue - if not isinstance(r,world.ScannedRegionFile): - raise ChildProcessException(r) - else: - corrupted, wrong, entities_prob, shared_offset, num_chunks = r.get_counters() - filename = r.filename - # the obj returned is a copy, overwrite it in regionset - regionset[r.get_coords()] = r - corrupted_total += corrupted - wrong_total += wrong - total_chunks += num_chunks - entities_total += entities_prob - if r.status == world.REGION_TOO_SMALL: - too_small_total += 1 - elif r.status == world.REGION_UNREADABLE: - unreadable += 1 - region_counter += 1 - if options.verbose: - if r.status == world.REGION_OK: - stats = "(c: {0}, w: {1}, tme: {2}, so: {3}, t: {4})".format( corrupted, wrong, entities_prob, shared_offset, num_chunks) - elif r.status == world.REGION_TOO_SMALL: - stats = "(Error: not a region file)" - elif r.status == world.REGION_UNREADABLE: - stats = "(Error: unreadable region file)" - print "Scanned {0: <12} {1:.<43} {2}/{3}".format(filename, stats, region_counter, total_regions) - else: - pbar.update(region_counter) - - if not options.verbose: pbar.finish() - - regionset.scanned = True +if __name__ == '__main__': + pass diff --git a/util.py b/regionfixer_core/util.py similarity index 99% rename from util.py rename to regionfixer_core/util.py index 2715bd0..27d9b76 100644 --- a/util.py +++ b/regionfixer_core/util.py @@ -44,6 +44,7 @@ def is_bare_console(): pass return False + def entitle(text, level = 0): """ Put the text in a title with lot's of hashes everywhere. """ t = '' @@ -53,7 +54,7 @@ def entitle(text, level = 0): t += "{0:#^60}\n".format(' ' + text + ' ') t += "{0:#^60}\n".format('') return t - + def table(columns): """ Gets a list with lists in which each list is a column, @@ -128,6 +129,7 @@ def parse_chunk_list(chunk_list, world_obj): return parsed_list + def parse_paths(args): """ Parse the list of args passed to region-fixer.py and returns a RegionSet object with the list of regions and a list of World @@ -163,6 +165,7 @@ def parse_paths(args): return world_list, world.RegionSet(region_list = region_list) + def parse_world_list(world_path_list): """ Parses a world list checking if they exists and are a minecraft world folders. Returns a list of World objects. """ @@ -180,7 +183,6 @@ def parse_world_list(world_path_list): return tmp - def parse_backup_list(world_backup_dirs): """ Generates a list with the input of backup dirs containing the world objects of valid world directories.""" diff --git a/world.py b/regionfixer_core/world.py similarity index 90% rename from world.py rename to regionfixer_core/world.py index e169c1d..43e7972 100644 --- a/world.py +++ b/regionfixer_core/world.py @@ -33,6 +33,9 @@ import time # Constants: + +# Chunk related: +# -------------- # Used to mark the status of a chunks: CHUNK_NOT_CREATED = -1 CHUNK_OK = 0 @@ -40,41 +43,53 @@ CHUNK_WRONG_LOCATED = 2 CHUNK_TOO_MANY_ENTITIES = 3 CHUNK_SHARED_OFFSET = 4 -CHUNK_STATUS_TEXT = {CHUNK_NOT_CREATED:"Not created", - CHUNK_OK:"OK", - CHUNK_CORRUPTED:"Corrupted", - CHUNK_WRONG_LOCATED:"Wrong located", - CHUNK_TOO_MANY_ENTITIES:"Too many entities", - CHUNK_SHARED_OFFSET:"Sharing offset"} - -CHUNK_PROBLEMS = [CHUNK_CORRUPTED, CHUNK_WRONG_LOCATED, CHUNK_TOO_MANY_ENTITIES, CHUNK_SHARED_OFFSET] - -CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED:'corrupted',CHUNK_WRONG_LOCATED:'wrong',CHUNK_TOO_MANY_ENTITIES:'entities',CHUNK_SHARED_OFFSET:'sharing'} -# list with problem status-text tuples +CHUNK_STATUS_TEXT = {CHUNK_NOT_CREATED: "Not created", + CHUNK_OK: "OK", + CHUNK_CORRUPTED: "Corrupted", + CHUNK_WRONG_LOCATED: "Wrong located", + CHUNK_TOO_MANY_ENTITIES: "Too many entities", + CHUNK_SHARED_OFFSET: "Sharing offset"} + +CHUNK_PROBLEMS = [CHUNK_CORRUPTED, + CHUNK_WRONG_LOCATED, + CHUNK_TOO_MANY_ENTITIES, + CHUNK_SHARED_OFFSET] + +CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', + CHUNK_WRONG_LOCATED: 'wrong', + CHUNK_TOO_MANY_ENTITIES: 'entities', + CHUNK_SHARED_OFFSET: 'sharing'} +# list with problem, status-text, problem arg tuples CHUNK_PROBLEMS_ITERATOR = [] for problem in CHUNK_PROBLEMS: - CHUNK_PROBLEMS_ITERATOR.append((problem, CHUNK_STATUS_TEXT[problem], CHUNK_PROBLEMS_ARGS[problem])) - - + CHUNK_PROBLEMS_ITERATOR.append((problem, + CHUNK_STATUS_TEXT[problem], + CHUNK_PROBLEMS_ARGS[problem])) +# Region related: +# --------------- # Used to mark the status of region files: REGION_OK = 10 REGION_TOO_SMALL = 11 REGION_UNREADABLE = 12 -REGION_STATUS_TEXT = {REGION_OK: "Ok", REGION_TOO_SMALL: "Too small", REGION_UNREADABLE: "Unreadable"} +REGION_STATUS_TEXT = {REGION_OK: "Ok", + REGION_TOO_SMALL: "Too small", + REGION_UNREADABLE: "Unreadable"} REGION_PROBLEMS = [REGION_TOO_SMALL] -REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too-small'} +REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too small'} -# list with problem status-text tuples +# list with problem, status-text, problem arg tuples REGION_PROBLEMS_ITERATOR = [] for problem in REGION_PROBLEMS: try: - REGION_PROBLEMS_ITERATOR.append((problem, REGION_STATUS_TEXT[problem], REGION_PROBLEMS_ARGS[problem])) + REGION_PROBLEMS_ITERATOR.append((problem, + REGION_STATUS_TEXT[problem], + REGION_PROBLEMS_ARGS[problem])) except KeyError: pass -REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL:'too-small'} +REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too-small'} # Used to know where to look in a chunk status tuple #~ TUPLE_COORDS = 0 @@ -84,10 +99,13 @@ TUPLE_STATUS = 1 # Dimension names: -DIMENSION_NAMES = { "region":"Overworld", "DIM1":"The End", "DIM-1":"Nether" } +DIMENSION_NAMES = {"region": "Overworld", + "DIM1": "The End", + "DIM-1": "Nether"} + class ScannedDatFile(object): - def __init__(self, path = None, readable = None, status_text = None): + def __init__(self, path=None, readable=None, status_text=None): self.path = path if self.path and exists(self.path): self.filename = split(path)[1] @@ -101,6 +119,7 @@ def __str__(self): text += "\tReadable:" + str(self.readable) + "\n" return text + class ScannedChunk(object): """ Stores all the results of the scan. Not used at the moment, it prette nice but takes an huge amount of memory. """ @@ -108,7 +127,9 @@ class ScannedChunk(object): # outdated # The problem with it was it took too much memory. It has been # remplaced with a tuple - def __init__(self, header_coords, global_coords = None, data_coords = None, status = None, num_entities = None, scan_time = None, region_path = None): + def __init__(self, header_coords, global_coords=None, data_coords=None, + status=None, num_entities=None, scan_time=None, + region_path=None): """ Inits the object with all the scan information. """ self.h_coords = header_coords self.g_coords = global_coords @@ -122,13 +143,13 @@ def __init__(self, header_coords, global_coords = None, data_coords = None, stat def __str__(self): text = "Chunk with header coordinates:" + str(self.h_coords) + "\n" text += "\tData coordinates:" + str(self.d_coords) + "\n" - text +="\tGlobal coordinates:" + str(self.g_coords) + "\n" + text += "\tGlobal coordinates:" + str(self.g_coords) + "\n" text += "\tStatus:" + str(self.status_text) + "\n" text += "\tNumber of entities:" + str(self.num_entities) + "\n" text += "\tScan time:" + time.ctime(self.scan_time) + "\n" return text - def get_path(): + def get_path(self): """ Returns the path of the region file. """ return self.region_path @@ -142,9 +163,11 @@ def rescan_entities(self, options): self.status = CHUNK_OK self.status_text = CHUNK_STATUS_TEXT[CHUNK_OK] + class ScannedRegionFile(object): """ Stores all the scan information for a region file """ - def __init__(self, filename, corrupted = 0, wrong = 0, entities_prob = 0, shared_offset = 0, chunks = 0, status = 0, time = None): + def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, + shared_offset=0, chunks=0, status=0, time=None): # general region file info self.path = filename self.filename = split(filename)[1] @@ -196,13 +219,14 @@ def get_counters(self): """ Returns integers with all the problem counters in this region file. The order is corrupted, wrong located, entities shared header, total chunks """ - return self.corrupted_chunks, self.wrong_located_chunks, self.entities_prob, self.shared_offset, self.count_chunks() + return self.corrupted_chunks, self.wrong_located_chunks,\ + self.entities_prob, self.shared_offset, self.count_chunks() def get_path(self): """ Returns the path of the region file. """ return self.path - def count_chunks(self, problem = None): + def count_chunks(self, problem=None): """ Counts chunks in the region file with the given problem. If problem is omited or None, counts all the chunks. Returns an integer with the counter. """ @@ -218,8 +242,8 @@ def get_global_chunk_coords(self, chunkX, chunkZ): coords and returns the global chunkcoords as integerss """ regionX, regionZ = self.get_coords() - chunkX += regionX*32 - chunkZ += regionZ*32 + chunkX += regionX * 32 + chunkZ += regionZ * 32 return chunkX, chunkZ @@ -237,7 +261,9 @@ def get_coords(self): return coordX, coordZ - def list_chunks(self, status = None): +# TODO TODO TODO: This is dangerous! Running the method remove_problematic_chunks +# without a problem will remove all the chunks in the region file!! + def list_chunks(self, status=None): """ Returns a list of all the ScannedChunk objects of the chunks with the given status, if no status is omited or None, returns all the existent chunks in the region file """ @@ -246,9 +272,9 @@ def list_chunks(self, status = None): for c in self.keys(): t = self[c] if status == t[TUPLE_STATUS]: - l.append((self.get_global_chunk_coords(*c),t)) + l.append((self.get_global_chunk_coords(*c), t)) elif status == None: - l.append((self.get_global_chunk_coords(*c),t)) + l.append((self.get_global_chunk_coords(*c), t)) return l def summary(self): @@ -347,7 +373,7 @@ class RegionSet(object): """Stores an arbitrary number of region files and the scan results. Inits with a list of region files. The regions dict is filled while scanning with ScannedRegionFiles and ScannedChunks.""" - def __init__(self, regionset_path = None, region_list = []): + def __init__(self, regionset_path=None, region_list=[]): if regionset_path: self.path = regionset_path self.region_list = glob(join(self.path, "r.*.*.mca")) @@ -371,8 +397,10 @@ def get_name(self): dim_directory = self._get_dimension_directory() if dim_directory: - try: return DIMENSION_NAMES[dim_directory] - except: return dim_directory + try: + return DIMENSION_NAMES[dim_directory] + except: + return dim_directory else: return "" @@ -383,10 +411,11 @@ def _get_dimension_directory(self): if self.path: rest, region = split(self.path) rest, dim_path = split(rest) - if dim_path == "": dim_path = split(rest)[1] + if dim_path == "": + dim_path = split(rest)[1] return dim_path - - else: return None + else: + return None def __str__(self): text = "Region-set information:\n" @@ -411,7 +440,7 @@ def __len__(self): def keys(self): return self.regions.keys() - def list_regions(self, status = None): + def list_regions(self, status=None): """ Returns a list of all the ScannedRegionFile objects stored in the RegionSet with status. If status = None it returns all the objects.""" @@ -430,18 +459,20 @@ def list_regions(self, status = None): t.append(r) return t - def count_regions(self, status = None): + def count_regions(self, status=None): """ Return the number of region files with status. If none returns the number of region files in this regionset. Possible status are: empty, too_small """ counter = 0 for r in self.keys(): - if status == self[r].status: counter += 1 - elif status == None: counter += 1 + if status == self[r].status: + counter += 1 + elif status == None: + counter += 1 return counter - def count_chunks(self, problem = None): + def count_chunks(self, problem=None): """ Returns the number of chunks with the given problem. If problem is None returns the number of chunks. """ counter = 0 @@ -449,7 +480,7 @@ def count_chunks(self, problem = None): counter += self[r].count_chunks(problem) return counter - def list_chunks(self, status = None): + def list_chunks(self, status=None): """ Returns a list of the ScannedChunk objects of the chunks with the given status. If status = None returns all the chunks. """ @@ -459,12 +490,16 @@ def list_chunks(self, status = None): return l def summary(self): - """ Returns a summary of the problematic chunks in this + """ Returns a summary of the problematic chunks in this regionset. The summary is a string with global coords, local coords, data coords and status. """ text = "" for r in self.keys(): - if not (self[r].count_chunks(CHUNK_CORRUPTED) or self[r].count_chunks(CHUNK_TOO_MANY_ENTITIES) or self[r].count_chunks(CHUNK_WRONG_LOCATED) or self[r].count_chunks(CHUNK_SHARED_OFFSET) or self[r].status == REGION_TOO_SMALL): + if not (self[r].count_chunks(CHUNK_CORRUPTED) or \ + self[r].count_chunks(CHUNK_TOO_MANY_ENTITIES) or \ + self[r].count_chunks(CHUNK_WRONG_LOCATED) or \ + self[r].count_chunks(CHUNK_SHARED_OFFSET) or \ + self[r].status == REGION_TOO_SMALL): continue text += "Region file: {0}\n".format(self[r].filename) text += self[r].summary() @@ -490,7 +525,6 @@ def locate_region(self, coords): return region_name - def remove_problematic_chunks(self, problem): """ Removes all the chunks with the given problem, returns a counter with the number of deleted chunks. """ @@ -533,10 +567,10 @@ def generate_report(self, standalone): too_small_region = self.count_regions(REGION_TOO_SMALL) unreadable_region = self.count_regions(REGION_UNREADABLE) total_regions = self.count_regions() - + if standalone: text = "" - + # Print all this info in a table format # chunks chunk_errors = ("Problem","Corrupted","Wrong l.","Etities","Shared o.", "Total chunks") @@ -544,7 +578,7 @@ def generate_report(self, standalone): table_data = [] for i, j in zip(chunk_errors, chunk_counters): table_data.append([i,j]) - text += "\nChunk problems:" + text += "\nChunk problems:\n" if corrupted or wrong_located or entities_prob or shared_prob: text += table(table_data) else: @@ -577,6 +611,7 @@ def remove_problematic_regions(self, problem): counter += 1 return counter + class World(object): """ This class stores all the info needed of a world, and once scanned, stores all the problems found. It also has all the tools @@ -626,6 +661,7 @@ def __init__(self, world_path): self.isworld = True else: self.isworld = False + # TODO: Make a Exception for this! so we can use try/except # set in scan.py, used in interactive.py self.scanned = False @@ -643,7 +679,7 @@ def get_number_regions(self): counter = 0 for dim in self.regionsets: counter += len(dim) - + return counter def summary(self): @@ -675,9 +711,9 @@ def summary(self): # chunk info chunk_info = "" for regionset in self.regionsets: - + title = regionset.get_name() - + # don't add text if there aren't broken chunks text = regionset.summary() chunk_info += (title + text) if text else "" @@ -731,7 +767,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, options): # this don't need to be aware of region status, it just # iterates the list returned by list_chunks() bad_chunks = regionset.list_chunks(problem) - + if bad_chunks and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): print "The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory()) else: @@ -746,7 +782,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, options): tofix_region_path, _ = regionset.locate_chunk(global_coords) if exists(backup_region_path): print "Backup region file found in:\n {0}".format(backup_region_path) - + # scan the whole region file, pretty slow, but completely needed to detec sharing offset chunks from scan import scan_region_file r = scan_region_file(ScannedRegionFile(backup_region_path),options) @@ -863,7 +899,7 @@ def rescan_entities(self, options): option entity limit is changed. """ for regionset in self.regionsets: regionset.rescan_entities(options) - + def generate_report(self, standalone): # collect data diff --git a/regionfixer_gui.py b/regionfixer_gui.py new file mode 100644 index 0000000..cb0fbe9 --- /dev/null +++ b/regionfixer_gui.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Needed for the gui +import scan +import world +import nbt + +from gui import Starter + +s = Starter() +s.run() From f9031b92570179aa20eb09b3776d6006a3b440bc Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 9 Jun 2014 13:09:15 +0200 Subject: [PATCH 002/151] Update and clean the import statements. --- gui/backups.py | 3 ++- gui/main.py | 11 ++++++----- regionfixer.py | 16 ++++++++-------- regionfixer_gui.py | 3 +-- 4 files changed, 17 insertions(+), 16 deletions(-) diff --git a/gui/backups.py b/gui/backups.py index badb76c..84d3b07 100644 --- a/gui/backups.py +++ b/gui/backups.py @@ -7,7 +7,8 @@ # TODO: just copied this file to this module, is a cutre solution # improve it! See Importing python modules from relative paths, or # order this in a better way -from world import World +from regionfixer_core.world import World + class BackupsWindow(wx.Frame): def __init__(self, parent, title): diff --git a/gui/main.py b/gui/main.py index cb4ea94..a3862c2 100644 --- a/gui/main.py +++ b/gui/main.py @@ -3,14 +3,15 @@ import wx import os - -from backups import BackupsWindow -from scan import AsyncWorldScanner, AsyncPlayerScanner -import world -from world import World from time import sleep from os.path import split +from backups import BackupsWindow +from regionfixer_core.scan import AsyncWorldScanner, AsyncPlayerScanner +from regionfixer_core import world +from regionfixer_core.world import World + + class MainWindow(wx.Frame): def __init__(self, parent, title, backups = None): wx.Frame.__init__(self, parent, title=title, size = (300,400)) diff --git a/regionfixer.py b/regionfixer.py index a995f53..4bceeea 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -22,17 +22,16 @@ # from multiprocessing import freeze_support -from optparse import OptionParser, OptionGroup +from optparse import OptionParser from getpass import getpass import sys -import world -from scan import console_scan_world, AsyncRegionsetScanner,\ - console_scan_regionset -from interactive import InteractiveLoop -from util import entitle, is_bare_console, parse_world_list, parse_paths, parse_backup_list -from time import sleep -import progressbar +from regionfixer_core import world +from regionfixer_core.scan import console_scan_world, console_scan_regionset +from regionfixer_core.interactive import InteractiveLoop +from regionfixer_core.util import entitle, is_bare_console, parse_paths,\ + parse_backup_list +from regionfixer_core import progressbar class FractionWidget(progressbar.ProgressBarWidget): @@ -64,6 +63,7 @@ def delete_bad_chunks(options, scanned_obj): else: print "No chunks to delete with status: {0}".format(status) + def delete_bad_regions(options, scanned_obj): """ Takes an scanned object (world object or regionset object) and the options give to region-fixer, it deletes all the region files diff --git a/regionfixer_gui.py b/regionfixer_gui.py index cb0fbe9..1e3a354 100644 --- a/regionfixer_gui.py +++ b/regionfixer_gui.py @@ -2,8 +2,7 @@ # -*- coding: utf-8 -*- # Needed for the gui -import scan -import world +import regionfixer_core import nbt from gui import Starter From df98129e3c9b6d9863683dd2bd4065b2e76d5aca Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 9 Jun 2014 19:10:48 +0200 Subject: [PATCH 003/151] Update the donors list. --- DONORS.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/DONORS.txt b/DONORS.txt index 699dca4..231adb5 100644 --- a/DONORS.txt +++ b/DONORS.txt @@ -4,6 +4,10 @@ Travis Wicks Nico van Duuren (Knights and Merchants) Diana Rotter Biocraft +Andrew Van Hise +Eugene Sterner +Udell Ross Burton +Powercraft Network Sponsors: -Intial development was sponsored by: NITRADO Servers (http://nitrado.net) +Initial development was sponsored by: NITRADO Servers (http://nitrado.net) From dd272e66c75cb5dbd84ef3f62dd12f761b5780ef Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 16 Jun 2014 14:04:00 +0200 Subject: [PATCH 004/151] Some cleaning up. Update to properly scan the world new folders. --- README.rst | 13 +++-- gui/main.py | 64 +++++++++++++--------- regionfixer.py | 24 +++++---- regionfixer_core/scan.py | 100 ++++++++++++++++++++++------------ regionfixer_core/util.py | 2 +- regionfixer_core/world.py | 110 ++++++++++++++++++++++++++++++-------- 6 files changed, 215 insertions(+), 98 deletions(-) diff --git a/README.rst b/README.rst index 5130756..65b6a92 100644 --- a/README.rst +++ b/README.rst @@ -9,7 +9,7 @@ Locates problems and tries to fix Minecraft worlds (or region files). Tries to fix corrupted chunks in region files using old backup copies of the Minecraft world. If you don't have a copy, you can eliminate the -corrupted chunks making Minecraft recreate them. +corrupted chunks making Minecraft regenerate them. It also scans the 'level.dat' file and the player '\*.dat' and tries to read them. If there are any problems it prints warnings. At the moment @@ -18,6 +18,9 @@ it doesn't fix any problem in these files. Web page: https://github.com/Fenixin/Minecraft-Region-Fixer +Mincraft forums posts: +http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/ +http://www.minecraftforum.net/topic/275730-minecraft-region-fixer/ Supported platforms =================== @@ -45,9 +48,11 @@ Notes Older versions of Minecraft had big problems when loading corrupted chunks. But in the latest versions of Minecraft (tested in 1.4.7) the server itself removes corrupted chunks (when loading them) and -regenerate those chunks. Region-Fixer still is useful for replacing -those chunks with a backup, removing entities, or trying to see what's -going wrong with your world. +regenerate those chunks. + +Region-Fixer still is useful for replacing those chunks with a +backup, removing entities, or trying to see what's going wrong +with your world. Usage diff --git a/gui/main.py b/gui/main.py index a3862c2..07591ad 100644 --- a/gui/main.py +++ b/gui/main.py @@ -265,10 +265,9 @@ def OnReplaceRegions(self, e): self.world.remove_problematic_regions(world.REGION_UNREADABLE) progressdlg.pulse() progressdlg.Destroy() - + self.update_delete_buttons_status(False) self.update_replace_buttons_status(False) - def OnScan(self, e): # Let's simulate the options stuff @@ -281,17 +280,13 @@ def __init__(self, main): self.gui = True options = Options(self) - progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", - self.world.count_regions(), self, - style = wx.PD_ELAPSED_TIME | - wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | - wx.PD_CAN_SKIP | - wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | - wx.PD_SMOOTH) + progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", + self.world.count_regions(), self, + style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | wx.PD_CAN_SKIP | wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | wx.PD_SMOOTH) options.progressdlg = progressdlg - + ws = AsyncWorldScanner(self.world, options.processes, options.entity_limit, options.delete_entities) @@ -305,18 +300,15 @@ def __init__(self, main): counter += 1 progressdlg.Update(counter, "Scanning regions from: " + rs.get_name()) - + progressdlg.Destroy() - - progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", - len(self.world.players), self, - style = wx.PD_ELAPSED_TIME | - wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | - wx.PD_CAN_SKIP | - wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | - wx.PD_SMOOTH) + + # TODO: DATA files and old player files + progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", + self.world.count_regions(), self, + style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | wx.PD_CAN_SKIP | wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | wx.PD_SMOOTH) ps = AsyncPlayerScanner(self.world.players, options.processes) ps.scan() @@ -333,9 +325,29 @@ def __init__(self, main): progressdlg.Destroy() - # TODO! We need to make every module truly independent. - # We need to make better way to separate print of text to - # console and scanning + # Data files + progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", + self.world.count_regions(), self, + style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | wx.PD_CAN_SKIP | wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | wx.PD_SMOOTH) + + ps = AsyncPlayerScanner(self.world.players, options.processes) + ps.scan() + counter = 0 + last_player = "" + while not ps.finished: + sleep(0.001) + result = ps.get_last_result() + if result: + counter += 1 + last_player = result.filename.split('.')[0] + progressdlg.Update(counter, + "Last player scanned: " + last_player) + + progressdlg.Destroy() + + self.results_text.SetValue(self.world.generate_report(True)) self.update_delete_buttons_status(True) diff --git a/regionfixer.py b/regionfixer.py index 4bceeea..f52cfd0 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -269,7 +269,7 @@ def main(): print print "Minecraft Region Fixer is a command line aplication, if you want to run it" print "you need to open a command line (cmd.exe in the start menu in windows 7)." - print + print getpass("Press enter to continue:") return 1 @@ -357,7 +357,7 @@ def main(): c.cmdloop() else: summary_text = "" - # scan the separate region files + # Scan the separate region files if len(regionset.regions) > 0: print entitle("Scanning separate region files", 0) @@ -365,13 +365,13 @@ def main(): o.delete_entities) print regionset.generate_report(True) - # delete chunks + # Delete chunks delete_bad_chunks(options, regionset) - # delete region files + # Delete region files delete_bad_regions(options, regionset) - # verbose log + # Verbose log if options.summary: summary_text += "\n" summary_text += entitle("Separate region files") @@ -391,12 +391,12 @@ def main(): print w.generate_report(True) - corrupted, wrong_located, entities_prob, shared_prob, \ - total_chunks, too_small_region, unreadable_region, total_regions \ - = w.generate_report(standalone = False) +# corrupted, wrong_located, entities_prob, shared_prob, \ +# total_chunks, too_small_region, unreadable_region, total_regions \ +# = w.generate_report(standalone = False) print - # replace chunks + # Replace chunks if backup_worlds and not len(world_list) > 1: options_replace = [o.replace_corrupted, o.replace_wrong_located, @@ -411,7 +411,8 @@ def main(): print "{0:#^60}".format(text) fixed = w.replace_problematic_chunks(backup_worlds, problem, options) print "\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status) - else: print "No chunks to replace with status: {0}".format(status) + else: + print "No chunks to replace with status: {0}".format(status) elif any_chunk_replace_option and not backup_worlds: print "Info: Won't replace any chunk." @@ -432,7 +433,8 @@ def main(): print "{0:#^60}".format(text) fixed = w.replace_problematic_regions(backup_worlds, problem, options) print "\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status) - else: print "No region to replace with status: {0}".format(status) + else: + print "No region to replace with status: {0}".format(status) elif any_region_replace_option and not backup_worlds: print "Info: Won't replace any regions." diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 78c6b96..c7a197e 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -21,26 +21,21 @@ # along with this program. If not, see . # -import nbt.region as region -import nbt.nbt as nbt -#~ from nbt.region import STATUS_CHUNK_OVERLAPPING, STATUS_CHUNK_MISMATCHED_LENGTHS - #~ - STATUS_CHUNK_ZERO_LENGTH - #~ - STATUS_CHUNK_IN_HEADER - #~ - STATUS_CHUNK_OUT_OF_FILE - #~ - STATUS_CHUNK_OK - #~ - STATUS_CHUNK_NOT_CREATED -from os.path import split, join -import progressbar -import multiprocessing -from multiprocessing import queues -import world -import time +from os.path import split, join +from time import sleep, time import sys import traceback from copy import copy import logging -from time import sleep +import multiprocessing +from multiprocessing import queues + +import nbt.region as region +import nbt.nbt as nbt + +import progressbar +import world #~ TUPLE_COORDS = 0 @@ -50,7 +45,7 @@ TUPLE_STATUS = 1 -logging.basicConfig(filename='scan.log', level=logging.DEBUG) +# logging.basicConfig(filename='scan.log', level=logging.DEBUG) class ChildProcessException(Exception): @@ -266,16 +261,13 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): # Scan the world directory print "World info:" - if w.players: - print ("There are {0} region files and {1} player files " - "in the world directory.").format( - w.get_number_regions(), - len(w.players)) - else: - print "There are {0} region files in the world directory.".format(\ - w.get_number_regions()) + print ("There are {0} region files, {1} player files and {2} data" + " files in the world directory.").format( + w.get_number_regions(), + len(w.players) + len(w.old_players), + len(w.data_files)) - # check the level.dat file and the *.dat files in players directory + # check the level.dat print "\n{0:-^60}".format(' Checking level.dat ') if not w.scanned_level.path: @@ -288,7 +280,7 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): print "\t {0}".format(w.scanned_level.status_text) # Scan player files - print "\n{0:-^60}".format(' Scanning player files ') + print "\n{0:-^60}".format(' Scanning UUID player files ') if not w.players: print "Info: No player files to scan." else: @@ -306,6 +298,44 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): counter += 1 pbar.update(counter) + # Scan old player files + print "\n{0:-^60}".format(' Scanning old format player files ') + if not w.old_players: + print "Info: No old format player files to scan." + else: + total_players = len(w.old_players) + pbar = progressbar.ProgressBar(widgets=widgets, + maxval=total_players) + + ps = AsyncPlayerScanner(w.old_players, processes) + ps.scan() + counter = 0 + while not ps.finished: + sleep(0.001) + result = ps.get_last_result() + if result: + counter += 1 + pbar.update(counter) + + # Scan dat files + print "\n{0:-^60}".format(' Scanning structures and map data files ') + if not w.data_files: + print "Info: No structures and map data files to scan." + else: + total_files = len(w.data_files) + pbar = progressbar.ProgressBar(widgets=widgets, + maxval=total_players) + + ps = AsyncPlayerScanner(w.data_files, processes) + ps.scan() + counter = 0 + while not ps.finished: + sleep(0.001) + result = ps.get_last_result() + if result: + counter += 1 + pbar.update(counter) + # SCAN ALL THE CHUNKS! if w.get_number_regions == 0: print "No region files to scan!" @@ -428,7 +458,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): except IOError, e: print "\nWARNING: I can't open the file {0} !\nThe error is \"{1}\".\nTypical causes are file blocked or problems in the file system.\n".format(filename,e) r.status = world.REGION_UNREADABLE - r.scan_time = time.time() + r.scan_time = time() print "Note: this region file won't be scanned and won't be taken into acount in the summaries" # TODO count also this region files return r @@ -501,7 +531,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r.wrong_located_chunks = wrong r.entities_prob = entities_prob r.shared_offset = shared_counter - r.scan_time = time.time() + r.scan_time = time() r.status = world.REGION_OK return r @@ -529,15 +559,15 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): if data_coords != global_coords: status = world.CHUNK_WRONG_LOCATED status_text = "Mismatched coordinates (wrong located chunk)." - scan_time = time.time() + scan_time = time() elif num_entities > el: status = world.CHUNK_TOO_MANY_ENTITIES status_text = "The chunks has too many entities (it has {0}, and it's more than the limit {1})".format(num_entities, entity_limit) - scan_time = time.time() + scan_time = time() else: status = world.CHUNK_OK status_text = "OK" - scan_time = time.time() + scan_time = time() except region.InconceivedChunk as e: chunk = None @@ -545,13 +575,13 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): num_entities = None status = world.CHUNK_NOT_CREATED status_text = "The chunk doesn't exist" - scan_time = time.time() + scan_time = time() except region.RegionHeaderError as e: error = "Region header error: " + e.msg status = world.CHUNK_CORRUPTED status_text = error - scan_time = time.time() + scan_time = time() chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -561,7 +591,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): error = "Chunk data error: " + e.msg status = world.CHUNK_CORRUPTED status_text = error - scan_time = time.time() + scan_time = time() chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -571,7 +601,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): error = "Chunk herader error: " + e.msg status = world.CHUNK_CORRUPTED status_text = error - scan_time = time.time() + scan_time = time() chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index 27d9b76..b040c88 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -39,7 +39,7 @@ def is_bare_console(): num = GetConsoleProcessList(ctypes.byref(ctypes.c_int(0)), ctypes.c_int(1)) if (num == 1): return True - + except Exception: pass return False diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 43e7972..1819d10 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -628,31 +628,48 @@ def __init__(self, world_path): self.regionsets.append(RegionSet(join(self.path, directory))) # level.dat - # let's scan level.dat here so we can extract the world name - # right now + # Let's scan level.dat here so we can extract the world name level_dat_path = join(self.path, "level.dat") if exists(level_dat_path): try: self.level_data = nbt.NBTFile(level_dat_path)["Data"] self.name = self.level_data["LevelName"].value - self.scanned_level = ScannedDatFile(level_dat_path, readable = True, status_text = "OK") + self.scanned_level = ScannedDatFile(level_dat_path, + readable=True, + status_text="OK") except Exception, e: self.name = None - self.scanned_level = ScannedDatFile(level_dat_path, readable = False, status_text = e) + self.scanned_level = ScannedDatFile(level_dat_path, + readable=False, + status_text=e) else: self.level_file = None self.level_data = None self.name = None self.scanned_level = ScannedDatFile(None, False, "The file doesn't exist") - # player files - player_paths = glob(join(join(self.path, "players"), "*.dat")) + # Player files + old_player_paths = glob(join(join(self.path, "players"), "*.dat")) + player_paths = glob(join(join(self.path, "playerdata"), "*.dat")) self.players = {} for path in player_paths: + uuid = split(path)[1].split(".")[0] + self.players[uuid] = ScannedDatFile(path) + + # Player files before 1.7.6 + self.old_players = {} + for path in old_player_paths: name = split(path)[1].split(".")[0] - self.players[name] = ScannedDatFile(path) + self.old_players[name] = ScannedDatFile(path) + + # Structures dat files + data_files_paths = glob(join(join(self.path, "data"), "*.dat")) + self.data_files = {} + for path in data_files_paths: + name = split(path)[1] + self.data_files[name] = ScannedDatFile(path) - # does it look like a world folder? + # Does it look like a world folder? region_files = False for region_directory in self.regionsets: if region_directory: @@ -663,7 +680,7 @@ def __init__(self, world_path): self.isworld = False # TODO: Make a Exception for this! so we can use try/except - # set in scan.py, used in interactive.py + # Set in scan.py, used in interactive.py self.scanned = False def __str__(self): @@ -692,7 +709,7 @@ def summary(self): final += "{0:#^60}\n".format(" World name: {0} ".format(self.name)) final += "{0:#^60}\n".format('') - # dat files info + # leve.dat files info final += "\nlevel.dat:\n" if self.scanned_level.readable: final += "\t\'level.dat\' is readable\n" @@ -700,14 +717,32 @@ def summary(self): final += "\t[WARNING]: \'level.dat\' isn't readable, error: {0}\n".format(self.scanned_level.status_text) all_ok = True - final += "\nPlayer files:\n" - for name in self.players: - if not self.players[name].readable: + final += "\nPlayer UUID files:\n" + for p in self.players.values(): + if not p.readable: + all_ok = False + final += "\t-[WARNING]: Player file {0} has problems.\n\t\tError: {1}\n\n".format(p.filename, p.status_text) + if all_ok: + final += "\tAll player files are readable.\n\n" + + all_ok = True + final += "\nOld format player files:\n" + for p in self.old_players.values(): + if not p.readable: all_ok = False - final += "\t-[WARNING]: Player file {0} has problems.\n\t\tError: {1}\n\n".format(self.players[name].filename, self.players[name].status_text) + final += "\t-[WARNING]: Player file {0} has problems.\n\t\tError: {1}\n\n".format(p.filename, p.status_text) if all_ok: final += "\tAll player files are readable.\n\n" + all_ok = True + final += "\nStructures and map data files:\n" + for d in self.data_files.values(): + if not d.readable: + all_ok = False + final += "\t-[WARNING]: File {0} has problems.\n\t\tError: {1}\n\n".format(d.filename, d.status_text) + if all_ok: + final += "\tAll data files are readable.\n\n" + # chunk info chunk_info = "" for regionset in self.regionsets: @@ -901,7 +936,7 @@ def rescan_entities(self, options): regionset.rescan_entities(options) def generate_report(self, standalone): - + # collect data corrupted = self.count_chunks(CHUNK_CORRUPTED) wrong_located = self.count_chunks(CHUNK_WRONG_LOCATED) @@ -912,13 +947,44 @@ def generate_report(self, standalone): too_small_region = self.count_regions(REGION_TOO_SMALL) unreadable_region = self.count_regions(REGION_UNREADABLE) total_regions = self.count_regions() - + if standalone: text = "" - - # Print all this info in a table format - chunk_errors = ("Problem","Corrupted","Wrong l.","Etities","Shared o.", "Total chunks") - chunk_counters = ("Counts",corrupted, wrong_located, entities_prob, shared_prob, total_chunks) + + # Print all the player files with problems + broken_players = [p for p in self.players.values() if not p.readable] + broken_players.extend([p for p in self.old_players.values() if not p.readable]) + if broken_players: + text += "\nUnreadable player files:\n" + broken_player_files = [p.filename for p in broken_players] + text += "\n".join(broken_player_files) + text += "\n" + else: + text += "\nAll player files are readable\n" + + # Now all the data files + broken_data_files = [d for d in self.data_files.values() if not d.readable] + if broken_data_files: + text += "\nUnreadable data files:\n" + broken_data_filenames = [p.filename for p in broken_players] + text += "\n".join(broken_data_filenames) + text += "\n" + else: + text += "\nAll data files are readable\n" + + # Print all chunk info in a table format + chunk_errors = ("Problem", + "Corrupted", + "Wrong l.", + "Entities", + "Shared o.", + "Total chunks") + chunk_counters = ("Counts", + corrupted, + wrong_located, + entities_prob, + shared_prob, + total_chunks) table_data = [] for i, j in zip(chunk_errors, chunk_counters): table_data.append([i,j]) @@ -942,7 +1008,9 @@ def generate_report(self, standalone): return text else: - return corrupted, wrong_located, entities_prob, shared_prob, total_chunks, too_small_region, unreadable_region, total_regions + return corrupted, wrong_located, entities_prob, shared_prob,\ + total_chunks, too_small_region, unreadable_region,\ + total_regions From eca3946ec2d8e36e6ae19b7d5d72867189499977 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 16 Jun 2014 14:15:52 +0200 Subject: [PATCH 005/151] temporal commit --- gui/main.py | 40 +++++++++++++++++++++------------------- regionfixer_gui.py | 11 ++++++++--- 2 files changed, 29 insertions(+), 22 deletions(-) diff --git a/gui/main.py b/gui/main.py index 07591ad..92ebb5e 100644 --- a/gui/main.py +++ b/gui/main.py @@ -16,6 +16,8 @@ class MainWindow(wx.Frame): def __init__(self, parent, title, backups = None): wx.Frame.__init__(self, parent, title=title, size = (300,400)) + panel = wx.Panel(self) + self.backups = backups # Variables @@ -52,54 +54,54 @@ def __init__(self, parent, title, backups = None): # Create elements in the window # First row: - self.status_text = wx.StaticText(self, style=wx.TE_MULTILINE, label="test") - self.open_button = wx.Button(self, label="Open") - self.scan_button = wx.Button(self, label="Scan") + self.status_text = wx.StaticText(panel, style=wx.TE_MULTILINE, label="test") + self.open_button = wx.Button(panel, label="Open") + self.scan_button = wx.Button(panel, label="Scan") self.scan_button.Disable() self.firstrow_sizer = wx.BoxSizer(wx.HORIZONTAL) self.firstrow_sizer.Add(self.status_text, 1, wx.ALIGN_CENTER) self.firstrow_sizer.Add(self.open_button, 0, wx.EXPAND) self.firstrow_sizer.Add(self.scan_button, 0, wx.EXPAND) - self.firstrow_static_box = wx.StaticBox(self, label = "World loaded") + self.firstrow_static_box = wx.StaticBox(panel, label = "World loaded") self.firstrow_static_box_sizer = wx.StaticBoxSizer(self.firstrow_static_box) self.firstrow_static_box_sizer.Add(self.firstrow_sizer, 1, wx.EXPAND) # Second row: - self.proc_info_text = wx.StaticText(self, label="Threads to use: ") - self.proc_text = wx.TextCtrl(self, value="1") - self.el_info_text = wx.StaticText(self, label="Entity limit: " ) - self.el_text = wx.TextCtrl(self, value="150") + self.proc_info_text = wx.StaticText(panel, label="Threads to use: ") + self.proc_text = wx.TextCtrl(panel, value="1") + self.el_info_text = wx.StaticText(panel, label="Entity limit: " ) + self.el_text = wx.TextCtrl(panel, value="150") self.secondrow_sizer = wx.BoxSizer(wx.HORIZONTAL) self.secondrow_sizer.Add(self.proc_info_text, 0, wx.ALIGN_CENTER) self.secondrow_sizer.Add(self.proc_text, 0, wx.ALIGN_LEFT) self.secondrow_sizer.Add(self.el_info_text, 0, wx.ALIGN_CENTER) self.secondrow_sizer.Add(self.el_text, 0, wx.ALIGN_RIGHT) - self.secondrow_static_box_sizer = wx.StaticBoxSizer(wx.StaticBox(self, label = "Scan options")) + self.secondrow_static_box_sizer = wx.StaticBoxSizer(wx.StaticBox(panel, label = "Scan options")) self.secondrow_static_box_sizer.Add(self.secondrow_sizer, 1, wx.EXPAND) # Third row: # Note: In order to use a static box add it directly to a # static box sizer and add to the same sizer it's contents - self.results_text = wx.TextCtrl(self, style=wx.TE_READONLY | wx.TE_MULTILINE, value="Scan the world to get results", size = (500,200)) + self.results_text = wx.TextCtrl(panel, style=wx.TE_READONLY | wx.TE_MULTILINE, value="Scan the world to get results", size = (500,200)) # Lets try to create a monospaced font: ffont = wx.Font(9, wx.FONTFAMILY_MODERN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL) # print ffont.IsFixedWidth() textattr = wx.TextAttr(font = ffont) self.results_text.SetFont(ffont) - self.results_text_box = wx.StaticBox(self, label="Results", size = (100,100)) + self.results_text_box = wx.StaticBox(panel, label="Results", size = (100,100)) self.results_text_box_sizer = wx.StaticBoxSizer(self.results_text_box) self.results_text_box_sizer.Add(self.results_text, 1, wx.EXPAND) - self.delete_all_chunks_button = wx.Button(self, label = "Delete all bad chunks") - self.replace_all_chunks_button = wx.Button(self, label = "Replace all bad chunks (using backups)") - self.delete_all_regions_button = wx.Button(self, label = "Delete all bad regions") - self.replace_all_regions_button = wx.Button(self, label = "Replace all bad regions (using backups)") + self.delete_all_chunks_button = wx.Button(panel, label = "Delete all bad chunks") + self.replace_all_chunks_button = wx.Button(panel, label = "Replace all bad chunks (using backups)") + self.delete_all_regions_button = wx.Button(panel, label = "Delete all bad regions") + self.replace_all_regions_button = wx.Button(panel, label = "Replace all bad regions (using backups)") self.update_delete_buttons_status(False) self.update_replace_buttons_status(False) self.thirdrow_sizer = wx.BoxSizer(wx.HORIZONTAL) - self.thirdrow_actions_box = wx.StaticBox(self, label="Actions", size = (-1,-1)) + self.thirdrow_actions_box = wx.StaticBox(panel, label="Actions", size = (-1,-1)) self.thirdrow_buttons_box_sizer = wx.StaticBoxSizer(self.thirdrow_actions_box) self.thirdrow_buttons_sizer = wx.BoxSizer(wx.VERTICAL) self.thirdrow_buttons_sizer.Add(self.delete_all_chunks_button, 1, wx.EXPAND) @@ -117,7 +119,7 @@ def __init__(self, parent, title, backups = None): self.frame_sizer.Add(self.thirdrow_sizer, 1, wx.EXPAND) # Layout sizers - self.SetSizerAndFit(self.frame_sizer) + panel.SetSizerAndFit(self.frame_sizer) self.frame_sizer.Fit(self) @@ -293,7 +295,7 @@ def __init__(self, main): ws.scan() counter = 0 while not ws.finished: - sleep(0.01) + sleep(0.001) result = ws.get_last_result() rs = ws.current_regionset if result: @@ -315,7 +317,7 @@ def __init__(self, main): counter = 0 last_player = "" while not ps.finished: - sleep(0.001) + sleep(0.0001) result = ps.get_last_result() if result: counter += 1 diff --git a/regionfixer_gui.py b/regionfixer_gui.py index 1e3a354..9ab54f8 100644 --- a/regionfixer_gui.py +++ b/regionfixer_gui.py @@ -1,11 +1,16 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +from multiprocessing import freeze_support +import sys + # Needed for the gui import regionfixer_core import nbt from gui import Starter - -s = Starter() -s.run() +if __name__ == '__main__': + freeze_support() + s = Starter() + value = s.run() + sys.exit(value) From 29b6e8701968ae1703c02217480943831208f841 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 17 Jun 2014 08:15:04 +0200 Subject: [PATCH 006/151] Change contributors list format. --- CONTRIBUTORS.txt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e4fc248..a35e716 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -1,6 +1,9 @@ -In no particular order: +Original author: Fenixin (Alejandro Aguilera) - Main developer + +Contributors (in no particular order): + aheadley (Alex Headley) - First multiprocessing version of Region Fixer. carlallen (Carl Allen) - Fix problem in MacOS kbn (Kristian Berge) - Small fixes From a9e7626db4792a920812372180f1d0deb26cb6f5 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 17 Jun 2014 09:51:38 +0200 Subject: [PATCH 007/151] Update the way to store player and data info. Fix problem while scanning data files. --- regionfixer.py | 33 ++++++++++++++++++++------------- regionfixer_core/scan.py | 14 ++++++++++---- regionfixer_core/world.py | 31 +++++++++++++++---------------- 3 files changed, 45 insertions(+), 33 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index f52cfd0..bebce2d 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -44,11 +44,15 @@ def update(self, pbar): def delete_bad_chunks(options, scanned_obj): - """ Takes a scanned object (world object or regionset object) and + """ Takes a scanned object (world object or regionset object) and the options given to region-fixer, it deletes all the chunks with problems iterating through all the possible problems. """ - print # a blank line - options_delete = [options.delete_corrupted, options.delete_wrong_located, options.delete_entities, options.delete_shared_offset] + print + # In the same order as in CHUNK_PROBLEMS + options_delete = [options.delete_corrupted, + options.delete_wrong_located, + options.delete_entities, + options.delete_shared_offset] deleting = zip(options_delete, world.CHUNK_PROBLEMS) for delete, problem in deleting: status = world.CHUNK_STATUS_TEXT[problem] @@ -56,19 +60,19 @@ def delete_bad_chunks(options, scanned_obj): if delete: if total: text = ' Deleting chunks with status: {0} '.format(status) - print "{0:#^60}".format(text) + print "\n{0:#^60}".format(text) counter = scanned_obj.remove_problematic_chunks(problem) - - print "\nDeleted {0} chunks with status: {1}".format(counter,status) + print "\nDeleted {0} chunks with status: {1}".format(counter, + status) else: print "No chunks to delete with status: {0}".format(status) def delete_bad_regions(options, scanned_obj): - """ Takes an scanned object (world object or regionset object) and + """ Takes an scanned object (world object or regionset object) and the options give to region-fixer, it deletes all the region files with problems iterating through all the possible problems. """ - print # a blank line + print options_delete = [options.delete_too_small] deleting = zip(options_delete, world.REGION_PROBLEMS) for delete, problem in deleting: @@ -79,8 +83,8 @@ def delete_bad_regions(options, scanned_obj): text = ' Deleting regions with status: {0} '.format(status) print "{0:#^60}".format(text) counter = scanned_obj.remove_problematic_regions(problem) - - print "Deleted {0} regions with status: {1}".format(counter,status) + print "Deleted {0} regions with status: {1}".format(counter, + status) else: print "No regions to delete with status: {0}".format(status) @@ -384,15 +388,18 @@ def main(): # scan all the world folders for w in world_list: - print entitle(' Scanning world: {0} '.format(w.get_name()), 0) + w_name = w.get_name() + print entitle(' Scanning world: {0} '.format(w_name), 0) console_scan_world(w, o.processes, o.entity_limit, o.delete_entities) + print + print entitle('Scan results for: {0}'.format(w_name), 0) print w.generate_report(True) -# corrupted, wrong_located, entities_prob, shared_prob, \ -# total_chunks, too_small_region, unreadable_region, total_regions \ +# corrupted, wrong_located, entities_prob, shared_prob,\ +# total_chunks, too_small_region, unreadable_region, total_regions\ # = w.generate_report(standalone = False) print diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index c7a197e..575af00 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -45,7 +45,7 @@ TUPLE_STATUS = 1 -# logging.basicConfig(filename='scan.log', level=logging.DEBUG) +logging.basicConfig(filename='scan.log', level=logging.DEBUG) class ChildProcessException(Exception): @@ -224,7 +224,7 @@ def get_last_result(self): # raise ChildProcessException("Something went wrong \ # scanning a player-file.") # Overwrite it in the regionset - self._player_dict[p.filename.split('.')[0]] = p + self._player_dict[p.filename] = p return p else: return None @@ -297,6 +297,8 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): if result: counter += 1 pbar.update(counter) + + pbar.finish() # Scan old player files print "\n{0:-^60}".format(' Scanning old format player files ') @@ -317,14 +319,16 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): counter += 1 pbar.update(counter) - # Scan dat files + pbar.finish() + + # Scan data files print "\n{0:-^60}".format(' Scanning structures and map data files ') if not w.data_files: print "Info: No structures and map data files to scan." else: total_files = len(w.data_files) pbar = progressbar.ProgressBar(widgets=widgets, - maxval=total_players) + maxval=total_files) ps = AsyncPlayerScanner(w.data_files, processes) ps.scan() @@ -336,6 +340,8 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): counter += 1 pbar.update(counter) + pbar.finish() + # SCAN ALL THE CHUNKS! if w.get_number_regions == 0: print "No region files to scan!" diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 1819d10..3b5048a 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -115,7 +115,7 @@ def __init__(self, path=None, readable=None, status_text=None): self.status_text = status_text def __str__(self): - text = "NBT file:" + str(self.path) + "\n" + text = "NBT file:" + str(self.filename) + "\n" text += "\tReadable:" + str(self.readable) + "\n" return text @@ -653,21 +653,21 @@ def __init__(self, world_path): player_paths = glob(join(join(self.path, "playerdata"), "*.dat")) self.players = {} for path in player_paths: - uuid = split(path)[1].split(".")[0] - self.players[uuid] = ScannedDatFile(path) + filename = split(path)[1] + self.players[filename] = ScannedDatFile(path) # Player files before 1.7.6 self.old_players = {} for path in old_player_paths: - name = split(path)[1].split(".")[0] - self.old_players[name] = ScannedDatFile(path) + filename = split(path)[1] + self.old_players[filename] = ScannedDatFile(path) # Structures dat files data_files_paths = glob(join(join(self.path, "data"), "*.dat")) self.data_files = {} for path in data_files_paths: - name = split(path)[1] - self.data_files[name] = ScannedDatFile(path) + filename = split(path)[1] + self.data_files[filename] = ScannedDatFile(path) # Does it look like a world folder? region_files = False @@ -876,7 +876,7 @@ def replace_problematic_regions(self, backup_worlds, problem, options): if temp_regionset._get_dimension_directory() == regionset._get_dimension_directory(): b_regionset = temp_regionset break - + bad_regions = regionset.list_regions(problem) if bad_regions and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): print "The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory()) @@ -885,13 +885,13 @@ def replace_problematic_regions(self, backup_worlds, problem, options): print "\n{0:-^60}".format(' New region file to replace! Coords {0} '.format(r.get_coords())) # search for the region file - + try: backup_region_path = b_regionset[r.get_coords()].get_path() except: backup_region_path = None tofix_region_path = r.get_path() - + if backup_region_path != None and exists(backup_region_path): print "Backup region file found in:\n {0}".format(backup_region_path) # check the region file, just open it. @@ -910,7 +910,6 @@ def replace_problematic_regions(self, backup_worlds, problem, options): print "The region file doesn't exist in the backup directory: {0}".format(backup_region_path) return counter - def remove_problematic_regions(self, problem): """ Removes all the regions files with the given problem. @@ -952,25 +951,25 @@ def generate_report(self, standalone): text = "" # Print all the player files with problems + text += "\nUnreadable player files:\n" broken_players = [p for p in self.players.values() if not p.readable] broken_players.extend([p for p in self.old_players.values() if not p.readable]) if broken_players: - text += "\nUnreadable player files:\n" broken_player_files = [p.filename for p in broken_players] text += "\n".join(broken_player_files) text += "\n" else: - text += "\nAll player files are readable\n" + text += "No problems found.\n" # Now all the data files + text += "\nUnreadable data files:\n" broken_data_files = [d for d in self.data_files.values() if not d.readable] if broken_data_files: - text += "\nUnreadable data files:\n" - broken_data_filenames = [p.filename for p in broken_players] + broken_data_filenames = [p.filename for p in broken_data_files] text += "\n".join(broken_data_filenames) text += "\n" else: - text += "\nAll data files are readable\n" + text += "No problems found.\n" # Print all chunk info in a table format chunk_errors = ("Problem", From 49ef1952d8887178be48c1651840854eecbc8d1c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 19 Jun 2014 09:20:32 +0200 Subject: [PATCH 008/151] Update scan methods. Properly finish child processes after scanning. --- regionfixer_core/scan.py | 284 ++++++++++++++++++++++++--------------- 1 file changed, 178 insertions(+), 106 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 575af00..9f6fac0 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -90,12 +90,20 @@ def __init__(self, regionset, processes, entity_limit, initializer=_mp_pool_init, initargs=(regionset, entity_limit, remove_entities, q)) + # Recommended time to sleep between polls for results + self.scan_wait_time = 0.001 + def scan(self): """ Scan and fill the given regionset. """ total_regions = len(self._regionset.regions) self._results = self.pool.map_async(multithread_scan_regionfile, self._regionset.list_regions(None), max(1,total_regions//self.processes)) + # No more tasks to the pool, exit the processes once the tasks are done + self.pool.close() + + # See method + self._str_last_scanned = "" def get_last_result(self): """ Return results of last region file scanned. @@ -117,11 +125,24 @@ def get_last_result(self): raise ChildProcessException("Something went wrong \ scanning a region-file.") # Overwrite it in the regionset + if isinstance(r, tuple): + raise ChildProcessException(r) self._regionset[r.get_coords()] = r + self._str_last_scanned = self._regionset.get_name() + ": " + r.filename return r else: return None + def terminate(self): + """ Terminate the pool, this will exit no matter what. + """ + self.pool.terminate() + + @property + def str_last_scanned(self): + """ A friendly string with last scanned thing. """ + return self._str_last_scanned if self._str_last_scanned else "Scanning..." + @property def finished(self): """ Finished the operation. The queue could have elements """ @@ -131,6 +152,35 @@ def finished(self): def regionset(self): return self._regionset + @property + def results(self): + """ Yield all the results from the scan. + + This is the preferred method to report scanning status. This way you + can iterate through all the results in easy way: + + for result in scanner.results: + # do things + """ + + q = self.queue + logging.debug("AsyncRegionsetScanner: starting yield results") + while not q.empty() or not self.finished: + sleep(0.0001) + logging.debug("AsyncRegionsetScanner: in while") + if not q.empty(): + r = q.get() + logging.debug("AsyncRegionsetScanner: result: {0}".format(r)) + # if r is None: + # # Something went wrong scanning! + # raise ChildProcessException("Something went wrong \ + # scanning a region-file.") + # Overwrite it in the regionset + self._regionset[r.get_coords()] = r + yield r + + def __len__(self): + return len(self._regionset) class AsyncWorldScanner(object): def __init__(self, world_obj, processes, entity_limit, @@ -144,6 +194,10 @@ def __init__(self, world_obj, processes, entity_limit, self.regionsets = copy(world_obj.regionsets) self._current_regionset = None + self._str_last_scanned = "" + + # Recommended time to sleep between polls for results + self.scan_wait_time = 0.001 def scan(self): """ Scan and fill the given regionset. """ @@ -160,22 +214,34 @@ def get_last_result(self): If there are left no scanned region files return None. The ScannedRegionFile returned is the same instance in the regionset, don't modify it or you will modify the regionset results. + + This method is better if you want to closely control the scan + process. """ cr = self._current_regionset logging.debug("AsyncWorldScanner: current_regionset {0}".format(cr)) if cr is not None: logging.debug("AsyncWorldScanner: cr.finished {0}".format(cr.finished)) if not cr.finished: - return cr.get_last_result() + r = cr.get_last_result() + self._str_last_scanned = cr.str_last_scanned + return r elif self.regionsets: self.scan() return None else: return None - else: return None + def terminate(self): + self._current_regionset.terminate() + + @property + def str_last_scanned(self): + """ A friendly string with last scanned thing. """ + return self._str_last_scanned + @property def current_regionset(self): return self._current_regionset.regionset @@ -189,42 +255,74 @@ def finished(self): def world_obj(self): return self._world_obj + @property + def results(self): + """ Yield all the results from the scan. + + This is the simpler method to control the scanning process, + but also the most slopy. If you want to closely control the + scan process (for example cancel the process in the middle, + whatever is happening) use get_last_result(). -class AsyncPlayerScanner(object): - def __init__(self, player_dict, processes): + Example using this method: - self._player_dict = player_dict + for result in scanner.results: + # do things + """ + + while not self.finished: + cr = self._current_regionset + if cr and not cr.finished: + for r in cr.results: + yield r + elif self.regionsets: + self.scan() + + def __len__(self): + l = 0 + for rs in self.regionsets: + l += len(rs) + return l + +class AsyncDataScanner(object): + def __init__(self, data_dict, processes): + + self._data_dict = data_dict self.processes = processes self.queue = q = queues.SimpleQueue() self.pool = multiprocessing.Pool(processes=processes, initializer=_mp_player_pool_init, initargs=(q,)) + # Recommended time to sleep between polls for results + self.scan_wait_time = 0.0001 def scan(self): - """ Scan and fill the given player_dict generated by world.py. """ - total_players = len(self._player_dict) - player_list = self._player_dict.values() + """ Scan and fill the given data_dict generated by world.py. """ + total_players = len(self._data_dict) + player_list = self._data_dict.values() self._results = self.pool.map_async(multiprocess_scan_player, player_list, max(1, total_players//self.processes)) + # No more tasks to the pool, exit the processes once the tasks are done + self.pool.close() def get_last_result(self): """ Return results of last player scanned. """ q = self.queue - logging.debug("AsyncPlayerScanner: starting get_last_result") - logging.debug("AsyncPlayerScanner: queue empty: {0}".format(q.empty())) + logging.debug("AsyncDataScanner: starting get_last_result") + logging.debug("AsyncDataScanner: queue empty: {0}".format(q.empty())) if not q.empty(): - logging.debug("AsyncPlayerScanner: queue not empty") + logging.debug("AsyncDataScanner: queue not empty") p = q.get() - logging.debug("AsyncPlayerScanner: result: {0}".format(p)) + logging.debug("AsyncDataScanner: result: {0}".format(p)) # if p is None: # # Something went wrong scanning! # raise ChildProcessException("Something went wrong \ # scanning a player-file.") # Overwrite it in the regionset - self._player_dict[p.filename] = p + self._data_dict[p.filename] = p return p else: return None @@ -235,10 +333,39 @@ def finished(self): return self._results.ready() and self.queue.empty() @property - def player_dict(self): - return self._player_dict + def data_dict(self): + return self._data_dict + @property + def results(self): + """ Yield all the results from the scan. + This is the preferred method to report scanning status. This way you + can iterate through all the results in easy way: + + for result in scanner.results: + # do things + """ + + q = self.queue + logging.debug("AsyncDataScanner: starting yield results") + logging.debug("AsyncDataScanner: queue empty: {0}".format(q.empty())) + while not q.empty() or not self.finished: + sleep(0.0001) + logging.debug("AsyncDataScanner: in while") + if not q.empty(): + p = q.get() + logging.debug("AsyncDataScanner: result: {0}".format(p)) + # if p is None: + # # Something went wrong scanning! + # raise ChildProcessException("Something went wrong \ + # scanning a player-file.") + # Overwrite it in the data dict + self._data_dict[p.filename] = p + yield p + + def __len__(self): + return len(self._data_dict) # All scanners will use this progress bar widgets = ['Scanning: ', @@ -254,9 +381,12 @@ def player_dict(self): def console_scan_world(world_obj, processes, entity_limit, remove_entities): """ Scans a world folder including players and prints status to console. - This functions uses AsyncPlayerScanner and AsyncWorldScanner. + This functions uses AsyncDataScanner and AsyncWorldScanner. """ + # Time to wait beween asking for results. Note that if the time is too big + # results will be waiting in the queue and the scan will take longer just + # because of this. w = world_obj # Scan the world directory print "World info:" @@ -279,98 +409,40 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): print "[WARNING!]: \'level.dat\' is corrupted with the following error/s:" print "\t {0}".format(w.scanned_level.status_text) - # Scan player files - print "\n{0:-^60}".format(' Scanning UUID player files ') - if not w.players: - print "Info: No player files to scan." - else: - total_players = len(w.players) - pbar = progressbar.ProgressBar(widgets=widgets, - maxval=total_players) - - ps = AsyncPlayerScanner(w.players, processes) - ps.scan() - counter = 0 - while not ps.finished: - sleep(0.001) - result = ps.get_last_result() - if result: - counter += 1 - pbar.update(counter) - - pbar.finish() - - # Scan old player files - print "\n{0:-^60}".format(' Scanning old format player files ') - if not w.old_players: - print "Info: No old format player files to scan." - else: - total_players = len(w.old_players) - pbar = progressbar.ProgressBar(widgets=widgets, - maxval=total_players) - - ps = AsyncPlayerScanner(w.old_players, processes) - ps.scan() - counter = 0 - while not ps.finished: - sleep(0.001) - result = ps.get_last_result() - if result: - counter += 1 - pbar.update(counter) + ps = AsyncDataScanner(w.players, processes) + ops = AsyncDataScanner(w.old_players, processes) + ds = AsyncDataScanner(w.data_files, processes) + ws = AsyncWorldScanner(w, processes, entity_limit, remove_entities) - pbar.finish() + scanners = [ps, ops, ds, ws] - # Scan data files - print "\n{0:-^60}".format(' Scanning structures and map data files ') - if not w.data_files: - print "Info: No structures and map data files to scan." - else: - total_files = len(w.data_files) - pbar = progressbar.ProgressBar(widgets=widgets, - maxval=total_files) - - ps = AsyncPlayerScanner(w.data_files, processes) - ps.scan() - counter = 0 - while not ps.finished: - sleep(0.001) - result = ps.get_last_result() - if result: - counter += 1 - pbar.update(counter) - - pbar.finish() - - # SCAN ALL THE CHUNKS! - if w.get_number_regions == 0: - print "No region files to scan!" - else: - print "\n{0:-^60}".format(' Scanning region files ') - #Scan world regionsets - ws = AsyncWorldScanner(w, processes, entity_limit, - remove_entities) - - total_regions = ws.world_obj.count_regions() - pbar = progressbar.ProgressBar(widgets=widgets, - maxval=total_regions) - pbar = progressbar.ProgressBar( - widgets=widgets, - maxval=total_regions) - pbar.start() - ws.scan() - - counter = 0 - while not ws.finished: - sleep(0.01) - result = ws.get_last_result() - if result: - counter += 1 - pbar.update(counter) - - pbar.finish() - - w.scanned = True + scan_titles = [' Scanning UUID player files ', + ' Scanning old format player files ', + ' Scanning structures and map data files ', + ' Scanning region files '] + try: + for scanner, title in zip(scanners, scan_titles): + # Scan player files + print "\n{0:-^60}".format(title) + if not len(scanner): + print "Info: No files to scan." + else: + total = len(scanner) + pbar = progressbar.ProgressBar(widgets=widgets, maxval=total) + scanner.scan() + counter = 0 + while not scanner.finished: + sleep(scanner.scan_wait_time) + result = scanner.get_last_result() + if result: + counter += 1 + pbar.update(counter) + pbar.finish() + w.scanned = True + except ChildProcessException, e: + print "Something went really wrong scanning a region file." + print "See a log for more details!" + print e def console_scan_regionset(regionset, processes, entity_limit, From 36e48cfcc00399c399cb8ba395dad5beeaabb5d4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 19 Jun 2014 09:21:11 +0200 Subject: [PATCH 009/151] Update scan mehotds. Move all windows to use panel which fixes bad windows colors in windows 8. --- gui/about.py | 71 ++++++++++----------- gui/backups.py | 51 ++++++++------- gui/main.py | 168 +++++++++++++++++++------------------------------ gui/starter.py | 3 +- 4 files changed, 128 insertions(+), 165 deletions(-) diff --git a/gui/about.py b/gui/about.py index 1fee7b0..c226c73 100644 --- a/gui/about.py +++ b/gui/about.py @@ -5,49 +5,50 @@ class AboutWindow(wx.Frame): - def __init__(self, parent, title = "About"): - wx.Frame.__init__(self, parent, title=title, style = wx.CLOSE_BOX | wx.RESIZE_BORDER | wx.CAPTION) - - self.about1 = wx.StaticText(self, style=wx.ALIGN_CENTER, label="Minecraft Region-Fixer (GUI)") - self.about2 = wx.StaticText(self, style=wx.ALIGN_CENTER, label="Fix problems in Minecraft worlds.") - self.about3 = wx.StaticText(self, style=wx.ALIGN_CENTER, label="Official-web:") - self.link_github = \ - wx.HyperlinkCtrl(self, wx.ID_ABOUT, - "https://github.com/Fenixin/Minecraft-Region-Fixer", - "https://github.com/Fenixin/Minecraft-Region-Fixer", - style = wx.ALIGN_CENTER) - self.about4 = wx.StaticText(self, style=wx.TE_MULTILINE | wx.ALIGN_CENTER, label="Minecraft forums post:") - self.link_minecraft_forums = \ - wx.HyperlinkCtrl(self, wx.ID_ABOUT, - "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", - "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", - style = wx.ALIGN_CENTER) - - self.close_button = wx.Button(self, wx.ID_CLOSE) - - + def __init__(self, parent, title="About"): + wx.Frame.__init__(self, parent, title=title, + style=wx.CLOSE_BOX | wx.RESIZE_BORDER | wx.CAPTION) + # Every windows should use panel as parent. Not doing so will + # make the windows look non-native (very ugly) + panel = wx.Panel(self) + + self.about1 = wx.StaticText(panel, style=wx.ALIGN_CENTER, + label="Minecraft Region-Fixer (GUI)") + self.about2 = wx.StaticText(panel, style=wx.ALIGN_CENTER, + label="Fix problems in Minecraft worlds.") + self.about3 = wx.StaticText(panel, style=wx.ALIGN_CENTER, + label="Official-web:") + self.link_github = wx.HyperlinkCtrl(panel, wx.ID_ABOUT, + "https://github.com/Fenixin/Minecraft-Region-Fixer", + "https://github.com/Fenixin/Minecraft-Region-Fixer", + style=wx.ALIGN_CENTER) + self.about4 = wx.StaticText(panel, + style=wx.TE_MULTILINE | wx.ALIGN_CENTER, + label="Minecraft forums post:") + self.link_minecraft_forums = wx.HyperlinkCtrl(panel, wx.ID_ABOUT, + "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", + "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", + style=wx.ALIGN_CENTER) + + self.close_button = wx.Button(panel, wx.ID_CLOSE) + self.sizer = wx.BoxSizer(wx.VERTICAL) self.sizer.Add(self.about1, 0, wx.ALIGN_CENTER | wx.TOP, 10) self.sizer.Add(self.about2, 0, wx.ALIGN_CENTER) - self.sizer.Add(self.about3, 0, wx.ALIGN_CENTER| wx.TOP, 20) + self.sizer.Add(self.about3, 0, wx.ALIGN_CENTER | wx.TOP, 20) self.sizer.Add(self.link_github, 0, wx.ALIGN_CENTER | wx.ALL, 5) self.sizer.Add(self.about4, 0, wx.ALIGN_CENTER | wx.TOP, 20) self.sizer.Add(self.link_minecraft_forums, 0, wx.ALIGN_CENTER | wx.ALL, 5) - self.sizer.Add(self.close_button, 0, wx.ALIGN_CENTER | wx.ALL, 10) - - self.SetSizerAndFit(self.sizer) - size = self.sizer.GetMinSize() + self.sizer.Add(self.close_button, 0, wx.ALIGN_CENTER | wx.ALL, 20) + + # Fit sizers and make the windows not resizable + panel.SetSizerAndFit(self.sizer) + self.sizer.Fit(self) + size = self.GetSize() self.SetMinSize(size) self.SetMaxSize(size) - - + self.Bind(wx.EVT_BUTTON, self.OnClose, self.close_button) - + def OnClose(self, e): self.Show(False) - - - - - - diff --git a/gui/backups.py b/gui/backups.py index 84d3b07..cc33218 100644 --- a/gui/backups.py +++ b/gui/backups.py @@ -12,17 +12,21 @@ class BackupsWindow(wx.Frame): def __init__(self, parent, title): - wx.Frame.__init__(self, parent, title=title, size = (100,500)) - + wx.Frame.__init__(self, parent, title=title) + # Every windows should use panel as parent. Not doing so will + # make the windows look non-native (very ugly) + panel = wx.Panel(self) + # Sizer with all the elements in the window self.all_sizer = wx.BoxSizer(wx.VERTICAL) - + # Text with help in the top - self.help_text = wx.StaticText(self, style=wx.TE_MULTILINE, label="Region-Fixer will use the worlds in\nthis list in top-down order.") - + self.help_text = wx.StaticText(panel, style=wx.TE_MULTILINE, + label=("Region-Fixer will use the worlds in\n" + "this list in top-down order.")) + # List of worlds to use as backups - self.world_list_box = wx.ListBox(self, size = (80, 100) ) - #~ test_list = ["world1", "world2", "world3"] + self.world_list_box = wx.ListBox(panel, size=(180, 100)) test_list = [] self.world_list_box.Set(test_list) # Here will be the worlds to use as backup @@ -30,30 +34,33 @@ def __init__(self, parent, title): self.world_list_text = test_list[:] # Last path we used in the file dialog self.last_path = "" - + # Buttons self.buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) - self.add = wx.Button(self, label = "Add") - self.move_up = wx.Button(self, label = "Move up") - self.move_down = wx.Button(self, label = "Move down") + self.add = wx.Button(panel, label="Add") + self.move_up = wx.Button(panel, label="Move up") + self.move_down = wx.Button(panel, label="Move down") self.buttons_sizer.Add(self.add, 0, 0) self.buttons_sizer.Add(self.move_up, 0, 0) self.buttons_sizer.Add(self.move_down, 0, 0) - + # Add things to the general sizer - self.all_sizer.Add(self.help_text, 0, wx.GROW | wx.ALL, 10) - self.all_sizer.Add(self.world_list_box, 1, wx.EXPAND | wx.ALL, 10) - self.all_sizer.Add(self.buttons_sizer, 0, wx.ALIGN_CENTER | wx.ALL, 10) - + self.all_sizer.Add(self.help_text, proportion=0, + flag=wx.GROW | wx.ALL, border=10) + self.all_sizer.Add(self.world_list_box, proportion=1, + flag=wx.EXPAND | wx.ALL, border=10) + self.all_sizer.Add(self.buttons_sizer, proportion=0, + flag=wx.ALIGN_CENTER | wx.ALL, border=10) + # Layout sizers - self.SetSizerAndFit(self.all_sizer) + panel.SetSizerAndFit(self.all_sizer) # Bindings self.Bind(wx.EVT_CLOSE, self.OnClose) self.Bind(wx.EVT_BUTTON, self.OnAddWorld, self.add) self.Bind(wx.EVT_BUTTON, self.OnMoveUp, self.move_up) self.Bind(wx.EVT_BUTTON, self.OnMoveDown, self.move_down) - + # Show the window, usually False, True for fast testing self.Show(False) @@ -65,13 +72,9 @@ def get_dirs(self, list_dirs): if os.path.isdir(p): tmp.append(p) return tmp - - + def are_there_files(self, list_dirs): - """ Given a list of paths return True if there are - any files. - - """ + """ Given a list of paths return True if there are any files. """ for d in list_dirs: if not os.path.isdir(d): diff --git a/gui/main.py b/gui/main.py index 92ebb5e..bae5d0b 100644 --- a/gui/main.py +++ b/gui/main.py @@ -2,33 +2,33 @@ # -*- coding: utf-8 -*- import wx -import os from time import sleep from os.path import split from backups import BackupsWindow -from regionfixer_core.scan import AsyncWorldScanner, AsyncPlayerScanner +from regionfixer_core.scan import AsyncWorldScanner, AsyncDataScanner from regionfixer_core import world from regionfixer_core.world import World class MainWindow(wx.Frame): - def __init__(self, parent, title, backups = None): - wx.Frame.__init__(self, parent, title=title, size = (300,400)) - + def __init__(self, parent, title, backups=None): + wx.Frame.__init__(self, parent, title=title, size=(300, 400)) + # Every windows should use panel as parent. Not doing so will + # make the windows look non-native (very ugly) panel = wx.Panel(self) - + self.backups = backups - + # Variables - self.last_path = "" # Last path opened - self.world = None # World to scan - + self.last_path = "" # Last path opened + self.world = None # World to scan + # Status bar self.CreateStatusBar() - + # Create menu - filemenu=wx.Menu() + filemenu = wx.Menu() windowsmenu = wx.Menu() helpmenu = wx.Menu() @@ -36,24 +36,23 @@ def __init__(self, parent, title, backups = None): menuOpen = filemenu.Append(wx.ID_OPEN, "&Open", "Open a Minecraft world") filemenu.AppendSeparator() menuExit = filemenu.Append(wx.ID_EXIT, "E&xit","Terminate program") - + # Add elements to helpmenu menuAbout = helpmenu.Append(wx.ID_ABOUT, "&About", "Information about this program") - + # Add elements to windowsmenu menuBackups = windowsmenu.Append(-1, "&Backups", "Manage list of backups") menuAdvanced = windowsmenu.Append(-1, "A&dvanced actions", "Manage list of backups") - + # Create a menu bar menuBar = wx.MenuBar() menuBar.Append(filemenu,"&File") menuBar.Append(windowsmenu,"&View") menuBar.Append(helpmenu,"&Help") self.SetMenuBar(menuBar) - + # Create elements in the window # First row: - self.status_text = wx.StaticText(panel, style=wx.TE_MULTILINE, label="test") self.open_button = wx.Button(panel, label="Open") self.scan_button = wx.Button(panel, label="Scan") @@ -62,11 +61,10 @@ def __init__(self, parent, title, backups = None): self.firstrow_sizer.Add(self.status_text, 1, wx.ALIGN_CENTER) self.firstrow_sizer.Add(self.open_button, 0, wx.EXPAND) self.firstrow_sizer.Add(self.scan_button, 0, wx.EXPAND) - self.firstrow_static_box = wx.StaticBox(panel, label = "World loaded") + self.firstrow_static_box = wx.StaticBox(panel, label="World loaded") self.firstrow_static_box_sizer = wx.StaticBoxSizer(self.firstrow_static_box) self.firstrow_static_box_sizer.Add(self.firstrow_sizer, 1, wx.EXPAND) - - + # Second row: self.proc_info_text = wx.StaticText(panel, label="Threads to use: ") self.proc_text = wx.TextCtrl(panel, value="1") @@ -77,9 +75,9 @@ def __init__(self, parent, title, backups = None): self.secondrow_sizer.Add(self.proc_text, 0, wx.ALIGN_LEFT) self.secondrow_sizer.Add(self.el_info_text, 0, wx.ALIGN_CENTER) self.secondrow_sizer.Add(self.el_text, 0, wx.ALIGN_RIGHT) - self.secondrow_static_box_sizer = wx.StaticBoxSizer(wx.StaticBox(panel, label = "Scan options")) + self.secondrow_static_box_sizer = wx.StaticBoxSizer(wx.StaticBox(panel, label="Scan options")) self.secondrow_static_box_sizer.Add(self.secondrow_sizer, 1, wx.EXPAND) - + # Third row: # Note: In order to use a static box add it directly to a # static box sizer and add to the same sizer it's contents @@ -164,7 +162,7 @@ def OnOpen(self, e): self.update_world_status(self.world) # Properly recover the last path used - self.last_path = os.path.split(dlg.GetPath())[0] + self.last_path = split(dlg.GetPath())[0] dlg.Destroy() # Rest the results textctrl @@ -272,87 +270,50 @@ def OnReplaceRegions(self, e): self.update_replace_buttons_status(False) def OnScan(self, e): - # Let's simulate the options stuff - class Options(object): - def __init__(self, main): - self.entity_limit = int(main.el_text.GetValue()) - self.processes = int(main.proc_text.GetValue()) - self.verbose = True - self.delete_entities = False - self.gui = True - - options = Options(self) - progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", - self.world.count_regions(), self, - style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | wx.PD_CAN_SKIP | wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | wx.PD_SMOOTH) - options.progressdlg = progressdlg - - ws = AsyncWorldScanner(self.world, options.processes, - options.entity_limit, - options.delete_entities) - ws.scan() - counter = 0 - while not ws.finished: - sleep(0.001) - result = ws.get_last_result() - rs = ws.current_regionset - if result: - counter += 1 - progressdlg.Update(counter, - "Scanning regions from: " + rs.get_name()) - - progressdlg.Destroy() + processes = int(self.proc_text.GetValue()) + entity_limit = int(self.el_text.GetValue()) + delete_entities = False - # TODO: DATA files and old player files - progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", - self.world.count_regions(), self, - style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | wx.PD_CAN_SKIP | wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | wx.PD_SMOOTH) - - ps = AsyncPlayerScanner(self.world.players, options.processes) - ps.scan() - counter = 0 - last_player = "" - while not ps.finished: - sleep(0.0001) - result = ps.get_last_result() - if result: - counter += 1 - last_player = result.filename.split('.')[0] - progressdlg.Update(counter, - "Last player scanned: " + last_player) + ps = AsyncDataScanner(self.world.players, processes) + ops = AsyncDataScanner(self.world.old_players, processes) + ds = AsyncDataScanner(self.world.data_files, processes) + ws = AsyncWorldScanner(self.world, processes, entity_limit, + delete_entities) - progressdlg.Destroy() - - # Data files - progressdlg = wx.ProgressDialog("Scanning...", "Scanning...", - self.world.count_regions(), self, - style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | wx.PD_CAN_SKIP | wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | wx.PD_SMOOTH) - - ps = AsyncPlayerScanner(self.world.players, options.processes) - ps.scan() - counter = 0 - last_player = "" - while not ps.finished: - sleep(0.001) - result = ps.get_last_result() - if result: - counter += 1 - last_player = result.filename.split('.')[0] - progressdlg.Update(counter, - "Last player scanned: " + last_player) - - progressdlg.Destroy() - - - self.results_text.SetValue(self.world.generate_report(True)) - self.update_delete_buttons_status(True) - + things_to_scan = [ws, ops, ps, ds] + dialog_texts = ["Scanning region files", + "Scanning old format player files", + "Scanning players", + "Scanning data files"] + for scanner, dialog_title in zip(things_to_scan, dialog_texts): + progressdlg = wx.ProgressDialog( + dialog_title, + "Last scanned:\n starting...", + len(scanner), self, + style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | wx.PD_SMOOTH) + scanner.scan() + counter = 0 + while not scanner.finished: + sleep(0.001) + result = scanner.get_last_result() + if result: + counter += 1 + not_cancelled, not_skipped = progressdlg.Update(counter, + "Last scanned:\n" + ws.str_last_scanned) + if not not_cancelled: + # User pressed cancel + scanner.terminate() + break + progressdlg.Destroy() + if not not_cancelled: + break + else: + # The scan finished successfully + self.world.scanned = True + self.results_text.SetValue(self.world.generate_report(True)) + self.update_delete_buttons_status(True) def update_delete_buttons_status(self, status): @@ -362,7 +323,7 @@ def update_delete_buttons_status(self, status): else: self.delete_all_chunks_button.Disable() self.delete_all_regions_button.Disable() - + def update_replace_buttons_status(self, status): if status: @@ -371,7 +332,6 @@ def update_replace_buttons_status(self, status): else: self.replace_all_chunks_button.Disable() self.replace_all_regions_button.Disable() - def update_world_status(self, world): self.status_text.SetLabel(world.path) diff --git a/gui/starter.py b/gui/starter.py index 7353136..2acb2d4 100644 --- a/gui/starter.py +++ b/gui/starter.py @@ -20,8 +20,7 @@ def __init__(self): self.about = AboutWindow(self.frame, "About") self.frame.backups = self.backups self.frame.about = self.about - - + def run(self): """ Run the app main loop. """ From d23c2d709785ca98473624949b94ea5342e382de Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 24 Jun 2014 22:41:25 +0200 Subject: [PATCH 010/151] Add icon to gui. Add version in about. --- gui/about.py | 9 ++++++--- gui/main.py | 15 +++++++++++++-- regionfixer.py | 3 ++- regionfixer_core/scan.py | 2 +- 4 files changed, 22 insertions(+), 7 deletions(-) diff --git a/gui/about.py b/gui/about.py index c226c73..d635510 100644 --- a/gui/about.py +++ b/gui/about.py @@ -3,6 +3,9 @@ import wx +from regionfixer_core.version import version_string as rf_ver +from gui.version import version_string as gui_ver + class AboutWindow(wx.Frame): def __init__(self, parent, title="About"): @@ -13,7 +16,7 @@ def __init__(self, parent, title="About"): panel = wx.Panel(self) self.about1 = wx.StaticText(panel, style=wx.ALIGN_CENTER, - label="Minecraft Region-Fixer (GUI)") + label="Minecraft Region-Fixer (GUI) (ver. {0})\n(using Region-Fixer ver. {1})".format(gui_ver,rf_ver)) self.about2 = wx.StaticText(panel, style=wx.ALIGN_CENTER, label="Fix problems in Minecraft worlds.") self.about3 = wx.StaticText(panel, style=wx.ALIGN_CENTER, @@ -34,11 +37,11 @@ def __init__(self, parent, title="About"): self.sizer = wx.BoxSizer(wx.VERTICAL) self.sizer.Add(self.about1, 0, wx.ALIGN_CENTER | wx.TOP, 10) - self.sizer.Add(self.about2, 0, wx.ALIGN_CENTER) + self.sizer.Add(self.about2, 0, wx.ALIGN_CENTER| wx.TOP, 20) self.sizer.Add(self.about3, 0, wx.ALIGN_CENTER | wx.TOP, 20) self.sizer.Add(self.link_github, 0, wx.ALIGN_CENTER | wx.ALL, 5) self.sizer.Add(self.about4, 0, wx.ALIGN_CENTER | wx.TOP, 20) - self.sizer.Add(self.link_minecraft_forums, 0, wx.ALIGN_CENTER | wx.ALL, 5) + self.sizer.Add(self.link_minecraft_forums, 0,wx.ALIGN_CENTER | wx.ALL, 5) self.sizer.Add(self.close_button, 0, wx.ALIGN_CENTER | wx.ALL, 20) # Fit sizers and make the windows not resizable diff --git a/gui/main.py b/gui/main.py index bae5d0b..67a1cd3 100644 --- a/gui/main.py +++ b/gui/main.py @@ -9,6 +9,13 @@ from regionfixer_core.scan import AsyncWorldScanner, AsyncDataScanner from regionfixer_core import world from regionfixer_core.world import World +from regionfixer_core.version import version_string + +# Proper way to set an icon in windows 7 and above +# Thanks to http://stackoverflow.com/a/15923439 +import ctypes +myappid = 'Fenixin.region-fixer.gui.100' # arbitrary string +ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid) class MainWindow(wx.Frame): @@ -20,7 +27,11 @@ def __init__(self, parent, title, backups=None): self.backups = backups - # Variables + # Icon + ico = wx.Icon('icon.ico', wx.BITMAP_TYPE_ICO) + self.SetIcon(ico) + + # Open world stuff self.last_path = "" # Last path opened self.world = None # World to scan @@ -53,7 +64,7 @@ def __init__(self, parent, title, backups=None): # Create elements in the window # First row: - self.status_text = wx.StaticText(panel, style=wx.TE_MULTILINE, label="test") + self.status_text = wx.StaticText(panel, style=wx.TE_MULTILINE, label="No world loaded") self.open_button = wx.Button(panel, label="Open") self.scan_button = wx.Button(panel, label="Scan") self.scan_button.Disable() diff --git a/regionfixer.py b/regionfixer.py index bebce2d..082e2d3 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -32,6 +32,7 @@ from regionfixer_core.util import entitle, is_bare_console, parse_paths,\ parse_backup_list from regionfixer_core import progressbar +from regionfixer_core.version import version_string class FractionWidget(progressbar.ProgressBarWidget): @@ -105,7 +106,7 @@ def main(): 'possible. It uses NBT by twoolie. ' 'Author: Alejandro Aguilera (Fenixin)'), prog='region_fixer', - version='0.1.3', + version=version_string, usage=usage, epilog=epilog) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 9f6fac0..d82acba 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -707,7 +707,7 @@ def multithread_scan_regionfile(region_file): # exceptions will be handled in scan_region_file which is in the # single thread land - + multithread_scan_regionfile.q.put(r) From 8781e7900382a387744b29b693e7b755307dc111 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 24 Jun 2014 22:54:20 +0200 Subject: [PATCH 011/151] Remove, for now, the advanced menu. --- gui/main.py | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/gui/main.py b/gui/main.py index 67a1cd3..9f39991 100644 --- a/gui/main.py +++ b/gui/main.py @@ -9,7 +9,6 @@ from regionfixer_core.scan import AsyncWorldScanner, AsyncDataScanner from regionfixer_core import world from regionfixer_core.world import World -from regionfixer_core.version import version_string # Proper way to set an icon in windows 7 and above # Thanks to http://stackoverflow.com/a/15923439 @@ -53,7 +52,7 @@ def __init__(self, parent, title, backups=None): # Add elements to windowsmenu menuBackups = windowsmenu.Append(-1, "&Backups", "Manage list of backups") - menuAdvanced = windowsmenu.Append(-1, "A&dvanced actions", "Manage list of backups") +# menuAdvanced = windowsmenu.Append(-1, "A&dvanced actions", "Manage list of backups") # Create a menu bar menuBar = wx.MenuBar() @@ -120,18 +119,18 @@ def __init__(self, parent, title, backups=None): self.thirdrow_buttons_box_sizer.Add(self.thirdrow_buttons_sizer, 1, wx.EXPAND) self.thirdrow_sizer.Add(self.results_text_box_sizer, 1, wx.EXPAND) self.thirdrow_sizer.Add(self.thirdrow_buttons_box_sizer, 0, wx.EXPAND) - + # All together now self.frame_sizer = wx.BoxSizer(wx.VERTICAL) self.frame_sizer.Add(self.firstrow_static_box_sizer, 0, wx.EXPAND) self.frame_sizer.Add(self.secondrow_static_box_sizer, 0, wx.EXPAND) self.frame_sizer.Add(self.thirdrow_sizer, 1, wx.EXPAND) - + # Layout sizers panel.SetSizerAndFit(self.frame_sizer) self.frame_sizer.Fit(self) - + # Bindings self.Bind(wx.EVT_MENU, self.OnAbout, menuAbout) self.Bind(wx.EVT_MENU, self.OnOpen, menuOpen) @@ -143,18 +142,18 @@ def __init__(self, parent, title, backups=None): self.Bind(wx.EVT_BUTTON, self.OnReplaceChunks, self.replace_all_chunks_button) self.Bind(wx.EVT_BUTTON, self.OnDeleteRegions, self.delete_all_regions_button) self.Bind(wx.EVT_BUTTON, self.OnReplaceRegions, self.replace_all_regions_button) - + self.Show(True) - + def OnExit(self, e): self.Close(True) - + def OnBackups(self, e): self.backups.Show(True) - + def OnAbout(self, e): self.about.Show(True) - + def OnOpen(self, e): dlg = wx.DirDialog(self, "Choose a Minecraf world folder") # Set the last path used @@ -175,7 +174,7 @@ def OnOpen(self, e): # Properly recover the last path used self.last_path = split(dlg.GetPath())[0] dlg.Destroy() - + # Rest the results textctrl self.results_text.SetValue("") @@ -205,7 +204,7 @@ def OnDeleteChunks(self, e): progressdlg.Pulse() print "4" progressdlg.Destroy() - + self.update_delete_buttons_status(False) def OnDeleteRegions(self, e): @@ -220,13 +219,13 @@ def OnDeleteRegions(self, e): wx.PD_SMOOTH ) progressdlg = progressdlg - + self.world.remove_problematic_regions(world.REGION_TOO_SMALL) progressdlg.pulse() self.world.remove_problematic_regions(world.REGION_UNREADABLE) progressdlg.pulse() progressdlg.Destroy() - + self.update_delete_buttons_status(False) self.update_replace_buttons_status(False) @@ -242,9 +241,9 @@ def OnReplaceChunks(self, e): wx.PD_SMOOTH ) progressdlg = progressdlg - + backups = self.backups.world_list - + self.world.replace_problematic_chunks(world.CHUNK_CORRUPTED, backups) progressdlg.pulse() self.world.replace_problematic_chunks(world.CHUNK_SHARED_OFFSET, backups) @@ -254,7 +253,7 @@ def OnReplaceChunks(self, e): self.world.replace_problematic_chunks(world.CHUNK_TOO_MANY_ENTITIES, backups) progressdlg.pulse() progressdlg.Destroy() - + self.update_delete_buttons_status(False) self.update_replace_buttons_status(False) @@ -270,7 +269,7 @@ def OnReplaceRegions(self, e): wx.PD_SMOOTH ) progressdlg = progressdlg - + self.world.remove_problematic_regions(world.REGION_TOO_SMALL) progressdlg.pulse() self.world.remove_problematic_regions(world.REGION_UNREADABLE) From e4ddf6c8481c53e0534533d4ebb747bfc0ce78c4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 25 Jun 2014 14:03:02 +0200 Subject: [PATCH 012/151] Rename player to data en scan.py. --- regionfixer_core/scan.py | 44 ++++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index d82acba..86e598e 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -292,23 +292,23 @@ def __init__(self, data_dict, processes): self.queue = q = queues.SimpleQueue() self.pool = multiprocessing.Pool(processes=processes, - initializer=_mp_player_pool_init, + initializer=_mp_data_pool_init, initargs=(q,)) # Recommended time to sleep between polls for results self.scan_wait_time = 0.0001 def scan(self): """ Scan and fill the given data_dict generated by world.py. """ - total_players = len(self._data_dict) - player_list = self._data_dict.values() - self._results = self.pool.map_async(multiprocess_scan_player, - player_list, - max(1, total_players//self.processes)) + total_datas = len(self._data_dict) + data_list = self._data_dict.values() + self._results = self.pool.map_async(multiprocess_scan_data, + data_list, + max(1, total_datas//self.processes)) # No more tasks to the pool, exit the processes once the tasks are done self.pool.close() def get_last_result(self): - """ Return results of last player scanned. """ + """ Return results of last data file scanned. """ q = self.queue logging.debug("AsyncDataScanner: starting get_last_result") @@ -320,7 +320,7 @@ def get_last_result(self): # if p is None: # # Something went wrong scanning! # raise ChildProcessException("Something went wrong \ -# scanning a player-file.") +# scanning a data-file.") # Overwrite it in the regionset self._data_dict[p.filename] = p return p @@ -379,12 +379,12 @@ def __len__(self): def console_scan_world(world_obj, processes, entity_limit, remove_entities): - """ Scans a world folder including players and prints status to console. + """ Scans a world folder prints status to console. - This functions uses AsyncDataScanner and AsyncWorldScanner. + It will scan region files and data files (includes players). """ - # Time to wait beween asking for results. Note that if the time is too big + # Time to wait between asking for results. Note that if the time is too big # results will be waiting in the queue and the scan will take longer just # because of this. w = world_obj @@ -470,13 +470,13 @@ def console_scan_regionset(regionset, processes, entity_limit, pbar.finish() -def scan_player(scanned_dat_file): +def scan_data(scanned_dat_file): """ At the moment only tries to read a .dat player file. It returns 0 if it's ok and 1 if has some problem """ s = scanned_dat_file try: - player_dat = nbt.NBTFile(filename = s.path) + nbt_data = nbt.NBTFile(filename=s.path) s.readable = True except Exception, e: s.readable = False @@ -484,24 +484,24 @@ def scan_player(scanned_dat_file): return s -def multiprocess_scan_player(player): - """ Does the multithread stuff for scan_region_file """ - p = player - p = scan_player(p) - multiprocess_scan_player.q.put(p) +def multiprocess_scan_data(data): + """ Does the multithread stuff for scan_data """ + d = data + d = scan_data(d) + multiprocess_scan_data.q.put(d) -def _mp_player_pool_init(q): +def _mp_data_pool_init(q): """ Function to initialize the multiprocessing in scan_regionset. Is used to pass values to the child process. """ - multiprocess_scan_player.q = q + multiprocess_scan_data.q = q def scan_all_players(world_obj): - """ Scans all the players using the scan_player function. """ + """ Scans all the players using the scan_data function. """ for name in world_obj.players: - scan_player(world_obj.players[name]) + scan_data(world_obj.players[name]) def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): From 6245d8cc77b47a6b7ee416f2da17b8ee01708b3b Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 26 Jun 2014 21:27:07 +0200 Subject: [PATCH 013/151] Proper handling of exceptions for multiprocess scan in gui and in terminal. Better summary in interactive mode. --- gui/main.py | 72 ++++--- regionfixer_core/interactive.py | 8 +- regionfixer_core/scan.py | 356 +++++++++++++++++--------------- 3 files changed, 244 insertions(+), 192 deletions(-) diff --git a/gui/main.py b/gui/main.py index 9f39991..7aae9d4 100644 --- a/gui/main.py +++ b/gui/main.py @@ -3,10 +3,11 @@ import wx from time import sleep -from os.path import split +from os.path import split, abspath from backups import BackupsWindow -from regionfixer_core.scan import AsyncWorldScanner, AsyncDataScanner +from regionfixer_core.scan import AsyncWorldScanner, AsyncDataScanner,\ + ChildProcessException from regionfixer_core import world from regionfixer_core.world import World @@ -295,35 +296,50 @@ def OnScan(self, e): "Scanning old format player files", "Scanning players", "Scanning data files"] - for scanner, dialog_title in zip(things_to_scan, dialog_texts): - progressdlg = wx.ProgressDialog( - dialog_title, - "Last scanned:\n starting...", - len(scanner), self, - style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | wx.PD_SMOOTH) - scanner.scan() - counter = 0 - while not scanner.finished: - sleep(0.001) - result = scanner.get_last_result() - if result: - counter += 1 - not_cancelled, not_skipped = progressdlg.Update(counter, - "Last scanned:\n" + ws.str_last_scanned) + try: + for scanner, dialog_title in zip(things_to_scan, dialog_texts): + progressdlg = wx.ProgressDialog( + dialog_title, + "Last scanned:\n starting...", + len(scanner), self, + style=wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | wx.PD_SMOOTH) + scanner.scan() + counter = 0 + while not scanner.finished: + sleep(0.001) + result = scanner.get_last_result() + if result: + counter += 1 + not_cancelled, not_skipped = progressdlg.Update(counter, + "Last scanned:\n" + ws.str_last_scanned) + if not not_cancelled: + # User pressed cancel + scanner.terminate() + break + progressdlg.Destroy() if not not_cancelled: - # User pressed cancel - scanner.terminate() break + else: + # The scan finished successfully + self.world.scanned = True + self.results_text.SetValue(self.world.generate_report(True)) + self.update_delete_buttons_status(True) + except ChildProcessException as e: + error_log_path = e.save_error_log() + filename = e.scanned_file.filename + scanner.terminate() progressdlg.Destroy() - if not not_cancelled: - break - else: - # The scan finished successfully - self.world.scanned = True - self.results_text.SetValue(self.world.generate_report(True)) - self.update_delete_buttons_status(True) + error = wx.MessageDialog(self, + ("Something went really wrong scanning {0}\n\n" + "This is probably an error in the code. Please, " + "if you have the time report it. " + "I have saved all the error information in:\n\n" + "{1}").format(filename, error_log_path), + "Error", + wx.ICON_ERROR) + error.ShowModal() def update_delete_buttons_status(self, status): diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index 975cb25..a184a6a 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -172,9 +172,11 @@ def do_summary(self, arg): if len(arg) == 0: if self.current: if self.current.scanned: - text = self.current.summary() - if text: print text - else: print "No problems found!" + text = self.current.generate_report(True) + if text: + print text + else: + print "No problems found!" else: print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." else: diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 86e598e..809652e 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -22,18 +22,20 @@ # -from os.path import split, join -from time import sleep, time import sys -import traceback -from copy import copy import logging import multiprocessing +from os.path import split, abspath +from time import sleep, time +from copy import copy from multiprocessing import queues +from traceback import extract_tb import nbt.region as region import nbt.nbt as nbt - +from nbt.nbt import MalformedFileError +from nbt.region import ChunkDataError, ChunkHeaderError,\ + RegionHeaderError, InconceivedChunk import progressbar import world @@ -45,25 +47,54 @@ TUPLE_STATUS = 1 -logging.basicConfig(filename='scan.log', level=logging.DEBUG) +# logging.basicConfig(filename='scan.log', level=logging.DEBUG) class ChildProcessException(Exception): - """Takes the child process traceback text and prints it as a - real traceback with asterisks everywhere.""" - def __init__(self, error): - # Helps to see wich one is the child process traceback - traceback = error[2] - print "*" * 10 - print "*** Error while scanning:" - print "*** ", error[0] - print "*" * 10 - print "*** Printing the child's Traceback:" - print "*** Exception:", traceback[0], traceback[1] - for tb in traceback[2]: - print "*" * 10 - print "*** File {0}, line {1}, in {2} \n*** {3}".format(*tb) - print "*" * 10 + """ Raised when a child process has problems. + + Stores all the info given by sys.exc_info() and the + scanned file object which is probably partially filled. + """ + def __init__(self, partial_scanned_file, exc_type, exc_class, tb_text): + self.scanned_file = partial_scanned_file + self.exc_type = exc_type + self.exc_class = exc_class + self.tb_text = tb_text + + @property + def printable_traceback(self): + """ Returns a nice printable traceback. + + It uses a lot of asteriks to ensure it doesn't mix with + the main process traceback. + """ + text = "" + scanned_file = self.scanned_file + text += "*" * 10 + "\n" + text += "*** Exception while scanning:" + "\n" + text += "*** " + str(scanned_file.filename) + "\n" + text += "*" * 10 + "\n" + text += "*** Printing the child's traceback:" + "\n" + text += "*** Exception:" + str(self.exc_type) + str(self.exc_class) + "\n" + for tb in self.tb_text: + text += "*" * 10 + "\n" + text += "*** File {0}, line {1}, in {2} \n*** {3}".format(*tb) + text += "\n" + "*" * 10 + "\n" + + return text + + def save_error_log(self, filename='error.log'): + """ Save the error in filename, return the path of saved file. """ + f = open(filename, 'w') + error_log_path = abspath(f.name) + filename = self.scanned_file.filename + f.write("Error while scanning: {0}\n".format(filename)) + f.write(self.printable_traceback) + f.write('\n') + f.close() + + return error_log_path class FractionWidget(progressbar.ProgressBarWidget): @@ -96,7 +127,7 @@ def __init__(self, regionset, processes, entity_limit, def scan(self): """ Scan and fill the given regionset. """ total_regions = len(self._regionset.regions) - self._results = self.pool.map_async(multithread_scan_regionfile, + self._results = self.pool.map_async(multiprocess_scan_regionfile, self._regionset.list_regions(None), max(1,total_regions//self.processes)) # No more tasks to the pool, exit the processes once the tasks are done @@ -117,16 +148,12 @@ def get_last_result(self): logging.debug("AsyncRegionsetScanner: starting get_last_result") logging.debug("AsyncRegionsetScanner: queue empty: {0}".format(q.empty())) if not q.empty(): - logging.debug("AsyncRegionsetScanner: queue not empty") r = q.get() logging.debug("AsyncRegionsetScanner: result: {0}".format(r)) - if r is None: - # Something went wrong scanning! - raise ChildProcessException("Something went wrong \ - scanning a region-file.") - # Overwrite it in the regionset if isinstance(r, tuple): - raise ChildProcessException(r) + logging.debug("AsyncRegionsetScanner: Something went wrong handling error") + raise ChildProcessException(r[0], r[1][0], r[1][1], r[1][2]) + # Overwrite it in the regionset self._regionset[r.get_coords()] = r self._str_last_scanned = self._regionset.get_name() + ": " + r.filename return r @@ -156,8 +183,10 @@ def regionset(self): def results(self): """ Yield all the results from the scan. - This is the preferred method to report scanning status. This way you - can iterate through all the results in easy way: + This is the simpler method to control the scanning process, + but also the most sloppy. If you want to closely control the + scan process (for example cancel the process in the middle, + whatever is happening) use get_last_result(). for result in scanner.results: # do things @@ -171,10 +200,8 @@ def results(self): if not q.empty(): r = q.get() logging.debug("AsyncRegionsetScanner: result: {0}".format(r)) - # if r is None: - # # Something went wrong scanning! - # raise ChildProcessException("Something went wrong \ - # scanning a region-file.") + if isinstance(r, tuple): + raise ChildProcessException(r[0], r[1][0], r[1][1], r[1][2]) # Overwrite it in the regionset self._regionset[r.get_coords()] = r yield r @@ -182,6 +209,7 @@ def results(self): def __len__(self): return len(self._regionset) + class AsyncWorldScanner(object): def __init__(self, world_obj, processes, entity_limit, remove_entities=False): @@ -260,7 +288,7 @@ def results(self): """ Yield all the results from the scan. This is the simpler method to control the scanning process, - but also the most slopy. If you want to closely control the + but also the most sloppy. If you want to closely control the scan process (for example cancel the process in the middle, whatever is happening) use get_last_result(). @@ -284,6 +312,7 @@ def __len__(self): l += len(rs) return l + class AsyncDataScanner(object): def __init__(self, data_dict, processes): @@ -316,11 +345,9 @@ def get_last_result(self): if not q.empty(): logging.debug("AsyncDataScanner: queue not empty") p = q.get() + if isinstance(p, tuple): + raise ChildProcessException(p[0], p[1][0], p[1][1], p[1][2]) logging.debug("AsyncDataScanner: result: {0}".format(p)) -# if p is None: -# # Something went wrong scanning! -# raise ChildProcessException("Something went wrong \ -# scanning a data-file.") # Overwrite it in the regionset self._data_dict[p.filename] = p return p @@ -340,8 +367,10 @@ def data_dict(self): def results(self): """ Yield all the results from the scan. - This is the preferred method to report scanning status. This way you - can iterate through all the results in easy way: + This is the simpler method to control the scanning process, + but also the most sloppy. If you want to closely control the + scan process (for example cancel the process in the middle, + whatever is happening) use get_last_result(). for result in scanner.results: # do things @@ -356,10 +385,8 @@ def results(self): if not q.empty(): p = q.get() logging.debug("AsyncDataScanner: result: {0}".format(p)) - # if p is None: - # # Something went wrong scanning! - # raise ChildProcessException("Something went wrong \ - # scanning a player-file.") + if isinstance(p, tuple): + raise ChildProcessException(p[0], p[1][0], p[1][1], p[1][2]) # Overwrite it in the data dict self._data_dict[p.filename] = p yield p @@ -367,6 +394,7 @@ def results(self): def __len__(self): return len(self._data_dict) + # All scanners will use this progress bar widgets = ['Scanning: ', FractionWidget(), @@ -439,10 +467,13 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): pbar.update(counter) pbar.finish() w.scanned = True - except ChildProcessException, e: - print "Something went really wrong scanning a region file." - print "See a log for more details!" - print e + except ChildProcessException as e: + print "\n\nSomething went really wrong scanning a file." + print ("This is probably a bug! If you have the time, please report " + "it to the region-fixer github or in the region fixer post " + "in minecraft forums") + print e.printable_traceback + raise e def console_scan_regionset(regionset, processes, entity_limit, @@ -460,27 +491,45 @@ def console_scan_regionset(regionset, processes, entity_limit, remove_entities) rs.scan() counter = 0 - while not rs.finished: - sleep(0.01) - result = rs.get_last_result() - if result: - counter += 1 - pbar.update(counter) - - pbar.finish() + try: + while not rs.finished: + sleep(0.01) + result = rs.get_last_result() + if result: + counter += 1 + pbar.update(counter) + pbar.finish() + except ChildProcessException as e: + print "\n\nSomething went really wrong scanning a file." + print ("This is probably a bug! If you have the time, please report " + "it to the region-fixer github or in the region fixer post " + "in minecraft forums") + print e.printable_traceback + raise e def scan_data(scanned_dat_file): - """ At the moment only tries to read a .dat player file. It returns - 0 if it's ok and 1 if has some problem """ + """ Try to parse the nbd data file, and fill the scanned object. + + If something is wrong it will return a tuple with useful info + to debug the problem. + """ s = scanned_dat_file try: - nbt_data = nbt.NBTFile(filename=s.path) + _ = nbt.NBTFile(filename=s.path) s.readable = True - except Exception, e: + except MalformedFileError as e: + s.readable = False + s.status_text = str(e) + except IOError: s.readable = False - s.status_text = e + s.status_text = str(e) + except: + s.readable = False + except_type, except_class, tb = sys.exc_info() + s = (s, (except_type, except_class, extract_tb(tb))) + return s @@ -497,17 +546,8 @@ def _mp_data_pool_init(q): multiprocess_scan_data.q = q -def scan_all_players(world_obj): - """ Scans all the players using the scan_data function. """ - - for name in world_obj.players: - scan_data(world_obj.players[name]) - - def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): - """ Given a scanned region file object with the information of a - region files scans it and returns the same obj filled with the - results. + """ Scan a region file filling the ScannedRegionFile If delete_entities is True it will delete entities while scanning @@ -530,79 +570,70 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # try to open the file and see if we can parse the header try: region_file = region.RegionFile(r.path) - except region.NoRegionHeader: # the region has no header + except region.NoRegionHeader: # The region has no header r.status = world.REGION_TOO_SMALL return r except IOError, e: - print "\nWARNING: I can't open the file {0} !\nThe error is \"{1}\".\nTypical causes are file blocked or problems in the file system.\n".format(filename,e) r.status = world.REGION_UNREADABLE r.scan_time = time() - print "Note: this region file won't be scanned and won't be taken into acount in the summaries" - # TODO count also this region files - return r - except: # whatever else print an error and ignore for the scan - # not really sure if this is a good solution... - print "\nWARNING: The region file \'{0}\' had an error and couldn't be parsed as region file!\nError:{1}\n".format(join(split(split(r.path)[0])[1], split(r.path)[1]),sys.exc_info()[0]) - print "Note: this region file won't be scanned and won't be taken into acount." - print "Also, this may be a bug. Please, report it if you have the time.\n" - return None - try:# start the scanning of chunks - - for x in range(32): - for z in range(32): - - # start the actual chunk scanning - g_coords = r.get_global_chunk_coords(x, z) - chunk, c = scan_chunk(region_file, (x,z), g_coords, entity_limit) - if c != None: # chunk not created - r.chunks[(x,z)] = c - chunk_count += 1 - else: continue - if c[TUPLE_STATUS] == world.CHUNK_OK: - continue - elif c[TUPLE_STATUS] == world.CHUNK_TOO_MANY_ENTITIES: - # deleting entities is in here because parsing a chunk with thousands of wrong entities - # takes a long time, and once detected is better to fix it at once. - if delete_entities: - world.delete_entities(region_file, x, z) - print "Deleted {0} entities in chunk ({1},{2}) of the region file: {3}".format(c[TUPLE_NUM_ENTITIES], x, z, r.filename) - # entities removed, change chunk status to OK - r.chunks[(x,z)] = (0, world.CHUNK_OK) - - else: - entities_prob += 1 - # This stores all the entities in a file, - # comes handy sometimes. - #~ pretty_tree = chunk['Level']['Entities'].pretty_tree() - #~ name = "{2}.chunk.{0}.{1}.txt".format(x,z,split(region_file.filename)[1]) - #~ archivo = open(name,'w') - #~ archivo.write(pretty_tree) - - elif c[TUPLE_STATUS] == world.CHUNK_CORRUPTED: - corrupted += 1 - elif c[TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED: - wrong += 1 - - # Now check for chunks sharing offsets: - # Please note! region.py will mark both overlapping chunks - # as bad (the one stepping outside his territory and the - # good one). Only wrong located chunk with a overlapping - # flag are really BAD chunks! Use this criterion to - # discriminate - metadata = region_file.metadata - sharing = [k for k in metadata if ( - metadata[k].status == region.STATUS_CHUNK_OVERLAPPING and - r[k][TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED)] - shared_counter = 0 - for k in sharing: - r[k] = (r[k][TUPLE_NUM_ENTITIES], world.CHUNK_SHARED_OFFSET) - shared_counter += 1 - - except KeyboardInterrupt: - print "\nInterrupted by user\n" - # TODO this should't exit - sys.exit(1) + for x in range(32): + for z in range(32): + # start the actual chunk scanning + g_coords = r.get_global_chunk_coords(x, z) + chunk, c = scan_chunk(region_file, + (x, z), + g_coords, + entity_limit) + if c: + r.chunks[(x, z)] = c + chunk_count += 1 + else: + # chunk not created + continue + + if c[TUPLE_STATUS] == world.CHUNK_OK: + continue + elif c[TUPLE_STATUS] == world.CHUNK_TOO_MANY_ENTITIES: + # Deleting entities is in here because parsing a chunk + # with thousands of wrong entities takes a long time, + # and once detected is better to fix it at once. + if delete_entities: + world.delete_entities(region_file, x, z) + print ("Deleted {0} entities in chunk" + " ({1},{2}) of the region file: {3}").format( + c[TUPLE_NUM_ENTITIES], x, z, r.filename) + # entities removed, change chunk status to OK + r.chunks[(x, z)] = (0, world.CHUNK_OK) + + else: + entities_prob += 1 + # This stores all the entities in a file, + # comes handy sometimes. + #~ pretty_tree = chunk['Level']['Entities'].pretty_tree() + #~ name = "{2}.chunk.{0}.{1}.txt".format(x,z,split(region_file.filename)[1]) + #~ archivo = open(name,'w') + #~ archivo.write(pretty_tree) + + elif c[TUPLE_STATUS] == world.CHUNK_CORRUPTED: + corrupted += 1 + elif c[TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED: + wrong += 1 + + # Now check for chunks sharing offsets: + # Please note! region.py will mark both overlapping chunks + # as bad (the one stepping outside his territory and the + # good one). Only wrong located chunk with a overlapping + # flag are really BAD chunks! Use this criterion to + # discriminate + metadata = region_file.metadata + sharing = [k for k in metadata if ( + metadata[k].status == region.STATUS_CHUNK_OVERLAPPING and + r[k][TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED)] + shared_counter = 0 + for k in sharing: + r[k] = (r[k][TUPLE_NUM_ENTITIES], world.CHUNK_SHARED_OFFSET) + shared_counter += 1 r.chunk_count = chunk_count r.corrupted_chunks = corrupted @@ -611,18 +642,21 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r.shared_offset = shared_counter r.scan_time = time() r.status = world.REGION_OK - return r + return r + + except KeyboardInterrupt: + print "\nInterrupted by user\n" + # TODO this should't exit + sys.exit(1) # Fatal exceptions: except: - # anything else is a ChildProcessException - try: - # Not even r was created, something went really wrong - except_type, except_class, tb = sys.exc_info() - r = (r.path, r.coords, (except_type, except_class, traceback.extract_tb(tb))) - except NameError: - r = (None, None, (except_type, except_class, traceback.extract_tb(tb))) - + # Anything else is a ChildProcessException + # NOTE TO SELF: do not try to return the traceback object directly! + # A multiprocess pythonic hell comes to earth if you do so. + except_type, except_class, tb = sys.exc_info() + r = (scanned_regionfile_obj, (except_type, except_class, extract_tb(tb))) + return r @@ -647,7 +681,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): status_text = "OK" scan_time = time() - except region.InconceivedChunk as e: + except InconceivedChunk as e: chunk = None data_coords = None num_entities = None @@ -655,7 +689,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): status_text = "The chunk doesn't exist" scan_time = time() - except region.RegionHeaderError as e: + except RegionHeaderError as e: error = "Region header error: " + e.msg status = world.CHUNK_CORRUPTED status_text = error @@ -665,7 +699,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except region.ChunkDataError as e: + except ChunkDataError as e: error = "Chunk data error: " + e.msg status = world.CHUNK_CORRUPTED status_text = error @@ -675,7 +709,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except region.ChunkHeaderError as e: + except ChunkHeaderError as e: error = "Chunk herader error: " + e.msg status = world.CHUNK_CORRUPTED status_text = error @@ -691,24 +725,24 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): def _mp_pool_init(regionset, entity_limit, remove_entities, q): """ Function to initialize the multiprocessing in scan_regionset. Is used to pass values to the child process. """ - multithread_scan_regionfile.regionset = regionset - multithread_scan_regionfile.q = q - multithread_scan_regionfile.entity_limit = entity_limit - multithread_scan_regionfile.remove_entities = remove_entities + multiprocess_scan_regionfile.regionset = regionset + multiprocess_scan_regionfile.q = q + multiprocess_scan_regionfile.entity_limit = entity_limit + multiprocess_scan_regionfile.remove_entities = remove_entities -def multithread_scan_regionfile(region_file): +def multiprocess_scan_regionfile(region_file): """ Does the multithread stuff for scan_region_file """ r = region_file - entity_limit = multithread_scan_regionfile.entity_limit - remove_entities = multithread_scan_regionfile.remove_entities + entity_limit = multiprocess_scan_regionfile.entity_limit + remove_entities = multiprocess_scan_regionfile.remove_entities # call the normal scan_region_file with this parameters r = scan_region_file(r, entity_limit, remove_entities) # exceptions will be handled in scan_region_file which is in the # single thread land - multithread_scan_regionfile.q.put(r) + multiprocess_scan_regionfile.q.put(r) if __name__ == '__main__': From 86e50fc44296e4373518f898106156f4e951d450 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 26 Jun 2014 21:51:45 +0200 Subject: [PATCH 014/151] Fix no counting unreadable region-files. --- regionfixer_core/scan.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 809652e..d65899b 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -572,10 +572,12 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): region_file = region.RegionFile(r.path) except region.NoRegionHeader: # The region has no header r.status = world.REGION_TOO_SMALL + r.scan_time = time() return r except IOError, e: r.status = world.REGION_UNREADABLE r.scan_time = time() + return r for x in range(32): for z in range(32): @@ -646,7 +648,8 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): except KeyboardInterrupt: print "\nInterrupted by user\n" - # TODO this should't exit + # TODO this should't exit. It should return to interactive + # mode if we are in it. sys.exit(1) # Fatal exceptions: From f9ed670c33c193e2657743a3f50b0311c5cb32e9 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 26 Jun 2014 22:25:48 +0200 Subject: [PATCH 015/151] Improve gui. --- gui/main.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/gui/main.py b/gui/main.py index 7aae9d4..72f18aa 100644 --- a/gui/main.py +++ b/gui/main.py @@ -77,17 +77,17 @@ def __init__(self, parent, title, backups=None): self.firstrow_static_box_sizer.Add(self.firstrow_sizer, 1, wx.EXPAND) # Second row: - self.proc_info_text = wx.StaticText(panel, label="Threads to use: ") - self.proc_text = wx.TextCtrl(panel, value="1") + self.proc_info_text = wx.StaticText(panel, label="Processes to use: ") + self.proc_text = wx.TextCtrl(panel, value="1", size=(30, 24), style=wx.TE_CENTER) self.el_info_text = wx.StaticText(panel, label="Entity limit: " ) - self.el_text = wx.TextCtrl(panel, value="150") + self.el_text = wx.TextCtrl(panel, value="150", size=(50, 24), style=wx.TE_CENTER) self.secondrow_sizer = wx.BoxSizer(wx.HORIZONTAL) - self.secondrow_sizer.Add(self.proc_info_text, 0, wx.ALIGN_CENTER) - self.secondrow_sizer.Add(self.proc_text, 0, wx.ALIGN_LEFT) + self.secondrow_sizer.Add(self.proc_info_text, flag=wx.ALIGN_CENTER) + self.secondrow_sizer.Add(self.proc_text, 0, flag=wx.RIGHT | wx.ALIGN_LEFT, border=15) self.secondrow_sizer.Add(self.el_info_text, 0, wx.ALIGN_CENTER) self.secondrow_sizer.Add(self.el_text, 0, wx.ALIGN_RIGHT) self.secondrow_static_box_sizer = wx.StaticBoxSizer(wx.StaticBox(panel, label="Scan options")) - self.secondrow_static_box_sizer.Add(self.secondrow_sizer, 1, wx.EXPAND) + self.secondrow_static_box_sizer.Add(self.secondrow_sizer, 1, flag=wx.EXPAND) # Third row: # Note: In order to use a static box add it directly to a From 809339e01b59a21003385442036743f93cbcc597 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 1 Jul 2014 11:19:20 +0200 Subject: [PATCH 016/151] Implement replace options in GUI. --- gui/main.py | 195 ++++++++++++++++---------------- regionfixer.py | 8 +- regionfixer_core/interactive.py | 8 +- regionfixer_core/scan.py | 3 +- regionfixer_core/world.py | 26 +++-- 5 files changed, 125 insertions(+), 115 deletions(-) diff --git a/gui/main.py b/gui/main.py index 72f18aa..2dcb332 100644 --- a/gui/main.py +++ b/gui/main.py @@ -179,106 +179,6 @@ def OnOpen(self, e): # Rest the results textctrl self.results_text.SetValue("") - def OnDeleteChunks(self, e): - progressdlg = wx.ProgressDialog("Removing chunks", "Removing...", - self.world.count_regions(), self, - style = wx.PD_ELAPSED_TIME | - wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | - wx.PD_CAN_SKIP | - wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | - wx.PD_SMOOTH - ) - progressdlg = progressdlg - progressdlg.Pulse() - self.world.remove_problematic_chunks(world.CHUNK_CORRUPTED) - progressdlg.Pulse() - print "1" - self.world.remove_problematic_chunks(world.CHUNK_SHARED_OFFSET) - progressdlg.Pulse() - print "2" - self.world.remove_problematic_chunks(world.CHUNK_WRONG_LOCATED) - progressdlg.Pulse() - print "3" - self.world.remove_problematic_chunks(world.CHUNK_TOO_MANY_ENTITIES) - progressdlg.Pulse() - print "4" - progressdlg.Destroy() - - self.update_delete_buttons_status(False) - - def OnDeleteRegions(self, e): - progressdlg = wx.ProgressDialog("Removing regions", "Removing...", - self.world.count_regions(), self, - style = wx.PD_ELAPSED_TIME | - wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | - #~ wx.PD_CAN_SKIP | - #~ wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | - wx.PD_SMOOTH - ) - progressdlg = progressdlg - - self.world.remove_problematic_regions(world.REGION_TOO_SMALL) - progressdlg.pulse() - self.world.remove_problematic_regions(world.REGION_UNREADABLE) - progressdlg.pulse() - progressdlg.Destroy() - - self.update_delete_buttons_status(False) - self.update_replace_buttons_status(False) - - def OnReplaceChunks(self, e): - progressdlg = wx.ProgressDialog("Removing chunks", "Removing...", - self.world.count_regions(), self, - style = wx.PD_ELAPSED_TIME | - wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | - #~ wx.PD_CAN_SKIP | - #~ wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | - wx.PD_SMOOTH - ) - progressdlg = progressdlg - - backups = self.backups.world_list - - self.world.replace_problematic_chunks(world.CHUNK_CORRUPTED, backups) - progressdlg.pulse() - self.world.replace_problematic_chunks(world.CHUNK_SHARED_OFFSET, backups) - progressdlg.pulse() - self.world.replace_problematic_chunks(world.CHUNK_WRONG_LOCATED, backups) - progressdlg.pulse() - self.world.replace_problematic_chunks(world.CHUNK_TOO_MANY_ENTITIES, backups) - progressdlg.pulse() - progressdlg.Destroy() - - self.update_delete_buttons_status(False) - self.update_replace_buttons_status(False) - - def OnReplaceRegions(self, e): - progressdlg = wx.ProgressDialog("Removing regions", "Removing...", - self.world.count_regions(), self, - style = wx.PD_ELAPSED_TIME | - wx.PD_ESTIMATED_TIME | - wx.PD_REMAINING_TIME | - #~ wx.PD_CAN_SKIP | - #~ wx.PD_CAN_ABORT | - wx.PD_AUTO_HIDE | - wx.PD_SMOOTH - ) - progressdlg = progressdlg - - self.world.remove_problematic_regions(world.REGION_TOO_SMALL) - progressdlg.pulse() - self.world.remove_problematic_regions(world.REGION_UNREADABLE) - progressdlg.pulse() - progressdlg.Destroy() - - self.update_delete_buttons_status(False) - self.update_replace_buttons_status(False) def OnScan(self, e): processes = int(self.proc_text.GetValue()) @@ -307,6 +207,7 @@ def OnScan(self, e): wx.PD_AUTO_HIDE | wx.PD_SMOOTH) scanner.scan() counter = 0 + progressdlg.ShowModal() while not scanner.finished: sleep(0.001) result = scanner.get_last_result() @@ -326,6 +227,7 @@ def OnScan(self, e): self.world.scanned = True self.results_text.SetValue(self.world.generate_report(True)) self.update_delete_buttons_status(True) + self.update_replace_buttons_status(True) except ChildProcessException as e: error_log_path = e.save_error_log() filename = e.scanned_file.filename @@ -341,6 +243,99 @@ def OnScan(self, e): wx.ICON_ERROR) error.ShowModal() + def OnDeleteChunks(self, e): + progressdlg = wx.ProgressDialog("Removing chunks", "This may take a while", + self.world.count_regions(), self, + style=wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + wx.PD_CAN_SKIP | + wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + progressdlg.Pulse() + remove_chunks = self.world.remove_problematic_chunks + for problem in world.CHUNK_PROBLEMS: + progressdlg.Pulse("Removing chunks with problem: {}".format(world.CHUNK_STATUS_TEXT[problem])) + remove_chunks(problem) + progressdlg.Destroy() + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + + def OnDeleteRegions(self, e): + progressdlg = wx.ProgressDialog("Removing regions", "This may take a while...", + self.world.count_regions(), self, + style=wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + progressdlg.Pulse() + remove_regions = self.world.remove_problematic_regions + for problem in world.REGION_PROBLEMS: + progressdlg.Pulse("Removing regions with problem: {}".format(world.REGION_STATUS_TEXT[problem])) + remove_regions(problem) + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + self.update_replace_buttons_status(False) + + def OnReplaceChunks(self, e): + # Get options + entity_limit = int(self.el_text.GetValue()) + delete_entities = False + + progressdlg = wx.ProgressDialog("Removing chunks", "Removing...", + self.world.count_regions(), self, + style=wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + backups = self.backups.world_list + progressdlg.Pulse() + replace_chunks = self.world.replace_problematic_chunks + for problem in world.CHUNK_PROBLEMS: + progressdlg.Pulse("Replacing chunks with problem: {}".format(world.CHUNK_STATUS_TEXT[problem])) + replace_chunks(backups, problem, entity_limit, delete_entities) + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + self.update_replace_buttons_status(False) + + def OnReplaceRegions(self, e): + # Get options + entity_limit = int(self.el_text.GetValue()) + delete_entities = False + progressdlg = wx.ProgressDialog("Removing regions", "Removing...", + self.world.count_regions(), self, + style = wx.PD_ELAPSED_TIME | + wx.PD_ESTIMATED_TIME | + wx.PD_REMAINING_TIME | + #~ wx.PD_CAN_SKIP | + #~ wx.PD_CAN_ABORT | + wx.PD_AUTO_HIDE | + wx.PD_SMOOTH + ) + progressdlg = progressdlg + backups = self.backups.world_list + progressdlg.Pulse() + replace_regions = self.world.replace_problematic_regions + for problem in world.REGION_PROBLEMS: + progressdlg.Pulse("Replacing regions with problem: {}".format(world.REGION_STATUS_TEXT[problem])) + replace_regions(backups, problem, entity_limit, delete_entities) + progressdlg.Destroy() + + self.update_delete_buttons_status(False) + self.update_replace_buttons_status(False) + def update_delete_buttons_status(self, status): if status: diff --git a/regionfixer.py b/regionfixer.py index 082e2d3..7b12c8c 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -406,6 +406,8 @@ def main(): print # Replace chunks if backup_worlds and not len(world_list) > 1: + del_ent = options.delete_entities + ent_lim = options.entity_limit options_replace = [o.replace_corrupted, o.replace_wrong_located, o.replace_entities, @@ -417,7 +419,7 @@ def main(): if total: text = " Replacing chunks with status: {0} ".format(status) print "{0:#^60}".format(text) - fixed = w.replace_problematic_chunks(backup_worlds, problem, options) + fixed = w.replace_problematic_chunks(backup_worlds, problem, ent_lim, del_ent) print "\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status) else: print "No chunks to replace with status: {0}".format(status) @@ -431,6 +433,8 @@ def main(): # replace region files if backup_worlds and not len(world_list) > 1: + del_ent = options.delete_entities + ent_lim = options.entity_limit options_replace = [o.replace_too_small] replacing = zip(options_replace, world.REGION_PROBLEMS_ITERATOR) for replace, (problem, status, arg) in replacing: @@ -439,7 +443,7 @@ def main(): if total: text = " Replacing regions with status: {0} ".format(status) print "{0:#^60}".format(text) - fixed = w.replace_problematic_regions(backup_worlds, problem, options) + fixed = w.replace_problematic_regions(backup_worlds, problem, ent_lim, del_ent) print "\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status) else: print "No region to replace with status: {0}".format(status) diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index a184a6a..f6fb098 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -307,6 +307,8 @@ def do_remove_chunks(self, arg): print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." def do_replace_chunks(self, arg): + el = self.options.entity_limit + de = self.options.delete_entities if self.current and self.current.scanned: if len(arg.split()) == 0: print "Possible arguments are: {0}".format(self.possible_chunk_args_text) @@ -316,7 +318,7 @@ def do_replace_chunks(self, arg): if arg in world.CHUNK_PROBLEMS_ARGS.values() or arg == 'all': for problem, status_text, a in world.CHUNK_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: - n = self.current.replace_problematic_chunks(self.backup_worlds, problem, self.options) + n = self.current.replace_problematic_chunks(self.backup_worlds, problem, el, de) if n: self.current.scanned = False print "\nReplaced {0} chunks with status \'{1}\'.".format(n, status_text) @@ -326,6 +328,8 @@ def do_replace_chunks(self, arg): print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." def do_replace_regions(self, arg): + el = self.options.entity_limit + de = self.options.delete_entities if self.current and self.current.scanned: if len(arg.split()) == 0: print "Possible arguments are: {0}".format(self.possible_region_args_text) @@ -335,7 +339,7 @@ def do_replace_regions(self, arg): if arg in world.REGION_PROBLEMS_ARGS.values() or arg == 'all': for problem, status_text, a in world.REGION_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: - n = self.current.replace_problematic_regions(self.backup_worlds, problem, self.options) + n = self.current.replace_problematic_regions(self.backup_worlds, problem, el, de) if n: self.current.scanned = False print "\nReplaced {0} regions with status \'{1}\'.".format(n, status_text) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index d65899b..70a0a8c 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -151,7 +151,7 @@ def get_last_result(self): r = q.get() logging.debug("AsyncRegionsetScanner: result: {0}".format(r)) if isinstance(r, tuple): - logging.debug("AsyncRegionsetScanner: Something went wrong handling error") + logging.debug("AsyncRegionsetScanner: Something went wrong, handling error") raise ChildProcessException(r[0], r[1][0], r[1][1], r[1][2]) # Overwrite it in the regionset self._regionset[r.get_coords()] = r @@ -343,7 +343,6 @@ def get_last_result(self): logging.debug("AsyncDataScanner: starting get_last_result") logging.debug("AsyncDataScanner: queue empty: {0}".format(q.empty())) if not q.empty(): - logging.debug("AsyncDataScanner: queue not empty") p = q.get() if isinstance(p, tuple): raise ChildProcessException(p[0], p[1][0], p[1][1], p[1][2]) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 3b5048a..701977d 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -783,13 +783,14 @@ def count_chunks(self, status = None): counter += count return counter - def replace_problematic_chunks(self, backup_worlds, problem, options): + def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delete_entities): """ Takes a list of world objects and a problem value and try to replace every chunk with that problem using a working chunk from the list of world objects. It uses the world objects in left to riht order. """ counter = 0 + scanned_regions = {} for regionset in self.regionsets: for backup in backup_worlds: # choose the correct regionset based on the dimension @@ -817,21 +818,28 @@ def replace_problematic_chunks(self, backup_worlds, problem, options): tofix_region_path, _ = regionset.locate_chunk(global_coords) if exists(backup_region_path): print "Backup region file found in:\n {0}".format(backup_region_path) - - # scan the whole region file, pretty slow, but completely needed to detec sharing offset chunks - from scan import scan_region_file - r = scan_region_file(ScannedRegionFile(backup_region_path),options) + # Scan the whole region file, pretty slow, but + # absolutely needed to detect sharing offset chunks + # The backups world doesn't change, check if the + # region_file is already scanned: + try: + coords = get_region_coords(backup_region_path.split()[1]) + r = scanned_regions[coords] + except KeyError: + from scan import scan_region_file + r = scan_region_file(ScannedRegionFile(backup_region_path), entity_limit, delete_entities) + scanned_regions[r.coords] = r try: status_tuple = r[local_coords] except KeyError: status_tuple = None - - # retrive the status from status_tuple + + # Retrive the status from status_tuple if status_tuple == None: status = CHUNK_NOT_CREATED else: status = status_tuple[TUPLE_STATUS] - + if status == CHUNK_OK: backup_region_file = region.RegionFile(backup_region_path) working_chunk = backup_region_file.get_chunk(local_coords[0],local_coords[1]) @@ -864,7 +872,7 @@ def remove_problematic_chunks(self, problem): counter += regionset.remove_problematic_chunks(problem) return counter - def replace_problematic_regions(self, backup_worlds, problem, options): + def replace_problematic_regions(self, backup_worlds, problem, entity_limit, delete_entities): """ Replaces region files with the given problem using a backup directory. """ counter = 0 From ab174710b813f00279eab2a93fea19a9ed66490c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 1 Jul 2014 11:20:25 +0200 Subject: [PATCH 017/151] Ooops... add version files. --- gui/version.py | 8 ++++++++ regionfixer_core/version.py | 8 ++++++++ 2 files changed, 16 insertions(+) create mode 100644 gui/version.py create mode 100644 regionfixer_core/version.py diff --git a/gui/version.py b/gui/version.py new file mode 100644 index 0000000..9364a6b --- /dev/null +++ b/gui/version.py @@ -0,0 +1,8 @@ +''' +Created on 24/06/2014 + +@author: Alejandro +''' + +version_string = "0.0.1" +version_numbers = version_string.split(".") diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py new file mode 100644 index 0000000..3dd24ea --- /dev/null +++ b/regionfixer_core/version.py @@ -0,0 +1,8 @@ +''' +Created on 24/06/2014 + +@author: Alejandro +''' + +version_string = "0.2.0" +version_numbers = version_string.split('.') From a948339f27d76bd28b715f9ff1b8734ce8324844 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 1 Jul 2014 12:35:28 +0200 Subject: [PATCH 018/151] Fix problem scanning data files. Workaround idcounts.dat not using gzip compression. Fix calling windows dlls in linux. --- gui/main.py | 12 +++++++----- regionfixer_core/scan.py | 18 +++++++++++++++--- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/gui/main.py b/gui/main.py index 2dcb332..c7ecaf5 100644 --- a/gui/main.py +++ b/gui/main.py @@ -4,6 +4,7 @@ import wx from time import sleep from os.path import split, abspath +from os import name as os_name from backups import BackupsWindow from regionfixer_core.scan import AsyncWorldScanner, AsyncDataScanner,\ @@ -11,11 +12,12 @@ from regionfixer_core import world from regionfixer_core.world import World -# Proper way to set an icon in windows 7 and above -# Thanks to http://stackoverflow.com/a/15923439 -import ctypes -myappid = 'Fenixin.region-fixer.gui.100' # arbitrary string -ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid) +if os_name == 'nt': + # Proper way to set an icon in windows 7 and above + # Thanks to http://stackoverflow.com/a/15923439 + import ctypes + myappid = 'Fenixin.region-fixer.gui.100' # arbitrary string + ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid) class MainWindow(wx.Frame): diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 70a0a8c..05c3187 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -47,7 +47,7 @@ TUPLE_STATUS = 1 -# logging.basicConfig(filename='scan.log', level=logging.DEBUG) +logging.basicConfig(filename='scan.log', level=logging.DEBUG) class ChildProcessException(Exception): @@ -512,16 +512,28 @@ def scan_data(scanned_dat_file): If something is wrong it will return a tuple with useful info to debug the problem. + + NOTE: idcounts.dat (number of map files) is a nbt file and + is not compressed, we handle the special case here. + """ s = scanned_dat_file try: - _ = nbt.NBTFile(filename=s.path) + if s.filename == 'idcounts.dat': + # TODO: This is ugly + # Open the file and create a buffer, this way + # NBT won't try to de-gzip the file + f = open(s.path) + + _ = nbt.NBTFile(buffer=f) + else: + _ = nbt.NBTFile(filename=s.path) s.readable = True except MalformedFileError as e: s.readable = False s.status_text = str(e) - except IOError: + except IOError as e: s.readable = False s.status_text = str(e) except: From c0e233f5adbc929e774a8597a2c3080845620e94 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 1 Jul 2014 13:21:46 +0200 Subject: [PATCH 019/151] Fix problem with progressdlg in linux, it behaves different than in windows. Fix missing property in AsyncDataScanner. --- gui/main.py | 7 +++++-- regionfixer_core/scan.py | 23 ++++++++++++++++++++--- regionfixer_core/world.py | 2 +- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/gui/main.py b/gui/main.py index c7ecaf5..e6edb47 100644 --- a/gui/main.py +++ b/gui/main.py @@ -209,14 +209,17 @@ def OnScan(self, e): wx.PD_AUTO_HIDE | wx.PD_SMOOTH) scanner.scan() counter = 0 - progressdlg.ShowModal() + # NOTE TO SELF: ShowModal behaves different in windows and Linux! + # Use it with care. + progressdlg.Show() while not scanner.finished: sleep(0.001) result = scanner.get_last_result() + if result: counter += 1 not_cancelled, not_skipped = progressdlg.Update(counter, - "Last scanned:\n" + ws.str_last_scanned) + "Last scanned:\n" + scanner.str_last_scanned) if not not_cancelled: # User pressed cancel scanner.terminate() diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 05c3187..867421c 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -47,7 +47,7 @@ TUPLE_STATUS = 1 -logging.basicConfig(filename='scan.log', level=logging.DEBUG) +# logging.basicConfig(filename='scan.log', level=logging.DEBUG) class ChildProcessException(Exception): @@ -124,6 +124,9 @@ def __init__(self, regionset, processes, entity_limit, # Recommended time to sleep between polls for results self.scan_wait_time = 0.001 + # Holds a friendly string with the name of the last file scanned + self._str_last_scanned = None + def scan(self): """ Scan and fill the given regionset. """ total_regions = len(self._regionset.regions) @@ -222,9 +225,9 @@ def __init__(self, world_obj, processes, entity_limit, self.regionsets = copy(world_obj.regionsets) self._current_regionset = None - self._str_last_scanned = "" + self._str_last_scanned = None - # Recommended time to sleep between polls for results + # Holds a friendly string with the name of the last file scanned self.scan_wait_time = 0.001 def scan(self): @@ -235,6 +238,9 @@ def scan(self): self.remove_entities) self._current_regionset = cr cr.scan() + + # See method + self._str_last_scanned = "" def get_last_result(self): """ Return results of last region file scanned. @@ -326,6 +332,9 @@ def __init__(self, data_dict, processes): # Recommended time to sleep between polls for results self.scan_wait_time = 0.0001 + # Holds a friendly string with the name of the last file scanned + self._str_last_scanned = None + def scan(self): """ Scan and fill the given data_dict generated by world.py. """ total_datas = len(self._data_dict) @@ -336,6 +345,9 @@ def scan(self): # No more tasks to the pool, exit the processes once the tasks are done self.pool.close() + # See method + self._str_last_scanned = "" + def get_last_result(self): """ Return results of last data file scanned. """ @@ -353,6 +365,11 @@ def get_last_result(self): else: return None + @property + def str_last_scanned(self): + """ A friendly string with last scanned thing. """ + return self._str_last_scanned if self._str_last_scanned else "Scanning..." + @property def finished(self): """ Have the scan finished? """ diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 701977d..2e95aaf 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -585,7 +585,7 @@ def generate_report(self, standalone): text += "\nNo problems found.\n" # regions - text += "\n\nRegion problems:\n" + text += "\n\nRegion files problems:\n" region_errors = ("Problem","Too small","Unreadable","Total regions") region_counters = ("Counts", too_small_region,unreadable_region, total_regions) table_data = [] From a3b176d89580291e41c26f5c4bd795f0190b29af Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 1 Jul 2014 17:12:15 +0200 Subject: [PATCH 020/151] Improve status printing. --- regionfixer_core/world.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 2e95aaf..29da91b 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -582,7 +582,7 @@ def generate_report(self, standalone): if corrupted or wrong_located or entities_prob or shared_prob: text += table(table_data) else: - text += "\nNo problems found.\n" + text += "No problems found.\n" # regions text += "\n\nRegion files problems:\n" From bad64bb40e8afb22e26ff9b5a3ad73e9333af5a4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 4 Jul 2014 12:04:37 +0200 Subject: [PATCH 021/151] Add help window. --- gui/help.py | 46 ++++++++++++++++++++++++++++++++++++++++++++++ gui/main.py | 6 ++++++ gui/starter.py | 2 ++ 3 files changed, 54 insertions(+) create mode 100644 gui/help.py diff --git a/gui/help.py b/gui/help.py new file mode 100644 index 0000000..096f486 --- /dev/null +++ b/gui/help.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import wx + +class HelpWindow(wx.Frame): + def __init__(self, parent, title="Help"): + wx.Frame.__init__(self, parent, title=title, + style=wx.CLOSE_BOX | wx.RESIZE_BORDER | wx.CAPTION) + # Every windows should use panel as parent. Not doing so will + # make the windows look non-native (very ugly) + panel = wx.Panel(self) + + self.help1 = wx.StaticText(panel, style=wx.ALIGN_CENTER, + label="If you need help you can give a look to the wiki:") + self.link_github = wx.HyperlinkCtrl(panel, wx.ID_ABOUT, + "https://github.com/Fenixin/Minecraft-Region-Fixer/wiki", + style=wx.ALIGN_CENTER) + self.help2 = wx.StaticText(panel, + style=wx.TE_MULTILINE | wx.ALIGN_CENTER, + label="Or ask in the minecraft forums:") + self.link_minecraft_forums = wx.HyperlinkCtrl(panel, wx.ID_ABOUT, + "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", + "http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/", + style=wx.ALIGN_CENTER) + + self.close_button = wx.Button(panel, wx.ID_CLOSE) + + self.sizer = wx.BoxSizer(wx.VERTICAL) + self.sizer.Add(self.help1, 0, wx.ALIGN_CENTER | wx.TOP, 10) + self.sizer.Add(self.link_github, 0, wx.ALIGN_CENTER | wx.ALL, 5) + self.sizer.Add(self.help2, 0, wx.ALIGN_CENTER | wx.TOP, 20) + self.sizer.Add(self.link_minecraft_forums, 0, wx.ALIGN_CENTER | wx.ALL, 5) + self.sizer.Add(self.close_button, 0, wx.ALIGN_CENTER | wx.ALL, 20) + + # Fit sizers and make the windows not resizable + panel.SetSizerAndFit(self.sizer) + self.sizer.Fit(self) + size = self.GetSize() + self.SetMinSize(size) + self.SetMaxSize(size) + + self.Bind(wx.EVT_BUTTON, self.OnClose, self.close_button) + + def OnClose(self, e): + self.Show(False) diff --git a/gui/main.py b/gui/main.py index e6edb47..52b0962 100644 --- a/gui/main.py +++ b/gui/main.py @@ -51,6 +51,8 @@ def __init__(self, parent, title, backups=None): menuExit = filemenu.Append(wx.ID_EXIT, "E&xit","Terminate program") # Add elements to helpmenu + menuHelp = helpmenu.Append(wx.ID_HELP, "&Help", "Where to find help") + helpmenu.AppendSeparator() menuAbout = helpmenu.Append(wx.ID_ABOUT, "&About", "Information about this program") # Add elements to windowsmenu @@ -136,6 +138,7 @@ def __init__(self, parent, title, backups=None): # Bindings self.Bind(wx.EVT_MENU, self.OnAbout, menuAbout) + self.Bind(wx.EVT_MENU, self.OnHelp, menuHelp) self.Bind(wx.EVT_MENU, self.OnOpen, menuOpen) self.Bind(wx.EVT_MENU, self.OnBackups, menuBackups) self.Bind(wx.EVT_MENU, self.OnExit, menuExit) @@ -156,6 +159,9 @@ def OnBackups(self, e): def OnAbout(self, e): self.about.Show(True) + + def OnHelp(self, e): + self.help.Show(True) def OnOpen(self, e): dlg = wx.DirDialog(self, "Choose a Minecraf world folder") diff --git a/gui/starter.py b/gui/starter.py index 2acb2d4..fa5630d 100644 --- a/gui/starter.py +++ b/gui/starter.py @@ -6,6 +6,7 @@ from main import MainWindow from backups import BackupsWindow from about import AboutWindow +from help import HelpWindow class Starter(object): @@ -20,6 +21,7 @@ def __init__(self): self.about = AboutWindow(self.frame, "About") self.frame.backups = self.backups self.frame.about = self.about + self.frame.help = HelpWindow(self.frame, "Help") def run(self): """ Run the app main loop. """ From ecf2dcd3847f8a2c8876b710d2ca1db34c0f6a3b Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 4 Jul 2014 13:07:23 +0200 Subject: [PATCH 022/151] Add verbose mode. --- regionfixer.py | 6 +- regionfixer_core/interactive.py | 3 +- regionfixer_core/scan.py | 100 ++++++++++++++++---------------- regionfixer_core/world.py | 28 ++++++++- 4 files changed, 79 insertions(+), 58 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 7b12c8c..5ae4352 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -364,10 +364,8 @@ def main(): summary_text = "" # Scan the separate region files if len(regionset.regions) > 0: - print entitle("Scanning separate region files", 0) - console_scan_regionset(regionset, o.processes, o.entity_limit, - o.delete_entities) + o.delete_entities, o.verbose) print regionset.generate_report(True) # Delete chunks @@ -393,7 +391,7 @@ def main(): print entitle(' Scanning world: {0} '.format(w_name), 0) console_scan_world(w, o.processes, o.entity_limit, - o.delete_entities) + o.delete_entities, o.verbose) print print entitle('Scan results for: {0}'.format(w_name), 0) diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index f6fb098..c91b87b 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -204,7 +204,8 @@ def do_scan(self, arg): if isinstance(self.current, world.World): self.current = world.World(self.current.path) console_scan_world(self.current, o.processes, - o.entity_limit, o.delete_entities) + o.entity_limit, o.delete_entities, + o.verbose) elif isinstance(self.current, world.RegionSet): print "\n{0:-^60}".format(' Scanning region files ') console_scan_regionset(self.current, o.processes, diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 867421c..09aa32c 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -38,6 +38,9 @@ RegionHeaderError, InconceivedChunk import progressbar import world +from regionfixer_core.world import REGION_OK, REGION_TOO_SMALL,\ + REGION_UNREADABLE +from regionfixer_core.util import entitle #~ TUPLE_COORDS = 0 @@ -422,7 +425,44 @@ def __len__(self): progressbar.ETA()] -def console_scan_world(world_obj, processes, entity_limit, remove_entities): +def console_scan_loop(scanners, scan_titles, verbose): + try: + for scanner, title in zip(scanners, scan_titles): + # Scan player files + print "\n{0:-^60}".format(title) + if not len(scanner): + print "Info: No files to scan." + else: + total = len(scanner) + if not verbose: + pbar = progressbar.ProgressBar(widgets=widgets, + maxval=total) + scanner.scan() + counter = 0 + while not scanner.finished: + sleep(scanner.scan_wait_time) + result = scanner.get_last_result() + if result: + counter += 1 + if not verbose: + pbar.update(counter) + else: + status = "(" + result.oneliner_status + ")" + fn = result.filename + print "Scanned {0: <12} {1:.<43} {2}/{3}".format(fn, status, counter, total) + if not verbose: + pbar.finish() + except ChildProcessException as e: + print "\n\nSomething went really wrong scanning a file." + print ("This is probably a bug! If you have the time, please report " + "it to the region-fixer github or in the region fixer post " + "in minecraft forums") + print e.printable_traceback + raise e + + +def console_scan_world(world_obj, processes, entity_limit, remove_entities, + verbose): """ Scans a world folder prints status to console. It will scan region files and data files (includes players). @@ -464,65 +504,22 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities): ' Scanning old format player files ', ' Scanning structures and map data files ', ' Scanning region files '] - try: - for scanner, title in zip(scanners, scan_titles): - # Scan player files - print "\n{0:-^60}".format(title) - if not len(scanner): - print "Info: No files to scan." - else: - total = len(scanner) - pbar = progressbar.ProgressBar(widgets=widgets, maxval=total) - scanner.scan() - counter = 0 - while not scanner.finished: - sleep(scanner.scan_wait_time) - result = scanner.get_last_result() - if result: - counter += 1 - pbar.update(counter) - pbar.finish() - w.scanned = True - except ChildProcessException as e: - print "\n\nSomething went really wrong scanning a file." - print ("This is probably a bug! If you have the time, please report " - "it to the region-fixer github or in the region fixer post " - "in minecraft forums") - print e.printable_traceback - raise e + console_scan_loop(scanners, scan_titles, verbose) + w.scanned = True def console_scan_regionset(regionset, processes, entity_limit, - remove_entities): + remove_entities, verbose): """ Scan a regionset printing status to console. Uses AsyncRegionsetScanner. """ - total_regions = len(regionset) - pbar = progressbar.ProgressBar(widgets=widgets, - maxval=total_regions) - pbar.start() rs = AsyncRegionsetScanner(regionset, processes, entity_limit, remove_entities) - rs.scan() - counter = 0 - try: - while not rs.finished: - sleep(0.01) - result = rs.get_last_result() - if result: - counter += 1 - pbar.update(counter) - pbar.finish() - except ChildProcessException as e: - print "\n\nSomething went really wrong scanning a file." - print ("This is probably a bug! If you have the time, please report " - "it to the region-fixer github or in the region fixer post " - "in minecraft forums") - print e.printable_traceback - raise e - + scanners = [rs] + titles = [entitle("Scanning separate region files", 0)] + console_scan_loop(scanners, titles, verbose) def scan_data(scanned_dat_file): """ Try to parse the nbd data file, and fill the scanned object. @@ -601,10 +598,12 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): except region.NoRegionHeader: # The region has no header r.status = world.REGION_TOO_SMALL r.scan_time = time() + r.scanned = True return r except IOError, e: r.status = world.REGION_UNREADABLE r.scan_time = time() + r.scanned = True return r for x in range(32): @@ -672,6 +671,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r.shared_offset = shared_counter r.scan_time = time() r.status = world.REGION_OK + r.scanned = True return r except KeyboardInterrupt: diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 29da91b..06e3720 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -119,6 +119,10 @@ def __str__(self): text += "\tReadable:" + str(self.readable) + "\n" return text + @property + def oneliner_status(self): + return "Readable" if self.readable else "Unreadable" + class ScannedChunk(object): """ Stores all the results of the scan. Not used at the moment, it @@ -180,9 +184,8 @@ def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, # in the region file self.chunks = {} - # TODO: these values aren't really used. - # count_chunks() is used instead. - # counters with the number of chunks + # Counters with the number of chunks + # Filled in scan.scan_region_file self.corrupted_chunks = corrupted self.wrong_located_chunks = wrong self.entities_prob = entities_prob @@ -196,6 +199,25 @@ def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, # TOO SMALL or UNREADABLE see the constants at the start # of the file. self.status = status + + self.scanned = False + + @property + def oneliner_status(self): + if self.scanned: + status = self.status + if status == REGION_OK: + stats = "c: {0}, w: {1}, tme: {2}, so: {3}, t: {4}".format(\ + self.corrupted_chunks, self.wrong_located_chunks,\ + self.entities_prob, self.shared_offset, self.chunk_count) + elif status == REGION_TOO_SMALL: + stats = "No header in the region file" + elif status == REGION_UNREADABLE: + stats = "Unreadable region file)" + else: + stats = "Not scanned" + + return stats def __str__(self): text = "Path: {0}".format(self.path) From b9bf4948a5b2759e9e7116b920afdb7eac5eb449 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Sep 2014 00:31:28 +0200 Subject: [PATCH 023/151] Improve code in scan and world modules. --- regionfixer_core/scan.py | 368 ++++++++++++++++++-------------------- regionfixer_core/world.py | 95 +++++++--- 2 files changed, 247 insertions(+), 216 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 09aa32c..2d7e993 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -25,10 +25,10 @@ import sys import logging import multiprocessing +from multiprocessing.queues import SimpleQueue from os.path import split, abspath from time import sleep, time from copy import copy -from multiprocessing import queues from traceback import extract_tb import nbt.region as region @@ -38,8 +38,7 @@ RegionHeaderError, InconceivedChunk import progressbar import world -from regionfixer_core.world import REGION_OK, REGION_TOO_SMALL,\ - REGION_UNREADABLE + from regionfixer_core.util import entitle @@ -88,7 +87,7 @@ def printable_traceback(self): return text def save_error_log(self, filename='error.log'): - """ Save the error in filename, return the path of saved file. """ + """ Save the error in filename, return the absolute path of saved file. """ f = open(filename, 'w') error_log_path = abspath(f.name) filename = self.scanned_file.filename @@ -109,33 +108,107 @@ def update(self, pbar): return '%2d%s%2d' % (pbar.currval, self.sep, pbar.maxval) -class AsyncRegionsetScanner(object): - def __init__(self, regionset, processes, entity_limit, - remove_entities=False): +def multiprocess_scan_data(data): + """ Does the multithread stuff for scan_data """ + # Protect everything so an exception will be returned from the worker + try: + result = scan_data(data) + multiprocess_scan_data.q.put(result) + except KeyboardInterrupt as e: + raise e + except: + except_type, except_class, tb = sys.exc_info() + s = (data, (except_type, except_class, extract_tb(tb))) + multiprocess_scan_data.q.put(s) + + +def multiprocess_scan_regionfile(region_file): + """ Does the multithread stuff for scan_region_file """ + # Protect everything so an exception will be returned from the worker + try: + r = region_file + entity_limit = multiprocess_scan_regionfile.entity_limit + remove_entities = multiprocess_scan_regionfile.remove_entities + # call the normal scan_region_file with this parameters + r = scan_region_file(r, entity_limit, remove_entities) + multiprocess_scan_regionfile.q.put(r) + except KeyboardInterrupt as e: + raise e + except: + except_type, except_class, tb = sys.exc_info() + s = (region_file, (except_type, except_class, extract_tb(tb))) + multiprocess_scan_regionfile.q.put(s) + - self._regionset = regionset +def _mp_data_pool_init(d): + """ Function to initialize the multiprocessing in scan_regionset. + Is used to pass values to the child process. + + Requiere to pass the multiprocessing queue as argument. + """ + assert(type(d) == dict) + assert('queue' in d) + multiprocess_scan_data.q = d['queue'] + + +def _mp_regionset_pool_init(d): + """ Function to initialize the multiprocessing in scan_regionset. + Is used to pass values to the child process. """ + assert(type(d) == dict) + assert('regionset' in d) + assert('queue' in d) + assert('entity_limit' in d) + assert('remove_entities' in d) + multiprocess_scan_regionfile.regionset = d['regionset'] + multiprocess_scan_regionfile.q = d['queue'] + multiprocess_scan_regionfile.entity_limit = ['entity_limit'] + multiprocess_scan_regionfile.remove_entities = ['remove_entities'] + + +class AsyncScanner(object): + """ Class to derive all the scanner classes from. + + To implement a scanner you have to override: + update_str_last_scanned() + """ + def __init__(self, data_structure, processes, scan_function, init_args, + _mp_init_function): + """ Init the scanner. + + data_structure is a world.DataSet + processes is the number of child processes to use + scan_function is the function to use for scanning + init_args are the arguments passed to the init function + _mp_init_function is the function used to init the child processes + """ + assert(isinstance(data_structure, world.DataSet)) + self.data_structure = data_structure + self.list_files_to_scan = data_structure._get_list() self.processes = processes - self.entity_limit = entity_limit - self.remove_entities = remove_entities + self.scan_function = scan_function # Queue used by processes to pass results - self.queue = q = queues.SimpleQueue() + self.queue = SimpleQueue() + init_args.update({'queue': self.queue}) + # NOTE TO SELF: initargs doesn't handle kwargs, only args! + # Pass a dict with all the args self.pool = multiprocessing.Pool(processes=processes, - initializer=_mp_pool_init, - initargs=(regionset, entity_limit, remove_entities, q)) + initializer=_mp_init_function, + initargs=(init_args,)) + # TODO: make this automatic amount # Recommended time to sleep between polls for results - self.scan_wait_time = 0.001 + self.SCAN_WAIT_TIME = 0.001 # Holds a friendly string with the name of the last file scanned self._str_last_scanned = None def scan(self): - """ Scan and fill the given regionset. """ - total_regions = len(self._regionset.regions) - self._results = self.pool.map_async(multiprocess_scan_regionfile, - self._regionset.list_regions(None), - max(1,total_regions//self.processes)) + """ Launch the child processes and scan all the files. """ + total_files = len(self.data_structure) + self._results = self.pool.map_async(self.scan_function, + self.list_files_to_scan, 5) + # max(1, total_files // self.processes)) # No more tasks to the pool, exit the processes once the tasks are done self.pool.close() @@ -143,26 +216,18 @@ def scan(self): self._str_last_scanned = "" def get_last_result(self): - """ Return results of last region file scanned. - - If there are left no scanned region files return None. The - ScannedRegionFile returned is the same instance in the regionset, - don't modify it or you will modify the regionset results. - """ + """ Return results of last file scanned. """ q = self.queue - logging.debug("AsyncRegionsetScanner: starting get_last_result") - logging.debug("AsyncRegionsetScanner: queue empty: {0}".format(q.empty())) + ds = self.data_structure if not q.empty(): - r = q.get() - logging.debug("AsyncRegionsetScanner: result: {0}".format(r)) - if isinstance(r, tuple): - logging.debug("AsyncRegionsetScanner: Something went wrong, handling error") - raise ChildProcessException(r[0], r[1][0], r[1][1], r[1][2]) - # Overwrite it in the regionset - self._regionset[r.get_coords()] = r - self._str_last_scanned = self._regionset.get_name() + ": " + r.filename - return r + d = q.get() + if isinstance(d, tuple): + self.raise_child_exception(d) + # Copy it to the father process + ds._replace_in_data_structure(d) + self.update_str_last_scanned(d) + return d else: return None @@ -171,20 +236,27 @@ def terminate(self): """ self.pool.terminate() + def raise_child_exception(self, exception_tuple): + """ Raises a ChildProcessException using the info + contained in the tuple returned by the child process. """ + e = exception_tuple + raise ChildProcessException(e[0], e[1][0], e[1][1], e[1][2]) + + def update_str_last_scanned(self): + """ Updates the string that represents the last file scanned. """ + raise NotImplemented + @property def str_last_scanned(self): """ A friendly string with last scanned thing. """ - return self._str_last_scanned if self._str_last_scanned else "Scanning..." + return self._str_last_scanned if self._str_last_scanned \ + else "Scanning..." @property def finished(self): """ Finished the operation. The queue could have elements """ return self._results.ready() and self.queue.empty() - @property - def regionset(self): - return self._regionset - @property def results(self): """ Yield all the results from the scan. @@ -199,24 +271,67 @@ def results(self): """ q = self.queue - logging.debug("AsyncRegionsetScanner: starting yield results") + T = self.SCAN_WAIT_TIME while not q.empty() or not self.finished: - sleep(0.0001) - logging.debug("AsyncRegionsetScanner: in while") + sleep(T) if not q.empty(): - r = q.get() - logging.debug("AsyncRegionsetScanner: result: {0}".format(r)) - if isinstance(r, tuple): - raise ChildProcessException(r[0], r[1][0], r[1][1], r[1][2]) - # Overwrite it in the regionset - self._regionset[r.get_coords()] = r - yield r + d = q.get() + if isinstance(d, tuple): + self.raise_child_exception(d) + # Overwrite it in the data dict + self.replace_in_data_structure(d) + yield d def __len__(self): - return len(self._regionset) + return len(self.data_structure) + + +class AsyncDataScanner(AsyncScanner): + """ Scan a DataFileSet and fill the data structure. """ + def __init__(self, data_structure, processes): + scan_function = multiprocess_scan_data + init_args = {} + _mp_init_function = _mp_data_pool_init + + AsyncScanner.__init__(self, data_structure, processes, scan_function, + init_args, _mp_init_function) + + # Recommended time to sleep between polls for results + self.scan_wait_time = 0.0001 + + def update_str_last_scanned(self, data): + self._str_last_scanned = data.filename + + +class AsyncRegionsetScanner(AsyncScanner): + """ Scan a RegionSet and fill the data structure. """ + def __init__(self, regionset, processes, entity_limit, + remove_entities=False): + assert(isinstance(regionset, world.DataSet)) -class AsyncWorldScanner(object): + scan_function = multiprocess_scan_regionfile + _mp_init_function = _mp_regionset_pool_init + + init_args = {} + init_args['regionset'] = regionset + init_args['processes'] = processes + init_args['entity_limit'] = entity_limit + init_args['remove_entities'] = remove_entities + + AsyncScanner.__init__(self, regionset, processes, scan_function, + init_args, _mp_init_function) + + # Recommended time to sleep between polls for results + self.scan_wait_time = 0.001 + + def update_str_last_scanned(self, r): + self._str_last_scanned = self.data_structure.get_name() + ": " + r.filename + + +class AsyncWorldRegionScanner(object): + """ Wrapper around the calls of AsyncScanner to scan all the + regionsets of a world. """ def __init__(self, world_obj, processes, entity_limit, remove_entities=False): @@ -241,7 +356,7 @@ def scan(self): self.remove_entities) self._current_regionset = cr cr.scan() - + # See method self._str_last_scanned = "" @@ -322,98 +437,6 @@ def __len__(self): return l -class AsyncDataScanner(object): - def __init__(self, data_dict, processes): - - self._data_dict = data_dict - self.processes = processes - - self.queue = q = queues.SimpleQueue() - self.pool = multiprocessing.Pool(processes=processes, - initializer=_mp_data_pool_init, - initargs=(q,)) - # Recommended time to sleep between polls for results - self.scan_wait_time = 0.0001 - - # Holds a friendly string with the name of the last file scanned - self._str_last_scanned = None - - def scan(self): - """ Scan and fill the given data_dict generated by world.py. """ - total_datas = len(self._data_dict) - data_list = self._data_dict.values() - self._results = self.pool.map_async(multiprocess_scan_data, - data_list, - max(1, total_datas//self.processes)) - # No more tasks to the pool, exit the processes once the tasks are done - self.pool.close() - - # See method - self._str_last_scanned = "" - - def get_last_result(self): - """ Return results of last data file scanned. """ - - q = self.queue - logging.debug("AsyncDataScanner: starting get_last_result") - logging.debug("AsyncDataScanner: queue empty: {0}".format(q.empty())) - if not q.empty(): - p = q.get() - if isinstance(p, tuple): - raise ChildProcessException(p[0], p[1][0], p[1][1], p[1][2]) - logging.debug("AsyncDataScanner: result: {0}".format(p)) - # Overwrite it in the regionset - self._data_dict[p.filename] = p - return p - else: - return None - - @property - def str_last_scanned(self): - """ A friendly string with last scanned thing. """ - return self._str_last_scanned if self._str_last_scanned else "Scanning..." - - @property - def finished(self): - """ Have the scan finished? """ - return self._results.ready() and self.queue.empty() - - @property - def data_dict(self): - return self._data_dict - - @property - def results(self): - """ Yield all the results from the scan. - - This is the simpler method to control the scanning process, - but also the most sloppy. If you want to closely control the - scan process (for example cancel the process in the middle, - whatever is happening) use get_last_result(). - - for result in scanner.results: - # do things - """ - - q = self.queue - logging.debug("AsyncDataScanner: starting yield results") - logging.debug("AsyncDataScanner: queue empty: {0}".format(q.empty())) - while not q.empty() or not self.finished: - sleep(0.0001) - logging.debug("AsyncDataScanner: in while") - if not q.empty(): - p = q.get() - logging.debug("AsyncDataScanner: result: {0}".format(p)) - if isinstance(p, tuple): - raise ChildProcessException(p[0], p[1][0], p[1][1], p[1][2]) - # Overwrite it in the data dict - self._data_dict[p.filename] = p - yield p - - def __len__(self): - return len(self._data_dict) - - # All scanners will use this progress bar widgets = ['Scanning: ', FractionWidget(), @@ -426,9 +449,10 @@ def __len__(self): def console_scan_loop(scanners, scan_titles, verbose): + """ Uses all the AsyncScanner passed to scan the files and + print status text to the terminal. """ try: for scanner, title in zip(scanners, scan_titles): - # Scan player files print "\n{0:-^60}".format(title) if not len(scanner): print "Info: No files to scan." @@ -496,7 +520,7 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, ps = AsyncDataScanner(w.players, processes) ops = AsyncDataScanner(w.old_players, processes) ds = AsyncDataScanner(w.data_files, processes) - ws = AsyncWorldScanner(w, processes, entity_limit, remove_entities) + ws = AsyncWorldRegionScanner(w, processes, entity_limit, remove_entities) scanners = [ps, ops, ds, ws] @@ -521,15 +545,16 @@ def console_scan_regionset(regionset, processes, entity_limit, titles = [entitle("Scanning separate region files", 0)] console_scan_loop(scanners, titles, verbose) + def scan_data(scanned_dat_file): """ Try to parse the nbd data file, and fill the scanned object. If something is wrong it will return a tuple with useful info to debug the problem. - + NOTE: idcounts.dat (number of map files) is a nbt file and is not compressed, we handle the special case here. - + """ s = scanned_dat_file @@ -539,7 +564,7 @@ def scan_data(scanned_dat_file): # Open the file and create a buffer, this way # NBT won't try to de-gzip the file f = open(s.path) - + _ = nbt.NBTFile(buffer=f) else: _ = nbt.NBTFile(filename=s.path) @@ -558,19 +583,6 @@ def scan_data(scanned_dat_file): return s -def multiprocess_scan_data(data): - """ Does the multithread stuff for scan_data """ - d = data - d = scan_data(d) - multiprocess_scan_data.q.put(d) - - -def _mp_data_pool_init(q): - """ Function to initialize the multiprocessing in scan_regionset. - Is used to pass values to the child process. """ - multiprocess_scan_data.q = q - - def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): """ Scan a region file filling the ScannedRegionFile @@ -653,7 +665,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # Please note! region.py will mark both overlapping chunks # as bad (the one stepping outside his territory and the # good one). Only wrong located chunk with a overlapping - # flag are really BAD chunks! Use this criterion to + # flag are really BAD chunks! Use this criterion to # discriminate metadata = region_file.metadata sharing = [k for k in metadata if ( @@ -686,7 +698,8 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # NOTE TO SELF: do not try to return the traceback object directly! # A multiprocess pythonic hell comes to earth if you do so. except_type, except_class, tb = sys.exc_info() - r = (scanned_regionfile_obj, (except_type, except_class, extract_tb(tb))) + r = (scanned_regionfile_obj, + (except_type, except_class, extract_tb(tb))) return r @@ -753,28 +766,5 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): return chunk, (num_entities, status) if status != world.CHUNK_NOT_CREATED else None -def _mp_pool_init(regionset, entity_limit, remove_entities, q): - """ Function to initialize the multiprocessing in scan_regionset. - Is used to pass values to the child process. """ - multiprocess_scan_regionfile.regionset = regionset - multiprocess_scan_regionfile.q = q - multiprocess_scan_regionfile.entity_limit = entity_limit - multiprocess_scan_regionfile.remove_entities = remove_entities - - -def multiprocess_scan_regionfile(region_file): - """ Does the multithread stuff for scan_region_file """ - r = region_file - entity_limit = multiprocess_scan_regionfile.entity_limit - remove_entities = multiprocess_scan_regionfile.remove_entities - # call the normal scan_region_file with this parameters - r = scan_region_file(r, entity_limit, remove_entities) - - # exceptions will be handled in scan_region_file which is in the - # single thread land - - multiprocess_scan_regionfile.q.put(r) - - if __name__ == '__main__': pass diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 06e3720..b4c3a9f 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -104,7 +104,7 @@ "DIM-1": "Nether"} -class ScannedDatFile(object): +class ScannedDataFile(object): def __init__(self, path=None, readable=None, status_text=None): self.path = path if self.path and exists(self.path): @@ -391,7 +391,54 @@ def rescan_entities(self, options): self[c] = tuple(t) -class RegionSet(object): +class DataSet(object): + """ Stores data items to be scanned by AsyncScanner in scan.py. """ + + def _replace_in_data_structure(self, data): + raise NotImplemented + + def _get_list(self): + raise NotImplemented + + def __getitem__(self, key): + """ This and __setitem__ should use the path of the file as keys + not the filename. (I think) + TODO: Es realmente esto necesario? + """ + raise NotImplemented + + def __setitem__(self, key, value): + raise NotImplemented + + def __len__(self): + raise NotImplemented + + +class DataFileSet(DataSet): + """ Any scanneable set should derive from this. + + DataSets are scanned using scan.AsyncScanner + """ + def __init__(self, path, *args, **kwargs): + DataSet.__init__(self, *args, **kwargs) + + self.path = path + data_files_path = glob(join(path, "*.dat")) + self.data_files = d = {} + for path in data_files_path: + d[path] = ScannedDataFile(path) + + def _get_list(self): + return self.data_files.values() + + def _replace_in_data_structure(self, data): + self.data_files[data.path] = data + + def __len__(self): + return len(self.data_files) + + +class RegionSet(DataSet): """Stores an arbitrary number of region files and the scan results. Inits with a list of region files. The regions dict is filled while scanning with ScannedRegionFiles and ScannedChunks.""" @@ -459,6 +506,12 @@ def __delitem__(self, key): def __len__(self): return len(self.regions) + def _get_list(self): + return self.regions.values() + + def _replace_in_data_structure(self, data): + self.regions[data.get_coords()] = data + def keys(self): return self.regions.keys() @@ -656,40 +709,28 @@ def __init__(self, world_path): try: self.level_data = nbt.NBTFile(level_dat_path)["Data"] self.name = self.level_data["LevelName"].value - self.scanned_level = ScannedDatFile(level_dat_path, + self.scanned_level = ScannedDataFile(level_dat_path, readable=True, status_text="OK") except Exception, e: self.name = None - self.scanned_level = ScannedDatFile(level_dat_path, + self.scanned_level = ScannedDataFile(level_dat_path, readable=False, status_text=e) else: self.level_file = None self.level_data = None self.name = None - self.scanned_level = ScannedDatFile(None, False, "The file doesn't exist") + self.scanned_level = ScannedDataFile(None, False, + "The file doesn't exist") # Player files - old_player_paths = glob(join(join(self.path, "players"), "*.dat")) - player_paths = glob(join(join(self.path, "playerdata"), "*.dat")) - self.players = {} - for path in player_paths: - filename = split(path)[1] - self.players[filename] = ScannedDatFile(path) - - # Player files before 1.7.6 - self.old_players = {} - for path in old_player_paths: - filename = split(path)[1] - self.old_players[filename] = ScannedDatFile(path) - - # Structures dat files - data_files_paths = glob(join(join(self.path, "data"), "*.dat")) - self.data_files = {} - for path in data_files_paths: - filename = split(path)[1] - self.data_files[filename] = ScannedDatFile(path) + PLAYERS_DIRECTORY = 'players' + OLD_PLAYERS_DIRECTORY = ' playerdata' + STRUCTURES_DIRECTORY = 'data' + self.players = DataFileSet(join(self.path, PLAYERS_DIRECTORY)) + self.old_players = DataFileSet(join(self.path, OLD_PLAYERS_DIRECTORY)) + self.data_files = DataFileSet(join(self.path, STRUCTURES_DIRECTORY)) # Does it look like a world folder? region_files = False @@ -982,8 +1023,8 @@ def generate_report(self, standalone): # Print all the player files with problems text += "\nUnreadable player files:\n" - broken_players = [p for p in self.players.values() if not p.readable] - broken_players.extend([p for p in self.old_players.values() if not p.readable]) + broken_players = [p for p in self.players._get_list() if not p.readable] + broken_players.extend([p for p in self.old_players._get_list() if not p.readable]) if broken_players: broken_player_files = [p.filename for p in broken_players] text += "\n".join(broken_player_files) @@ -993,7 +1034,7 @@ def generate_report(self, standalone): # Now all the data files text += "\nUnreadable data files:\n" - broken_data_files = [d for d in self.data_files.values() if not d.readable] + broken_data_files = [d for d in self.data_files._get_list() if not d.readable] if broken_data_files: broken_data_filenames = [p.filename for p in broken_data_files] text += "\n".join(broken_data_filenames) From 767094f1fab3ecb7e054c71ed908d99b0dd58600 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Sep 2014 12:35:33 +0200 Subject: [PATCH 024/151] Add automatic bug reporter. --- regionfixer.py | 28 ++++++++++++++--- regionfixer_core/bug_reporter.py | 53 ++++++++++++++++++++++++++++++++ regionfixer_core/util.py | 37 ++++++++++++++++++++++ 3 files changed, 114 insertions(+), 4 deletions(-) create mode 100644 regionfixer_core/bug_reporter.py diff --git a/regionfixer.py b/regionfixer.py index 5ae4352..696750a 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -25,14 +25,19 @@ from optparse import OptionParser from getpass import getpass import sys +import traceback +import StringIO from regionfixer_core import world -from regionfixer_core.scan import console_scan_world, console_scan_regionset +from regionfixer_core.scan import console_scan_world, console_scan_regionset,\ + ChildProcessException from regionfixer_core.interactive import InteractiveLoop from regionfixer_core.util import entitle, is_bare_console, parse_paths,\ parse_backup_list from regionfixer_core import progressbar from regionfixer_core.version import version_string +from regionfixer_core.bug_reporter import BugReporter + class FractionWidget(progressbar.ProgressBarWidget): @@ -482,6 +487,21 @@ def main(): if __name__ == '__main__': - freeze_support() - value = main() - sys.exit(value) + ERROR_MSG = "\n\nOps! Something went really wrong and regionfixer crashed. I can try to send an automatic bug rerpot if you wish.\n\n" + try: + freeze_support() + value = main() + sys.exit(value) + except ChildProcessException as e: + print(ERROR_MSG) + bug = BugReporter(StringIO.StringIO(e.printable_traceback())) + bug.ask_and_send() + except Exception as e: + print(ERROR_MSG) + f = StringIO.StringIO("") + (ty, value, tb) = sys.exc_info() + f.write(str(ty) + "\n") + f.write(str(value) + "\n") + traceback.print_tb(tb, None, f) + bug = BugReporter(f) + bug.ask_and_send() diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py new file mode 100644 index 0000000..3ba8a33 --- /dev/null +++ b/regionfixer_core/bug_reporter.py @@ -0,0 +1,53 @@ +''' +Created on 16/09/2014 + +@author: Alejandro +''' + +import ftplib +import datetime +from StringIO import StringIO +from util import query_yes_no + + +SERVER = 'regionfixer.no-ip.org' +USER = 'regionfixer_bugreporter' +PASSWORD = 'supersecretpassword' +BUGREPORTS_DIR = 'bugreports' +QUESTION_TEXT = 'Do you want to send an anonymous bug report to the region fixer ftp?' + +class BugReporter(object): + ''' + Reports a bug to a ftp + ''' + + def __init__(self, error, server=SERVER, + user=USER, password=PASSWORD): + ''' + Constructor + ''' + assert(isinstance(error, StringIO)) + error.seek(0) + self.error_file_obj = error + self.server = server + self.user = user + self.password = password + + def ask_and_send(self): + if query_yes_no(QUESTION_TEXT): + self.send() + + def send(self): + try: + s = ftplib.FTP(self.server, self.user, + self.password) + + s.cwd(BUGREPORTS_DIR) + + error_name = str(datetime.datetime.now()) + + s.storlines("STOR " + error_name, self.error_file_obj) + s.quit() + print "Bug report uploaded successfully!" + except Exception as e: + print "Couldn't send the bug report!" diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index b040c88..946b1a0 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -23,8 +23,45 @@ import platform from os.path import join, split, exists, isfile +import sys import world + +# Stolen from: +# http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input +def query_yes_no(question, default="yes"): + """Ask a yes/no question via raw_input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is one of "yes" or "no". + """ + valid = {"yes": True, "y": True, "ye": True, + "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = raw_input().lower() + if default is not None and choice == '': + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " + "(or 'y' or 'n').\n") + + # stolen from minecraft overviewer # https://github.com/overviewer/Minecraft-Overviewer/ def is_bare_console(): From 0b46de2ccc1252882330f77b4ed3fa71e3219d52 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Sep 2014 12:48:04 +0200 Subject: [PATCH 025/151] Some more improvements. --- regionfixer.py | 8 ++++++-- regionfixer_core/bug_reporter.py | 2 ++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 696750a..f4b192d 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -495,7 +495,8 @@ def main(): except ChildProcessException as e: print(ERROR_MSG) bug = BugReporter(StringIO.StringIO(e.printable_traceback())) - bug.ask_and_send() + if not bug.ask_and_send(): + print e.printable_traceback() except Exception as e: print(ERROR_MSG) f = StringIO.StringIO("") @@ -504,4 +505,7 @@ def main(): f.write(str(value) + "\n") traceback.print_tb(tb, None, f) bug = BugReporter(f) - bug.ask_and_send() + if not bug.ask_and_send(): + print + print "Here it is the bug report:" + print f.getvalue() diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py index 3ba8a33..14b376b 100644 --- a/regionfixer_core/bug_reporter.py +++ b/regionfixer_core/bug_reporter.py @@ -49,5 +49,7 @@ def send(self): s.storlines("STOR " + error_name, self.error_file_obj) s.quit() print "Bug report uploaded successfully!" + return True except Exception as e: print "Couldn't send the bug report!" + return False From 294a07f2618e0ea40f0dc55ad4133ffc4f6e5730 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Sep 2014 23:57:48 +0200 Subject: [PATCH 026/151] Improve and fix the automatic bug reporter. --- regionfixer.py | 23 ++++++++++++++++++----- regionfixer_core/bug_reporter.py | 12 ++++++------ regionfixer_core/interactive.py | 3 ++- regionfixer_core/scan.py | 10 +++++----- 4 files changed, 31 insertions(+), 17 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index f4b192d..f86d951 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -275,6 +275,13 @@ def main(): (options, args) = parser.parse_args() o = options + if sys.version_info[0] > 2: + print() + print("Minecraft Region Fixer only works with python 2.x") + print("(And you just tried to run it in python {0})".format(sys.version)) + print() + return 1 + if is_bare_console(): print print "Minecraft Region Fixer is a command line aplication, if you want to run it" @@ -488,15 +495,20 @@ def main(): if __name__ == '__main__': ERROR_MSG = "\n\nOps! Something went really wrong and regionfixer crashed. I can try to send an automatic bug rerpot if you wish.\n\n" + QUESTION_TEXT = ('Do you want to send an anonymous bug report to the region fixer ftp?\n' + '(Answering no will print the bug report)') try: freeze_support() value = main() sys.exit(value) except ChildProcessException as e: print(ERROR_MSG) - bug = BugReporter(StringIO.StringIO(e.printable_traceback())) - if not bug.ask_and_send(): - print e.printable_traceback() + bug = BugReporter(StringIO.StringIO(e.printable_traceback)) + if not bug.ask_and_send(QUESTION_TEXT): + print + print "Bug report:" + print + print e.printable_traceback except Exception as e: print(ERROR_MSG) f = StringIO.StringIO("") @@ -505,7 +517,8 @@ def main(): f.write(str(value) + "\n") traceback.print_tb(tb, None, f) bug = BugReporter(f) - if not bug.ask_and_send(): + if not bug.ask_and_send(QUESTION_TEXT): + print + print "Bug report:" print - print "Here it is the bug report:" print f.getvalue() diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py index 14b376b..f83b63b 100644 --- a/regionfixer_core/bug_reporter.py +++ b/regionfixer_core/bug_reporter.py @@ -10,15 +10,15 @@ from util import query_yes_no -SERVER = 'regionfixer.no-ip.org' +SERVER = '192.168.1.3' USER = 'regionfixer_bugreporter' PASSWORD = 'supersecretpassword' BUGREPORTS_DIR = 'bugreports' -QUESTION_TEXT = 'Do you want to send an anonymous bug report to the region fixer ftp?' + class BugReporter(object): ''' - Reports a bug to a ftp + Reports a bug to the regionfixer ftp ''' def __init__(self, error, server=SERVER, @@ -33,9 +33,9 @@ def __init__(self, error, server=SERVER, self.user = user self.password = password - def ask_and_send(self): - if query_yes_no(QUESTION_TEXT): - self.send() + def ask_and_send(self, question_text): + if query_yes_no(question_text): + return self.send() def send(self): try: diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index c91b87b..5a7a23f 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -209,7 +209,8 @@ def do_scan(self, arg): elif isinstance(self.current, world.RegionSet): print "\n{0:-^60}".format(' Scanning region files ') console_scan_regionset(self.current, o.processes, - o.entity_limit, o.delete_entities) + o.entity_limit, o.delete_entities, + o.verbose) else: print "No world set! Use \'set workload\'" diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 2d7e993..a3b4bf9 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -477,11 +477,11 @@ def console_scan_loop(scanners, scan_titles, verbose): if not verbose: pbar.finish() except ChildProcessException as e: - print "\n\nSomething went really wrong scanning a file." - print ("This is probably a bug! If you have the time, please report " - "it to the region-fixer github or in the region fixer post " - "in minecraft forums") - print e.printable_traceback +# print "\n\nSomething went really wrong scanning a file." +# print ("This is probably a bug! If you have the time, please report " +# "it to the region-fixer github or in the region fixer post " +# "in minecraft forums") +# print e.printable_traceback raise e From 199fc888df1d0717653897cd04a1b295a4061320 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 18 Sep 2014 23:43:41 +0200 Subject: [PATCH 027/151] Fixed not reporting tme problems. Changed print statements to use (). --- regionfixer.py | 128 ++++++++++++++++++++++----------------- regionfixer_core/scan.py | 105 +++++++++++++++++++++++++------- 2 files changed, 155 insertions(+), 78 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index f86d951..aef6a89 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -26,7 +26,7 @@ from getpass import getpass import sys import traceback -import StringIO +from StringIO import StringIO from regionfixer_core import world from regionfixer_core.scan import console_scan_world, console_scan_regionset,\ @@ -53,7 +53,7 @@ def delete_bad_chunks(options, scanned_obj): """ Takes a scanned object (world object or regionset object) and the options given to region-fixer, it deletes all the chunks with problems iterating through all the possible problems. """ - print + print("") # In the same order as in CHUNK_PROBLEMS options_delete = [options.delete_corrupted, options.delete_wrong_located, @@ -66,19 +66,19 @@ def delete_bad_chunks(options, scanned_obj): if delete: if total: text = ' Deleting chunks with status: {0} '.format(status) - print "\n{0:#^60}".format(text) + print("\n{0:#^60}".format(text)) counter = scanned_obj.remove_problematic_chunks(problem) - print "\nDeleted {0} chunks with status: {1}".format(counter, - status) + print("\nDeleted {0} chunks with status: {1}".format(counter, + status)) else: - print "No chunks to delete with status: {0}".format(status) + print("No chunks to delete with status: {0}".format(status)) def delete_bad_regions(options, scanned_obj): """ Takes an scanned object (world object or regionset object) and the options give to region-fixer, it deletes all the region files with problems iterating through all the possible problems. """ - print + print("") options_delete = [options.delete_too_small] deleting = zip(options_delete, world.REGION_PROBLEMS) for delete, problem in deleting: @@ -87,12 +87,12 @@ def delete_bad_regions(options, scanned_obj): if delete: if total: text = ' Deleting regions with status: {0} '.format(status) - print "{0:#^60}".format(text) + print("{0:#^60}".format(text)) counter = scanned_obj.remove_problematic_regions(problem) - print "Deleted {0} regions with status: {1}".format(counter, - status) + print("Deleted {0} regions with status: {1}".format(counter, + status)) else: - print "No regions to delete with status: {0}".format(status) + print("No regions to delete with status: {0}".format(status)) def main(): @@ -276,17 +276,20 @@ def main(): o = options if sys.version_info[0] > 2: - print() + print("") print("Minecraft Region Fixer only works with python 2.x") print("(And you just tried to run it in python {0})".format(sys.version)) - print() + print("") return 1 if is_bare_console(): - print - print "Minecraft Region Fixer is a command line aplication, if you want to run it" - print "you need to open a command line (cmd.exe in the start menu in windows 7)." - print + print("") + print("Minecraft Region Fixer hast a command line aplication and a GUI\n" + "(Graphic User Interface) and you have just double clicked the\n" + "command line interface. If you really want to run the command line\n" + "interface you have to use a command prompt (cmd.exe)\n\n" + "You can also run the gui, double click regionfixer_gui.py instead!") + print("") getpass("Press enter to continue:") return 1 @@ -355,8 +358,8 @@ def main(): if o.entity_limit < 0: error("The entity limit must be at least 0!") - print "\nWelcome to Region Fixer!" - print "(version: {0})".format(parser.version) + print("\nWelcome to Region Fixer!") + print("(version: {0})".format(parser.version)) # Do things with the option options args # Create a list of worlds containing the backups of the region files @@ -378,7 +381,7 @@ def main(): if len(regionset.regions) > 0: console_scan_regionset(regionset, o.processes, o.entity_limit, o.delete_entities, o.verbose) - print regionset.generate_report(True) + print(regionset.generate_report(True)) # Delete chunks delete_bad_chunks(options, regionset) @@ -400,20 +403,20 @@ def main(): # scan all the world folders for w in world_list: w_name = w.get_name() - print entitle(' Scanning world: {0} '.format(w_name), 0) + print(entitle(' Scanning world: {0} '.format(w_name), 0)) console_scan_world(w, o.processes, o.entity_limit, o.delete_entities, o.verbose) - print - print entitle('Scan results for: {0}'.format(w_name), 0) - print w.generate_report(True) + print("") + print(entitle('Scan results for: {0}'.format(w_name), 0)) + print(w.generate_report(True)) # corrupted, wrong_located, entities_prob, shared_prob,\ # total_chunks, too_small_region, unreadable_region, total_regions\ # = w.generate_report(standalone = False) - print + print("") # Replace chunks if backup_worlds and not len(world_list) > 1: del_ent = options.delete_entities @@ -428,18 +431,18 @@ def main(): total = w.count_chunks(problem) if total: text = " Replacing chunks with status: {0} ".format(status) - print "{0:#^60}".format(text) + print("{0:#^60}".format(text)) fixed = w.replace_problematic_chunks(backup_worlds, problem, ent_lim, del_ent) - print "\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status) + print("\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status)) else: - print "No chunks to replace with status: {0}".format(status) + print("No chunks to replace with status: {0}".format(status)) elif any_chunk_replace_option and not backup_worlds: - print "Info: Won't replace any chunk." - print "No backup worlds found, won't replace any chunks/region files!" + print("Info: Won't replace any chunk.") + print("No backup worlds found, won't replace any chunks/region files!") elif any_chunk_replace_option and backup_worlds and len(world_list) > 1: - print "Info: Won't replace any chunk." - print "Can't use the replace options while scanning more than one world!" + print("Info: Won't replace any chunk.") + print("Can't use the replace options while scanning more than one world!") # replace region files if backup_worlds and not len(world_list) > 1: @@ -452,19 +455,19 @@ def main(): total = w.count_regions(problem) if total: text = " Replacing regions with status: {0} ".format(status) - print "{0:#^60}".format(text) + print("{0:#^60}".format(text)) fixed = w.replace_problematic_regions(backup_worlds, problem, ent_lim, del_ent) - print "\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status) + print("\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status)) else: - print "No region to replace with status: {0}".format(status) + print("No region to replace with status: {0}".format(status)) elif any_region_replace_option and not backup_worlds: - print "Info: Won't replace any regions." - print "No valid backup worlds found, won't replace any chunks/region files!" - print "Note: You probably inserted some backup worlds with the backup option but they are probably no valid worlds, the most common issue is wrong path." + print("Info: Won't replace any regions.") + print("No valid backup worlds found, won't replace any chunks/region files!") + print("Note: You probably inserted some backup worlds with the backup option but they are probably no valid worlds, the most common issue is wrong path.") elif any_region_replace_option and backup_worlds and len(world_list) > 1: - print "Info: Won't replace any regions." - print "Can't use the replace options while scanning more than one world!" + print("Info: Won't replace any regions.") + print("Can't use the replace options while scanning more than one world!") # delete chunks delete_bad_chunks(options, w) @@ -478,17 +481,17 @@ def main(): # verbose log text if options.summary == '-': - print "\nPrinting log:\n" - print summary_text + print("\nPrinting log:\n") + print(summary_text) elif options.summary != None: try: f = open(options.summary, 'w') f.write(summary_text) f.write('\n') f.close() - print "Log file saved in \'{0}\'.".format(options.summary) + print("Log file saved in \'{0}\'.".format(options.summary)) except: - print "Something went wrong while saving the log file!" + print("Something went wrong while saving the log file!") return 0 @@ -497,28 +500,39 @@ def main(): ERROR_MSG = "\n\nOps! Something went really wrong and regionfixer crashed. I can try to send an automatic bug rerpot if you wish.\n\n" QUESTION_TEXT = ('Do you want to send an anonymous bug report to the region fixer ftp?\n' '(Answering no will print the bug report)') + had_exception = False + auto_reported = False + try: freeze_support() value = main() sys.exit(value) + except ChildProcessException as e: + had_exception = True print(ERROR_MSG) - bug = BugReporter(StringIO.StringIO(e.printable_traceback)) - if not bug.ask_and_send(QUESTION_TEXT): - print - print "Bug report:" - print - print e.printable_traceback + + bug_report = e.printable_traceback + bug_sender = BugReporter(StringIO(bug_report)) + auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) + except Exception as e: + had_exception = True print(ERROR_MSG) - f = StringIO.StringIO("") + + f = StringIO("") (ty, value, tb) = sys.exc_info() f.write(str(ty) + "\n") f.write(str(value) + "\n") traceback.print_tb(tb, None, f) - bug = BugReporter(f) - if not bug.ask_and_send(QUESTION_TEXT): - print - print "Bug report:" - print - print f.getvalue() + + bug_sender = BugReporter(f) + auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) + bug_report = f.getvalue() + + finally: + if had_exception and not auto_reported: + print("") + print("Bug report:") + print("") + print(bug_report) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index a3b4bf9..5b62cda 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -49,7 +49,7 @@ TUPLE_STATUS = 1 -# logging.basicConfig(filename='scan.log', level=logging.DEBUG) +logging.basicConfig(filename=None, level=logging.CRITICAL) class ChildProcessException(Exception): @@ -161,8 +161,8 @@ def _mp_regionset_pool_init(d): assert('remove_entities' in d) multiprocess_scan_regionfile.regionset = d['regionset'] multiprocess_scan_regionfile.q = d['queue'] - multiprocess_scan_regionfile.entity_limit = ['entity_limit'] - multiprocess_scan_regionfile.remove_entities = ['remove_entities'] + multiprocess_scan_regionfile.entity_limit = d['entity_limit'] + multiprocess_scan_regionfile.remove_entities = d['remove_entities'] class AsyncScanner(object): @@ -170,6 +170,8 @@ class AsyncScanner(object): To implement a scanner you have to override: update_str_last_scanned() + Use try-finally to call terminate, if not processes will be + hanging in the background """ def __init__(self, data_structure, processes, scan_function, init_args, _mp_init_function): @@ -198,13 +200,26 @@ def __init__(self, data_structure, processes, scan_function, init_args, # TODO: make this automatic amount # Recommended time to sleep between polls for results - self.SCAN_WAIT_TIME = 0.001 + self.SCAN_START_SLEEP_TIME = 0.001 + self.SCAN_MIN_SLEEP_TIME = 1e-6 + self.SCAN_MAX_SLEEP_TIME = 0.1 + self.scan_sleep_time = self.SCAN_START_SLEEP_TIME + self.queries_without_results = 0 + self.last_time = time() + self.MIN_QUERY_NUM = 1 + self.MAX_QUERY_NUM = 5 # Holds a friendly string with the name of the last file scanned self._str_last_scanned = None def scan(self): """ Launch the child processes and scan all the files. """ + + logging.debug("########################################################") + logging.debug("########################################################") + logging.debug("Starting scan in: " + str(self)) + logging.debug("########################################################") + logging.debug("########################################################") total_files = len(self.data_structure) self._results = self.pool.map_async(self.scan_function, self.list_files_to_scan, 5) @@ -227,8 +242,12 @@ def get_last_result(self): # Copy it to the father process ds._replace_in_data_structure(d) self.update_str_last_scanned(d) + # Got result! Reset it! + self.queries_without_results = 0 return d else: + # Count amount of queries without result + self.queries_without_results += 1 return None def terminate(self): @@ -246,6 +265,38 @@ def update_str_last_scanned(self): """ Updates the string that represents the last file scanned. """ raise NotImplemented + def sleep(self): + """ Sleep waiting for results. + + This method will sleep less when results arrive faster and + more when they arrive slower. + """ + # If the query number is outside of our range... + if not ((self.queries_without_results < self.MAX_QUERY_NUM) & + (self.queries_without_results > self.MIN_QUERY_NUM)): + # ... increase or decrease it to optimize queries + if (self.queries_without_results < self.MIN_QUERY_NUM): + self.scan_sleep_time *= 0.5 + elif (self.queries_without_results > self.MAX_QUERY_NUM): + self.scan_sleep_time *= 2.0 + # and don't go farther than max/min + if self.scan_sleep_time > self.SCAN_MAX_SLEEP_TIME: + logging.debug("Setting sleep time to MAX") + self.scan_sleep_time = self.SCAN_MAX_SLEEP_TIME + elif self.scan_sleep_time < self.SCAN_MIN_SLEEP_TIME: + logging.debug("Setting sleep time to MIN") + self.scan_sleep_time = self.SCAN_MIN_SLEEP_TIME + + # Log how it's going + logging.debug("") + logging.debug("Nº of queries without result: " + str(self.queries_without_results)) + logging.debug("Current sleep time: " + str(self.scan_sleep_time)) + logging.debug("Time between calls to sleep(): " + str(time() - self.last_time)) + self.last_time = time() + + # Sleep, let the other processes do their job + sleep(self.scan_sleep_time) + @property def str_last_scanned(self): """ A friendly string with last scanned thing. """ @@ -348,6 +399,14 @@ def __init__(self, world_obj, processes, entity_limit, # Holds a friendly string with the name of the last file scanned self.scan_wait_time = 0.001 + def sleep(self): + """ Sleep waiting for results. + + This method will sleep less when results arrive faster and + more when they arrive slower. + """ + self._current_regionset.sleep() + def scan(self): """ Scan and fill the given regionset. """ cr = AsyncRegionsetScanner(self.regionsets.pop(0), @@ -371,9 +430,8 @@ def get_last_result(self): process. """ cr = self._current_regionset - logging.debug("AsyncWorldScanner: current_regionset {0}".format(cr)) + if cr is not None: - logging.debug("AsyncWorldScanner: cr.finished {0}".format(cr.finished)) if not cr.finished: r = cr.get_last_result() self._str_last_scanned = cr.str_last_scanned @@ -461,21 +519,26 @@ def console_scan_loop(scanners, scan_titles, verbose): if not verbose: pbar = progressbar.ProgressBar(widgets=widgets, maxval=total) - scanner.scan() - counter = 0 - while not scanner.finished: - sleep(scanner.scan_wait_time) - result = scanner.get_last_result() - if result: - counter += 1 - if not verbose: - pbar.update(counter) - else: - status = "(" + result.oneliner_status + ")" - fn = result.filename - print "Scanned {0: <12} {1:.<43} {2}/{3}".format(fn, status, counter, total) - if not verbose: - pbar.finish() + try: + scanner.scan() + counter = 0 + while not scanner.finished: + scanner.sleep() + result = scanner.get_last_result() + if result: + counter += 1 + if not verbose: + pbar.update(counter) + else: + status = "(" + result.oneliner_status + ")" + fn = result.filename + print "Scanned {0: <12} {1:.<43} {2}/{3}".format(fn, status, counter, total) + if not verbose: + pbar.finish() + except KeyboardInterrupt as e: + # If not, dead processes will accumulate in windows + scanner.terminate() + raise e except ChildProcessException as e: # print "\n\nSomething went really wrong scanning a file." # print ("This is probably a bug! If you have the time, please report " From e51e7af4aa1fc3fc9c9f2a2964acdc3106d62628 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 19 Sep 2014 01:23:13 +0200 Subject: [PATCH 028/151] Improve bug_reporter. --- gui/main.py | 5 ++-- gui/starter.py | 41 ++++++++++++++++++++++++++++++- regionfixer.py | 21 +++++++--------- regionfixer_core/bug_reporter.py | 42 +++++++++++++++++++++++++++----- 4 files changed, 88 insertions(+), 21 deletions(-) diff --git a/gui/main.py b/gui/main.py index 52b0962..487c8c0 100644 --- a/gui/main.py +++ b/gui/main.py @@ -7,7 +7,7 @@ from os import name as os_name from backups import BackupsWindow -from regionfixer_core.scan import AsyncWorldScanner, AsyncDataScanner,\ +from regionfixer_core.scan import AsyncWorldRegionScanner, AsyncDataScanner,\ ChildProcessException from regionfixer_core import world from regionfixer_core.world import World @@ -164,6 +164,7 @@ def OnHelp(self, e): self.help.Show(True) def OnOpen(self, e): + raise KeyError dlg = wx.DirDialog(self, "Choose a Minecraf world folder") # Set the last path used dlg.SetPath(self.last_path) @@ -196,7 +197,7 @@ def OnScan(self, e): ps = AsyncDataScanner(self.world.players, processes) ops = AsyncDataScanner(self.world.old_players, processes) ds = AsyncDataScanner(self.world.data_files, processes) - ws = AsyncWorldScanner(self.world, processes, entity_limit, + ws = AsyncWorldRegionScanner(self.world, processes, entity_limit, delete_entities) things_to_scan = [ws, ops, ps, ds] diff --git a/gui/starter.py b/gui/starter.py index fa5630d..af8efdf 100644 --- a/gui/starter.py +++ b/gui/starter.py @@ -2,18 +2,54 @@ # -*- coding: utf-8 -*- import wx +import sys +import traceback from main import MainWindow from backups import BackupsWindow from about import AboutWindow from help import HelpWindow +from error import ErrorWindow + +from regionfixer_core.scan import ChildProcessException + + +ERROR_MSG = "\n\nOps! Something went really wrong and regionfixer crashed.\n\nI can try to send an automatic bug rerpot if you wish.\n" +QUESTION_TEXT = ('Do you want to send an anonymous bug report to the region fixer ftp?\n' + '(Answering no will print the bug report)') + +# Thanks to: +# http://wxpython-users.1045709.n5.nabble.com/Exception-handling-strategies-td2369185.html +# For a way to handle exceptions +class MyApp(wx.App): + def OnInit(self): + sys.excepthook = self._excepthook + return True + + def _excepthook(self, etype, value, tb): + if type is ChildProcessException: + print("OMG! A BUG! A BUGGGGGGGG!") + traceback.print_tb(tb) + dlg = wx.MessageDialog(self.main_window, + ERROR_MSG + "\n" + QUESTION_TEXT, + style=wx.ICON_ERROR | wx.YES_NO) + dlg.ShowModal() + # application error - display a wx.MessageBox with the error message + else: + print("OMG! A BUG! A BUGGGGGGGG!") + traceback.print_tb(tb) + dlg = wx.MessageDialog(self.main_window, + ERROR_MSG + "\n" + QUESTION_TEXT, + style=wx.ICON_ERROR | wx.YES_NO) + dlg.ShowModal() + # bug - display a dialog with the entire exception and traceback printed out class Starter(object): def __init__(self): """ Create the windows and set some variables. """ - self.app = wx.App(False) + self.app = MyApp(False) self.frame = MainWindow(None, "Region-Fixer-GUI") # NOTE: It's very important that the MainWindow is parent of all others windows @@ -22,6 +58,9 @@ def __init__(self): self.frame.backups = self.backups self.frame.about = self.about self.frame.help = HelpWindow(self.frame, "Help") + self.frame.error = ErrorWindow(self.frame, "Error") + + self.app.main_window = self.frame def run(self): """ Run the app main loop. """ diff --git a/regionfixer.py b/regionfixer.py index aef6a89..9a4bc91 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -504,6 +504,7 @@ def main(): auto_reported = False try: + raise KeyError freeze_support() value = main() sys.exit(value) @@ -511,28 +512,24 @@ def main(): except ChildProcessException as e: had_exception = True print(ERROR_MSG) - - bug_report = e.printable_traceback - bug_sender = BugReporter(StringIO(bug_report)) + bug_sender = BugReporter(e.printable_traceback) auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) + bug_report = bug_sender.error_str except Exception as e: had_exception = True print(ERROR_MSG) - - f = StringIO("") - (ty, value, tb) = sys.exc_info() - f.write(str(ty) + "\n") - f.write(str(value) + "\n") - traceback.print_tb(tb, None, f) - - bug_sender = BugReporter(f) + bug_sender = BugReporter() auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) - bug_report = f.getvalue() + bug_report = bug_sender.error_str finally: if had_exception and not auto_reported: + print("Couldn't upload the bug report. While uploading I encounter the next problem:") + print(bug_sender.exception) print("") print("Bug report:") print("") print(bug_report) + else: + print("Bug report uploaded successfully") diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py index f83b63b..fcecf05 100644 --- a/regionfixer_core/bug_reporter.py +++ b/regionfixer_core/bug_reporter.py @@ -4,6 +4,8 @@ @author: Alejandro ''' +import traceback +import sys import ftplib import datetime from StringIO import StringIO @@ -21,17 +23,46 @@ class BugReporter(object): Reports a bug to the regionfixer ftp ''' - def __init__(self, error, server=SERVER, + def __init__(self, error_str=None, server=SERVER, user=USER, password=PASSWORD): ''' Constructor ''' - assert(isinstance(error, StringIO)) - error.seek(0) - self.error_file_obj = error + if error_str: + self.error_file_obj = self._get_fileobj_from_str(error_str) + else: + (ty, value, tb) = sys.exc_info() + self.error_file_obj = self._get_fileobj_from_tb(ty, value, tb) self.server = server self.user = user self.password = password + + self._exception = None + + def _get_fileobj_from_tb(self, ty, value, tb): + f = StringIO("") + f.write(str(ty) + "\n") + f.write(str(value) + "\n") + traceback.print_tb(tb, None, f) + f.seek(0) + return f + + def _get_fileobj_from_str(self, error_str): + bug_report = str + f = StringIO(bug_report) + f.seek(0) + return f + + @property + def error_str(self): + self.error_file_obj.seek(0) + s = self.error_file_obj.read() + self.error_file_obj.seek(0) + return s + + #property + def exception(self): + return self._exception def ask_and_send(self, question_text): if query_yes_no(question_text): @@ -48,8 +79,7 @@ def send(self): s.storlines("STOR " + error_name, self.error_file_obj) s.quit() - print "Bug report uploaded successfully!" return True except Exception as e: - print "Couldn't send the bug report!" + self.exception = e return False From 39bb7264afa961ca62eee9a7a407d00547920184 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 19 Sep 2014 01:31:36 +0200 Subject: [PATCH 029/151] Fix bug in bug-reporter. --- regionfixer.py | 8 ++------ regionfixer_core/bug_reporter.py | 11 +++++++---- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 9a4bc91..5bccddc 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -25,8 +25,6 @@ from optparse import OptionParser from getpass import getpass import sys -import traceback -from StringIO import StringIO from regionfixer_core import world from regionfixer_core.scan import console_scan_world, console_scan_regionset,\ @@ -504,7 +502,6 @@ def main(): auto_reported = False try: - raise KeyError freeze_support() value = main() sys.exit(value) @@ -519,17 +516,16 @@ def main(): except Exception as e: had_exception = True print(ERROR_MSG) + # Traceback will be taken in init bug_sender = BugReporter() auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str finally: if had_exception and not auto_reported: - print("Couldn't upload the bug report. While uploading I encounter the next problem:") - print(bug_sender.exception) print("") print("Bug report:") print("") print(bug_report) - else: + elif had_exception and auto_reported: print("Bug report uploaded successfully") diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py index fcecf05..15402a5 100644 --- a/regionfixer_core/bug_reporter.py +++ b/regionfixer_core/bug_reporter.py @@ -60,9 +60,9 @@ def error_str(self): self.error_file_obj.seek(0) return s - #property - def exception(self): - return self._exception + @property + def exception_str(self): + return self._exception.message def ask_and_send(self, question_text): if query_yes_no(question_text): @@ -81,5 +81,8 @@ def send(self): s.quit() return True except Exception as e: - self.exception = e + # TODO: prints shouldn't be here! + print("Couldn't send the bug report!") + self._exception = e + print(e) return False From 0a3dd62f63d06d73bb9a7b8ee93c57fc22d5950a Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 25 Sep 2014 10:25:47 +0200 Subject: [PATCH 030/151] Gui now also send bugreports to ftp. --- gui/main.py | 38 +++++++++++++++++--------- gui/starter.py | 46 ++++++++++++++++++++++---------- regionfixer_core/bug_reporter.py | 31 ++++++++++++++------- regionfixer_core/scan.py | 3 +-- regionfixer_core/util.py | 9 +++++++ 5 files changed, 88 insertions(+), 39 deletions(-) diff --git a/gui/main.py b/gui/main.py index 487c8c0..8c302b9 100644 --- a/gui/main.py +++ b/gui/main.py @@ -164,7 +164,7 @@ def OnHelp(self, e): self.help.Show(True) def OnOpen(self, e): - raise KeyError + """ Called when the open world button is pressed. """ dlg = wx.DirDialog(self, "Choose a Minecraf world folder") # Set the last path used dlg.SetPath(self.last_path) @@ -190,6 +190,7 @@ def OnOpen(self, e): def OnScan(self, e): + """ Called when the scan button is pressed. """ processes = int(self.proc_text.GetValue()) entity_limit = int(self.el_text.GetValue()) delete_entities = False @@ -222,7 +223,7 @@ def OnScan(self, e): while not scanner.finished: sleep(0.001) result = scanner.get_last_result() - + if result: counter += 1 not_cancelled, not_skipped = progressdlg.Update(counter, @@ -241,19 +242,25 @@ def OnScan(self, e): self.update_delete_buttons_status(True) self.update_replace_buttons_status(True) except ChildProcessException as e: - error_log_path = e.save_error_log() - filename = e.scanned_file.filename + # Will be handled in starter.py by _excepthook() scanner.terminate() progressdlg.Destroy() - error = wx.MessageDialog(self, - ("Something went really wrong scanning {0}\n\n" - "This is probably an error in the code. Please, " - "if you have the time report it. " - "I have saved all the error information in:\n\n" - "{1}").format(filename, error_log_path), - "Error", - wx.ICON_ERROR) - error.ShowModal() + raise e + #=================================================================== + # error_log_path = e.save_error_log() + # filename = e.scanned_file.filename + # scanner.terminate() + # progressdlg.Destroy() + # error = wx.MessageDialog(self, + # ("Something went really wrong scanning {0}\n\n" + # "This is probably an error in the code. Please, " + # "if you have the time report it. " + # "I have saved all the error information in:\n\n" + # "{1}").format(filename, error_log_path), + # "Error", + # wx.ICON_ERROR) + # error.ShowModal() + #=================================================================== def OnDeleteChunks(self, e): progressdlg = wx.ProgressDialog("Removing chunks", "This may take a while", @@ -275,6 +282,8 @@ def OnDeleteChunks(self, e): progressdlg.Destroy() progressdlg.Destroy() + self.results_text.SetValue("Scan again the world for results.") + self.update_delete_buttons_status(False) self.update_delete_buttons_status(False) def OnDeleteRegions(self, e): @@ -294,6 +303,7 @@ def OnDeleteRegions(self, e): remove_regions(problem) progressdlg.Destroy() + self.results_text.SetValue("Scan again the world for results.") self.update_delete_buttons_status(False) self.update_replace_buttons_status(False) @@ -319,6 +329,7 @@ def OnReplaceChunks(self, e): replace_chunks(backups, problem, entity_limit, delete_entities) progressdlg.Destroy() + self.results_text.SetValue("Scan again the world for results.") self.update_delete_buttons_status(False) self.update_replace_buttons_status(False) @@ -345,6 +356,7 @@ def OnReplaceRegions(self, e): replace_regions(backups, problem, entity_limit, delete_entities) progressdlg.Destroy() + self.results_text.SetValue("Scan again the world for results.") self.update_delete_buttons_status(False) self.update_replace_buttons_status(False) diff --git a/gui/starter.py b/gui/starter.py index af8efdf..e80bf45 100644 --- a/gui/starter.py +++ b/gui/starter.py @@ -4,6 +4,7 @@ import wx import sys import traceback +from StringIO import StringIO from main import MainWindow from backups import BackupsWindow @@ -12,7 +13,8 @@ from error import ErrorWindow from regionfixer_core.scan import ChildProcessException - +from regionfixer_core.bug_reporter import BugReporter +from regionfixer_core.util import get_str_from_traceback ERROR_MSG = "\n\nOps! Something went really wrong and regionfixer crashed.\n\nI can try to send an automatic bug rerpot if you wish.\n" QUESTION_TEXT = ('Do you want to send an anonymous bug report to the region fixer ftp?\n' @@ -27,22 +29,38 @@ def OnInit(self): return True def _excepthook(self, etype, value, tb): - if type is ChildProcessException: - print("OMG! A BUG! A BUGGGGGGGG!") - traceback.print_tb(tb) - dlg = wx.MessageDialog(self.main_window, - ERROR_MSG + "\n" + QUESTION_TEXT, - style=wx.ICON_ERROR | wx.YES_NO) + print etype + print value + print tb + if isinstance(etype, ChildProcessException): + s = "Using GUI:\n\n" + value.printable_traceback + else: + s = "Using GUI:\n\n" + get_str_from_traceback(etype, value, tb) + # bug - display a dialog with the entire exception and traceback printed out + traceback.print_tb(tb) + dlg = wx.MessageDialog(self.main_window, + ERROR_MSG + "\n" + QUESTION_TEXT, + style=wx.ICON_ERROR | wx.YES_NO) + # Get a string with the traceback and send it + + answer = dlg.ShowModal() + if answer == wx.ID_YES: + print "Sending bug report!" + bugsender = BugReporter(error_str=s) + success = bugsender.send() + # Dialog with success or not of the ftp uploading + if success: + msg = "The bug report was successfully uploaded." + style = 0 + else: + msg = "Couldn't upload the bug report!\n\nPlease, try again later." + style = wx.ICON_ERROR + dlg = wx.MessageDialog(self.main_window, msg, style=style) dlg.ShowModal() - # application error - display a wx.MessageBox with the error message else: - print("OMG! A BUG! A BUGGGGGGGG!") - traceback.print_tb(tb) - dlg = wx.MessageDialog(self.main_window, - ERROR_MSG + "\n" + QUESTION_TEXT, - style=wx.ICON_ERROR | wx.YES_NO) + dlg = wx.MessageDialog(self.main_window, "Error msg:\n\n" + s, + style=wx.ICON_ERROR) dlg.ShowModal() - # bug - display a dialog with the entire exception and traceback printed out class Starter(object): diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py index 15402a5..9b4b906 100644 --- a/regionfixer_core/bug_reporter.py +++ b/regionfixer_core/bug_reporter.py @@ -4,12 +4,11 @@ @author: Alejandro ''' -import traceback import sys import ftplib import datetime from StringIO import StringIO -from util import query_yes_no +from util import query_yes_no, get_str_from_traceback SERVER = '192.168.1.3' @@ -20,7 +19,13 @@ class BugReporter(object): ''' - Reports a bug to the regionfixer ftp + Class to report bugs to region fixer ftp. + + You can init it without arguments and it will extract the traceback + directly from sys.exc_info(). The traceback will be formated and + uploaded as a text file. + Or you can init it using an error string (error_str). The string + will be uploaded as a text file. ''' def __init__(self, error_str=None, server=SERVER, @@ -36,25 +41,24 @@ def __init__(self, error_str=None, server=SERVER, self.server = server self.user = user self.password = password - + self._exception = None def _get_fileobj_from_tb(self, ty, value, tb): - f = StringIO("") - f.write(str(ty) + "\n") - f.write(str(value) + "\n") - traceback.print_tb(tb, None, f) + ''' Return a file obj from a traceback object. ''' + f = StringIO(get_str_from_traceback(ty, value, tb)) f.seek(0) return f def _get_fileobj_from_str(self, error_str): - bug_report = str - f = StringIO(bug_report) + ''' Return a file object from a string. ''' + f = StringIO(error_str) f.seek(0) return f @property def error_str(self): + ''' Return the string that is currently ready for upload. ''' self.error_file_obj.seek(0) s = self.error_file_obj.read() self.error_file_obj.seek(0) @@ -62,13 +66,20 @@ def error_str(self): @property def exception_str(self): + ''' Return the exception caused by uploading the file. ''' return self._exception.message def ask_and_send(self, question_text): + ''' Query the user yes/no to send the file and send it. ''' if query_yes_no(question_text): return self.send() def send(self): + ''' Send the file to the ftp. + + If an exception is thrown, you can retrieve it at + exception_str. + ''' try: s = ftplib.FTP(self.server, self.user, self.password) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 5b62cda..0b14675 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -619,7 +619,6 @@ def scan_data(scanned_dat_file): is not compressed, we handle the special case here. """ - s = scanned_dat_file try: if s.filename == 'idcounts.dat': @@ -655,7 +654,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): entiti_limit is the threshold tof entities to conisder a chunk with too much entities problems. """ - + raise KeyError try: r = scanned_regionfile_obj # counters of problems diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index 946b1a0..ce48e8b 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -25,8 +25,17 @@ from os.path import join, split, exists, isfile import sys import world +import traceback +def get_str_from_traceback(ty, value, tb): + """ Return a string from a traceback + exception. """ + t = traceback.format_exception(ty, value, tb) + s = str(ty) + "\n" + for i in t: + s += i + return s + # Stolen from: # http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input def query_yes_no(question, default="yes"): From 36747c5034581041fbac2e15180fc9550c7afcb6 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 2 Oct 2014 14:28:26 +0200 Subject: [PATCH 031/151] Remove testing raise. --- regionfixer_core/scan.py | 1 - 1 file changed, 1 deletion(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 0b14675..9d7edd5 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -654,7 +654,6 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): entiti_limit is the threshold tof entities to conisder a chunk with too much entities problems. """ - raise KeyError try: r = scanned_regionfile_obj # counters of problems From 0c4031566e69b91a5f82ba34a39effc83ce56499 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 2 Oct 2014 14:31:51 +0200 Subject: [PATCH 032/151] Remove not used error window. --- gui/starter.py | 1 - 1 file changed, 1 deletion(-) diff --git a/gui/starter.py b/gui/starter.py index e80bf45..98d3870 100644 --- a/gui/starter.py +++ b/gui/starter.py @@ -10,7 +10,6 @@ from backups import BackupsWindow from about import AboutWindow from help import HelpWindow -from error import ErrorWindow from regionfixer_core.scan import ChildProcessException from regionfixer_core.bug_reporter import BugReporter From 012c0c9072d228d84d8fcdfa2a0ba5dd1346b7aa Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 2 Oct 2014 14:34:22 +0200 Subject: [PATCH 033/151] Change server address. --- regionfixer_core/bug_reporter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py index 9b4b906..ddbf8cb 100644 --- a/regionfixer_core/bug_reporter.py +++ b/regionfixer_core/bug_reporter.py @@ -11,7 +11,7 @@ from util import query_yes_no, get_str_from_traceback -SERVER = '192.168.1.3' +SERVER = 'regionfixer.no-ip.org' USER = 'regionfixer_bugreporter' PASSWORD = 'supersecretpassword' BUGREPORTS_DIR = 'bugreports' From f777c2a32bcd3a08739339db925b03937b0b6c3c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 25 Sep 2014 10:51:31 +0200 Subject: [PATCH 034/151] Added mandatory arg in help window. Set number of jobs pero worker to low value after testing in linux. --- gui/help.py | 3 ++- gui/starter.py | 5 +---- regionfixer_core/scan.py | 9 +++++++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/gui/help.py b/gui/help.py index 096f486..e75db57 100644 --- a/gui/help.py +++ b/gui/help.py @@ -15,7 +15,8 @@ def __init__(self, parent, title="Help"): label="If you need help you can give a look to the wiki:") self.link_github = wx.HyperlinkCtrl(panel, wx.ID_ABOUT, "https://github.com/Fenixin/Minecraft-Region-Fixer/wiki", - style=wx.ALIGN_CENTER) + style=wx.ALIGN_CENTER, + url="https://github.com/Fenixin/Minecraft-Region-Fixer/wiki") self.help2 = wx.StaticText(panel, style=wx.TE_MULTILINE | wx.ALIGN_CENTER, label="Or ask in the minecraft forums:") diff --git a/gui/starter.py b/gui/starter.py index 98d3870..19cd5f5 100644 --- a/gui/starter.py +++ b/gui/starter.py @@ -28,9 +28,6 @@ def OnInit(self): return True def _excepthook(self, etype, value, tb): - print etype - print value - print tb if isinstance(etype, ChildProcessException): s = "Using GUI:\n\n" + value.printable_traceback else: @@ -75,7 +72,7 @@ def __init__(self): self.frame.backups = self.backups self.frame.about = self.about self.frame.help = HelpWindow(self.frame, "Help") - self.frame.error = ErrorWindow(self.frame, "Error") +# self.frame.error = ErrorWindow(self.frame, "Error") self.app.main_window = self.frame diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 9d7edd5..8356b0e 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -221,9 +221,14 @@ def scan(self): logging.debug("########################################################") logging.debug("########################################################") total_files = len(self.data_structure) + # Tests indicate that smaller amount of jobs per worker make all type + # of scans faster + jobs_per_worker = 5 + #jobs_per_worker = max(1, total_files // self.processes self._results = self.pool.map_async(self.scan_function, - self.list_files_to_scan, 5) - # max(1, total_files // self.processes)) + self.list_files_to_scan, + jobs_per_worker) + # No more tasks to the pool, exit the processes once the tasks are done self.pool.close() From 9ca4f5d35f07c83a5a4536da739d2ccff05a7ecf Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 25 Sep 2014 10:55:48 +0200 Subject: [PATCH 035/151] Add icon file. --- icon.ico | Bin 0 -> 1013990 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 icon.ico diff --git a/icon.ico b/icon.ico new file mode 100644 index 0000000000000000000000000000000000000000..bdbc714fec4a664ddf4b1306534c2da5155c23f0 GIT binary patch literal 1013990 zcmeI5d9W4Lz3-1Eb#p6~N?xT>snq==q*6)U+)AqMtNUJJhyxn+dd={1bB!^^7$e9G zGDG6jpny0aQ3RZr2LZ*I$l>5QARx001{DY=IW1pO!Jgr=`GZTtNfn!c?6|CW9o)L*~$sq*)-yjD_5 zHSxj*XaEhM0W=^Q=r?x0I=j~-UIA?|Z|Qn%zGOAzNDT?s!TOVJL+p!v(Eu8VQv?0R zEr8bnc`e5{J)Bu;%d>lRQzWNHN7kO;`dN2!ZHH~KEgC=rNoW9OhZ|$)H*UTfFn)nL zdi0a8C($mlPDj7##Mn4CKm%w14WNON24Hr0X7?RCPmQ_b(Zsmw$ePh9z6uSX0W^RH z+BE>P!!uiA?cUeURS&gpO@ytEtd+h>Uqu6G01co4m>p>zUwG}ly6d^^ox5L;ov)6p zk-kb_MFVI64WI#-9UHTS-9v9$6gyuXStEUwzKRCW02)98FgsRe%iQ*jGnd86SVz`I zU!||20W^RH&;ZPimDv(&_Zl-twXAzKHqJV-Ci*IU6%C*PG=K(RcI?a+Zg;LD${IHrld9oTYVuZTnl1tP@7hR;fb?esQ_0ymJ zw6t7)Gi=x}86|1%$YOQqmMqS+S=OG?%li9kt0WXMe*yLHy-W0arZp5%Fj`g zmc+tZN7h1LHPTnHJNE@Z1I5$8+O=!d)mL9#;_>G{|9PeFC!NUa#1l_cU;EnE)OWx8 zU3Jbm=ag7KYSbuo!wol-7{7Y;YG2t=4XP$=B|Uu-K8(^wq@r zDz+_N97UVc=B@@NO`24Sb;B?<=`s>~yW8pggxSe3Te#hG^c=PSz+qq8d)vaBccdiD zhS_KU4WNP8G;r&!x2msv>!*>e5YRg)j|wY^`PxZgWclD-;S zU&WThSZJWQ8fb2AR{#31|C(u5yZn~9Z9lET?Btj&v9`=@&w6;Rul@bnMZR~W6wHR% zXaEhMfzTQ_di3bmFTC)=P^|vex4xyMtegWrefo4I+?VseH*DCTUU}t}W3OkPd8YKe zER)y#`SVNXlF9kzQnr8p{_5h3FD{+i_MPv1r_(rp;>3x*z8_{M%WUCx*ynaS(rWb8 z(0vu#6*s1$?P+__fSiB*)vtcFVP1dv%U>>?k9OaE_vvRg9!>0-jjlfT?c1kZ-p`&r zTgkY+FrM?q!*N-jlkN8s76*Ct9XnsW|Mw3kxNX(Qr=x$Q6U@Q}XaEhMfq)v2828hk z{&c6jmf!sR=RYr9Lv4eKevLCPI`yjlQ-^$$fwRb!gh{2BhI-pMZJ;yj`01Xsf z1A_()YKYZxuGfn%zNpP*k7i(YI?NVs%X2&LU%sVbeK_(cbPWs~IIwPf{q1jmTkYJrQ=7?dt-$Q` znJshMH_Yh#xt)%v%W{3O{dW5Y+K=|LK?C>Rdv9G{pL_1PdN=aXmM}YgW=pKy>)JW$ z@zu}N)sZ7hbD}f$BOm^PcBCD%rU8kaKli!M`7*mlj~;rH^V4oH+YGZybK9Lfx6=`& z7V|obU*OC>H=n)XLgC(HJ;n)Xwm6JoQVz402)986&jc_WlBw6OS~=5#m=+egxT3( zws8B3+a9RWnj^Uio$^1&FK7S_paIqNr7wM{CbQ2z`)qBVd}{<|XNTD`w>{&*HODmP z$bUK4AN!YY|3drHzHw{d>Z`A=$?FqOJTVvdOzF&CFgrWUmRS3;(X-WtXI|FL#`Ci~ z(=N;50U9Wl296v#(#!8!>^xrARZHJ6J6p^aZoALzbaaF1#5@)@Km%w14QLI>T($Qc zamoMlKmW7ZyLWFP{3XoJBD3eIYoBnyxt%95zRXuFt0Ji{19Z>dM$z zxcrAd{6Vc^{!o9F*wb~c$U-2Tq;%~hJsvkuv}Cv?C!C<>&YjEHc&~6A zJG(&TWb8WW7h~i2Gnj@3il70x->4s#8~I+Y&F}S>P-Mh7n@?l4q4YTv2W%eTN%2yB>L-NS-tWvtf3g z^vu2#&)?JL_P6=Kfddm^_C*(6q~&41eJb|>X2a}CJ@9|N`Zw)Kd)lCZ>#x5)c4mM3 z+uzo^p1<~o*)Thgx@Mp9=c{Pb{I;pwGsQcm9VP$y&wpO++O_rKBYhuJW@QLXU1V)z8wnRYgz z0eKE~7-pY$-g$a+i_>N>8)g?t@9b-Fd>d_TS6dGmG9)mwzxmB?>SnfUKcj9h%!b)f zHAMMh`3%~d_D(_rH{5VTAZE+6Kja#*B(^2X>x9`bJInfH+u#ElD24{+&z~QN+4B1$ zY6E85#q0%3H>gSTR;WMn!Ukvn4Fu4@jWe57mR~(bOlX+d-#O{*)b?e~))zB%Yyh+E zVs^iA^VQkC!fi9B)Q2B_nA-N= zzkG{&`L(}FeN9<-!R&B-SlF5L>pROgchCv#6|TKt6?QAS28P{ypZeg)M;MHEmIw)CsdKju(^WwX7dz!-m{FK_W|E*F!OK+e1+~<6`ed38Hs+N`(BbzJ7ICtIuq||Mk+r(|*`UK1lXE%H> zc0&VZG~hDZh2DM5o7MJR>Bg*X-F~0n`qsC6nLTdYIKN`4e81({y=u+V=I6FycDVin zv%}d9e~sPHfEf*VnJu7OxoMYra?1;;Ft3qH&N$ z>wkG$>M#~>!|ZVV2WE$}8~z%*p#d`*sKso7&#P}7ERB_a{IPi^UVQPz9hiOc$tSCS z{3AQ(%}rgn5@uUCuMM-!_#ON`c1Hu@G~mZ<0np)hKTtFDIpu%*;D}_6rBS0sRWVzh z)s^-+XPxc*d+#4nciv}iZksqeoc>{UIJ@C*u^SpNqk($N7EXQikB`-iht{gU9DJuU zY$djQ^5n_Km@Uuhdg6&E600l9ig&*Bx>~mO&$5ct$H-xJxc&pP!`Tggjor|I84YyC zY?op49$Tkg*!veZo9gG1B}}NmA#5rg6c1W(7o4T+~egd;CnBRuk zX8aER9=oG~a2g20Yyr`R9WSfpPnDj*A=y;Y-o1O3Jge*Nw@c?{r&4E>O5QsBH+9bg zt7R3baho_hoc>{UIJ@C*u^SpNqk*8z77#u6@*8TwpEk(9sU*24>DH}VQ>iPiO47b= z8)k>=KQKF--SF4g4GoylKp18V*W`L+xlg&AqeiN2f!X@H?fnN1%fiItZI~Ud|G?~U zcEev|H#A^I17VphAbRI-e^=8VSfdWV_kmyYlwOe0Oo1fhy-z3Ium>sVF!0d2#!(U@JG+;&paWPwfB=-|pw|%$# zOY&Gi&Le;SgAXU@uM^=m%nsLoV0JjW;jgh98Ze`QIGHV&+VtmF)MKl5c!)*z!;k)< zpSQhI3<0wu;(1mT#6XV&^t- zb~ydR>~MC&-(oj3U`7K;Fk7JX#$OMY&MBApl62exE$g0Da$j*gu?Q1Ctk zvxD0VonbRHU`_)`F$!hiXyYEYu)NX+!uK(Kb%qyzbn7OLk?`}{(`1M#` zjo9F#vFa!NZ&SUmovV^$_jhWHtuQWP&ga1!Y;Us$l4Q0ZO2)_Kd7gXrm!6vex3vZ2 zSmus|uG??aNQ>D5r`C4tZp(V*`nwB;O@rMH za9hSE&L4J1?A(UgFv4a=uZ<@>&IBk1=pL*a4yPo$U+nrRJx~NT7ofxy-b<6d4 z|1)@sX7^~~?mGPb-UXv$pW}wv+46ZXi1x5q18Fl`0JVGH0X6&KruL))gpbi{*w{)NK5B~n4 zWDA|PJ^!j&wRxA=AQf&)Jxf})s2}y61iS0R+%m`gn?FRKs@w$m_UacD8&T45B@3)_`edyNq7a^0eBj$B2uZ{&MhLef+XUOwbT#lla}t zht{eyd)%o0@A=mdcYEUQvo5fiOK(%V~3#xU)^YU+D zlROt~)6Q39|BZ25>Xzq_ozWe3d(YE5<>GOH=eS{ZwtOB8qCITZK$e&-pnB%T*VN+X z(sMK9*rt3JbsMmsh^pB7Ay-in%DU34PtzXzZX>N+0t)TnJuuA=XeUYA6>P*kgsc|Jgm0w+9#&S47X)l_8&N`ZkVwQcDHl8vpH^< zoh_dSgJ=(%HIQXy3#{Z`r!v3&##`?c?j>{kmruA?%#bN&yW8{HUk<5pcP&+?{AOI? zN$Fjl6mAXEKR%2&X@M9QMcT$PQIwP?5>>ShS}Nj zc`%6fuvr5z+YV+6x39SE0V|m89wP|52j93*b-n1iLdM8{5jHb|ZO@i|VYWTY z?yc{4J^cZ*_cnET9OP4;TQ*?)eDyBl&gyxy`o^Q{L}XRxBlH`$K;a9s$1_a)rK7}%LZ35fP8lC;Ye$F+6!jG2OIdnHfB$!FuUwf zv-n<%*~0F=W9O^)|Nh|wF9*nHliN;NTvf+dFgts`5I)f+HfR85mmO*r-~E|APs#mO zbgJS2`E1(5k(Tnb70iYYHt>OM%$`nRcG;n3@!gNv5^Kxc_M&B*In#K1<2r5%{yWdwP19CxI3U^ZjpjE$p#7&HL0%MLY*?~O29V(ngfZu>F)>@zPH$eZJI zrR^P^(Wz=IggD#2_=cF2Hp_wrV0PJ|X7RmIW(&7_IJ4BQJ#ThkgO?TLtJ#@`Jse$$ zPB1%$7$XZllWlDgqIuSt2vsKJsiDaonSWpI`=4#9v8EP+ug63#dAA7-1KrZsk~DbSJm+n%(mBO!aLf|qy}Ji*`a3fJx*o|w+D=0 zppG8>`=4#9yhajZfCM=)*7nfvYU6x z;;LE(v+ebn@Q$`KsR5W>cBol=kDJ-<-1bAQTV;2v7(zZddpKfkkG)|we&3|ur*3=o z2eZo#HH+^_Fx#Em=DD58G1|*#r!20jVcw4F%}!0as2y1Y$EF}9j$owB&9j*nory*?A( z(RL;^0JF;uHH+^_GTWWo=DO|V*$uPt@g{vdb>~ZeFguI1E^kxP%oc9>w4@w>`=4# zo))v+xoxi7j)&1+K09S`RUHq(Yd!rZ78SeiSa!o@r?SW|tjm7T?omws4#4wxjXc%VxVa75sM-^kphqs3#*7VpDX6MU~!bRFMEe*izvO~?{y9s7X ztlewO9M!V!*$$lXa)*3l9CyT$Fk7YN)2Ii2=2-)sG5dqRf2f2Vre19upI3{UHyf+p zZ@=|3Ez6!b!FN;4F3oN0=WW0I+Fv`c#>*b^opaj}E5dA;-3ylHnWb6Tw{!aZQT@38 zZAM=|>Swn8TKj+X?GLHHeQ>1gjT3q|$!y{Fs7XsYu*SRiC3TN=BmjHTH&@QMh<$M;*=Fqb$$Tz+4vSe z-@^B30}aHlfeT07UE*lZ(R0+g?YqlfIp%lMakd;MTsC^PTD$Ef*|RFD2?mx5z0g%$9wVx$PMbu91CWj69$XoU#(C?8ewQ{>!BQqHgq8 zG!4{Zwy;fJG6%eJ)2_0oj_{r(X3Ku@+|F>vD=-`XSTz60{-XJ;X`mjng>OCeeM#@n2c3#*;@+ zdqlnc?)#y*olR!*+|HoKComiTSTz60{-XJ;X`m5im*diTYS0Y})vi5n2Ih7anJxR& z``Wpsd(#DFS5bctv+-Y9^IvQank+&)i4osAu@IOSHZI=10jFMMXvd?x#g=1poKEVG5% z!n*$B7N}=le9dZRm&OjRouihn-XZ%@#U%2_?cs=ZJobUv_%DmIK-<+iQ*4a~-87R_g}zi8g12I63LIZmCcZkzvv z`sg1Ycc8W{%$EHaKJh-;{wgk!FWbWr%Xn-9v+-Xh{TFqkzoKa%E@lhwTm3p+e_jHonTFucB`BhXy98B$zGS>*35+kFMHY;d?p$|170hrwwvxVE; z_4R8zUMO96UdVIFVYb=6im$}}XaEhs?8ccb+%EESJ7G5d0%o8AG+vpkM7#@ zW})J3m~FPN;w!N~8bAXuI~=ox+eLeBC(OoQzzj5i2Fz*zW`}3C+?RFYoR-4G+A!N} zU&U8qe>8vwV0L(BORQa_=XSzu`~}QF18Bgk24HqHW(&73)6ebPyZ>PB<87F2wy)wV zu|FC>128)}vxVD5cy1@m#$Uh;G=K)oY5-=(!|dMI%~f|UUX|-u8)lpBtN2Rnj|R{H z%#MfIneHCAc{m}p# zfZ6dfJI~MUgxUBDn1Ke+fLRT|?0A{&#@h3iuGgV&-FgrPB=k2+j zFdKgXGtdAUFslKWogA~>x$TEqx9U*a{<&?KZMLuCE3rQsKm#y4S!S2!wy(*@b30)+ z{sLy80W@G%128*zW(&86-n7X2SQ}=W?W_1o?2iV}0L)H@*?D+wC(OoQzzj5i2Fz*z zW~ak!iM4x;nWI|PJ!@^e4YSSmReUA(M+0a8W~aw&;dT#awzbdggxUBDn1Ke+fLRT| z>~xte+#WS)iIuT7%r@Is@s-#g4WI#-oj$W|eQqbr#$Uh;G=K)oY5-=Z&uodcFB?5u zt=;yLRq-~=HrrS6mDnE*paGa|hS}x0ZM&}9hS~TFn1Ke+fLRT|Y;(*OZV$bAu{E(a z%r@Is@s-#g4WI#-ZI;68ob8Gyt=+ z!|beIw+*xL7cc`2paHWQfZ5q$c6p3^wpzD+x6-L8YcH5>wy)wVu|FC>128*V%oc9< zzILuUaOk~5Fq{5~k46J%01Z@X0A^>8*?q^(Q&&xXNFzI2u{LowzN6B2@IQJ$1LH17@8)m29KkN)5p5Y%*K8-QAg`wmtu9X1EQr@g0@Ega6S3 z8c1IQFgv@<7H$t1zd#*5`pMTdxQ(Ba2D8)epZG*Gw~3A~r_@XrNL9FgyFq7H;>@bKB3q z@`h1v!)$y^neD^*8t46gW1CEp*Jmp*}c;5Blsn3jt0;`Rx|*!?P0deZQner z*%-Hpv+-M5@mp*MS}dvtV75KXmRP%&er{*Wx@V1W8)oAti|QxYZ?vBk4Zv)>m@VAy ze$6Z+&+UZS_^qt?Ew%$K7F7c<+dgIsw@o~^6K3Nli|QxYZ?vBk4Zv(Wnce5QxoYa7 zm8p)A!)*LkR{R#*ffkFZ0hnzkvnAHPZ1ilkcH2v-aT{jiCyVMQ*>AL;6%D{_dztNW zJFVw-!fgCjR{R#*ffkFZ0hnz!v&+xzd@z+*8)oAti|QxYZ?vBk4Zv)>nJuxl%x%wp zpDe1MWWUjVRy07I zZ9l6kkGBTjnA~$jU^ad$D}IaZK#N7y0L+HjIxuf)^IuaiUwdd%x;9& z@*9|q-^z;LVmr`cQ8fUwVK&Uh{}t8$vEOLlHVwdRm<_Y>TekTvwinG7RRb^^X2Wd! zUs3%Z`;GQ((*Vqd*)SWwWt-n(d(muBH2||=Hq6HV71jT--)P@94Zv)e4YToEw)ri# z7tIz`127wA!)*LtQT-qLjrMKR0L+HjFdM&Ro8Mx4(QHvQ0JC8>%*Ou})&H^IXx}ys zz-*Wev+-ND`7O2=%@$PyFdJsWZ2VtQ{U7^{_HEMu%!b)88^2|n-(q{wY*94;vtc&O z#{U)7|FPd_-!=`vY?uwR@msd}Ew&fU7F7c<8)n07{9jT1AN!5=ZPNhEhS@M1zh#@> zVtdhSQ8fUwVK&Uh{}t8$vEOLlHVwdRm<_Y>TekTvwinG7RRb^^X2Wd!Us3%Z`;GQ( z(*Vqd*)SWwWt-n(d(muBH2||=Hq6HV71jT--)P@94Zv)e4YToEw)ri#7tIz`127wA z!)*LtQT-qLjrMKR0L+HjFdM&Ro8Mx4(QHvQ0JC8>%*Ou})&H^IXx}ysz-*Wev+-ND z`7O2=%@$PyFdJsWZ2VtQ{U7^{_HEMu%!b)88^2|n-(q{wY*94;vtc&O#{U)7|FPd_ z-!=`vY?uwR@msd}Ew&fU7F7c<8)i4m?6x-jQ`J=Y>NjqIDr(O*sApsWFy4YR!*_RBkZfle?RW;3>e2GD>Z4Zv)e?d7mv z-q8zmg4u@pDscv}CK^BkFdJrjIqa8r^a7n=Hq0i*LIY^PkOp8j%=U8FFYo9DI>Bs1 zeU%uC7z+)c0hkT5y&U$-J9>dmFdJqQW1#^wU`PWn8)kbs?3Z`+0-a#Cp}tCtMT~_8 z&;ZPa*3FVG2Q z!)#(KG=K&SX#i%!Y%hoX@{V4h6U;W$SBbHRvCse-fY~tH%VEE~qZjA|vtc$d78*bU zhBN@PVYZjUetAbP&Z`<9#8_wm4Zv)e?d7mv-q8zmg4r;e7z+)c0Ye&q*)ZG7 zVZXei7w81D4fR!GEMhD)fCgYT%=U8FFYo9DI>Bt1O^k&G(10Ngz-*Z9<*;Ag(F=5f z*@pTmF%~fv8bAXu8)kbs?3Z`+0-azs%qGS{18BgI24FVK_Hx)S@8|_O!E8f)l^BZ{ z3k{$Fm<_YN9QMmQdVx+b8)g$@p#d~tNCPk%W_vm8mv{66onW@1zDkTmjD-f!0L+Hj zUJm=^9lby&m<_XuvCse-Fr)#P4YR!*_RBkZfle^nP+uj+BE~`kXaHuzY%hoX@{V4h z6U>I$#8_wm4H(h@%!b)s4*TUDy+9|JZK$sjV-aJa0W<)!VYZjUetAbP&%r?|liLr>W&;S~M*)ZG7VZXei7w81DVKy-q8bAYvGyt<< zwwJ?xc}FkM31%DWtHfBuSZDwZz-*Z9<*;Ag(F=5f*)W?J3k{$FLmGhDFx$&vzr3Rt z=mfJ3^;KdlVk|U(24FVK_Hx)S@8|_O!EBgKjD-f!fFTXQY?$rkuwUNM3v`0nhWaWo z7BLnYKm#xvW_vm8mv{66onSW1CdNVoXuyyLU^dM5a@a5L=mk2#Y(ssO7>gJS4WI#- z4YR!*_RBkZfle?RW)owf0W@Gp127wAdpYcvck}|CV78&YN{mH}g$B?7%!b)s4*TUD zy+9|J4YP@{&;S}Rqyd->v%MVl%R733PB7b0UnRyO#zF&V0A|B%FNgi|j$WV>%!b*- zSZDwZ7}5aDhS^>Y`{fo%DHq7>N*e~zs1v+HpaGZ-v%MVl z%R733PB0s06JwzPG+;;rFdJrjIqa8r^a7n=wxPaCj75xv2G9V^hS^>Y`{fdmFxyaHCB`DgLIY?3X2WbRhyC)7UZ4}qhS|heXaEfu z(g4hc*3FVG2Q z8|tgXSj1Rp01d!wnC<1TU*6FRbb{G1n-~iXpaDY~fY~tH%VEE~qZjA|vkmoCVk}}T zG=K(RHq7>N*e~zs1vZ4Zv)e?d7mv-q8zmg4u@pDlrx@78*bUFdJrj zIqa8r^a7n=Hq0i*LIY^PkOp8j%=U8FFYo9DI>Bs1eU%t1-WcnQ+pbin-#seU*IE5< zhGq1RL^S}jVYZjUetAbP&dId6y`Jc;&Kh!KE^L!l8&FU1 z<4~9lv%MVl%R733PB0s0=YmVNF#D{&6IHi`gH^Xh@xD&Kdt@%!v1r_f*)ZG7VZXei z7w81DQ(*R418-8NE$y#Pd%Rz4uWk#5;-f3t&KhRx@%B?&d&f5(&_1>x`(HHPN*e~zs1vc2^@7iPn3FNgi|j$WV> z%!b*yh|gd)%ubT6Nv#`Z!)z~y{ql}ppcBl7*)Y4-xoZiXgC^&qGd5m5Mv@c{ZCEeN zhS^>Y`{feSV6=A|VK&V6a@a5L=mk2#Y?z%3f6dr9W8+>P+K`9Y zFx$&vzr3Rt=mfK2Hq0i@&V>eSV6=A|VK&V6a@a5L=mk2#Y?z%3e@&cCobBbI4SARi zv%MVl%R733PB0s0!))U0Txh@sMtiprX2WbRhyC)7UZ4}qhS|CB*TmVx*N*e~zs1vdmFdJsWY~t)(Xut+Wd$$p0!)z~y{ql}ppcBl7*}3r7#M#8zULM+zhuJXO z%VEE~qZjA|vtc&OCeF@<25exocN<|g%=U8FFYo9DI>Bt1oeO_WoK2kV<)ICEm<_YN z9QMmQdVx+b8)hG4wmdU5jn~<|ZaG$GTz)kPW}kV*xY+G+=E&=lc}B>i17hFSk`@Nt z7<*gA)#iCv56p(yUJm=^9lby&E0}#&--*fiqm&pWzCY{F&pz`?y^XE<8Q=1p?<8Nk zjvrooZR{L~o!M=@lCy)XyViJol|u7FNwd_MSB;JBb=JU}V$u2;ldek6ek0?Xv%|Guvm|)3Q-!>d_jOcsmY_)m%sKTVnG!ar!Rbb^U&PthTsx-fe2| z;^}ZZC&w*mGy9j1jfvH73u|lk(}3gAC1&G8tJ(%;S3iRpW+xE4OP@=a*=Zgt#&R6A zqIWDl&Gn1I>`~9$u7*B#x9T^39_+T9-7vfCmtl5Ce+;u>b}S6)>{u9PR~;*#nh>*v z+rw7qaqi9_3Dfoi{`mJ%!b*1{ugG$Y(H(p`aR4x&+IyJpD?==AM4L3_bRVswybaX+NoS) zR>$ss#~m;mX8ZYHm<_Z2v=Qs~Fxv`dmpJ>RzE^q3Yq|SRnp0xGtjB$g*fd2AxP5ly z7~YR>dHfz`!)!nQ3$tOipEhFs9%g5o*-zYYjIqMn5gRpUL(FkY8JE?SpCh;4t_I%C zJyaV#zJS><+t2^PY?$q*jaa{j*+!W?a_f}3aqjT8YGw;>!*JG**(HAKd#J+g`E}dD zWjf4;*?#^PX2WbhZN&OL%r?tx{j3m|2em$HG27+rp!tnoyH%Ij`Z!?Fy))`=N3Cru z;&(6`X8ZYHm<_Z2v=Qs~FxyUMx6extdHC*zuHAy!jm6QO9f!bdnC<6(ZDw}2dxxmg z?;aKF>(u7H)xI^rePI1;%=h|UzA(F5S1Wp}vwKw^FGk5^L^=mUTaet?2xmayi$`{e7)Z;_Swl4zpplA2)1fw%4qsFM+AVojxBJ^#pD-I{2RSx9W71XC{e~Dy!R!q3Eswf{+10+Mt(WSyXmD(= z-?WUY%gPZkFYDn@ zULQ}K?Z(b>PWk2cbi8(}Q)bWM9JJ1Ood;&0+R{gz_ISU@FIi8lekkUCXJXr6Hq1^% zcQ89q-_sehhwHKLdaM|+ZfafTyR5GK9JxcE2fncSoN|~=oSlk4$pL%A+6iXE?6BGi zR8Eft2l{=1`(zk9MP{}hJFnz%kpG81cKn?3fp>K>pG}-y_t?ojuCje>JdB1Z)0mHh z*^Su=Jz=LTYrrJ4h3~`kebnT6AeT>v*)DgZe8Oz}ji0}P*=WO08+>n%HacatzRy`b zhR8W;L1OD#zm?~K3|cU~?s^6hXV+~9w~b)7J$)~2KMAwZM%tRAzW6m4nc17C)Mafg zM%VgI=D2Hp?|ygrteFil%Vk$lKVf$KeIze_2WF#x|hFJCD@k-vg)BevUQFhS~JX`1@rNHig-Cv{Mpo&pPwd_Kh&R#BKdN z%*$(>2R2fFpX8XhyKctD8#^{2&dyKYjN8w`Y_t)#rdVJ4nrf8UE<1-jsIRHk_sDc# z?)OQ5!tCQVcWqzm6K1F1hivg%FdJ>yqE)ujU#mfxEo?45@1uQ=J84$C>zDha)nXLi zRkIvs`)j@EegI~pjiUE~JnVlMW(!A4bKFzwKR?suZ5*F4yPC~@$0{%zzwGCiU^d$D z(+1z$qm8i4E^#}hxooK`X=eA=_hK8oxcVOGeq7A*_b}U@zL&P2gxP2#ZOu_%{F;lx z>^S1*M%R%Bvt?U`K6aPtH-4VAp!U@vt^wlx9aoX^s~K#tjpwYVK#o*&!53; zwBe@>zPCpkd1AIOc38`us$Yn?Xg_9`Vs3qYeE*y67$=9>_Vm59{Uppr8)<8f`r_AI z-k2?n9lm;+>K||}x-PTzI%T=Z__@)#VRrm|BsYGixvx6??oqM5y3HG=a$}p7=!Ukm zLj!qaws3pICOsE@d*kP=!EDu}RQ;Euf1p#LPYT1KHF-QJ5|4u6M0IbI_a;vSBv9EStWJcA;I0 zpaGcOe(zGB>$BZBUG9_C=(*b_f5&;__^l%NE%p(e7gYn+FuTt6(U&(cHr{ER{l_`J zbK+9AF0<>5xu?xP9F3n$K|zh|YtvmIy=4cM&#m|c#u2Tq+`_j<8<*ImPGyZZn>jP^qV zXuuW?*uZQ#2fN-p?x05F?1}#8jr%==+ntjhzILkWe@pG_u3DYH9l3(U5=58%USKQw>_Y|#MBZXX-h_ds!D>Uy80 z>#p^^QY+(rm`#7TrN6VCXcZ0EtpS)FjM>8Tk)G?Wi_PrO3Ljv%51{>MKQv&E2J*n{ z!He%{C`PXLY_3Me#=Y_P(4}{&e)>6_MP_!}Yg^T?T1Tr>ntQ7sEW3;sHpqnrx;~DU z>Cf5Jz)#zTs}VaUH^e+Iv(n0s>|{QB@cm|(E&D&L?T*sD#p~HZm|i)Sp5*^&-&4iC zmb>}Yjq3cBSE`emf2Y2??64c;~>v)mrBVQO9GnPj%GTAm3~ zbY{Eker(TL_4C$Iusfl?%JC4#gJ=K^hz3qsewpgC`G&gesmG-xm&L{Gde6uVGdAAn zJ{KX*cA4#dKKRO$>L)9P!*1A3tc3>9KtdY$-m**7#p|8SaXT(%kKC-EkvXm7wOTS( z4YO5~%Ui!sM=#K+)8AG8=KkF))qk}N*6hB7ew$FgCGH~bLIcItz<)nELXFsdTV~iD z7qcZU9=TmVBXiD-4rA??-_zK&UX8}qmA@-uoL$Ln`G3KSEhTo#aUuSt*#3p~r~T1D z{2Dm(37O-*B@^84jM+mTXzU!cN{$YB;O;7JUp^gXdpYaPJNgTq0>#Ebot$~x5_t10P6O40v#8dUpK^yYW-J!N|=<+*NzX|h7?1tH$v)B=0 zM`X5Je#Uc4)fvtGh`AXrM+0a84aBGciH9y)cTFfJR>rka{vW<(T2)N!y;dqLvt^rx zuhI7r*Y_b$lGy|AoK<)Gz55d-Z|Yhvm&HLp?|$x4bvo?EXT|8Vh&78HYtsJZ_W$oE z^>y5a=D3q&ws2eSBOYdaz0u$af`y@M$6j9Y?tv6!d185*7 z4VaqaPMX<`vNy`_O6R`K?s)E7qbzRpceXOyW%r~Vi`1#jz38(s^;u#LVoo$rYz>&0 zxQ|4nmakGb>2Y?s}yzjIJsw{@oafxeEx)gGVt zAvAyn&_Hkv{P2msYS7knuHzp1sJ^CJKS#?OXWGbY;rxj8Q`LY!#{2w`JTlv5_r7=j zszyIOU3G=s9J?^~fCkV&XbnjG(`(c9sl>Xq;!|6hE!-Z~dPj$25brTbkbIt*?Xvsj zxA&{7x7?-K-95BsIX(}5e2&eq85+o!27dk2)v0h>?u%A=KTw;QJzPJ_GYFGA`)v`J z?Xvs%gRiQQ8>gtQ&9EC^nlE2Ud(xh1Y2g2@8kWi&_sFgKzTq=wmUsuV=O5#Kk(lkW z``Nels3GfbRo`#El(?JmLNtH|&_F;9oRQKT_sAW$E4g+GW*3{;F1xoM*sTVw|0C=U z*k98+o~IsSM>LRK4Wv28E%z%4bMAH)$}l$G`F+k^CO7)I>CIiL?^8FyZrDw{g$B?- zKn&~9Y3>Owr_gldDUahc>U~d#>|OtvK!yfHndIFG?3ODcNk`xE(5cZ zW46ogwfq09E?G5}xI5tScGmm|+XLIsKzbUGIqu7!o@nhDc2aDI*~v58Wq0eor`2y( zjUn!a-Hex_0W{En27aKg$?3UqJlxjTWxK}!oiaP%Jx+^w-nh%;Mn79#-K>7uc9mxL z@94J;^jl(^^kN%qjjgk&fnPm!l^VHY@-c3?{3_ySXUvxC&`b9xuW|pkTGwjB?8f8v zWqUWO^IETf-Ef;@H#C3-I?=#SR}M|(I_|6=2QyKvFuQSPyK(tr zd)BI-w~m6{o%Go(9{;mV#HDB;Jq?`kM4!UXaW~3r;r8&=(^UVP>pxcnW(Q}s%kBqW zUZsArVmR!E-NamI01fzP;0OAi(M6u)4#I5Vc4yBOf!X1i?Xvs6m)g{SwG7tmzJz}3 z({G7w(u-}dHMY*827dL_m4%$+4$5rd_Q)+$)PSk8kKM1da(oS(JO8Mq;*bTfc@bzf<*Vzm6MbC&z4;-ExiCFStgGc*7HKWYwRtU9dha4HV&? zr*SY_xIKK;G}Z6Mxe8_{&uo|7TqA}rOv@Ki4_dQD13ygZxt^u5=JvQX&Hv+Kwrul= zr>3eAQx|EZB*QI7)|}9~ZC)?eh~ZCd@uzGjT1`U(@?6i}o3790ebeG(ws8B}<@YAW zRY%qgvzz>Eu=vF`b#BWLt`TF*n0W&@(Pn zql0{YboAr1Chb_PPHpa0$UaLvlYBgbjj?f-HDLR5Jx6ZU_Ya>wvjewbc0QTy^8Njf zj!d|D+d_46b5Gb!%!LNfK%q1s*KzmUG(ON+a*%Q7q<=ed$L(tHqUlxKhS~XMw#)7} z-aDiwY?}+a@!5s)*;$SQ*|zk?DQutP9{Q)dj@>^TW*39mF1ufU=b*Z7>ntV5pZJXQ zdmivdxxoI%FSj=|W{p#TZYV_07Rae-}u?u4lXrLe(uzikO&P}^~ z_RK(I{hT+RFUG(8_I`E6raNJGL3|dmp4nIr`L&Y-HAEiHyS_#x!1r2Yp+xzcT5fxvj$;hR(=b!^T}+N-G4stiW;*1R`vbn zOLO06iFwS&JhTCAU{V7=S~)}w+kR`Vxec@P&1{$5+Yjtk1E0DDb`x)L?1lz%t%1{5 z^i~6(zB#wthS|knw#)7;F&T`Xq1 z?B2NldDUahc-RfQVK*Actp@Tq#|^WK$!wS1t6$rpez$5I?8a~B)^8K{nvZ);wn5(J zxM6m&neDQ>b>GwKqLri7Nt~+&yEuM913A+`p60k=Hq3UpEuSr~ZdSi+y9##a%x9U5 zbEv!6ac)-HKQD9KFdJr9GTZ(CPkYv@U$kDK+5J0W4)~1*&_Hf9@XNJ-u=+Y~m<_XC zPS^T;bkAD#vlXLYH$FQzK07OMAKPJW{NYC{2dkmm?7WT}X2a}StoDBQz{{)DPgV?v z-LM;Wqk&v#pxcUGYJi2;al>qw?d5a5{Nfke)VVD~G`la!MZYE1F&FD#du(r719ETY z?ir{SNiRJ~R+b12)ZZ!)%!C$K^)8pS|mG zb!PJb*v)t;<1T2xY7Ly*X2%>i%!b*Gu(`9}%y@38I-|Lt)qR#&C0eY4{ji_y8tB%N z{W)%!4YNDraggPA?tDa@z6^HT?z`B2_*+yBbk+Bj%JLjH%!b)PnB3@ZCqJ`9owlqG zaW~_oXaEgZse$v?Wb-<1m<_WV<#5p7eRTBWvnK6WtWIg}ZDpS=YMjA-6Jw)+vIfp= z8>Fw}{$r-jD~H)IJ1C2T{{GMh?`^($+d_46b5Gb!%!LNffK?heP3E|_+>{w^!)%xx zl)+(t|HgZV)cCD)VK+Y8DxXcv$+&~eabLRrx=e5zX2a~T3=UiF^>+@c>$c8PKWOfO z?}p!K01a59fnTioz43dM!)%xxmc3ES?LB-zjedH%>be|u6U!0Hp#eKIV0w-lX2a~L z?2TIfr9-c)D>mH;yNS1~iMQaZ9ehoyolMSg!)%xxmAP@1-*s@G8nNMaCHDYMs;{x$ zV*6U!pZ1SS1Eo3c4e8Hu!)%xx7jxrU&z}#xqK2%$Rec|JbL_&{0~*M<2GXA6hS@MX zF4o4qo~Pe@Q4M_R7R~NU@mm@9TXDrMtcN(*Mh%?PcDWjo(skT08)nDN+PK%Z<;@pV zpEWnaZn({{8yd*022O41srqfcG4bocU^dK-o3Tl)Z{z;wRrfXHVK;s|vwqvg7?Ev` zZ)`)x1TR@XCQ)v~Y?z%CW0P9%llyk4->n)4yJ0u%Mgy7D!2hfsmDqLMFdJqk#nz

Llj7GwHX8-{Onku%&&r{Li+5iOg}sY?z%iTa#XY%d4B!`K?#NZelKu zUC=S=*m#KbBjAUPoM4QoOacLm#Ic}H@v(sW~ zTJ=2g%3Afa6{BD`F&D>fXdp8hI4`C-ZkP?T(_(4b^(@)FQvIl980^MxXU1>G6}zz> zJI4XabKH|6omURCVRqUqO}oCuFSe<3TZU+MUxM$3-)I007}dZjEtf^QjvHpf?6etb zs=j%XB;1_Q`?R4YOgkDR!E!cgAx|)#=Us@Y}E(4WI#I8aTUkfWD5q z@i}go4YN(N({%lJ?tDa@z6^Fpi5DnK%!mfUYe43>`!;y5a+nRXO*1nK^&kD@lds>l z<9>D8vOdO+xAA%5eIB;McD8Gv(K&9I4YRYr%&cs~M@K(C>z3_{)G5n)!EVM&(Eu7q zT?0SwWR4qV!|bf^GE3WW=!5q*-@I+1I;FX1>V4Msc!BLF=0yY1G*EAj8)n1oEU_|c z+w%H52i5qkbJfYPn`0MZPBf6B24s%A&!!tXoKp_7VRqIynZ@l9c8}RIQ~jX12YxF> zzeT(jEndTZcH8g!`a164th=U)+b|nuXOWRv-KM>V52(>kPgh-+!)|zBHxFn(d)n{6 zpFDmYH_V3FS>;ai-C{D_a@yKt-GS1PCO3USsl!c8fePy% zA20tI!{t8}vHlYxme;0>WWUJeorv{!i&(x((DF@9pQ+GKQ_%9-+D#`M*I&~Y0@gp; zQ~oo-%6}pqr?UP}1uI|L@(AVkcgtApSe?J_p{Pl;sl9FOB?E@E zzb@Y;V)Szd-hjSiYA32~vNw_TQrqy`tDJdiiMnV^6#M!nWU@i2Zj@ zd-n-nzZ22^<>DRc*9dGlb<^wc_WvN|yBIDn5{k0@(gmWFmo8|sytt4k^$YAHm3Iu6 z?-sHCE)mOX0Yj`FCKVw)eZzJFCMho0Kv;w8z5MD@xavv2w2|n8X#c# zZn}Jx0Ya3oGC;ucU3C2)S2ke4@^T2EqLyD-v4Vj0%OQjen;lD){~!C(|DV!Pe5C;b zmM=#U73&XGe&z8M1T0@3FQ_O~@$d1K*7?s71G-5U4qCpOTUI^|lC-Z5c?msd|ANZ>v!Y8(i%c+zZ-`dE8nGk-{m9B_E!opO!-Oy znkZi>;4t-93eZIPN&$zdzf!=V%6B_1AS30wM6AC;z@fIkLcm7KR|q&%{S^WZRlY)i zX39Iq1Q4$NZV}6OF#YCPFVoP%6E%cf0u~m+XWbU|D%>~7jWqM+XWoDe7gXRm2Vet==$4(FmvV0 z0uEn)S-|1Tmj!69d|ANZ>n{s9eEG6~&6O_;IDGwO0Y@lb5}?8IB>_jMza-!Ysb2&frMw8( zXn7}M{oNv#?_#*T7H}l{YXKWAuLT^be#&FN;PdA$zMABOa`YGY`~&RgS^)jffABxr zg@#P@ANmjdhyD}NeQ^B7@hivg9KVNkANUXa2mS;9f&U0;0pd5}H{v(qw~+3G<1dcC zIR4`JE3Es#f8amxANUXaM@S10zY)I?zY)KMbRQglas0*c7sp>=-3R^y|AGIo-hWiy z%Pp8ID90aFH?s@0g7)$&0~M<-;1lEg#kZZuzhVaLb1^ zz_Ie9D;hPxzsJh&2~eowr^m`y-4C+j2c7(TtbFwiF*_-BeDSgJo;!jb|3&@(j+ft4 z|A#(HkCzW;faB%E8Q^&Na0aL-AIbm~QFwQ0XmcqXn+po0~(-X`Cta< zSU#8mI+hP+fR5#Z8K6e_KnAE$K9B)wln-Qp8s!5SpjP=H2B=j&hyiMq4`P5?<%1ZY zX88aHs98RM0cw^HV1SzC0~nxo`O!^{rrPEAG+MlU{7>ISqAE3x(c1Mlj?voX8^@?m z`GyVPQ@&vX_>^zh06yg#Hh^FGMh)OszEK1Cm2cDle&riAfN%K*4d7e8K?C@fZ_og~ zn5d+jK--rRq<^MkwoSvcp literal 0 HcmV?d00001 From 42eefb0dc280cb268d09ab0d6125f0e0c3da4ecd Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 26 Nov 2014 23:11:40 +0100 Subject: [PATCH 036/151] Fix and improve log generation --- regionfixer_core/world.py | 66 ++++++++++++++++++++------------------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index b4c3a9f..f9a5f65 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -121,7 +121,7 @@ def __str__(self): @property def oneliner_status(self): - return "Readable" if self.readable else "Unreadable" + return "File: \"" + self.filename + "\"; status: " + ("Readable" if self.readable else "Unreadable") class ScannedChunk(object): @@ -394,6 +394,10 @@ def rescan_entities(self, options): class DataSet(object): """ Stores data items to be scanned by AsyncScanner in scan.py. """ + def summary(self): + """ Return a summary of problems found in this set. """ + raise NotImplemented + def _replace_in_data_structure(self, data): raise NotImplemented @@ -419,9 +423,10 @@ class DataFileSet(DataSet): DataSets are scanned using scan.AsyncScanner """ - def __init__(self, path, *args, **kwargs): + def __init__(self, path, title, *args, **kwargs): DataSet.__init__(self, *args, **kwargs) + self.title = title self.path = path data_files_path = glob(join(path, "*.dat")) self.data_files = d = {} @@ -437,6 +442,15 @@ def _replace_in_data_structure(self, data): def __len__(self): return len(self.data_files) + def summary(self): + """ Return a summary of problems found in this set. """ + text = "" + bad_data_files = [i for i in self.data_files.values() if not i.readable] + for f in bad_data_files: + text += "\t" + f.oneliner_status + text += "\n" + return text + class RegionSet(DataSet): """Stores an arbitrary number of region files and the scan results. @@ -728,9 +742,13 @@ def __init__(self, world_path): PLAYERS_DIRECTORY = 'players' OLD_PLAYERS_DIRECTORY = ' playerdata' STRUCTURES_DIRECTORY = 'data' - self.players = DataFileSet(join(self.path, PLAYERS_DIRECTORY)) - self.old_players = DataFileSet(join(self.path, OLD_PLAYERS_DIRECTORY)) - self.data_files = DataFileSet(join(self.path, STRUCTURES_DIRECTORY)) + + self.players = DataFileSet(join(self.path, PLAYERS_DIRECTORY), + "\nPlayer UUID files:\n") + self.old_players = DataFileSet(join(self.path, OLD_PLAYERS_DIRECTORY), + "\nOld format player files:\n") + self.data_files = DataFileSet(join(self.path, STRUCTURES_DIRECTORY), + "\nStructures and map data files:\n") # Does it look like a world folder? region_files = False @@ -772,39 +790,23 @@ def summary(self): final += "{0:#^60}\n".format(" World name: {0} ".format(self.name)) final += "{0:#^60}\n".format('') - # leve.dat files info + # leve.dat and data files final += "\nlevel.dat:\n" if self.scanned_level.readable: final += "\t\'level.dat\' is readable\n" else: final += "\t[WARNING]: \'level.dat\' isn't readable, error: {0}\n".format(self.scanned_level.status_text) - all_ok = True - final += "\nPlayer UUID files:\n" - for p in self.players.values(): - if not p.readable: - all_ok = False - final += "\t-[WARNING]: Player file {0} has problems.\n\t\tError: {1}\n\n".format(p.filename, p.status_text) - if all_ok: - final += "\tAll player files are readable.\n\n" - - all_ok = True - final += "\nOld format player files:\n" - for p in self.old_players.values(): - if not p.readable: - all_ok = False - final += "\t-[WARNING]: Player file {0} has problems.\n\t\tError: {1}\n\n".format(p.filename, p.status_text) - if all_ok: - final += "\tAll player files are readable.\n\n" - - all_ok = True - final += "\nStructures and map data files:\n" - for d in self.data_files.values(): - if not d.readable: - all_ok = False - final += "\t-[WARNING]: File {0} has problems.\n\t\tError: {1}\n\n".format(d.filename, d.status_text) - if all_ok: - final += "\tAll data files are readable.\n\n" + sets = [self.players, + self.old_players, + self.data_files] + + for set in sets: + final += set.title + text = set.summary() + final += text if text else "All files ok.\n" + + final += "\n" # chunk info chunk_info = "" From 20c40d8b048b264641147d74ba83dca5c06f1806 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 26 Nov 2014 23:45:08 +0100 Subject: [PATCH 037/151] Fix log file printing. --- regionfixer_core/world.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index f9a5f65..9a110db 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -813,11 +813,15 @@ def summary(self): for regionset in self.regionsets: title = regionset.get_name() + final += "\n" + title + ":\n" # don't add text if there aren't broken chunks text = regionset.summary() - chunk_info += (title + text) if text else "" - final += chunk_info if chunk_info else "All the chunks are ok." + print len(text) + chunk_info += text if text else "" + final += chunk_info if chunk_info else "All the chunks are ok." + + return final From 85dbae6d58df7f4f5e8bf523a49a4337f57f04cc Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 26 Nov 2014 23:48:11 +0100 Subject: [PATCH 038/151] Remove debug printing --- regionfixer_core/world.py | 1 - 1 file changed, 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 9a110db..63dd9f0 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -817,7 +817,6 @@ def summary(self): # don't add text if there aren't broken chunks text = regionset.summary() - print len(text) chunk_info += text if text else "" final += chunk_info if chunk_info else "All the chunks are ok." From e8d4e3d95a8593b8fc29d8d476a5986f500e2419 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 8 Jan 2015 14:11:26 +0100 Subject: [PATCH 039/151] Update the reamde.rst --- README.rst | 262 +---------------------------------------------------- 1 file changed, 3 insertions(+), 259 deletions(-) diff --git a/README.rst b/README.rst index 65b6a92..10d826a 100644 --- a/README.rst +++ b/README.rst @@ -57,267 +57,11 @@ with your world. Usage ===== -You can read the program help running: “python region-fixer.py --help” +You can read the program help running: "python region-fixer.py --help" -(NOTE: if you downloaded the .exe version for windows, use - "region-fixer.exe" instead of "python region-fixer.py") +For usage examples and more info visit the wiki: -Here are some examples: - -From v0.1.0 Region-Fixer can scan single region files and arbitrary -region sets. For example, if you know where the problem is you could -scan a single region file instead of scanning the whole world. You -can also scan a few region files from different locations. Example:: - - $ python region-fixer.py ~/.minecraft/saves/World1/region/r.0.0.mca - - Welcome to Region Fixer! - - ############################################################ - ############## Scanning separate region files ############## - ############################################################ - Scanning: 1 / 1 100% [########################################] Time: 00:00:01 - - Found 0 corrupted, 0 wrong located chunks and 0 chunks with too many entities of a total of 976 - -The next example will scan your world and report any problems:: - - $ python region-fixer.py ~/.minecraft/saves/corrupted-world - - Welcome to Region Fixer! - - ############################################################ - ############ Scanning world: Testing corruption ############ - ############################################################ - Scanning directory... - Info: No nether dimension in the world directory. - Info: No end dimension in the world directory. - There are 1 region files and 1 player files in the world directory. - - -------------------- Checking level.dat -------------------- - 'level.dat' is redable - - ------------------ Checking player files ------------------- - All player files are readable. - - ------------------ Scanning the overworld ------------------ - Scanning: 1 / 1 100% [########################################] Time: 00:00:20 - - Found 19 corrupted, 0 wrong located chunks and 0 chunks with too many entities of a total of 625 - -You can use --verbose or -v option if you want more info. This option -will print a line per region file showing problems found in that region -file. - -To delete corrupted chunks you can use "--delete-corrupted" or "--dc":: - - $ python region-fixer.py --delete-corrupted ~/.minecraft/saves/corrupted-world - - Welcome to Region Fixer! - - ############################################################ - ############ Scanning world: Testing corruption ############ - ############################################################ - Scanning directory... - Info: No nether dimension in the world directory. - Info: No end dimension in the world directory. - There are 1 region files and 1 player files in the world directory. - - -------------------- Checking level.dat -------------------- - 'level.dat' is redable - - ------------------ Checking player files ------------------- - All player files are readable. - - ------------------ Scanning the overworld ------------------ - Scanning: 1 / 1 100% [########################################] Time: 00:00:19 - - Found 19 corrupted, 0 wrong located chunks and 0 chunks with too many entities of a total of 625 - - ################ Deleting corrupted chunks ################ - Deleting chunks in region set "/home/alejandro/.minecraft/saves/corrupted-world/region/": Done! Removed 19 chunks - Done! - Deleted 19 corrupted chunks - -If we have a backup of our world we can use them to fix the problems -found chunks, this method can spam a lot of output text, because writes -a log for every chunk that is trying to fix:: - - $ python region-fixer.py --backups ~/backup/2013.01.05/ --replace-corrupted ~/.minecraft/saves/corrupted-world - - Welcome to Region Fixer! - - ############################################################ - ############ Scanning world: Testing corruption ############ - ############################################################ - Scanning directory... - Info: No nether dimension in the world directory. - Info: No end dimension in the world directory. - There are 1 region files and 1 player files in the world directory. - - -------------------- Checking level.dat -------------------- - 'level.dat' is redable - - ------------------ Checking player files ------------------- - All player files are readable. - - ------------------ Scanning the overworld ------------------ - Scanning: 1 / 1 100% [########################################] Time: 00:00:19 - - Found 19 corrupted, 0 wrong located chunks and 0 chunks with too many entities of a total of 625 - - ############ Trying to replace corrupted chunks ############ - - ---------- New chunk to replace! Coords (-16, 9) ----------- - Backup region file found in: - ~/backup/2013.01.05/region/r.-1.0.mca - Replacing... - Chunk replaced using backup dir: ~/backup/2013.01.05/ - - ---------- New chunk to replace! Coords (-10, 19) ---------- - Backup region file found in: - ~/backup/2013.01.05/region/r.-1.0.mca - Replacing... - Chunk replaced using backup dir: ~/backup/2013.01.05/ - - ... long log of replaced chunks ... - - ---------- New chunk to replace! Coords (-13, 16) ---------- - Backup region file found in: - ~/backup/2013.01.05/region/r.-1.0.mca - Replacing... - Chunk replaced using backup dir: ~/backup/2013.01.05/ - - ---------- New chunk to replace! Coords (-13, 25) ---------- - Backup region file found in: - ~/backup/2013.01.05/region/r.-1.0.mca - Replacing... - Chunk replaced using backup dir: ~/backup/2013.01.05/ - - 19 replaced chunks of a total of 19 corrupted chunks - -These options have an equivalent for wrong located chunks. - -Another problem that Region Fixer can fix is an entity problem. -Sometimes worlds store thousands of entities in one chunk, hanging the -server when loaded. This can happen with squids, spiders, or even items. -A very common way to make this happen in your server is to ignite a few -thousands of TNTs at the same time. All those TNTs are entities and -the server will hang trying to move them all. - -This problem can be fixed with this method. Using the option -"--delete-entities" Region Fixer will delete all the entities in that -chunk if it does have more entities than entity-limit (see the help). -It doesn't touch TileEntities (chests, singposts, noteblocks, etc...). -At the moment of writing this Entities stored in chunks are: - -- mobs -- projectiles (arrows, snowballs...) -- primed TNT -- ender crystal -- paintings -- items on the ground (don't worry chests are safe) -- vehicles (boats and minecarts) -- dynamic tiles (falling sand and activated TNT) - -Note that you still need to load the chunk in Region Fixer to fix it, -and it may need GIGs of RAM and lot of time. You can use this in -combination with "--entity-limit" to set your limit (default 300 -entities, note that a chunk has 256 square meters of surface and if you -put a mob in every sun lighted block of a chunk that will make 256 -mobs, so it's a big limit!):: - - python region-fixer.py --entity-limit 50 --delete-entities ~/.minecraft/saves/corrupted-world - - Welcome to Region Fixer! - - ############################################################ - ############ Scanning world: Testing corruption ############ - ############################################################ - Scanning directory... - Info: No nether dimension in the world directory. - Info: No end dimension in the world directory. - There are 1 region files and 1 player files in the world directory. - - -------------------- Checking level.dat -------------------- - 'level.dat' is redable - - ------------------ Checking player files ------------------- - All player files are readable. - - ------------------ Scanning the overworld ------------------ - Deleted 102 entities in chunk (14,8) of the region file: r.-1.0.mca - Deleted 111 entities in chunk (14,10) of the region file: r.-1.0.mca - Deleted 84 entities in chunk (15,4) of the region file: r.-1.0.mca - Deleted 75 entities in chunk (21,4) of the region file: r.-1.0.mca - Scanning: 1 / 1 100% [########################################] Time: 00:00:20 - - Found 0 corrupted, 0 wrong located chunks and 0 chunks with too many entities of a total of 625 - - -From version v0.1.0 there is also an interactive mode for Region-Fixer. -If you don't know what's wrong with your world this mode can be very -useful. To start using the mode use the '--interactive' option:: - - $ python region-fixer.py --interactive ~/.minecraft/saves/corrutped-world - -In this mode the scan results are saved in memory, so one scanned you -can delete chunks, delete entities, replace chunks, replace chunks with -too many entities and read a summary of what's wrong without needing to -scan the world again. Example of usage:: - - $ python region-fixer.py --interactive ~/.minecraft/saves/corrupted-world - Welcome to Region Fixer! - Minecraft Region-Fixer interactive mode. - (Use tab to autocomplete. Type help for a list of commands.) - - #-> scan - Scanning directory... - Info: No nether dimension in the world directory. - Info: No end dimension in the world directory. - There are 1 region files and 1 player files in the world directory. - - -------------------- Checking level.dat -------------------- - 'level.dat' is redable - - ------------------ Checking player files ------------------- - All player files are readable. - - ------------------ Scanning the overworld ------------------ - Scanning: 1 / 1 100% [########################################] Time: 00:00:21 - - #-> summary - - ############################################################ - ############## World name: Testing corruption ############## - ############################################################ - - level.dat: - 'level.dat' is readable - - Player files: - All player files are readable. - - Overworld: - Region file: r.-1.0.mca - |-+-Chunk coords: header (16, 9), global (-16, 9). - | +-Status: Corrupted - - ... big summary... - - |-+-Chunk coords: header (19, 25), global (-13, 25). - | +-Status: Corrupted - | - + - - - #-> remove_chunks corrupted - Deleting chunks in region set "/home/alejandro/.minecraft/saves/corrupted-world/region/": Done! Removed 19 chunks - Done! Removed 19 chunks - #-> - - -For more info: “python region-fixer.py --help” +https://github.com/Fenixin/Minecraft-Region-Fixer/wiki/Usage Bugs, suggestions, feedback, questions From 692665de064639777a4b3e26af0b2876654aaf31 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 8 Jan 2015 14:21:39 +0100 Subject: [PATCH 040/151] Update NBT library fixing slow removing chunks. --- nbt/nbt.py | 11 ++++++- nbt/region.py | 80 +++++++++++++++++++++++++++++++++++++-------------- nbt/setup.py | 28 ------------------ 3 files changed, 68 insertions(+), 51 deletions(-) delete mode 100755 nbt/setup.py diff --git a/nbt/nbt.py b/nbt/nbt.py index e98cacb..312381b 100644 --- a/nbt/nbt.py +++ b/nbt/nbt.py @@ -484,6 +484,15 @@ def pretty_tree(self, indent=0): class NBTFile(TAG_Compound): """Represent an NBT file object.""" def __init__(self, filename=None, buffer=None, fileobj=None): + """ + Create a new NBTFile object. + Specify either a filename, file object or data buffer. + If filename of file object is specified, data should be GZip-compressed. + If a data buffer is specified, it is assumed to be uncompressed. + + If filename is specified, the file is closed after reading and writing. + If file object is specified, the caller is responsible for closing the file. + """ super(NBTFile, self).__init__() self.filename = filename self.type = TAG_Byte(self.id) @@ -508,7 +517,7 @@ def __init__(self, filename=None, buffer=None, fileobj=None): self.parse_file() if closefile: # Note: GzipFile().close() does NOT close the fileobj, - # So the caller is still responsible for closing that. + # So we are still responsible for closing that. try: self.file.close() except (AttributeError, IOError): diff --git a/nbt/region.py b/nbt/region.py index 803aa2d..ba8d9fa 100644 --- a/nbt/region.py +++ b/nbt/region.py @@ -118,7 +118,7 @@ def __init__(self, x, z): - STATUS_CHUNK_OK - STATUS_CHUNK_NOT_CREATED""" def __str__(self): - return "%s(%d, %d, sector=%s, length=%s, timestamp=%s, lenght=%s, compression=%s, status=%s)" % \ + return "%s(%d, %d, sector=%s, blocklength=%s, timestamp=%s, bytelength=%s, compression=%s, status=%s)" % \ (self.__class__.__name__, self.x, self.z, self.blockstart, self.blocklength, self.timestamp, \ self.length, self.compression, self.status) def __repr__(self): @@ -183,8 +183,8 @@ class RegionFile(object): def __init__(self, filename=None, fileobj=None): """ - Read a region file by filename of file object. - If a fileobj is specified, it is not closed after use; it is the callers responibility to close that. + Read a region file by filename or file object. + If a fileobj is specified, it is not closed after use; it is the callers responibility to close it. """ self.file = None self.filename = None @@ -263,9 +263,15 @@ def _bytes_to_sector(bsize, sectorlength=SECTOR_LENGTH): sectors, remainder = divmod(bsize, sectorlength) return sectors if remainder == 0 else sectors + 1 - def __del__(self): + def close(self): if self._closefile: - self.file.close() + try: + self.file.close() + except IOError: + pass + + def __del__(self): + self.close() # Parent object() has no __del__ method, otherwise it should be called here. def _init_file(self): @@ -302,7 +308,7 @@ def _parse_header(self): m = self.metadata[x, z] self.file.seek(index) - offset, length = unpack(">IB", b"\0"+self.file.read(4)) + offset, length = unpack(">IB", b"\0" + self.file.read(4)) m.blockstart, m.blocklength = offset, length self.file.seek(index + SECTOR_LENGTH) m.timestamp = unpack(">I", self.file.read(4))[0] @@ -335,6 +341,8 @@ def _parse_chunk_headers(self): m = self.metadata[x, z] if m.status not in (STATUS_CHUNK_OK, STATUS_CHUNK_OVERLAPPING, \ STATUS_CHUNK_MISMATCHED_LENGTHS): + # skip to next if status is NOT_CREATED, OUT_OF_FILE, IN_HEADER, + # ZERO_LENGTH or anything else. continue try: self.file.seek(m.blockstart*SECTOR_LENGTH) # offset comes in sectors of 4096 bytes @@ -345,7 +353,9 @@ def _parse_chunk_headers(self): except IOError: m.status = STATUS_CHUNK_OUT_OF_FILE continue - if m.length <= 1: # chunk can't be zero length + if m.blockstart*SECTOR_LENGTH + m.length + 4 > self.size: + m.status = STATUS_CHUNK_OUT_OF_FILE + elif m.length <= 1: # chunk can't be zero length m.status = STATUS_CHUNK_ZERO_LENGTH elif m.length + 4 > m.blocklength * SECTOR_LENGTH: # There are not enough sectors allocated for the whole block @@ -365,9 +375,10 @@ def _sectors(self, ignore_chunk=None): if ignore_chunk == m: continue if m.blocklength and m.blockstart: - for b in range(m.blockstart, m.blockstart + max(m.blocklength, m.requiredblocks())): - if 2 <= b < sectorsize: - sectors[b].append(m) + blockend = m.blockstart + max(m.blocklength, m.requiredblocks()) + # Ensure 2 <= b < sectorsize, as well as m.blockstart <= b < blockend + for b in range(max(m.blockstart, 2), min(blockend, sectorsize)): + sectors[b].append(m) return sectors def _locate_free_sectors(self, ignore_chunk=None): @@ -462,33 +473,45 @@ def chunk_count(self): return len(self.get_metadata()) def get_blockdata(self, x, z): - """Return the decompressed binary data representing a chunk.""" + """ + Return the decompressed binary data representing a chunk. + + May raise a RegionFileFormatError(). + If decompression of the data succeeds, all available data is returned, + even if it is shorter than what is specified in the header (e.g. in case + of a truncated while and non-compressed data). + """ # read metadata block m = self.metadata[x, z] if m.status == STATUS_CHUNK_NOT_CREATED: raise InconceivedChunk("Chunk is not created") elif m.status == STATUS_CHUNK_IN_HEADER: raise RegionHeaderError('Chunk %d,%d is in the region header' % (x,z)) - elif m.status == STATUS_CHUNK_OUT_OF_FILE: + elif m.status == STATUS_CHUNK_OUT_OF_FILE and (m.length <= 1 or m.compression == None): + # Chunk header is outside of the file. raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x,z)) elif m.status == STATUS_CHUNK_ZERO_LENGTH: if m.blocklength == 0: raise RegionHeaderError('Chunk %d,%d has zero length' % (x,z)) else: raise ChunkHeaderError('Chunk %d,%d has zero length' % (x,z)) + elif m.blockstart * SECTOR_LENGTH + 5 >= self.size: + raise RegionHeaderError('Chunk %d,%d is partially/completely outside the file' % (x,z)) - # status is STATUS_CHUNK_OK, STATUS_CHUNK_MISMATCHED_LENGTHS or STATUS_CHUNK_OVERLAPPING. + # status is STATUS_CHUNK_OK, STATUS_CHUNK_MISMATCHED_LENGTHS, STATUS_CHUNK_OVERLAPPING + # or STATUS_CHUNK_OUT_OF_FILE. # The chunk is always read, but in case of an error, the exception may be different # based on the status. - # offset comes in sectors of 4096 bytes + length bytes + compression byte - self.file.seek(m.blockstart * SECTOR_LENGTH + 5) - chunk = self.file.read(m.length-1) # the length in the file includes the compression byte - err = None - if m.compression > 2: - raise ChunkDataError('Unknown chunk compression/format (%d)' % m.compression) try: + # offset comes in sectors of 4096 bytes + length bytes + compression byte + self.file.seek(m.blockstart * SECTOR_LENGTH + 5) + # Do not read past the length of the file. + # The length in the file includes the compression byte, hence the -1. + length = min(m.length - 1, self.size - (m.blockstart * SECTOR_LENGTH + 5)) + chunk = self.file.read(length) + if (m.compression == COMPRESSION_GZIP): # Python 3.1 and earlier do not yet support gzip.decompress(chunk) f = gzip.GzipFile(fileobj=BytesIO(chunk)) @@ -496,11 +519,16 @@ def get_blockdata(self, x, z): f.close() elif (m.compression == COMPRESSION_ZLIB): chunk = zlib.decompress(chunk) + elif m.compression != COMPRESSION_NONE: + raise ChunkDataError('Unknown chunk compression/format (%s)' % m.compression) + return chunk + except RegionFileFormatError: + raise except Exception as e: # Deliberately catch the Exception and re-raise. # The details in gzip/zlib/nbt are irrelevant, just that the data is garbled. - err = str(e) + err = '%s' % e # avoid str(e) due to Unicode issues in Python 2. if err: # don't raise during exception handling to avoid the warning # "During handling of the above exception, another exception occurred". @@ -524,7 +552,7 @@ def get_nbt(self, x, z): return NBTFile(buffer=data) # this may raise a MalformedFileError. Convert to ChunkDataError. except MalformedFileError as e: - err = str(e) + err = '%s' % e # avoid str(e) due to Unicode issues in Python 2. if err: raise ChunkDataError(err) @@ -561,6 +589,13 @@ def write_blockdata(self, x, z, data): free_sectors = self._locate_free_sectors(ignore_chunk=current) sector = self._find_free_location(free_sectors, nsectors, preferred=current.blockstart) + # If file is smaller than sector*SECTOR_LENGTH (it was truncated), pad it with zeroes. + if self.size < sector*SECTOR_LENGTH: + # jump to end of file + self.file.seek(0, SEEK_END) + self.file.write((sector*SECTOR_LENGTH - self.size) * b"\x00") + assert self.file.tell() == sector*SECTOR_LENGTH + # write out chunk to region self.file.seek(sector*SECTOR_LENGTH) self.file.write(pack(">I", length + 1)) #length field @@ -601,7 +636,8 @@ def write_blockdata(self, x, z, data): self.file.write(SECTOR_LENGTH*b'\x00') # update file size and header information - self.size = self.get_size() + self.size = max((sector + nsectors)*SECTOR_LENGTH, self.size) + assert self.get_size() == self.size current.blockstart = sector current.blocklength = nsectors current.status = STATUS_CHUNK_OK diff --git a/nbt/setup.py b/nbt/setup.py deleted file mode 100755 index 2aaea80..0000000 --- a/nbt/setup.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -from setuptools import setup -from nbt import VERSION - -setup( - name = 'NBT', - version = ".".join(str(x) for x in VERSION), - description = 'Named Binary Tag Reader/Writer', - author = 'Thomas Woolford', - author_email = 'woolford.thomas@gmail.com', - url = 'http://github.com/twoolie/NBT', - license = open("LICENSE.txt").read(), - long_description = open("README.txt").read(), - packages = ['nbt'], - classifiers = [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Topic :: Games/Entertainment", - "Topic :: Software Development :: Libraries :: Python Modules" - ] -) From 996578a8ed2233138a97673fb338f15fa6dcda83 Mon Sep 17 00:00:00 2001 From: Freek Dijkstra Date: Fri, 14 Nov 2014 14:29:09 +0100 Subject: [PATCH 041/151] Skip incorrectly named files in a world. Fixes issue #36. Signed-off-by: Freek Dijkstra --- regionfixer_core/world.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 63dd9f0..9f03a3b 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -104,6 +104,10 @@ "DIM-1": "Nether"} +class InvalidFileName(IOError): + pass + + class ScannedDataFile(object): def __init__(self, path=None, readable=None, status_text=None): self.path = path @@ -278,8 +282,11 @@ def get_coords(self): splited = split(self.filename) filename = splited[1] l = filename.split('.') - coordX = int(l[1]) - coordZ = int(l[2]) + try: + coordX = int(l[1]) + coordZ = int(l[2]) + except ValueError: + raise InvalidFileName() return coordX, coordZ @@ -465,7 +472,10 @@ def __init__(self, regionset_path=None, region_list=[]): self.region_list = region_list self.regions = {} for path in self.region_list: - r = ScannedRegionFile(path) + try: + r = ScannedRegionFile(path) + except InvalidFileName as e: + print "Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path) self.regions[r.get_coords()] = r self.corrupted_chunks = 0 self.wrong_located_chunks = 0 From 8b5f28b570f890130aa87129a5b9b069438f676b Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 30 Jan 2015 00:34:25 +0100 Subject: [PATCH 042/151] Only store regionf info if valid. --- regionfixer_core/world.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 9f03a3b..6e8b9ac 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -474,15 +474,17 @@ def __init__(self, regionset_path=None, region_list=[]): for path in self.region_list: try: r = ScannedRegionFile(path) + self.regions[r.get_coords()] = r + self.corrupted_chunks = 0 + self.wrong_located_chunks = 0 + self.entities_problems = 0 + self.shared_header = 0 + self.bad_list = [] + self.scanned = False + except InvalidFileName as e: print "Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path) - self.regions[r.get_coords()] = r - self.corrupted_chunks = 0 - self.wrong_located_chunks = 0 - self.entities_problems = 0 - self.shared_header = 0 - self.bad_list = [] - self.scanned = False + def get_name(self): """ Return a string with the name of the dimension, the From b74e31c73030e5386d5463e822da402f55b294e0 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 5 Feb 2015 12:31:02 +0100 Subject: [PATCH 043/151] Update donors list. --- DONORS.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/DONORS.txt b/DONORS.txt index 231adb5..0e35a92 100644 --- a/DONORS.txt +++ b/DONORS.txt @@ -8,6 +8,7 @@ Andrew Van Hise Eugene Sterner Udell Ross Burton Powercraft Network +David Wilczewski Sponsors: Initial development was sponsored by: NITRADO Servers (http://nitrado.net) From 16cef359030624e394ad84d0d6cb7bca5d357bb8 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 16 Mar 2015 23:48:10 +0100 Subject: [PATCH 044/151] New setup script that builds gui. --- setup.py | 194 +++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 188 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index ff3a335..b827421 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,191 @@ +# taken from: http://www.wiki.wxpython.org/py2exe-python26 + +# ======================================================# +# File automagically generated by GUI2Exe version 0.3 +# Andrea Gavana, 01 April 2007 +# ======================================================# + +# Let's start with some default (for me) imports... + from distutils.core import setup -import nbt import py2exe -import sys +import glob +import os +import zlib +import shutil + +from regionfixer_core import version as cli_version +from gui import version as gui_version + + +# Remove the build folder +shutil.rmtree("build", ignore_errors=True) + +# do the same for dist folder +shutil.rmtree("dist", ignore_errors=True) + +MANIFEST_TEMPLATE = """ + + + + %(prog)s + + + + + + + + + + + + + + + + + + + + +""" + +class Target(object): + """ A simple class that holds information on our executable file. """ + def __init__(self, **kw): + """ Default class constructor. Update as you need. """ + self.__dict__.update(kw) + + +# Ok, let's explain why I am doing that. +# Often, data_files, excludes and dll_excludes (but also resources) +# can be very long list of things, and this will clutter too much +# the setup call at the end of this file. So, I put all the big lists +# here and I wrap them using the textwrap module. + +data_files = ['COPYING.txt', 'README.rst', 'CONTRIBUTORS.txt', 'DONORS.txt', 'icon.ico'] + +includes = [] +excludes = ['_gtkagg', '_tkagg', 'bsddb', 'curses', 'email', 'pywin.debugger', + 'pywin.debugger.dbgcon', 'pywin.dialogs', 'tcl', + 'Tkconstants', 'Tkinter'] +packages = [] +dll_excludes = ['libgdk-win32-2.0-0.dll', 'libgobject-2.0-0.dll', 'tcl84.dll', + 'tk84.dll', + 'MSVCP90.dll', 'mswsock.dll', 'powrprof.dll'] +icon_resources = [(1, 'icon.ico')] +bitmap_resources = [] +other_resources = [] +other_resources = [(24, 1, MANIFEST_TEMPLATE % dict(prog="MyAppName"))] + + +# This is a place where the user custom code may go. You can do almost +# whatever you want, even modify the data_files, includes and friends +# here as long as they have the same variable name that the setup call +# below is expecting. + + +# +# The following will copy the MSVC run time dll's +# (msvcm90.dll, msvcp90.dll and msvcr90.dll) and +# the Microsoft.VC90.CRT.manifest which I keep in the +# "Py26MSdlls" folder to the dist folder +# +# depending on wx widgets you use, you might need to add +# gdiplus.dll to the above collection + +py26MSdll = glob.glob(r"c:\Dev\Py26MSdlls-9.0.21022.8\msvc\*.*") + +# install the MSVC 9 runtime dll's into the application folder +data_files += [("", py26MSdll),] + +# I found on some systems one has to put them into sub-folders. +##data_files += [("Microsoft.VC90.CRT", py26MSdll), +## ("lib\Microsoft.VC90.CRT", py26MSdll)] + + + +# Ok, now we are going to build our target class. +# I chose this building strategy as it works perfectly for me :-D + +GUI_Target = Target( + # what to build + script = "regionfixer_gui.py", + icon_resources = icon_resources, + bitmap_resources = bitmap_resources, + other_resources = other_resources, + dest_base = "regionfixer_gui", + version = gui_version.version_string, + company_name = "No Company", + copyright = "Copyright (C) 2011 Alejandro Aguilera", + name = "Region Fixer GUI" + ) + +CLI_Target = Target( + # what to build + script = "regionfixer.py", + icon_resources = icon_resources, + bitmap_resources = bitmap_resources, + other_resources = other_resources, + dest_base = "regionfixer", + version = cli_version.version_string, + company_name = "No Company", + copyright = "Copyright (C) 2011 Alejandro Aguilera", + name = "Region Fixer" + ) + + +# That's serious now: we have all (or almost all) the options py2exe +# supports. I put them all even if some of them are usually defaulted +# and not used. Some of them I didn't even know about. + +setup( + + data_files = data_files, + + options = {"py2exe": {"compressed": 2, + "optimize": 2, + "includes": includes, + "excludes": excludes, + "packages": packages, + "dll_excludes": dll_excludes, + "bundle_files": 2, + "dist_dir": "dist", + "xref": False, + "skip_archive": False, + "ascii": False, + "custom_boot_script": '', + } + }, + + zipfile = "lib\library.zip", + console = [CLI_Target], + windows = [GUI_Target] + ) + +# This is a place where any post-compile code may go. +# You can add as much code as you want, which can be used, for example, +# to clean up your folders or to do some particular post-compilation +# actions. -if sys.argv[1] == "py2exe": - setup(console=['region-fixer.py'], data_files=['COPYING.txt','README.rst','CONTRIBUTORS.txt','DONORS.txt']) -else: - print "Use \'python setup.py py2exe\' to build a windows executable." +# And we are done. That's a setup script :-D \ No newline at end of file From 9df921cb0f130dcf4fc1ebe2f334b901d29e7e67 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 17 Mar 2015 23:18:30 +0100 Subject: [PATCH 045/151] Something went wrong with stashing. --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b827421..e06fdf0 100644 --- a/setup.py +++ b/setup.py @@ -188,4 +188,5 @@ def __init__(self, **kw): # to clean up your folders or to do some particular post-compilation # actions. -# And we are done. That's a setup script :-D \ No newline at end of file +# And we are done. That's a setup script :-D + From ec80f749cc069a11bf5885939c1695bf3445f075 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 17 Mar 2015 23:50:35 +0100 Subject: [PATCH 046/151] Update contributors list. --- CONTRIBUTORS.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index a35e716..69f06c6 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -7,3 +7,4 @@ Contributors (in no particular order): aheadley (Alex Headley) - First multiprocessing version of Region Fixer. carlallen (Carl Allen) - Fix problem in MacOS kbn (Kristian Berge) - Small fixes +macfreek (Freek Dijkstra) - Fixes and lots of help \ No newline at end of file From d778d0b7046e472c57fab4a158a91d2541b82154 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 27 Mar 2015 00:39:21 +0100 Subject: [PATCH 047/151] Fix not being able to remove, fix a regionset in interactive mode. --- regionfixer_core/scan.py | 1 + 1 file changed, 1 insertion(+) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 8356b0e..797e394 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -612,6 +612,7 @@ def console_scan_regionset(regionset, processes, entity_limit, scanners = [rs] titles = [entitle("Scanning separate region files", 0)] console_scan_loop(scanners, titles, verbose) + regionset.scanned = True def scan_data(scanned_dat_file): From be8093991a6eb1df769eaca1d69fa99392575b23 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 27 Mar 2015 00:39:58 +0100 Subject: [PATCH 048/151] Bump version number to 0.2.1. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 3dd24ea..935f5d8 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -4,5 +4,5 @@ @author: Alejandro ''' -version_string = "0.2.0" +version_string = "0.2.1" version_numbers = version_string.split('.') From ec120ae4fbc2f7b6650447f9080cf194f7bf9cce Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 9 Jun 2015 18:11:58 +0200 Subject: [PATCH 049/151] Fix old and new player folders being swapped. --- regionfixer_core/world.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 6e8b9ac..2cb77db 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -751,8 +751,8 @@ def __init__(self, world_path): "The file doesn't exist") # Player files - PLAYERS_DIRECTORY = 'players' - OLD_PLAYERS_DIRECTORY = ' playerdata' + PLAYERS_DIRECTORY = 'playerdata' + OLD_PLAYERS_DIRECTORY = ' players' STRUCTURES_DIRECTORY = 'data' self.players = DataFileSet(join(self.path, PLAYERS_DIRECTORY), From 514f3e89569dd91a5049f35fa7a140e901523898 Mon Sep 17 00:00:00 2001 From: Simon Date: Sat, 30 Jul 2016 12:38:09 +0200 Subject: [PATCH 050/151] Fixed typo. --- gui/backups.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gui/backups.py b/gui/backups.py index cc33218..968a67b 100644 --- a/gui/backups.py +++ b/gui/backups.py @@ -84,7 +84,7 @@ def are_there_files(self, list_dirs): def OnAddWorld(self, e): """ Called when the buttom Add is clicked. """ - dlg = wx.DirDialog(self, "Choose a Minecraf world folder") + dlg = wx.DirDialog(self, "Choose a Minecraft world folder") # Set the last path used dlg.SetPath(self.last_path) if dlg.ShowModal() == wx.ID_OK: From 852991650ec5cf3c00d601ad75ca0bc3df7387d4 Mon Sep 17 00:00:00 2001 From: Simon Date: Sat, 30 Jul 2016 12:38:56 +0200 Subject: [PATCH 051/151] Fixed typo. --- gui/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gui/main.py b/gui/main.py index 8c302b9..88cfa6d 100644 --- a/gui/main.py +++ b/gui/main.py @@ -165,7 +165,7 @@ def OnHelp(self, e): def OnOpen(self, e): """ Called when the open world button is pressed. """ - dlg = wx.DirDialog(self, "Choose a Minecraf world folder") + dlg = wx.DirDialog(self, "Choose a Minecraft world folder") # Set the last path used dlg.SetPath(self.last_path) if dlg.ShowModal() == wx.ID_OK: From e8886e2cf92951353941049a99928db128f6d67b Mon Sep 17 00:00:00 2001 From: m Date: Wed, 12 Oct 2016 11:31:12 +0100 Subject: [PATCH 052/151] Spell change (resolves #50) Corrects Minecraf -> Minecraft in two files. --- gui/backups.py | 2 +- gui/main.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gui/backups.py b/gui/backups.py index cc33218..968a67b 100644 --- a/gui/backups.py +++ b/gui/backups.py @@ -84,7 +84,7 @@ def are_there_files(self, list_dirs): def OnAddWorld(self, e): """ Called when the buttom Add is clicked. """ - dlg = wx.DirDialog(self, "Choose a Minecraf world folder") + dlg = wx.DirDialog(self, "Choose a Minecraft world folder") # Set the last path used dlg.SetPath(self.last_path) if dlg.ShowModal() == wx.ID_OK: diff --git a/gui/main.py b/gui/main.py index 8c302b9..88cfa6d 100644 --- a/gui/main.py +++ b/gui/main.py @@ -165,7 +165,7 @@ def OnHelp(self, e): def OnOpen(self, e): """ Called when the open world button is pressed. """ - dlg = wx.DirDialog(self, "Choose a Minecraf world folder") + dlg = wx.DirDialog(self, "Choose a Minecraft world folder") # Set the last path used dlg.SetPath(self.last_path) if dlg.ShowModal() == wx.ID_OK: From 7b8702259fc5741b310cf6131e41fa8ac2ac2c92 Mon Sep 17 00:00:00 2001 From: kasper Franz Date: Mon, 5 Mar 2018 15:05:16 +0000 Subject: [PATCH 053/151] Fixed pyhon file name --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 10d826a..0ca0f27 100644 --- a/README.rst +++ b/README.rst @@ -57,7 +57,7 @@ with your world. Usage ===== -You can read the program help running: "python region-fixer.py --help" +You can read the program help running: "python regionfixer.py --help" For usage examples and more info visit the wiki: From 3d11f02ed2412531c553e9f2a2b9c3943948d3fd Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 28 Aug 2018 20:58:45 +0200 Subject: [PATCH 054/151] Old changes. --- regionfixer_core/scan.py | 1 + 1 file changed, 1 insertion(+) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 8356b0e..797e394 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -612,6 +612,7 @@ def console_scan_regionset(regionset, processes, entity_limit, scanners = [rs] titles = [entitle("Scanning separate region files", 0)] console_scan_loop(scanners, titles, verbose) + regionset.scanned = True def scan_data(scanned_dat_file): From 2c32e4cb6e7f047af833435d4b53f27ac905b784 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 29 Aug 2018 16:37:27 +0200 Subject: [PATCH 055/151] Update NBT library. --- nbt/CONTRIBUTORS.txt | 6 +- nbt/README.md | 1 + nbt/__init__.py | 2 +- nbt/chunk.py | 17 ++- nbt/nbt.py | 291 ++++++++++++++++++++++++++++++------------- nbt/region.py | 76 ++++++++--- nbt/world.py | 150 ++++++++++++++-------- 7 files changed, 386 insertions(+), 157 deletions(-) diff --git a/nbt/CONTRIBUTORS.txt b/nbt/CONTRIBUTORS.txt index 9857b67..a0ec799 100644 --- a/nbt/CONTRIBUTORS.txt +++ b/nbt/CONTRIBUTORS.txt @@ -1,11 +1,15 @@ d0sboots (David Walker) dtrauma (Thomas Roesner) Fenixin (Alejandro Aguilera) +fwaggle (Jamie Fraser) +k1988 (Terry Zhao) kamyu2 MacFreek (Freek Dijkstra) MidnightLightning (Brooks Boyd) MostAwesomeDude (Corbin Simpson) +psolyca (Damien) SBliven (Spencer Bliven) Stumpylog (Trenton Holmes) +suresttexas00 (Surest Texas) tWoolie (Thomas Woolford) -Xgkkp \ No newline at end of file +Xgkkp diff --git a/nbt/README.md b/nbt/README.md index 694b468..0083f5c 100644 --- a/nbt/README.md +++ b/nbt/README.md @@ -8,6 +8,7 @@ From The spec: read the full spec at http://www.minecraft.net/docs/NBT.txt [![Build Status](https://secure.travis-ci.org/twoolie/NBT.png?branch=master)](http://travis-ci.org/#!/twoolie/NBT) +[![Test Coverage Status](https://coveralls.io/repos/twoolie/NBT/badge.svg)](https://coveralls.io/r/twoolie/NBT) Usage: 1) Reading files. diff --git a/nbt/__init__.py b/nbt/__init__.py index 6fc5768..e0e92ad 100644 --- a/nbt/__init__.py +++ b/nbt/__init__.py @@ -4,7 +4,7 @@ # Documentation only automatically includes functions specified in __all__. # If you add more functions, please manually include them in doc/index.rst. -VERSION = (1, 4, 1) +VERSION = (1, 5, 0) """NBT version as tuple. Note that the major and minor revision number are always present, but the patch identifier (the 3rd number) is only used in 1.4.""" diff --git a/nbt/chunk.py b/nbt/chunk.py index 1897d14..484223d 100644 --- a/nbt/chunk.py +++ b/nbt/chunk.py @@ -1,6 +1,13 @@ """ Handles a single chunk of data (16x16x128 blocks) from a Minecraft save. -Chunk is currently McRegion only. + +WARNING: Chunk is currently McRegion only. +You likely should not use chunk, but instead just get the NBT datastructure, +and do the appropriate lookups and block conversions yourself. + +The authors decided to focus on NBT datastructure and Region files, +and are not actively working on chunk.py. +Code contributions to chunk.py are welcomed! """ from io import BytesIO from struct import pack, unpack @@ -11,7 +18,6 @@ class Chunk(object): def __init__(self, nbt): chunk_data = nbt['Level'] self.coords = chunk_data['xPos'],chunk_data['zPos'] - self.blocks = BlockArray(chunk_data['Blocks'].value, chunk_data['Data'].value) def get_coords(self): """Return the coordinates of this chunk.""" @@ -22,6 +28,13 @@ def __repr__(self): return "Chunk("+str(self.coords[0])+","+str(self.coords[1])+")" +class McRegionChunk(Chunk): + def __init__(self, nbt): + Chunk.__init__(self, nbt) + self.blocks = BlockArray(nbt['Level']['Blocks'].value, nbt['Level']['Data'].value) + +# TODO: Add class AnvilChunk(Chunk) + class BlockArray(object): """Convenience class for dealing with a Block/data byte array.""" def __init__(self, blocksBytes=None, dataBytes=None): diff --git a/nbt/nbt.py b/nbt/nbt.py index 312381b..46ccac1 100644 --- a/nbt/nbt.py +++ b/nbt/nbt.py @@ -4,17 +4,15 @@ from struct import Struct, error as StructError from gzip import GzipFile -import zlib from collections import MutableMapping, MutableSequence, Sequence -import os, io - -try: - unicode - basestring -except NameError: - unicode = str # compatibility for Python 3 - basestring = str # compatibility for Python 3 +import sys +_PY3 = sys.version_info >= (3,) +if _PY3: + unicode = str + basestring = str +else: + range = xrange TAG_END = 0 TAG_BYTE = 1 @@ -28,11 +26,14 @@ TAG_LIST = 9 TAG_COMPOUND = 10 TAG_INT_ARRAY = 11 +TAG_LONG_ARRAY = 12 + class MalformedFileError(Exception): """Exception raised on parse error.""" pass + class TAG(object): """TAG, a variable with an intrinsic name.""" id = None @@ -41,114 +42,141 @@ def __init__(self, value=None, name=None): self.name = name self.value = value - #Parsers and Generators + # Parsers and Generators def _parse_buffer(self, buffer): raise NotImplementedError(self.__class__.__name__) def _render_buffer(self, buffer): raise NotImplementedError(self.__class__.__name__) - #Printing and Formatting of tree + # Printing and Formatting of tree def tag_info(self): """Return Unicode string with class, name and unnested value.""" - return self.__class__.__name__ + \ - ('(%r)' % self.name if self.name else "") + \ - ": " + self.valuestr() + return self.__class__.__name__ + ( + '(%r)' % self.name if self.name + else "") + ": " + self.valuestr() + def valuestr(self): - """Return Unicode string of unnested value. For iterators, this returns a summary.""" + """Return Unicode string of unnested value. For iterators, this + returns a summary.""" return unicode(self.value) def pretty_tree(self, indent=0): - """Return formated Unicode string of self, where iterable items are recursively listed in detail.""" - return ("\t"*indent) + self.tag_info() + """Return formated Unicode string of self, where iterable items are + recursively listed in detail.""" + return ("\t" * indent) + self.tag_info() # Python 2 compatibility; Python 3 uses __str__ instead. def __unicode__(self): - """Return a unicode string with the result in human readable format. Unlike valuestr(), the result is recursive for iterators till at least one level deep.""" + """Return a unicode string with the result in human readable format. + Unlike valuestr(), the result is recursive for iterators till at least + one level deep.""" return unicode(self.value) def __str__(self): - """Return a string (ascii formated for Python 2, unicode for Python 3) with the result in human readable format. Unlike valuestr(), the result is recursive for iterators till at least one level deep.""" + """Return a string (ascii formated for Python 2, unicode for Python 3) + with the result in human readable format. Unlike valuestr(), the result + is recursive for iterators till at least one level deep.""" return str(self.value) + # Unlike regular iterators, __repr__() is not recursive. # Use pretty_tree for recursive results. - # iterators should use __repr__ or tag_info for each item, like regular iterators + # iterators should use __repr__ or tag_info for each item, like + # regular iterators def __repr__(self): - """Return a string (ascii formated for Python 2, unicode for Python 3) describing the class, name and id for debugging purposes.""" - return "<%s(%r) at 0x%x>" % (self.__class__.__name__,self.name,id(self)) + """Return a string (ascii formated for Python 2, unicode for Python 3) + describing the class, name and id for debugging purposes.""" + return "<%s(%r) at 0x%x>" % ( + self.__class__.__name__, self.name, id(self)) + class _TAG_Numeric(TAG): """_TAG_Numeric, comparable to int with an intrinsic name""" + def __init__(self, value=None, name=None, buffer=None): super(_TAG_Numeric, self).__init__(value, name) if buffer: self._parse_buffer(buffer) - #Parsers and Generators + # Parsers and Generators def _parse_buffer(self, buffer): - # Note: buffer.read() may raise an IOError, for example if buffer is a corrupt gzip.GzipFile + # Note: buffer.read() may raise an IOError, for example if buffer is a + # corrupt gzip.GzipFile self.value = self.fmt.unpack(buffer.read(self.fmt.size))[0] def _render_buffer(self, buffer): buffer.write(self.fmt.pack(self.value)) + class _TAG_End(TAG): id = TAG_END fmt = Struct(">b") def _parse_buffer(self, buffer): - # Note: buffer.read() may raise an IOError, for example if buffer is a corrupt gzip.GzipFile + # Note: buffer.read() may raise an IOError, for example if buffer is a + # corrupt gzip.GzipFile value = self.fmt.unpack(buffer.read(1))[0] if value != 0: - raise ValueError("A Tag End must be rendered as '0', not as '%d'." % (value)) + raise ValueError( + "A Tag End must be rendered as '0', not as '%d'." % value) def _render_buffer(self, buffer): buffer.write(b'\x00') -#== Value Tags ==# + +# == Value Tags ==# class TAG_Byte(_TAG_Numeric): """Represent a single tag storing 1 byte.""" id = TAG_BYTE fmt = Struct(">b") + class TAG_Short(_TAG_Numeric): """Represent a single tag storing 1 short.""" id = TAG_SHORT fmt = Struct(">h") + class TAG_Int(_TAG_Numeric): """Represent a single tag storing 1 int.""" id = TAG_INT fmt = Struct(">i") """Struct(">i"), 32-bits integer, big-endian""" + class TAG_Long(_TAG_Numeric): """Represent a single tag storing 1 long.""" id = TAG_LONG fmt = Struct(">q") + class TAG_Float(_TAG_Numeric): """Represent a single tag storing 1 IEEE-754 floating point number.""" id = TAG_FLOAT fmt = Struct(">f") + class TAG_Double(_TAG_Numeric): - """Represent a single tag storing 1 IEEE-754 double precision floating point number.""" + """Represent a single tag storing 1 IEEE-754 double precision floating + point number.""" id = TAG_DOUBLE fmt = Struct(">d") + class TAG_Byte_Array(TAG, MutableSequence): """ TAG_Byte_Array, comparable to a collections.UserList with an intrinsic name whose values must be bytes """ id = TAG_BYTE_ARRAY + def __init__(self, name=None, buffer=None): + # TODO: add a value parameter as well super(TAG_Byte_Array, self).__init__(name=name) if buffer: self._parse_buffer(buffer) - #Parsers and Generators + # Parsers and Generators def _parse_buffer(self, buffer): length = TAG_Int(buffer=buffer) self.value = bytearray(buffer.read(length.value)) @@ -176,20 +204,22 @@ def __setitem__(self, key, value): self.value[key] = value def __delitem__(self, key): - del(self.value[key]) + del (self.value[key]) def insert(self, key, value): # TODO: check type of value, or is this done by self.value already? self.value.insert(key, value) - #Printing and Formatting of tree + # Printing and Formatting of tree def valuestr(self): return "[%i byte(s)]" % len(self.value) def __unicode__(self): - return '['+",".join([unicode(x) for x in self.value])+']' + return '[' + ",".join([unicode(x) for x in self.value]) + ']' + def __str__(self): - return '['+",".join([str(x) for x in self.value])+']' + return '[' + ",".join([str(x) for x in self.value]) + ']' + class TAG_Int_Array(TAG, MutableSequence): """ @@ -197,7 +227,9 @@ class TAG_Int_Array(TAG, MutableSequence): an intrinsic name whose values must be integers """ id = TAG_INT_ARRAY + def __init__(self, name=None, buffer=None): + # TODO: add a value parameter as well super(TAG_Int_Array, self).__init__(name=name) if buffer: self._parse_buffer(buffer) @@ -206,7 +238,7 @@ def update_fmt(self, length): """ Adjust struct format description to length given """ self.fmt = Struct(">" + str(length) + "i") - #Parsers and Generators + # Parsers and Generators def _parse_buffer(self, buffer): length = TAG_Int(buffer=buffer).value self.update_fmt(length) @@ -235,28 +267,84 @@ def __setitem__(self, key, value): self.value[key] = value def __delitem__(self, key): - del(self.value[key]) + del (self.value[key]) def insert(self, key, value): self.value.insert(key, value) - #Printing and Formatting of tree + # Printing and Formatting of tree def valuestr(self): return "[%i int(s)]" % len(self.value) +class TAG_Long_Array(TAG, MutableSequence): + """ + TAG_Long_Array, comparable to a collections.UserList with + an intrinsic name whose values must be integers + """ + id = TAG_LONG_ARRAY + + def __init__(self, name=None, buffer=None): + super(TAG_Long_Array, self).__init__(name=name) + if buffer: + self._parse_buffer(buffer) + + def update_fmt(self, length): + """ Adjust struct format description to length given """ + self.fmt = Struct(">" + str(length) + "q") + + # Parsers and Generators + def _parse_buffer(self, buffer): + length = TAG_Int(buffer=buffer).value + self.update_fmt(length) + self.value = list(self.fmt.unpack(buffer.read(self.fmt.size))) + + def _render_buffer(self, buffer): + length = len(self.value) + self.update_fmt(length) + TAG_Int(length)._render_buffer(buffer) + buffer.write(self.fmt.pack(*self.value)) + + # Mixin methods + def __len__(self): + return len(self.value) + + def __iter__(self): + return iter(self.value) + + def __contains__(self, item): + return item in self.value + + def __getitem__(self, key): + return self.value[key] + + def __setitem__(self, key, value): + self.value[key] = value + + def __delitem__(self, key): + del (self.value[key]) + + def insert(self, key, value): + self.value.insert(key, value) + + # Printing and Formatting of tree + def valuestr(self): + return "[%i long(s)]" % len(self.value) + + class TAG_String(TAG, Sequence): """ TAG_String, comparable to a collections.UserString with an intrinsic name """ id = TAG_STRING + def __init__(self, value=None, name=None, buffer=None): super(TAG_String, self).__init__(value, name) if buffer: self._parse_buffer(buffer) - #Parsers and Generators + # Parsers and Generators def _parse_buffer(self, buffer): length = TAG_Short(buffer=buffer) read = buffer.read(length.value) @@ -283,16 +371,18 @@ def __contains__(self, item): def __getitem__(self, key): return self.value[key] - #Printing and Formatting of tree + # Printing and Formatting of tree def __repr__(self): return self.value -#== Collection Tags ==# + +# == Collection Tags ==# class TAG_List(TAG, MutableSequence): """ TAG_List, comparable to a collections.UserList with an intrinsic name """ id = TAG_LIST + def __init__(self, type=None, value=None, name=None, buffer=None): super(TAG_List, self).__init__(value, name) if type: @@ -302,10 +392,10 @@ def __init__(self, type=None, value=None, name=None, buffer=None): self.tags = [] if buffer: self._parse_buffer(buffer) - if self.tagID == None: - raise ValueError("No type specified for list: %s" % (name)) + # if self.tagID == None: + # raise ValueError("No type specified for list: %s" % (name)) - #Parsers and Generators + # Parsers and Generators def _parse_buffer(self, buffer): self.tagID = TAG_Byte(buffer=buffer).value self.tags = [] @@ -319,8 +409,9 @@ def _render_buffer(self, buffer): length._render_buffer(buffer) for i, tag in enumerate(self.tags): if tag.id != self.tagID: - raise ValueError("List element %d(%s) has type %d != container type %d" % - (i, tag, tag.id, self.tagID)) + raise ValueError( + "List element %d(%s) has type %d != container type %d" % + (i, tag, tag.id, self.tagID)) tag._render_buffer(buffer) # Mixin methods @@ -340,66 +431,73 @@ def __setitem__(self, key, value): self.tags[key] = value def __delitem__(self, key): - del(self.tags[key]) + del (self.tags[key]) def insert(self, key, value): self.tags.insert(key, value) - #Printing and Formatting of tree + # Printing and Formatting of tree def __repr__(self): - return "%i entries of type %s" % (len(self.tags), TAGLIST[self.tagID].__name__) + return "%i entries of type %s" % ( + len(self.tags), TAGLIST[self.tagID].__name__) - #Printing and Formatting of tree + # Printing and Formatting of tree def valuestr(self): return "[%i %s(s)]" % (len(self.tags), TAGLIST[self.tagID].__name__) + def __unicode__(self): - return "["+", ".join([tag.tag_info() for tag in self.tags])+"]" + return "[" + ", ".join([tag.tag_info() for tag in self.tags]) + "]" + def __str__(self): - return "["+", ".join([tag.tag_info() for tag in self.tags])+"]" + return "[" + ", ".join([tag.tag_info() for tag in self.tags]) + "]" def pretty_tree(self, indent=0): output = [super(TAG_List, self).pretty_tree(indent)] if len(self.tags): - output.append(("\t"*indent) + "{") + output.append(("\t" * indent) + "{") output.extend([tag.pretty_tree(indent + 1) for tag in self.tags]) - output.append(("\t"*indent) + "}") + output.append(("\t" * indent) + "}") return '\n'.join(output) + class TAG_Compound(TAG, MutableMapping): """ TAG_Compound, comparable to a collections.OrderedDict with an intrinsic name """ id = TAG_COMPOUND - def __init__(self, buffer=None): + + def __init__(self, buffer=None, name=None): + # TODO: add a value parameter as well super(TAG_Compound, self).__init__() self.tags = [] self.name = "" if buffer: self._parse_buffer(buffer) - #Parsers and Generators + # Parsers and Generators def _parse_buffer(self, buffer): while True: type = TAG_Byte(buffer=buffer) if type.value == TAG_END: - #print("found tag_end") + # print("found tag_end") break else: name = TAG_String(buffer=buffer).value try: - tag = TAGLIST[type.value](buffer=buffer) - tag.name = name - self.tags.append(tag) + tag = TAGLIST[type.value]() except KeyError: - raise ValueError("Unrecognised tag type") + raise ValueError("Unrecognised tag type %d" % type.value) + tag.name = name + self.tags.append(tag) + tag._parse_buffer(buffer) def _render_buffer(self, buffer): for tag in self.tags: TAG_Byte(tag.id)._render_buffer(buffer) TAG_String(tag.name)._render_buffer(buffer) tag._render_buffer(buffer) - buffer.write(b'\x00') #write TAG_END + buffer.write(b'\x00') # write TAG_END # Mixin methods def __len__(self): @@ -431,7 +529,9 @@ def __getitem__(self, key): else: raise KeyError("Tag %s does not exist" % key) else: - raise TypeError("key needs to be either name of tag, or index of tag, not a %s" % type(key).__name__) + raise TypeError( + "key needs to be either name of tag, or index of tag, " + "not a %s" % type(key).__name__) def __setitem__(self, key, value): assert isinstance(value, TAG), "value must be an nbt.TAG" @@ -448,11 +548,12 @@ def __setitem__(self, key, value): def __delitem__(self, key): if isinstance(key, int): - del(self.tags[key]) + del (self.tags[key]) elif isinstance(key, basestring): self.tags.remove(self.__getitem__(key)) else: - raise ValueError("key needs to be either name of tag, or index of tag") + raise ValueError( + "key needs to be either name of tag, or index of tag") def keys(self): return [tag.name for tag in self.tags] @@ -461,11 +562,12 @@ def iteritems(self): for tag in self.tags: yield (tag.name, tag) - #Printing and Formatting of tree + # Printing and Formatting of tree def __unicode__(self): - return "{"+", ".join([tag.tag_info() for tag in self.tags])+"}" + return "{" + ", ".join([tag.tag_info() for tag in self.tags]) + "}" + def __str__(self): - return "{"+", ".join([tag.tag_info() for tag in self.tags])+"}" + return "{" + ", ".join([tag.tag_info() for tag in self.tags]) + "}" def valuestr(self): return '{%i Entries}' % len(self.tags) @@ -473,32 +575,41 @@ def valuestr(self): def pretty_tree(self, indent=0): output = [super(TAG_Compound, self).pretty_tree(indent)] if len(self.tags): - output.append(("\t"*indent) + "{") + output.append(("\t" * indent) + "{") output.extend([tag.pretty_tree(indent + 1) for tag in self.tags]) - output.append(("\t"*indent) + "}") + output.append(("\t" * indent) + "}") return '\n'.join(output) -TAGLIST = {TAG_END: _TAG_End, TAG_BYTE:TAG_Byte, TAG_SHORT:TAG_Short, TAG_INT:TAG_Int, TAG_LONG:TAG_Long, TAG_FLOAT:TAG_Float, TAG_DOUBLE:TAG_Double, TAG_BYTE_ARRAY:TAG_Byte_Array, TAG_STRING:TAG_String, TAG_LIST:TAG_List, TAG_COMPOUND:TAG_Compound, TAG_INT_ARRAY:TAG_Int_Array} +TAGLIST = {TAG_END: _TAG_End, TAG_BYTE: TAG_Byte, TAG_SHORT: TAG_Short, + TAG_INT: TAG_Int, TAG_LONG: TAG_Long, TAG_FLOAT: TAG_Float, + TAG_DOUBLE: TAG_Double, TAG_BYTE_ARRAY: TAG_Byte_Array, + TAG_STRING: TAG_String, TAG_LIST: TAG_List, + TAG_COMPOUND: TAG_Compound, TAG_INT_ARRAY: TAG_Int_Array, + TAG_LONG_ARRAY: TAG_Long_Array} + class NBTFile(TAG_Compound): """Represent an NBT file object.""" + def __init__(self, filename=None, buffer=None, fileobj=None): """ Create a new NBTFile object. Specify either a filename, file object or data buffer. If filename of file object is specified, data should be GZip-compressed. If a data buffer is specified, it is assumed to be uncompressed. - + If filename is specified, the file is closed after reading and writing. - If file object is specified, the caller is responsible for closing the file. + If file object is specified, the caller is responsible for closing the + file. """ super(NBTFile, self).__init__() self.filename = filename self.type = TAG_Byte(self.id) closefile = True - #make a file object + # make a file object if filename: + self.filename = filename self.file = GzipFile(filename, 'rb') elif buffer: if hasattr(buffer, 'name'): @@ -512,11 +623,11 @@ def __init__(self, filename=None, buffer=None, fileobj=None): else: self.file = None closefile = False - #parse the file given initially + # parse the file given initially if self.file: self.parse_file() if closefile: - # Note: GzipFile().close() does NOT close the fileobj, + # Note: GzipFile().close() does NOT close the fileobj, # So we are still responsible for closing that. try: self.file.close() @@ -545,11 +656,16 @@ def parse_file(self, filename=None, buffer=None, fileobj=None): self.name = name self.file.close() else: - raise MalformedFileError("First record is not a Compound Tag") + raise MalformedFileError( + "First record is not a Compound Tag") except StructError as e: - raise MalformedFileError("Partial File Parse: file possibly truncated.") + raise MalformedFileError( + "Partial File Parse: file possibly truncated.") else: - raise ValueError("NBTFile.parse_file(): Need to specify either a filename or a file object") + raise ValueError( + "NBTFile.parse_file(): Need to specify either a " + "filename or a file object" + ) def write_file(self, filename=None, buffer=None, fileobj=None): """Write this NBT file to a file.""" @@ -567,12 +683,15 @@ def write_file(self, filename=None, buffer=None, fileobj=None): elif self.filename: self.file = GzipFile(self.filename, "wb") elif not self.file: - raise ValueError("NBTFile.write_file(): Need to specify either a filename or a file object") - #Render tree to file + raise ValueError( + "NBTFile.write_file(): Need to specify either a " + "filename or a file object" + ) + # Render tree to file TAG_Byte(self.id)._render_buffer(self.file) TAG_String(self.name)._render_buffer(self.file) self._render_buffer(self.file) - #make sure the file is complete + # make sure the file is complete try: self.file.flush() except (AttributeError, IOError): @@ -590,8 +709,12 @@ def __repr__(self): debugging purposes. """ if self.filename: - return "<%s(%r) with %s(%r) at 0x%x>" % (self.__class__.__name__, self.filename, \ - TAG_Compound.__name__, self.name, id(self)) + return "<%s(%r) with %s(%r) at 0x%x>" % ( + self.__class__.__name__, self.filename, + TAG_Compound.__name__, self.name, id(self) + ) else: - return "<%s with %s(%r) at 0x%x>" % (self.__class__.__name__, \ - TAG_Compound.__name__, self.name, id(self)) + return "<%s with %s(%r) at 0x%x>" % ( + self.__class__.__name__, TAG_Compound.__name__, + self.name, id(self) + ) diff --git a/nbt/region.py b/nbt/region.py index ba8d9fa..90338ac 100644 --- a/nbt/region.py +++ b/nbt/region.py @@ -6,7 +6,6 @@ from .nbt import NBTFile, MalformedFileError from struct import pack, unpack -from gzip import GzipFile from collections import Mapping import zlib import gzip @@ -20,6 +19,8 @@ SECTOR_LENGTH = 4096 """Constant indicating the length of a sector. A Region file is divided in sectors of 4096 bytes each.""" +# TODO: move status codes to an (Enum) object + # Status is a number representing: # -5 = Error, the chunk is overlapping with another chunk # -4 = Error, the chunk length is too large to fit in the sector length in the region header @@ -29,7 +30,7 @@ # 0 = Ok # 1 = Chunk non-existant yet STATUS_CHUNK_OVERLAPPING = -5 -"""Constant indicating an error status: the chunk is allocated a sector already occupied by another chunk""" +"""Constant indicating an error status: the chunk is allocated to a sector already occupied by another chunk""" STATUS_CHUNK_MISMATCHED_LENGTHS = -4 """Constant indicating an error status: the region header length and the chunk length are incompatible""" STATUS_CHUNK_ZERO_LENGTH = -3 @@ -44,11 +45,11 @@ """Constant indicating an normal status: the chunk does not exist""" COMPRESSION_NONE = 0 -"""Constant indicating tha tthe chunk is not compressed.""" +"""Constant indicating that the chunk is not compressed.""" COMPRESSION_GZIP = 1 -"""Constant indicating tha tthe chunk is GZip compressed.""" +"""Constant indicating that the chunk is GZip compressed.""" COMPRESSION_ZLIB = 2 -"""Constant indicating tha tthe chunk is zlib compressed.""" +"""Constant indicating that the chunk is zlib compressed.""" # TODO: reconsider these errors. where are they catched? Where would an implementation make a difference in handling the different exceptions. @@ -139,7 +140,7 @@ def __getitem__(self, xz): m = self.metadata[xz] return (m.blockstart, m.blocklength, m.timestamp, m.status) def __iter__(self): - return iter(self.metadata) # iterates of the keys + return iter(self.metadata) # iterates over the keys def __len__(self): return len(self.metadata) class _ChunkHeaderWrapper(Mapping): @@ -150,16 +151,24 @@ def __getitem__(self, xz): m = self.metadata[xz] return (m.length if m.length > 0 else None, m.compression, m.status) def __iter__(self): - return iter(self.metadata) # iterates of the keys + return iter(self.metadata) # iterates over the keys def __len__(self): return len(self.metadata) +class Location(object): + def __init__(self, x=None, y=None, z=None): + self.x = x + self.y = y + self.z = z + def __str__(self): + return "%s(x=%s, y=%s, z=%s)" % (self.__class__.__name__, self.x, self.y, self.z) + class RegionFile(object): """A convenience class for extracting NBT files from the Minecraft Beta Region Format.""" # Redefine constants for backward compatibility. STATUS_CHUNK_OVERLAPPING = STATUS_CHUNK_OVERLAPPING - """Constant indicating an error status: the chunk is allocated a sector + """Constant indicating an error status: the chunk is allocated to a sector already occupied by another chunk. Deprecated. Use :const:`nbt.region.STATUS_CHUNK_OVERLAPPING` instead.""" STATUS_CHUNK_MISMATCHED_LENGTHS = STATUS_CHUNK_MISMATCHED_LENGTHS @@ -244,6 +253,9 @@ def __init__(self, filename=None, fileobj=None): Deprecated. Use :attr:`metadata` instead. """ + self.loc = Location() + """Optional: x,z location of a region within a world.""" + self._init_header() self._parse_header() self._parse_chunk_headers() @@ -264,6 +276,13 @@ def _bytes_to_sector(bsize, sectorlength=SECTOR_LENGTH): return sectors if remainder == 0 else sectors + 1 def close(self): + """ + Clean up resources after use. + + Note that the instance is no longer readable nor writable after calling close(). + The method is automatically called by garbage collectors, but made public to + allow explicit cleanup. + """ if self._closefile: try: self.file.close() @@ -463,9 +482,15 @@ def __iter__(self): return self.iter_chunks() def get_timestamp(self, x, z): - """Return the timestamp of when this region file was last modified.""" - # TODO: raise an exception if chunk does not exist? - # TODO: return a datetime.datetime object using datetime.fromtimestamp() + """ + Return the timestamp of when this region file was last modified. + + Note that this returns the timestamp as-is. A timestamp may exist, + while the chunk does not, or it may return a timestamp of 0 even + while the chunk exists. + + To convert to an actual date, use `datetime.fromtimestamp()`. + """ return self.metadata[x,z].timestamp def chunk_count(self): @@ -484,7 +509,7 @@ def get_blockdata(self, x, z): # read metadata block m = self.metadata[x, z] if m.status == STATUS_CHUNK_NOT_CREATED: - raise InconceivedChunk("Chunk is not created") + raise InconceivedChunk("Chunk %d,%d is not present in region" % (x,z)) elif m.status == STATUS_CHUNK_IN_HEADER: raise RegionHeaderError('Chunk %d,%d is in the region header' % (x,z)) elif m.status == STATUS_CHUNK_OUT_OF_FILE and (m.length <= 1 or m.compression == None): @@ -545,11 +570,18 @@ def get_nbt(self, x, z): Return a NBTFile of the specified chunk. Raise InconceivedChunk if the chunk is not included in the file. """ + # TODO: cache results? data = self.get_blockdata(x, z) # This may raise a RegionFileFormatError. data = BytesIO(data) err = None try: - return NBTFile(buffer=data) + nbt = NBTFile(buffer=data) + if self.loc.x != None: + x += self.loc.x*32 + if self.loc.z != None: + z += self.loc.z*32 + nbt.loc = Location(x=x, z=z) + return nbt # this may raise a MalformedFileError. Convert to ChunkDataError. except MalformedFileError as e: err = '%s' % e # avoid str(e) due to Unicode issues in Python 2. @@ -566,12 +598,24 @@ def get_chunk(self, x, z): """ return self.get_nbt(x, z) - def write_blockdata(self, x, z, data): + def write_blockdata(self, x, z, data, compression=COMPRESSION_ZLIB): """ Compress the data, write it to file, and add pointers in the header so it can be found as chunk(x,z). """ - data = zlib.compress(data) # use zlib compression, rather than Gzip + if compression == COMPRESSION_GZIP: + # Python 3.1 and earlier do not yet support `data = gzip.compress(data)`. + compressed_file = BytesIO() + f = gzip.GzipFile(fileobj=compressed_file) + f.write(data) + f.close() + compressed_file.seek(0) + data = compressed_file.read() + del compressed_file + elif compression == COMPRESSION_ZLIB: + data = zlib.compress(data) # use zlib compression, rather than Gzip + elif compression != COMPRESSION_NONE: + raise ValueError("Unknown compression type %d" % compression) length = len(data) # 5 extra bytes are required for the chunk block header @@ -599,7 +643,7 @@ def write_blockdata(self, x, z, data): # write out chunk to region self.file.seek(sector*SECTOR_LENGTH) self.file.write(pack(">I", length + 1)) #length field - self.file.write(pack(">B", COMPRESSION_ZLIB)) #compression field + self.file.write(pack(">B", compression)) #compression field self.file.write(data) #compressed data # Write zeros up to the end of the chunk diff --git a/nbt/world.py b/nbt/world.py index 0555fbc..3689dfa 100644 --- a/nbt/world.py +++ b/nbt/world.py @@ -5,7 +5,7 @@ import os, glob, re from . import region from . import chunk -from .region import InconceivedChunk +from .region import InconceivedChunk, Location class UnknownWorldFormat(Exception): """Unknown or invalid world folder.""" @@ -13,7 +13,6 @@ def __init__(self, msg=""): self.msg = msg - class _BaseWorldFolder(object): """ Abstract class, representing either a McRegion or Anvil world folder. @@ -21,6 +20,8 @@ class _BaseWorldFolder(object): Simply calling WorldFolder() will do this automatically. """ type = "Generic" + extension = '' + chunkclass = chunk.Chunk def __init__(self, world_folder): """Initialize a WorldFolder.""" @@ -34,6 +35,9 @@ def __init__(self, world_folder): self.set_regionfiles(self.get_filenames()) def get_filenames(self): + """Find all matching file names in the world folder. + + This method is private, and it's use it deprecated. Use get_regionfiles() instead.""" # Warning: glob returns a empty list if the directory is unreadable, without raising an Exception return list(glob.glob(os.path.join(self.worldfolder,'region','r.*.*.'+self.extension))) @@ -59,14 +63,14 @@ def set_regionfiles(self, filenames): pass self.regionfiles[(x,z)] = filename - def nonempty(self): - """Return True is the world is non-empty.""" - return len(self.regionfiles) > 0 - def get_regionfiles(self): """Return a list of full path of all region files.""" return list(self.regionfiles.values()) + def nonempty(self): + """Return True is the world is non-empty.""" + return len(self.regionfiles) > 0 + def get_region(self, x,z): """Get a region using x,z coordinates of a region. Cache results.""" if (x,z) not in self.regions: @@ -75,47 +79,62 @@ def get_region(self, x,z): else: # Return an empty RegionFile object # TODO: this does not yet allow for saving of the region file + # TODO: this currently fails with a ValueError! + # TODO: generate the correct name, and create the file + # and add the fie to self.regionfiles self.regions[(x,z)] = region.RegionFile() + self.regions[(x,z)].loc = Location(x=x,z=z) return self.regions[(x,z)] def iter_regions(self): - for x,z in self.regionfiles.keys(): - yield self.get_region(x,z) - - def iter_nbt(self): """ - Return an iterable list of all NBT. Use this function if you only - want to loop through the chunks once, and don't need the block or data arrays. + Return an iterable list of all region files. Use this function if you only + want to loop through each region files once, and do not want to cache the results. """ # TODO: Implement BoundingBox # TODO: Implement sort order - for region in self.iter_regions(): - for c in region.iter_chunks(): - yield c + for x,z in self.regionfiles.keys(): + close_after_use = False + if (x,z) in self.regions: + regionfile = self.regions[(x,z)] + else: + # It is not yet cached. + # Get file, but do not cache later. + regionfile = region.RegionFile(self.regionfiles[(x,z)]) + regionfile.loc = Location(x=x,z=z) + close_after_use = True + try: + yield regionfile + finally: + if close_after_use: + regionfile.close() - def iter_chunks(self): + def call_for_each_region(self, callback_function, boundingbox=None): """ - Return an iterable list of all chunks. Use this function if you only - want to loop through the chunks once or have a very large world. - Use get_chunks() if you access the chunk list frequently and want to cache - the results. Use iter_nbt() if you are concerned about speed and don't want - to parse the block data. + Return an iterable that calls callback_function for each region file + in the world. This is equivalent to: + ``` + for the_region in iter_regions(): + yield callback_function(the_region) + ```` + + This function is threaded. It uses pickle to pass values between threads. + See [What can be pickled and unpickled?](https://docs.python.org/library/pickle.html#what-can-be-pickled-and-unpickled) in the Python documentation + for limitation on the output of `callback_function()`. """ - # TODO: Implement BoundingBox - # TODO: Implement sort order - for c in self.iter_nbt(): - yield self.chunkclass(c) + raise NotImplemented() def get_nbt(self,x,z): """ Return a NBT specified by the chunk coordinates x,z. Raise InconceivedChunk if the NBT file is not yet generated. To get a Chunk object, use get_chunk. """ - rx,x = divmod(x,32) - rz,z = divmod(z,32) - nbt = self.get_region(rx,rz).get_chunk(x,z) - if nbt == None: - raise InconceivedChunk("Chunk %s,%s not present in world" % (32*rx+x,32*rz+z)) + rx,cx = divmod(x,32) + rz,cz = divmod(z,32) + if (rx,rz) not in self.regions and (rx,rz) not in self.regionfiles: + raise InconceivedChunk("Chunk %s,%s is not present in world" % (x,z)) + nbt = self.get_region(rx,rz).get_nbt(cx,cz) + assert nbt != None return nbt def set_nbt(self,x,z,nbt): @@ -127,6 +146,32 @@ def set_nbt(self,x,z,nbt): raise NotImplemented() # TODO: implement + def iter_nbt(self): + """ + Return an iterable list of all NBT. Use this function if you only + want to loop through the chunks once, and don't need the block or data arrays. + """ + # TODO: Implement BoundingBox + # TODO: Implement sort order + for region in self.iter_regions(): + for c in region.iter_chunks(): + yield c + + def call_for_each_nbt(self, callback_function, boundingbox=None): + """ + Return an iterable that calls callback_function for each NBT structure + in the world. This is equivalent to: + ``` + for the_nbt in iter_nbt(): + yield callback_function(the_nbt) + ```` + + This function is threaded. It uses pickle to pass values between threads. + See [What can be pickled and unpickled?](https://docs.python.org/library/pickle.html#what-can-be-pickled-and-unpickled) in the Python documentation + for limitation on the output of `callback_function()`. + """ + raise NotImplemented() + def get_chunk(self,x,z): """ Return a chunk specified by the chunk coordinates x,z. Raise InconceivedChunk @@ -145,6 +190,19 @@ def get_chunks(self, boundingbox=None): self.chunks = list(self.iter_chunks()) return self.chunks + def iter_chunks(self): + """ + Return an iterable list of all chunks. Use this function if you only + want to loop through the chunks once or have a very large world. + Use get_chunks() if you access the chunk list frequently and want to cache + the results. Use iter_nbt() if you are concerned about speed and don't want + to parse the block data. + """ + # TODO: Implement BoundingBox + # TODO: Implement sort order + for c in self.iter_nbt(): + yield self.chunkclass(c) + def chunk_count(self): """Return a count of the chunks in this world folder.""" c = 0 @@ -166,26 +224,6 @@ def get_boundingbox(self): b.expand(x,None,z) return b - def cache_test(self): - """ - Debug routine: loop through all chunks, fetch them again by coordinates, - and check if the same object is returned. - """ - # TODO: make sure this test succeeds (at least True,True,False, preferable True,True,True) - # TODO: Move this function to test class. - for rx,rz in self.regionfiles.keys(): - region = self.get_region(rx,rz) - rx,rz = 32*rx,32*rz - for cc in region.get_chunk_coords(): - x,z = (rx+cc['x'],rz+cc['z']) - c1 = self.chunkclass(region.get_chunk(cc['x'],cc['z'])) - c2 = self.get_chunk(x,z) - correct_coords = (c2.get_coords() == (x,z)) - is_comparable = (c1 == c2) # test __eq__ function - is_equal = (id(c1) == id(c2)) # test if they point to the same memory location - # DEBUG (prints a tuple) - print((x,z,c1,c2,correct_coords,is_comparable,is_equal)) - def __repr__(self): return "%s(%r)" % (self.__class__.__name__,self.worldfolder) @@ -194,8 +232,8 @@ class McRegionWorldFolder(_BaseWorldFolder): """Represents a world save using the old McRegion format.""" type = "McRegion" extension = 'mcr' - chunkclass = chunk.Chunk - # chunkclass = chunk.McRegionChunk # TODO: change to McRegionChunk when done + chunkclass = chunk.McRegionChunk + class AnvilWorldFolder(_BaseWorldFolder): """Represents a world save using the new Anvil format.""" @@ -205,7 +243,7 @@ class AnvilWorldFolder(_BaseWorldFolder): # chunkclass = chunk.AnvilChunk # TODO: change to AnvilChunk when done -class _WorldFolderFactory(): +class _WorldFolderFactory(object): """Factory class: instantiate the subclassses in order, and the first instance whose nonempty() method returns True is returned. If no nonempty() returns True, a UnknownWorldFormat exception is raised.""" @@ -252,10 +290,16 @@ def expand(self,x,y,z): if self.maxz is None or z > self.maxz: self.maxz = z def lenx(self): + if self.maxx is None or self.minx is None: + return 0 return self.maxx-self.minx+1 def leny(self): + if self.maxy is None or self.miny is None: + return 0 return self.maxy-self.miny+1 def lenz(self): + if self.maxz is None or self.minz is None: + return 0 return self.maxz-self.minz+1 def __repr__(self): return "%s(%s,%s,%s,%s,%s,%s)" % (self.__class__.__name__,self.minx,self.maxx, From c5e63e10461b31ee50d81a44700cbbdf4263a1d1 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 29 Aug 2018 16:41:56 +0200 Subject: [PATCH 056/151] Fix for IndexError while replacing chunks. --- regionfixer_core/world.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 6e8b9ac..a13b008 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -903,7 +903,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet # The backups world doesn't change, check if the # region_file is already scanned: try: - coords = get_region_coords(backup_region_path.split()[1]) + coords = get_region_coords(split(backup_region_path)[1]) r = scanned_regions[coords] except KeyError: from scan import scan_region_file From cccc2f7ef63f67cea82c01f753f74f030d135a4c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 20 Feb 2019 22:41:27 +0100 Subject: [PATCH 057/151] Bump version number. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 935f5d8..e9e6338 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -4,5 +4,5 @@ @author: Alejandro ''' -version_string = "0.2.1" +version_string = "0.2.2" version_numbers = version_string.split('.') From 6eaf7a11773a6df5d963ac79faeea7c6f0187e85 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 20 Feb 2019 23:37:41 +0100 Subject: [PATCH 058/151] Use python 2to3 script. --- gui/__init__.py | 6 +- gui/main.py | 2 +- gui/starter.py | 12 +- regionfixer.py | 52 +-- regionfixer_core/bug_reporter.py | 4 +- regionfixer_core/interactive.py | 322 +++++++++--------- regionfixer_core/progressbar/__init__.py | 49 +++ regionfixer_core/progressbar/compat.py | 44 +++ regionfixer_core/progressbar/progressbar.py | 305 +++++++++++++++++ regionfixer_core/progressbar/widgets.py | 359 ++++++++++++++++++++ regionfixer_core/scan.py | 36 +- regionfixer_core/util.py | 18 +- regionfixer_core/world.py | 76 ++--- 13 files changed, 1021 insertions(+), 264 deletions(-) create mode 100644 regionfixer_core/progressbar/__init__.py create mode 100644 regionfixer_core/progressbar/compat.py create mode 100644 regionfixer_core/progressbar/progressbar.py create mode 100644 regionfixer_core/progressbar/widgets.py diff --git a/gui/__init__.py b/gui/__init__.py index b8fa9d7..54ac455 100644 --- a/gui/__init__.py +++ b/gui/__init__.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from main import MainWindow -from backups import BackupsWindow -from starter import Starter +from .main import MainWindow +from .backups import BackupsWindow +from .starter import Starter diff --git a/gui/main.py b/gui/main.py index 8c302b9..80dab91 100644 --- a/gui/main.py +++ b/gui/main.py @@ -6,7 +6,7 @@ from os.path import split, abspath from os import name as os_name -from backups import BackupsWindow +from .backups import BackupsWindow from regionfixer_core.scan import AsyncWorldRegionScanner, AsyncDataScanner,\ ChildProcessException from regionfixer_core import world diff --git a/gui/starter.py b/gui/starter.py index 19cd5f5..b2d764e 100644 --- a/gui/starter.py +++ b/gui/starter.py @@ -4,12 +4,12 @@ import wx import sys import traceback -from StringIO import StringIO +from io import StringIO -from main import MainWindow -from backups import BackupsWindow -from about import AboutWindow -from help import HelpWindow +from .main import MainWindow +from .backups import BackupsWindow +from .about import AboutWindow +from .help import HelpWindow from regionfixer_core.scan import ChildProcessException from regionfixer_core.bug_reporter import BugReporter @@ -41,7 +41,7 @@ def _excepthook(self, etype, value, tb): answer = dlg.ShowModal() if answer == wx.ID_YES: - print "Sending bug report!" + print("Sending bug report!") bugsender = BugReporter(error_str=s) success = bugsender.send() # Dialog with success or not of the ftp uploading diff --git a/regionfixer.py b/regionfixer.py index 5bccddc..40d9e81 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -57,19 +57,19 @@ def delete_bad_chunks(options, scanned_obj): options.delete_wrong_located, options.delete_entities, options.delete_shared_offset] - deleting = zip(options_delete, world.CHUNK_PROBLEMS) + deleting = list(zip(options_delete, world.CHUNK_PROBLEMS)) for delete, problem in deleting: status = world.CHUNK_STATUS_TEXT[problem] total = scanned_obj.count_chunks(problem) if delete: if total: text = ' Deleting chunks with status: {0} '.format(status) - print("\n{0:#^60}".format(text)) + print(("\n{0:#^60}".format(text))) counter = scanned_obj.remove_problematic_chunks(problem) - print("\nDeleted {0} chunks with status: {1}".format(counter, - status)) + print(("\nDeleted {0} chunks with status: {1}".format(counter, + status))) else: - print("No chunks to delete with status: {0}".format(status)) + print(("No chunks to delete with status: {0}".format(status))) def delete_bad_regions(options, scanned_obj): @@ -78,19 +78,19 @@ def delete_bad_regions(options, scanned_obj): with problems iterating through all the possible problems. """ print("") options_delete = [options.delete_too_small] - deleting = zip(options_delete, world.REGION_PROBLEMS) + deleting = list(zip(options_delete, world.REGION_PROBLEMS)) for delete, problem in deleting: status = world.REGION_STATUS_TEXT[problem] total = scanned_obj.count_regions(problem) if delete: if total: text = ' Deleting regions with status: {0} '.format(status) - print("{0:#^60}".format(text)) + print(("{0:#^60}".format(text))) counter = scanned_obj.remove_problematic_regions(problem) - print("Deleted {0} regions with status: {1}".format(counter, - status)) + print(("Deleted {0} regions with status: {1}".format(counter, + status))) else: - print("No regions to delete with status: {0}".format(status)) + print(("No regions to delete with status: {0}".format(status))) def main(): @@ -273,10 +273,10 @@ def main(): (options, args) = parser.parse_args() o = options - if sys.version_info[0] > 2: + if sys.version_info[0] > 5: print("") print("Minecraft Region Fixer only works with python 2.x") - print("(And you just tried to run it in python {0})".format(sys.version)) + print(("(And you just tried to run it in python {0})".format(sys.version))) print("") return 1 @@ -357,7 +357,7 @@ def main(): error("The entity limit must be at least 0!") print("\nWelcome to Region Fixer!") - print("(version: {0})".format(parser.version)) + print(("(version: {0})".format(parser.version))) # Do things with the option options args # Create a list of worlds containing the backups of the region files @@ -379,7 +379,7 @@ def main(): if len(regionset.regions) > 0: console_scan_regionset(regionset, o.processes, o.entity_limit, o.delete_entities, o.verbose) - print(regionset.generate_report(True)) + print((regionset.generate_report(True))) # Delete chunks delete_bad_chunks(options, regionset) @@ -401,14 +401,14 @@ def main(): # scan all the world folders for w in world_list: w_name = w.get_name() - print(entitle(' Scanning world: {0} '.format(w_name), 0)) + print((entitle(' Scanning world: {0} '.format(w_name), 0))) console_scan_world(w, o.processes, o.entity_limit, o.delete_entities, o.verbose) print("") - print(entitle('Scan results for: {0}'.format(w_name), 0)) - print(w.generate_report(True)) + print((entitle('Scan results for: {0}'.format(w_name), 0))) + print((w.generate_report(True))) # corrupted, wrong_located, entities_prob, shared_prob,\ # total_chunks, too_small_region, unreadable_region, total_regions\ @@ -423,17 +423,17 @@ def main(): o.replace_wrong_located, o.replace_entities, o.replace_shared_offset] - replacing = zip(options_replace, world.CHUNK_PROBLEMS_ITERATOR) + replacing = list(zip(options_replace, world.CHUNK_PROBLEMS_ITERATOR)) for replace, (problem, status, arg) in replacing: if replace: total = w.count_chunks(problem) if total: text = " Replacing chunks with status: {0} ".format(status) - print("{0:#^60}".format(text)) + print(("{0:#^60}".format(text))) fixed = w.replace_problematic_chunks(backup_worlds, problem, ent_lim, del_ent) - print("\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status)) + print(("\n{0} replaced of a total of {1} chunks with status: {2}".format(fixed, total, status))) else: - print("No chunks to replace with status: {0}".format(status)) + print(("No chunks to replace with status: {0}".format(status))) elif any_chunk_replace_option and not backup_worlds: print("Info: Won't replace any chunk.") @@ -447,17 +447,17 @@ def main(): del_ent = options.delete_entities ent_lim = options.entity_limit options_replace = [o.replace_too_small] - replacing = zip(options_replace, world.REGION_PROBLEMS_ITERATOR) + replacing = list(zip(options_replace, world.REGION_PROBLEMS_ITERATOR)) for replace, (problem, status, arg) in replacing: if replace: total = w.count_regions(problem) if total: text = " Replacing regions with status: {0} ".format(status) - print("{0:#^60}".format(text)) + print(("{0:#^60}".format(text))) fixed = w.replace_problematic_regions(backup_worlds, problem, ent_lim, del_ent) - print("\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status)) + print(("\n{0} replaced of a total of {1} regions with status: {2}".format(fixed, total, status))) else: - print("No region to replace with status: {0}".format(status)) + print(("No region to replace with status: {0}".format(status))) elif any_region_replace_option and not backup_worlds: print("Info: Won't replace any regions.") @@ -487,7 +487,7 @@ def main(): f.write(summary_text) f.write('\n') f.close() - print("Log file saved in \'{0}\'.".format(options.summary)) + print(("Log file saved in \'{0}\'.".format(options.summary))) except: print("Something went wrong while saving the log file!") diff --git a/regionfixer_core/bug_reporter.py b/regionfixer_core/bug_reporter.py index ddbf8cb..6fd2332 100644 --- a/regionfixer_core/bug_reporter.py +++ b/regionfixer_core/bug_reporter.py @@ -7,8 +7,8 @@ import sys import ftplib import datetime -from StringIO import StringIO -from util import query_yes_no, get_str_from_traceback +from io import StringIO +from .util import query_yes_no, get_str_from_traceback SERVER = 'regionfixer.no-ip.org' diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index 5a7a23f..ee8a0af 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -22,10 +22,10 @@ # -import world +from . import world from cmd import Cmd -from scan import console_scan_world, console_scan_regionset +from .scan import console_scan_world, console_scan_regionset class InteractiveLoop(Cmd): @@ -50,7 +50,7 @@ def __init__(self, world_list, regionset, options, backup_worlds): # Possible args for chunks stuff possible_args = "" first = True - for i in world.CHUNK_PROBLEMS_ARGS.values() + ['all']: + for i in list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all']: if not first: possible_args += ", " possible_args += i @@ -60,7 +60,7 @@ def __init__(self, world_list, regionset, options, backup_worlds): # Possible args for region stuff possible_args = "" first = True - for i in world.REGION_PROBLEMS_ARGS.values() + ['all']: + for i in list(world.REGION_PROBLEMS_ARGS.values()) + ['all']: if not first: possible_args += ", " possible_args += i @@ -75,41 +75,41 @@ def do_set(self,arg): mode """ args = arg.split() if len(args) > 2: - print "Error: too many parameters." + print("Error: too many parameters.") elif len(args) == 0: - print "Write \'help set\' to see a list of all possible variables" + print("Write \'help set\' to see a list of all possible variables") else: if args[0] == "entity-limit": if len(args) == 1: - print "entity-limit = {0}".format(self.options.entity_limit) + print("entity-limit = {0}".format(self.options.entity_limit)) else: try: if int(args[1]) >= 0: self.options.entity_limit = int(args[1]) - print "entity-limit = {0}".format(args[1]) - print "Updating chunk status..." + print("entity-limit = {0}".format(args[1])) + print("Updating chunk status...") self.current.rescan_entities(self.options) else: - print "Invalid value. Valid values are positive integers and zero" + print("Invalid value. Valid values are positive integers and zero") except ValueError: - print "Invalid value. Valid values are positive integers and zero" + print("Invalid value. Valid values are positive integers and zero") elif args[0] == "workload": if len(args) == 1: if self.current: - print "Current workload:\n{0}\n".format(self.current.__str__()) - print "List of possible worlds and region-sets (determined by the command used to run region-fixer):" + print("Current workload:\n{0}\n".format(self.current.__str__())) + print("List of possible worlds and region-sets (determined by the command used to run region-fixer):") number = 1 for w in self.world_list: - print " ### world{0} ###".format(number) + print(" ### world{0} ###".format(number)) number += 1 # add a tab and print - for i in w.__str__().split("\n"): print "\t" + i - print - print " ### regionset ###" - for i in self.regionset.__str__().split("\n"): print "\t" + i - print "\n(Use \"set workload world1\" or name_of_the_world or regionset to choose one)" + for i in w.__str__().split("\n"): print("\t" + i) + print() + print(" ### regionset ###") + for i in self.regionset.__str__().split("\n"): print("\t" + i) + print("\n(Use \"set workload world1\" or name_of_the_world or regionset to choose one)") else: a = args[1] @@ -118,53 +118,53 @@ def do_set(self,arg): number = int(args[1][-1]) - 1 try: self.current = self.world_list[number] - print "workload = {0}".format(self.current.world_path) + print("workload = {0}".format(self.current.world_path)) except IndexError: - print "This world is not in the list!" + print("This world is not in the list!") elif a in self.world_names: for w in self.world_list: if w.name == args[1]: self.current = w - print "workload = {0}".format(self.current.world_path) + print("workload = {0}".format(self.current.world_path)) break else: - print "This world name is not on the list!" + print("This world name is not on the list!") elif args[1] == "regionset": if len(self.regionset): self.current = self.regionset - print "workload = set of region files" + print("workload = set of region files") else: - print "The region set is empty!" + print("The region set is empty!") else: - print "Invalid world number, world name or regionset." + print("Invalid world number, world name or regionset.") elif args[0] == "processes": if len(args) == 1: - print "processes = {0}".format(self.options.processes) + print("processes = {0}".format(self.options.processes)) else: try: if int(args[1]) > 0: self.options.processes = int(args[1]) - print "processes = {0}".format(args[1]) + print("processes = {0}".format(args[1])) else: - print "Invalid value. Valid values are positive integers." + print("Invalid value. Valid values are positive integers.") except ValueError: - print "Invalid value. Valid values are positive integers." + print("Invalid value. Valid values are positive integers.") elif args[0] == "verbose": if len(args) == 1: - print "verbose = {0}".format(str(self.options.verbose)) + print("verbose = {0}".format(str(self.options.verbose))) else: if args[1] == "True": self.options.verbose = True - print "verbose = {0}".format(args[1]) + print("verbose = {0}".format(args[1])) elif args[1] == "False": self.options.verbose = False - print "verbose = {0}".format(args[1]) + print("verbose = {0}".format(args[1])) else: - print "Invalid value. Valid values are True and False." + print("Invalid value. Valid values are True and False.") else: - print "Invalid argument! Write \'help set\' to see a list of valid variables." + print("Invalid argument! Write \'help set\' to see a list of valid variables.") def do_summary(self, arg): """ Prints a summary of all the problems found in the region @@ -174,23 +174,23 @@ def do_summary(self, arg): if self.current.scanned: text = self.current.generate_report(True) if text: - print text + print(text) else: - print "No problems found!" + print("No problems found!") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") else: - print "No world/region-set is set! Use \'set workload\' to set a world/regionset to work with." + print("No world/region-set is set! Use \'set workload\' to set a world/regionset to work with.") else: - print "This command doesn't use any arguments." + print("This command doesn't use any arguments.") def do_current_workload(self, arg): """ Prints the info of the current workload """ if len(arg) == 0: - if self.current: print self.current - else: print "No world/region-set is set! Use \'set workload\' to set a world/regionset to work with." + if self.current: print(self.current) + else: print("No world/region-set is set! Use \'set workload\' to set a world/regionset to work with.") else: - print "This command doesn't use any arguments." + print("This command doesn't use any arguments.") def do_scan(self, arg): """ Scans the current workload. """ @@ -198,7 +198,7 @@ def do_scan(self, arg): # this would need an option to choose which of the two methods use o = self.options if len(arg.split()) > 0: - print "Error: too many parameters." + print("Error: too many parameters.") else: if self.current: if isinstance(self.current, world.World): @@ -207,179 +207,179 @@ def do_scan(self, arg): o.entity_limit, o.delete_entities, o.verbose) elif isinstance(self.current, world.RegionSet): - print "\n{0:-^60}".format(' Scanning region files ') + print("\n{0:-^60}".format(' Scanning region files ')) console_scan_regionset(self.current, o.processes, o.entity_limit, o.delete_entities, o.verbose) else: - print "No world set! Use \'set workload\'" + print("No world set! Use \'set workload\'") def do_count_chunks(self, arg): """ Counts the number of chunks with the given problem and prints the result """ if self.current and self.current.scanned: if len(arg.split()) == 0: - print "Possible counters are: {0}".format(self.possible_chunk_args_text) + print("Possible counters are: {0}".format(self.possible_chunk_args_text)) elif len(arg.split()) > 1: - print "Error: too many parameters." + print("Error: too many parameters.") else: - if arg in world.CHUNK_PROBLEMS_ARGS.values() or arg == 'all': + if arg in list(world.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': total = self.current.count_chunks(None) for problem, status_text, a in world.CHUNK_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.count_chunks(problem) - print "Chunks with status \'{0}\': {1}".format(status_text, n) - print "Total chunks: {0}".format(total) + print("Chunks with status \'{0}\': {1}".format(status_text, n)) + print("Total chunks: {0}".format(total)) else: - print "Unknown counter." + print("Unknown counter.") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") def do_count_regions(self, arg): """ Counts the number of regions with the given problem and prints the result """ if self.current and self.current.scanned: if len(arg.split()) == 0: - print "Possible counters are: {0}".format(self.possible_region_args_text) + print("Possible counters are: {0}".format(self.possible_region_args_text)) elif len(arg.split()) > 1: - print "Error: too many parameters." + print("Error: too many parameters.") else: - if arg in world.REGION_PROBLEMS_ARGS.values() or arg == 'all': + if arg in list(world.REGION_PROBLEMS_ARGS.values()) or arg == 'all': total = self.current.count_regions(None) for problem, status_text, a in world.REGION_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.count_regions(problem) - print "Regions with status \'{0}\': {1}".format(status_text, n) - print "Total regions: {0}".format(total) + print("Regions with status \'{0}\': {1}".format(status_text, n)) + print("Total regions: {0}".format(total)) else: - print "Unknown counter." + print("Unknown counter.") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") def do_count_all(self, arg): """ Print all the counters for chunks and regions. """ if self.current and self.current.scanned: if len(arg.split()) > 0: - print "This command doesn't requiere any arguments" + print("This command doesn't requiere any arguments") else: - print "{0:#^60}".format("Chunk problems:") + print("{0:#^60}".format("Chunk problems:")) self.do_count_chunks('all') - print "\n" - print "{0:#^60}".format("Region problems:") + print("\n") + print("{0:#^60}".format("Region problems:")) self.do_count_regions('all') else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") def do_remove_entities(self, arg): if self.current and self.current.scanned: if len(arg.split()) > 0: - print "Error: too many parameters." + print("Error: too many parameters.") else: - print "WARNING: This will delete all the entities in the chunks that have more entities than entity-limit, make sure you know what entities are!.\nAre you sure you want to continue? (yes/no):" - answer = raw_input() + print("WARNING: This will delete all the entities in the chunks that have more entities than entity-limit, make sure you know what entities are!.\nAre you sure you want to continue? (yes/no):") + answer = input() if answer == 'yes': counter = self.current.remove_entities() - print "Deleted {0} entities.".format(counter) + print("Deleted {0} entities.".format(counter)) if counter: self.current.scanned = False self.current.rescan_entities(self.options) elif answer == 'no': - print "Ok!" - else: print "Invalid answer, use \'yes\' or \'no\' the next time!." + print("Ok!") + else: print("Invalid answer, use \'yes\' or \'no\' the next time!.") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") def do_remove_chunks(self, arg): if self.current and self.current.scanned: if len(arg.split()) == 0: - print "Possible arguments are: {0}".format(self.possible_chunk_args_text) + print("Possible arguments are: {0}".format(self.possible_chunk_args_text)) elif len(arg.split()) > 1: - print "Error: too many parameters." + print("Error: too many parameters.") else: - if arg in world.CHUNK_PROBLEMS_ARGS.values() or arg == 'all': + if arg in list(world.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': for problem, status_text, a in world.CHUNK_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.remove_problematic_chunks(problem) if n: self.current.scanned = False - print "Removed {0} chunks with status \'{1}\'.\n".format(n, status_text) + print("Removed {0} chunks with status \'{1}\'.\n".format(n, status_text)) else: - print "Unknown argument." + print("Unknown argument.") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") def do_replace_chunks(self, arg): el = self.options.entity_limit de = self.options.delete_entities if self.current and self.current.scanned: if len(arg.split()) == 0: - print "Possible arguments are: {0}".format(self.possible_chunk_args_text) + print("Possible arguments are: {0}".format(self.possible_chunk_args_text)) elif len(arg.split()) > 1: - print "Error: too many parameters." + print("Error: too many parameters.") else: - if arg in world.CHUNK_PROBLEMS_ARGS.values() or arg == 'all': + if arg in list(world.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': for problem, status_text, a in world.CHUNK_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.replace_problematic_chunks(self.backup_worlds, problem, el, de) if n: self.current.scanned = False - print "\nReplaced {0} chunks with status \'{1}\'.".format(n, status_text) + print("\nReplaced {0} chunks with status \'{1}\'.".format(n, status_text)) else: - print "Unknown argument." + print("Unknown argument.") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") def do_replace_regions(self, arg): el = self.options.entity_limit de = self.options.delete_entities if self.current and self.current.scanned: if len(arg.split()) == 0: - print "Possible arguments are: {0}".format(self.possible_region_args_text) + print("Possible arguments are: {0}".format(self.possible_region_args_text)) elif len(arg.split()) > 1: - print "Error: too many parameters." + print("Error: too many parameters.") else: - if arg in world.REGION_PROBLEMS_ARGS.values() or arg == 'all': + if arg in list(world.REGION_PROBLEMS_ARGS.values()) or arg == 'all': for problem, status_text, a in world.REGION_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.replace_problematic_regions(self.backup_worlds, problem, el, de) if n: self.current.scanned = False - print "\nReplaced {0} regions with status \'{1}\'.".format(n, status_text) + print("\nReplaced {0} regions with status \'{1}\'.".format(n, status_text)) else: - print "Unknown argument." + print("Unknown argument.") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") def do_remove_regions(self, arg): if self.current and self.current.scanned: if len(arg.split()) == 0: - print "Possible arguments are: {0}".format(self.possible_region_args_text) + print("Possible arguments are: {0}".format(self.possible_region_args_text)) elif len(arg.split()) > 1: - print "Error: too many parameters." + print("Error: too many parameters.") else: - if arg in world.REGION_PROBLEMS_ARGS.values() or arg == 'all': + if arg in list(world.REGION_PROBLEMS_ARGS.values()) or arg == 'all': for problem, status_text, a in world.REGION_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.remove_problematic_regions(problem) if n: self.current.scanned = False - print "\nRemoved {0} regions with status \'{1}\'.".format(n, status_text) + print("\nRemoved {0} regions with status \'{1}\'.".format(n, status_text)) else: - print "Unknown argument." + print("Unknown argument.") else: - print "The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it." + print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") pass def do_quit(self, arg): - print "Quitting." + print("Quitting.") return True def do_exit(self, arg): - print "Exiting." + print("Exiting.") return True def do_EOF(self, arg): - print "Quitting." + print("Quitting.") return True ################################################# @@ -403,27 +403,27 @@ def complete_set(self, text, line, begidx, endidx): return self.complete_arg(text, possible_args) def complete_count_chunks(self, text, line, begidx, endidx): - possible_args = world.CHUNK_PROBLEMS_ARGS.values() + ['all'] + possible_args = list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_remove_chunks(self, text, line, begidx, endidx): - possible_args = world.CHUNK_PROBLEMS_ARGS.values() + ['all'] + possible_args = list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_replace_chunks(self, text, line, begidx, endidx): - possible_args = world.CHUNK_PROBLEMS_ARGS.values() + ['all'] + possible_args = list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_count_regions(self, text, line, begidx, endidx): - possible_args = world.REGION_PROBLEMS_ARGS.values() + ['all'] + possible_args = list(world.REGION_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_remove_regions(self, text, line, begidx, endidx): - possible_args = world.REGION_PROBLEMS_ARGS.values() + ['all'] + possible_args = list(world.REGION_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_replace_regions(self, text, line, begidx, endidx): - possible_args = world.REGION_PROBLEMS_ARGS.values() + ['all'] + possible_args = list(world.REGION_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) ################################################# @@ -447,75 +447,75 @@ def help_set(self): "If you input a few worlds you can choose wich one will be " "scanned using this command.\n") def help_current_workload(self): - print "\nPrints information of the current region-set/world. This will be the region-set/world to scan and fix.\n" + print("\nPrints information of the current region-set/world. This will be the region-set/world to scan and fix.\n") def help_scan(self): - print "\nScans the current world set or the region set.\n" + print("\nScans the current world set or the region set.\n") def help_count_chunks(self): - print "\n Prints out the number of chunks with the given status. For example" - print "\'count corrupted\' prints the number of corrupted chunks in the world." - print - print "Possible status are: {0}\n".format(self.possible_chunk_args_text) + print("\n Prints out the number of chunks with the given status. For example") + print("\'count corrupted\' prints the number of corrupted chunks in the world.") + print() + print("Possible status are: {0}\n".format(self.possible_chunk_args_text)) def help_remove_entities(self): - print "\nRemove all the entities in chunks that have more than entity-limit entities." - print - print "This chunks are the ones with status \'too many entities\'.\n" + print("\nRemove all the entities in chunks that have more than entity-limit entities.") + print() + print("This chunks are the ones with status \'too many entities\'.\n") def help_remove_chunks(self): - print "\nRemoves bad chunks with the given problem." - print - print "Please, be careful, when used with the status too-many-entities this will" - print "REMOVE THE CHUNKS with too many entities problems, not the entities." - print "To remove only the entities see the command remove_entities." - print - print "For example \'remove_chunks corrupted\' this will remove corrupted chunks." - print - print "Possible status are: {0}\n".format(self.possible_chunk_args_text) - print + print("\nRemoves bad chunks with the given problem.") + print() + print("Please, be careful, when used with the status too-many-entities this will") + print("REMOVE THE CHUNKS with too many entities problems, not the entities.") + print("To remove only the entities see the command remove_entities.") + print() + print("For example \'remove_chunks corrupted\' this will remove corrupted chunks.") + print() + print("Possible status are: {0}\n".format(self.possible_chunk_args_text)) + print() def help_replace_chunks(self): - print "\nReplaces bad chunks with the given status using the backups directories." - print - print "Exampe: \"replace_chunks corrupted\"" - print - print "this will replace the corrupted chunks with the given backups." - print - print "Possible status are: {0}\n".format(self.possible_chunk_args_text) - print - print "Note: after replacing any chunks you have to rescan the world.\n" + print("\nReplaces bad chunks with the given status using the backups directories.") + print() + print("Exampe: \"replace_chunks corrupted\"") + print() + print("this will replace the corrupted chunks with the given backups.") + print() + print("Possible status are: {0}\n".format(self.possible_chunk_args_text)) + print() + print("Note: after replacing any chunks you have to rescan the world.\n") def help_count_regions(self): - print "\n Prints out the number of regions with the given status. For example " - print "\'count_regions too-small\' prints the number of region with \'too-small\' status." - print - print "Possible status are: {0}\n".format(self.possible_region_args_text) + print("\n Prints out the number of regions with the given status. For example ") + print("\'count_regions too-small\' prints the number of region with \'too-small\' status.") + print() + print("Possible status are: {0}\n".format(self.possible_region_args_text)) def help_remove_regions(self): - print "\nRemoves regions with the given status." - print - print "Example: \'remove_regions too-small\'" - print - print "this will remove the region files with status \'too-small\'." - print - print "Possible status are: {0}".format(self.possible_region_args_text) - print - print "Note: after removing any regions you have to rescan the world.\n" + print("\nRemoves regions with the given status.") + print() + print("Example: \'remove_regions too-small\'") + print() + print("this will remove the region files with status \'too-small\'.") + print() + print("Possible status are: {0}".format(self.possible_region_args_text)) + print() + print("Note: after removing any regions you have to rescan the world.\n") def help_replace_regions(self): - print "\nReplaces regions with the given status." - print - print "Example: \"replace_regions too-small\"" - print - print "this will try to replace the region files with status \'too-small\'" - print "with the given backups." - print - print "Possible status are: {0}".format(self.possible_region_args_text) - print - print "Note: after replacing any regions you have to rescan the world.\n" + print("\nReplaces regions with the given status.") + print() + print("Example: \"replace_regions too-small\"") + print() + print("this will try to replace the region files with status \'too-small\'") + print("with the given backups.") + print() + print("Possible status are: {0}".format(self.possible_region_args_text)) + print() + print("Note: after replacing any regions you have to rescan the world.\n") def help_summary(self): - print "\nPrints a summary of all the problems found in the current workload.\n" + print("\nPrints a summary of all the problems found in the current workload.\n") def help_quit(self): - print "\nQuits interactive mode, exits region-fixer. Same as \'EOF\' and \'exit\' commands.\n" + print("\nQuits interactive mode, exits region-fixer. Same as \'EOF\' and \'exit\' commands.\n") def help_EOF(self): - print "\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'exit\' commands\n" + print("\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'exit\' commands\n") def help_exit(self): - print "\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'EOF\' commands\n" + print("\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'EOF\' commands\n") def help_help(self): - print "Prints help help." + print("Prints help help.") diff --git a/regionfixer_core/progressbar/__init__.py b/regionfixer_core/progressbar/__init__.py new file mode 100644 index 0000000..89daf46 --- /dev/null +++ b/regionfixer_core/progressbar/__init__.py @@ -0,0 +1,49 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# +# progressbar - Text progress bar library for Python. +# Copyright (c) 2005 Nilton Volpato +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Text progress bar library for Python. + +A text progress bar is typically used to display the progress of a long +running operation, providing a visual cue that processing is underway. + +The ProgressBar class manages the current progress, and the format of the line +is given by a number of widgets. A widget is an object that may display +differently depending on the state of the progress bar. There are three types +of widgets: + - a string, which always shows itself + + - a ProgressBarWidget, which may return a different value every time its + update method is called + + - a ProgressBarWidgetHFill, which is like ProgressBarWidget, except it + expands to fill the remaining width of the line. + +The progressbar module is very easy to use, yet very powerful. It will also +automatically enable features like auto-resizing when the system supports it. +""" + +__author__ = 'Nilton Volpato' +__author_email__ = 'nilton.volpato@gmail.com' +__date__ = '2011-05-14' +__version__ = '2.5' + +from .compat import * +from .widgets import * +from .progressbar import * diff --git a/regionfixer_core/progressbar/compat.py b/regionfixer_core/progressbar/compat.py new file mode 100644 index 0000000..a39f4a1 --- /dev/null +++ b/regionfixer_core/progressbar/compat.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# progressbar - Text progress bar library for Python. +# Copyright (c) 2005 Nilton Volpato +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Compatibility methods and classes for the progressbar module.""" + + +# Python 3.x (and backports) use a modified iterator syntax +# This will allow 2.x to behave with 3.x iterators +try: + next +except NameError: + def next(iter): + try: + # Try new style iterators + return iter.__next__() + except AttributeError: + # Fallback in case of a "native" iterator + return iter.next() + + +# Python < 2.5 does not have "any" +try: + any +except NameError: + def any(iterator): + for item in iterator: + if item: return True + return False diff --git a/regionfixer_core/progressbar/progressbar.py b/regionfixer_core/progressbar/progressbar.py new file mode 100644 index 0000000..3baf530 --- /dev/null +++ b/regionfixer_core/progressbar/progressbar.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# +# progressbar - Text progress bar library for Python. +# Copyright (c) 2005 Nilton Volpato +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Main ProgressBar class.""" + +from __future__ import division + +import math +import os +import signal +import sys +import time + +try: + from fcntl import ioctl + from array import array + import termios +except ImportError: + pass + +from .compat import * # for: any, next +from . import widgets + + +class ProgressBar(object): + """The ProgressBar class which updates and prints the bar. + + A common way of using it is like: + >>> pbar = ProgressBar().start() + >>> for i in range(100): + ... # do something + ... pbar.update(i+1) + ... + >>> pbar.finish() + + You can also use a ProgressBar as an iterator: + >>> progress = ProgressBar() + >>> for i in progress(some_iterable): + ... # do something + ... + + Since the progress bar is incredibly customizable you can specify + different widgets of any type in any order. You can even write your own + widgets! However, since there are already a good number of widgets you + should probably play around with them before moving on to create your own + widgets. + + The term_width parameter represents the current terminal width. If the + parameter is set to an integer then the progress bar will use that, + otherwise it will attempt to determine the terminal width falling back to + 80 columns if the width cannot be determined. + + When implementing a widget's update method you are passed a reference to + the current progress bar. As a result, you have access to the + ProgressBar's methods and attributes. Although there is nothing preventing + you from changing the ProgressBar you should treat it as read only. + + Useful methods and attributes include (Public API): + - currval: current progress (0 <= currval <= maxval) + - maxval: maximum (and final) value + - finished: True if the bar has finished (reached 100%) + - start_time: the time when start() method of ProgressBar was called + - seconds_elapsed: seconds elapsed since start_time and last call to + update + - percentage(): progress in percent [0..100] + """ + + __slots__ = ('currval', 'fd', 'finished', 'last_update_time', + 'left_justify', 'maxval', 'next_update', 'num_intervals', + 'poll', 'seconds_elapsed', 'signal_set', 'start_time', + 'term_width', 'update_interval', 'widgets', '_time_sensitive', + '__iterable') + + _DEFAULT_MAXVAL = 100 + _DEFAULT_TERMSIZE = 80 + _DEFAULT_WIDGETS = [widgets.Percentage(), ' ', widgets.Bar()] + + def __init__(self, maxval=None, widgets=None, term_width=None, poll=1, + left_justify=True, fd=None): + """Initializes a progress bar with sane defaults.""" + + # Don't share a reference with any other progress bars + if widgets is None: + widgets = list(self._DEFAULT_WIDGETS) + + self.maxval = maxval + self.widgets = widgets + self.fd = fd if fd is not None else sys.stderr + self.left_justify = left_justify + + self.signal_set = False + if term_width is not None: + self.term_width = term_width + else: + try: + self._handle_resize() + signal.signal(signal.SIGWINCH, self._handle_resize) + self.signal_set = True + except (SystemExit, KeyboardInterrupt): raise + except: + self.term_width = self._env_size() + + self.__iterable = None + self._update_widgets() + self.currval = 0 + self.finished = False + self.last_update_time = None + self.poll = poll + self.seconds_elapsed = 0 + self.start_time = None + self.update_interval = 1 + self.next_update = 0 + + + def __call__(self, iterable): + """Use a ProgressBar to iterate through an iterable.""" + + try: + self.maxval = len(iterable) + except: + if self.maxval is None: + self.maxval = widgets.UnknownLength + + self.__iterable = iter(iterable) + return self + + + def __iter__(self): + return self + + + def __next__(self): + try: + value = next(self.__iterable) + if self.start_time is None: + self.start() + else: + self.update(self.currval + 1) + return value + except StopIteration: + if self.start_time is None: + self.start() + self.finish() + raise + + + # Create an alias so that Python 2.x won't complain about not being + # an iterator. + next = __next__ + + + def _env_size(self): + """Tries to find the term_width from the environment.""" + + return int(os.environ.get('COLUMNS', self._DEFAULT_TERMSIZE)) - 1 + + + def _handle_resize(self, signum=None, frame=None): + """Tries to catch resize signals sent from the terminal.""" + + h, w = array('h', ioctl(self.fd, termios.TIOCGWINSZ, '\0' * 8))[:2] + self.term_width = w + + + def percentage(self): + """Returns the progress as a percentage.""" + if self.maxval is widgets.UnknownLength: + return float("NaN") + if self.currval >= self.maxval: + return 100.0 + return (self.currval * 100.0 / self.maxval) if self.maxval else 100.00 + + percent = property(percentage) + + + def _format_widgets(self): + result = [] + expanding = [] + width = self.term_width + + for index, widget in enumerate(self.widgets): + if isinstance(widget, widgets.WidgetHFill): + result.append(widget) + expanding.insert(0, index) + else: + widget = widgets.format_updatable(widget, self) + result.append(widget) + width -= len(widget) + + count = len(expanding) + while count: + portion = max(int(math.ceil(width * 1. / count)), 0) + index = expanding.pop() + count -= 1 + + widget = result[index].update(self, portion) + width -= len(widget) + result[index] = widget + + return result + + + def _format_line(self): + """Joins the widgets and justifies the line.""" + + widgets = ''.join(self._format_widgets()) + + if self.left_justify: return widgets.ljust(self.term_width) + else: return widgets.rjust(self.term_width) + + + def _need_update(self): + """Returns whether the ProgressBar should redraw the line.""" + if self.currval >= self.next_update or self.finished: return True + + delta = time.time() - self.last_update_time + return self._time_sensitive and delta > self.poll + + + def _update_widgets(self): + """Checks all widgets for the time sensitive bit.""" + + self._time_sensitive = any(getattr(w, 'TIME_SENSITIVE', False) + for w in self.widgets) + + + def update(self, value=None): + """Updates the ProgressBar to a new value.""" + + if value is not None and value is not widgets.UnknownLength: + if (self.maxval is not widgets.UnknownLength + and not 0 <= value <= self.maxval): + + raise ValueError('Value out of range') + + self.currval = value + + + if not self._need_update(): return + if self.start_time is None: + raise RuntimeError('You must call "start" before calling "update"') + + now = time.time() + self.seconds_elapsed = now - self.start_time + self.next_update = self.currval + self.update_interval + self.fd.write(self._format_line() + '\r') + self.fd.flush() + self.last_update_time = now + + + def start(self): + """Starts measuring time, and prints the bar at 0%. + + It returns self so you can use it like this: + >>> pbar = ProgressBar().start() + >>> for i in range(100): + ... # do something + ... pbar.update(i+1) + ... + >>> pbar.finish() + """ + + if self.maxval is None: + self.maxval = self._DEFAULT_MAXVAL + + self.num_intervals = max(100, self.term_width) + self.next_update = 0 + + if self.maxval is not widgets.UnknownLength: + if self.maxval < 0: raise ValueError('Value out of range') + self.update_interval = self.maxval / self.num_intervals + + + self.start_time = self.last_update_time = time.time() + self.update(0) + + return self + + + def finish(self): + """Puts the ProgressBar bar in the finished state.""" + + if self.finished: + return + self.finished = True + self.update(self.maxval) + self.fd.write('\n') + if self.signal_set: + signal.signal(signal.SIGWINCH, signal.SIG_DFL) diff --git a/regionfixer_core/progressbar/widgets.py b/regionfixer_core/progressbar/widgets.py new file mode 100644 index 0000000..dd3c6ef --- /dev/null +++ b/regionfixer_core/progressbar/widgets.py @@ -0,0 +1,359 @@ +# -*- coding: utf-8 -*- +# +# progressbar - Text progress bar library for Python. +# Copyright (c) 2005 Nilton Volpato +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Default ProgressBar widgets.""" + +from __future__ import division + +import datetime +import math + +try: + from abc import ABCMeta, abstractmethod +except ImportError: + AbstractWidget = object + abstractmethod = lambda fn: fn +else: + AbstractWidget = ABCMeta('AbstractWidget', (object,), {}) + +class UnknownLength: + pass + +def format_updatable(updatable, pbar): + if hasattr(updatable, 'update'): return updatable.update(pbar) + else: return updatable + + +class Widget(AbstractWidget): + """The base class for all widgets. + + The ProgressBar will call the widget's update value when the widget should + be updated. The widget's size may change between calls, but the widget may + display incorrectly if the size changes drastically and repeatedly. + + The boolean TIME_SENSITIVE informs the ProgressBar that it should be + updated more often because it is time sensitive. + """ + + TIME_SENSITIVE = False + __slots__ = () + + @abstractmethod + def update(self, pbar): + """Updates the widget. + + pbar - a reference to the calling ProgressBar + """ + + +class WidgetHFill(Widget): + """The base class for all variable width widgets. + + This widget is much like the \\hfill command in TeX, it will expand to + fill the line. You can use more than one in the same line, and they will + all have the same width, and together will fill the line. + """ + + @abstractmethod + def update(self, pbar, width): + """Updates the widget providing the total width the widget must fill. + + pbar - a reference to the calling ProgressBar + width - The total width the widget must fill + """ + + +class Timer(Widget): + """Widget which displays the elapsed seconds.""" + + __slots__ = ('format_string',) + TIME_SENSITIVE = True + + def __init__(self, format='Elapsed Time: %s'): + self.format_string = format + + @staticmethod + def format_time(seconds): + """Formats time as the string "HH:MM:SS".""" + + return str(datetime.timedelta(seconds=int(seconds))) + + + def update(self, pbar): + """Updates the widget to show the elapsed time.""" + + return self.format_string % self.format_time(pbar.seconds_elapsed) + + +class ETA(Timer): + """Widget which attempts to estimate the time of arrival.""" + + TIME_SENSITIVE = True + + def update(self, pbar): + """Updates the widget to show the ETA or total time when finished.""" + + if pbar.maxval is UnknownLength or pbar.currval == 0: + return 'ETA: --:--:--' + elif pbar.finished: + return 'Time: %s' % self.format_time(pbar.seconds_elapsed) + else: + elapsed = pbar.seconds_elapsed + eta = elapsed * pbar.maxval / pbar.currval - elapsed + return 'ETA: %s' % self.format_time(eta) + + +class AdaptiveETA(Timer): + """Widget which attempts to estimate the time of arrival. + + Uses a weighted average of two estimates: + 1) ETA based on the total progress and time elapsed so far + 2) ETA based on the progress as per the last 10 update reports + + The weight depends on the current progress so that to begin with the + total progress is used and at the end only the most recent progress is + used. + """ + + TIME_SENSITIVE = True + NUM_SAMPLES = 10 + + def _update_samples(self, currval, elapsed): + sample = (currval, elapsed) + if not hasattr(self, 'samples'): + self.samples = [sample] * (self.NUM_SAMPLES + 1) + else: + self.samples.append(sample) + return self.samples.pop(0) + + def _eta(self, maxval, currval, elapsed): + return elapsed * maxval / float(currval) - elapsed + + def update(self, pbar): + """Updates the widget to show the ETA or total time when finished.""" + if pbar.maxval is UnknownLength or pbar.currval == 0: + return 'ETA: --:--:--' + elif pbar.finished: + return 'Time: %s' % self.format_time(pbar.seconds_elapsed) + else: + elapsed = pbar.seconds_elapsed + currval1, elapsed1 = self._update_samples(pbar.currval, elapsed) + eta = self._eta(pbar.maxval, pbar.currval, elapsed) + if pbar.currval > currval1: + etasamp = self._eta(pbar.maxval - currval1, + pbar.currval - currval1, + elapsed - elapsed1) + weight = (pbar.currval / float(pbar.maxval)) ** 0.5 + eta = (1 - weight) * eta + weight * etasamp + return 'ETA: %s' % self.format_time(eta) + + +class FileTransferSpeed(Widget): + """Widget for showing the transfer speed (useful for file transfers).""" + + FMT = '%6.2f %s%s/s' + PREFIXES = ' kMGTPEZY' + __slots__ = ('unit',) + + def __init__(self, unit='B'): + self.unit = unit + + def update(self, pbar): + """Updates the widget with the current SI prefixed speed.""" + + if pbar.seconds_elapsed < 2e-6 or pbar.currval < 2e-6: # =~ 0 + scaled = power = 0 + else: + speed = pbar.currval / pbar.seconds_elapsed + power = int(math.log(speed, 1000)) + scaled = speed / 1000.**power + + return self.FMT % (scaled, self.PREFIXES[power], self.unit) + + +class AnimatedMarker(Widget): + """An animated marker for the progress bar which defaults to appear as if + it were rotating. + """ + + __slots__ = ('markers', 'curmark') + + def __init__(self, markers='|/-\\'): + self.markers = markers + self.curmark = -1 + + def update(self, pbar): + """Updates the widget to show the next marker or the first marker when + finished""" + + if pbar.finished: return self.markers[0] + + self.curmark = (self.curmark + 1) % len(self.markers) + return self.markers[self.curmark] + +# Alias for backwards compatibility +RotatingMarker = AnimatedMarker + + +class Counter(Widget): + """Displays the current count.""" + + __slots__ = ('format_string',) + + def __init__(self, format='%d'): + self.format_string = format + + def update(self, pbar): + return self.format_string % pbar.currval + + +class Percentage(Widget): + """Displays the current percentage as a number with a percent sign.""" + + def update(self, pbar): + return '%3.0f%%' % pbar.percentage() + + +class FormatLabel(Timer): + """Displays a formatted label.""" + + mapping = { + 'elapsed': ('seconds_elapsed', Timer.format_time), + 'finished': ('finished', None), + 'last_update': ('last_update_time', None), + 'max': ('maxval', None), + 'seconds': ('seconds_elapsed', None), + 'start': ('start_time', None), + 'value': ('currval', None) + } + + __slots__ = ('format_string',) + def __init__(self, format): + self.format_string = format + + def update(self, pbar): + context = {} + for name, (key, transform) in self.mapping.items(): + try: + value = getattr(pbar, key) + + if transform is None: + context[name] = value + else: + context[name] = transform(value) + except: pass + + return self.format_string % context + + +class SimpleProgress(Widget): + """Returns progress as a count of the total (e.g.: "5 of 47").""" + + __slots__ = ('sep',) + + def __init__(self, sep=' of '): + self.sep = sep + + def update(self, pbar): + if pbar.maxval is UnknownLength: + return '%d%s?' % (pbar.currval, self.sep) + return '%d%s%s' % (pbar.currval, self.sep, pbar.maxval) + + +class Bar(WidgetHFill): + """A progress bar which stretches to fill the line.""" + + __slots__ = ('marker', 'left', 'right', 'fill', 'fill_left') + + def __init__(self, marker='#', left='|', right='|', fill=' ', + fill_left=True): + """Creates a customizable progress bar. + + marker - string or updatable object to use as a marker + left - string or updatable object to use as a left border + right - string or updatable object to use as a right border + fill - character to use for the empty part of the progress bar + fill_left - whether to fill from the left or the right + """ + self.marker = marker + self.left = left + self.right = right + self.fill = fill + self.fill_left = fill_left + + + def update(self, pbar, width): + """Updates the progress bar and its subcomponents.""" + + left, marked, right = (format_updatable(i, pbar) for i in + (self.left, self.marker, self.right)) + + width -= len(left) + len(right) + # Marked must *always* have length of 1 + if pbar.maxval is not UnknownLength and pbar.maxval: + marked *= int(pbar.currval / pbar.maxval * width) + else: + marked = '' + + if self.fill_left: + return '%s%s%s' % (left, marked.ljust(width, self.fill), right) + else: + return '%s%s%s' % (left, marked.rjust(width, self.fill), right) + + +class ReverseBar(Bar): + """A bar which has a marker which bounces from side to side.""" + + def __init__(self, marker='#', left='|', right='|', fill=' ', + fill_left=False): + """Creates a customizable progress bar. + + marker - string or updatable object to use as a marker + left - string or updatable object to use as a left border + right - string or updatable object to use as a right border + fill - character to use for the empty part of the progress bar + fill_left - whether to fill from the left or the right + """ + self.marker = marker + self.left = left + self.right = right + self.fill = fill + self.fill_left = fill_left + + +class BouncingBar(Bar): + def update(self, pbar, width): + """Updates the progress bar and its subcomponents.""" + + left, marker, right = (format_updatable(i, pbar) for i in + (self.left, self.marker, self.right)) + + width -= len(left) + len(right) + + if pbar.finished: return '%s%s%s' % (left, width * marker, right) + + position = int(pbar.currval % (width * 2 - 1)) + if position > width: position = width * 2 - position + lpad = self.fill * (position - 1) + rpad = self.fill * (width - len(marker) - len(lpad)) + + # Swap if we want to bounce the other way + if not self.fill_left: rpad, lpad = lpad, rpad + + return '%s%s%s%s%s' % (left, lpad, marker, rpad, right) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 797e394..9b6d5fc 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -25,7 +25,7 @@ import sys import logging import multiprocessing -from multiprocessing.queues import SimpleQueue +from multiprocessing import SimpleQueue from os.path import split, abspath from time import sleep, time from copy import copy @@ -36,8 +36,8 @@ from nbt.nbt import MalformedFileError from nbt.region import ChunkDataError, ChunkHeaderError,\ RegionHeaderError, InconceivedChunk -import progressbar -import world +from . import progressbar +from . import world from regionfixer_core.util import entitle @@ -516,9 +516,9 @@ def console_scan_loop(scanners, scan_titles, verbose): print status text to the terminal. """ try: for scanner, title in zip(scanners, scan_titles): - print "\n{0:-^60}".format(title) + print("\n{0:-^60}".format(title)) if not len(scanner): - print "Info: No files to scan." + print("Info: No files to scan.") else: total = len(scanner) if not verbose: @@ -537,7 +537,7 @@ def console_scan_loop(scanners, scan_titles, verbose): else: status = "(" + result.oneliner_status + ")" fn = result.filename - print "Scanned {0: <12} {1:.<43} {2}/{3}".format(fn, status, counter, total) + print("Scanned {0: <12} {1:.<43} {2}/{3}".format(fn, status, counter, total)) if not verbose: pbar.finish() except KeyboardInterrupt as e: @@ -565,25 +565,25 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, # because of this. w = world_obj # Scan the world directory - print "World info:" + print("World info:") - print ("There are {0} region files, {1} player files and {2} data" + print(("There are {0} region files, {1} player files and {2} data" " files in the world directory.").format( w.get_number_regions(), len(w.players) + len(w.old_players), - len(w.data_files)) + len(w.data_files))) # check the level.dat - print "\n{0:-^60}".format(' Checking level.dat ') + print("\n{0:-^60}".format(' Checking level.dat ')) if not w.scanned_level.path: - print "[WARNING!] \'level.dat\' doesn't exist!" + print("[WARNING!] \'level.dat\' doesn't exist!") else: if w.scanned_level.readable == True: - print "\'level.dat\' is readable" + print("\'level.dat\' is readable") else: - print "[WARNING!]: \'level.dat\' is corrupted with the following error/s:" - print "\t {0}".format(w.scanned_level.status_text) + print("[WARNING!]: \'level.dat\' is corrupted with the following error/s:") + print("\t {0}".format(w.scanned_level.status_text)) ps = AsyncDataScanner(w.players, processes) ops = AsyncDataScanner(w.old_players, processes) @@ -679,7 +679,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r.scan_time = time() r.scanned = True return r - except IOError, e: + except IOError as e: r.status = world.REGION_UNREADABLE r.scan_time = time() r.scanned = True @@ -708,9 +708,9 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # and once detected is better to fix it at once. if delete_entities: world.delete_entities(region_file, x, z) - print ("Deleted {0} entities in chunk" + print(("Deleted {0} entities in chunk" " ({1},{2}) of the region file: {3}").format( - c[TUPLE_NUM_ENTITIES], x, z, r.filename) + c[TUPLE_NUM_ENTITIES], x, z, r.filename)) # entities removed, change chunk status to OK r.chunks[(x, z)] = (0, world.CHUNK_OK) @@ -754,7 +754,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): return r except KeyboardInterrupt: - print "\nInterrupted by user\n" + print("\nInterrupted by user\n") # TODO this should't exit. It should return to interactive # mode if we are in it. sys.exit(1) diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index ce48e8b..5100b22 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -24,7 +24,7 @@ import platform from os.path import join, split, exists, isfile import sys -import world +from . import world import traceback @@ -61,7 +61,7 @@ def query_yes_no(question, default="yes"): while True: sys.stdout.write(question + prompt) - choice = raw_input().lower() + choice = input().lower() if default is not None and choice == '': return valid[default] elif choice in valid: @@ -164,14 +164,14 @@ def parse_chunk_list(chunk_list, world_obj): try: chunk = eval(line) except: - print "The chunk {0} is not valid.".format(line) + print("The chunk {0} is not valid.".format(line)) continue region_name = world.get_chunk_region(chunk[0], chunk[1]) fullpath = join(world_obj.world_path, "region", region_name) if fullpath in world_obj.all_mca_files: parsed_list.append((fullpath, chunk[0], chunk[1])) else: - print "The chunk {0} should be in the region file {1} and this region files doesn't extist!".format(chunk, fullpath) + print("The chunk {0} should be in the region file {1} and this region files doesn't extist!".format(chunk, fullpath)) return parsed_list @@ -189,7 +189,7 @@ def parse_paths(args): region_list.append(arg) elif arg[-4:] == ".mcr": # ignore pre-anvil region files if not warning: - print "Warning: Region-Fixer only works with anvil format region files. Ignoring *.mcr files" + print("Warning: Region-Fixer only works with anvil format region files. Ignoring *.mcr files") warning = True else: world_list.append(arg) @@ -201,9 +201,9 @@ def parse_paths(args): if isfile(f): region_list_tmp.append(f) else: - print "Warning: \"{0}\" is not a file. Skipping it and scanning the rest.".format(f) + print("Warning: \"{0}\" is not a file. Skipping it and scanning the rest.".format(f)) else: - print "Warning: The region file {0} doesn't exists. Skipping it and scanning the rest.".format(f) + print("Warning: The region file {0} doesn't exists. Skipping it and scanning the rest.".format(f)) region_list = region_list_tmp # init the world objects @@ -223,9 +223,9 @@ def parse_world_list(world_path_list): if w.isworld: tmp.append(w) else: - print "Warning: The folder {0} doesn't look like a minecraft world. I'll skip it.".format(d) + print("Warning: The folder {0} doesn't look like a minecraft world. I'll skip it.".format(d)) else: - print "Warning: The folder {0} doesn't exist. I'll skip it.".format(d) + print("Warning: The folder {0} doesn't exist. I'll skip it.".format(d)) return tmp diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index d4fab3a..07ed2a3 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -23,7 +23,7 @@ import nbt.region as region import nbt.nbt as nbt -from util import table +from .util import table from glob import glob from os.path import join, split, exists @@ -239,7 +239,7 @@ def __setitem__(self, key, value): self.chunks[key] = value def keys(self): - return self.chunks.keys() + return list(self.chunks.keys()) def get_counters(self): """ Returns integers with all the problem counters in this @@ -257,7 +257,7 @@ def count_chunks(self, problem=None): If problem is omited or None, counts all the chunks. Returns an integer with the counter. """ counter = 0 - for coords in self.keys(): + for coords in list(self.keys()): if self[coords] and (self[coords][TUPLE_STATUS] == problem or problem == None): counter += 1 @@ -298,7 +298,7 @@ def list_chunks(self, status=None): returns all the existent chunks in the region file """ l = [] - for c in self.keys(): + for c in list(self.keys()): t = self[c] if status == t[TUPLE_STATUS]: l.append((self.get_global_chunk_coords(*c), t)) @@ -314,7 +314,7 @@ def summary(self): if self.status == REGION_TOO_SMALL: text += " |- This region file is too small in size to actually be a region file.\n" else: - for c in self.keys(): + for c in list(self.keys()): if self[c][TUPLE_STATUS] == CHUNK_OK or self[c][TUPLE_STATUS] == CHUNK_NOT_CREATED: continue status = self[c][TUPLE_STATUS] h_coords = c @@ -379,7 +379,7 @@ def remove_chunk_entities(self, x, z): def rescan_entities(self, options): """ Updates the status of all the chunks in the region file when the the option entity limit is changed. """ - for c in self.keys(): + for c in list(self.keys()): # for safety reasons use a temporary list to generate the # new tuple t = [0,0] @@ -441,7 +441,7 @@ def __init__(self, path, title, *args, **kwargs): d[path] = ScannedDataFile(path) def _get_list(self): - return self.data_files.values() + return list(self.data_files.values()) def _replace_in_data_structure(self, data): self.data_files[data.path] = data @@ -452,7 +452,7 @@ def __len__(self): def summary(self): """ Return a summary of problems found in this set. """ text = "" - bad_data_files = [i for i in self.data_files.values() if not i.readable] + bad_data_files = [i for i in list(self.data_files.values()) if not i.readable] for f in bad_data_files: text += "\t" + f.oneliner_status text += "\n" @@ -483,7 +483,7 @@ def __init__(self, regionset_path=None, region_list=[]): self.scanned = False except InvalidFileName as e: - print "Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path) + print("Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path)) def get_name(self): @@ -533,13 +533,13 @@ def __len__(self): return len(self.regions) def _get_list(self): - return self.regions.values() + return list(self.regions.values()) def _replace_in_data_structure(self, data): self.regions[data.get_coords()] = data def keys(self): - return self.regions.keys() + return list(self.regions.keys()) def list_regions(self, status=None): """ Returns a list of all the ScannedRegionFile objects stored @@ -552,9 +552,9 @@ def list_regions(self, status=None): #~ print self.regions.values() #~ print "El diccionario es si es:" #~ print self.regions - return self.regions.values() + return list(self.regions.values()) t = [] - for coords in self.regions.keys(): + for coords in list(self.regions.keys()): r = self.regions[coords] if r.status == status: t.append(r) @@ -566,7 +566,7 @@ def count_regions(self, status=None): Possible status are: empty, too_small """ counter = 0 - for r in self.keys(): + for r in list(self.keys()): if status == self[r].status: counter += 1 elif status == None: @@ -577,7 +577,7 @@ def count_chunks(self, problem=None): """ Returns the number of chunks with the given problem. If problem is None returns the number of chunks. """ counter = 0 - for r in self.keys(): + for r in list(self.keys()): counter += self[r].count_chunks(problem) return counter @@ -586,7 +586,7 @@ def list_chunks(self, status=None): with the given status. If status = None returns all the chunks. """ l = [] - for r in self.keys(): + for r in list(self.keys()): l.extend(self[r].list_chunks(status)) return l @@ -595,7 +595,7 @@ def summary(self): regionset. The summary is a string with global coords, local coords, data coords and status. """ text = "" - for r in self.keys(): + for r in list(self.keys()): if not (self[r].count_chunks(CHUNK_CORRUPTED) or \ self[r].count_chunks(CHUNK_TOO_MANY_ENTITIES) or \ self[r].count_chunks(CHUNK_WRONG_LOCATED) or \ @@ -632,10 +632,10 @@ def remove_problematic_chunks(self, problem): counter = 0 if self.count_chunks(): - print ' Deleting chunks in region set \"{0}\":'.format(self._get_dimension_directory()) - for r in self.regions.keys(): + print(' Deleting chunks in region set \"{0}\":'.format(self._get_dimension_directory())) + for r in list(self.regions.keys()): counter += self.regions[r].remove_problematic_chunks(problem) - print "Removed {0} chunks in this regionset.\n".format(counter) + print("Removed {0} chunks in this regionset.\n".format(counter)) return counter @@ -643,14 +643,14 @@ def remove_entities(self): """ Removes entities in chunks with the status TOO_MANY_ENTITIES. """ counter = 0 - for r in self.regions.keys(): + for r in list(self.regions.keys()): counter += self.regions[r].remove_entities() return counter def rescan_entities(self, options): """ Updates the status of all the chunks in the regionset when the option entity limit is changed. """ - for r in self.keys(): + for r in list(self.keys()): self[r].rescan_entities(options) def generate_report(self, standalone): @@ -738,7 +738,7 @@ def __init__(self, world_path): self.scanned_level = ScannedDataFile(level_dat_path, readable=True, status_text="OK") - except Exception, e: + except Exception as e: self.name = None self.scanned_level = ScannedDataFile(level_dat_path, readable=False, @@ -885,19 +885,19 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet bad_chunks = regionset.list_chunks(problem) if bad_chunks and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): - print "The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory()) + print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory())) else: for c in bad_chunks: global_coords = c[0] status_tuple = c[1] local_coords = _get_local_chunk_coords(*global_coords) - print "\n{0:-^60}".format(' New chunk to replace. Coords: x = {0}; z = {1} '.format(*global_coords)) + print("\n{0:-^60}".format(' New chunk to replace. Coords: x = {0}; z = {1} '.format(*global_coords))) # search for the region file backup_region_path, local_coords = b_regionset.locate_chunk(global_coords) tofix_region_path, _ = regionset.locate_chunk(global_coords) if exists(backup_region_path): - print "Backup region file found in:\n {0}".format(backup_region_path) + print("Backup region file found in:\n {0}".format(backup_region_path)) # Scan the whole region file, pretty slow, but # absolutely needed to detect sharing offset chunks # The backups world doesn't change, check if the @@ -906,7 +906,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet coords = get_region_coords(split(backup_region_path)[1]) r = scanned_regions[coords] except KeyError: - from scan import scan_region_file + from .scan import scan_region_file r = scan_region_file(ScannedRegionFile(backup_region_path), entity_limit, delete_entities) scanned_regions[r.coords] = r try: @@ -924,7 +924,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet backup_region_file = region.RegionFile(backup_region_path) working_chunk = backup_region_file.get_chunk(local_coords[0],local_coords[1]) - print "Replacing..." + print("Replacing...") # the chunk exists and is healthy, fix it! tofix_region_file = region.RegionFile(tofix_region_path) # first unlink the chunk, second write the chunk. @@ -933,14 +933,14 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet tofix_region_file.unlink_chunk(*local_coords) tofix_region_file.write_chunk(local_coords[0], local_coords[1],working_chunk) counter += 1 - print "Chunk replaced using backup dir: {0}".format(backup.path) + print("Chunk replaced using backup dir: {0}".format(backup.path)) else: - print "Can't use this backup directory, the chunk has the status: {0}".format(CHUNK_STATUS_TEXT[status]) + print("Can't use this backup directory, the chunk has the status: {0}".format(CHUNK_STATUS_TEXT[status])) continue else: - print "The region file doesn't exist in the backup directory: {0}".format(backup_region_path) + print("The region file doesn't exist in the backup directory: {0}".format(backup_region_path)) return counter @@ -967,10 +967,10 @@ def replace_problematic_regions(self, backup_worlds, problem, entity_limit, dele bad_regions = regionset.list_regions(problem) if bad_regions and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): - print "The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory()) + print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory())) else: for r in bad_regions: - print "\n{0:-^60}".format(' New region file to replace! Coords {0} '.format(r.get_coords())) + print("\n{0:-^60}".format(' New region file to replace! Coords {0} '.format(r.get_coords()))) # search for the region file @@ -981,21 +981,21 @@ def replace_problematic_regions(self, backup_worlds, problem, entity_limit, dele tofix_region_path = r.get_path() if backup_region_path != None and exists(backup_region_path): - print "Backup region file found in:\n {0}".format(backup_region_path) + print("Backup region file found in:\n {0}".format(backup_region_path)) # check the region file, just open it. try: backup_region_file = region.RegionFile(backup_region_path) except region.NoRegionHeader as e: - print "Can't use this backup directory, the error while opening the region file: {0}".format(e) + print("Can't use this backup directory, the error while opening the region file: {0}".format(e)) continue except Exception as e: - print "Can't use this backup directory, unknown error: {0}".format(e) + print("Can't use this backup directory, unknown error: {0}".format(e)) continue copy(backup_region_path, tofix_region_path) - print "Region file replaced!" + print("Region file replaced!") counter += 1 else: - print "The region file doesn't exist in the backup directory: {0}".format(backup_region_path) + print("The region file doesn't exist in the backup directory: {0}".format(backup_region_path)) return counter From ecf1b4bf13f60df8c091a909d12914997f948013 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 21 Feb 2019 00:17:29 +0100 Subject: [PATCH 059/151] Update progressbar module. --- .../progressbar => progressbar}/__init__.py | 0 .../progressbar => progressbar}/compat.py | 0 .../progressbar.py | 0 .../progressbar => progressbar}/widgets.py | 0 regionfixer.py | 20 --------------- regionfixer_core/scan.py | 25 ++----------------- 6 files changed, 2 insertions(+), 43 deletions(-) rename {regionfixer_core/progressbar => progressbar}/__init__.py (100%) rename {regionfixer_core/progressbar => progressbar}/compat.py (100%) rename {regionfixer_core/progressbar => progressbar}/progressbar.py (100%) rename {regionfixer_core/progressbar => progressbar}/widgets.py (100%) diff --git a/regionfixer_core/progressbar/__init__.py b/progressbar/__init__.py similarity index 100% rename from regionfixer_core/progressbar/__init__.py rename to progressbar/__init__.py diff --git a/regionfixer_core/progressbar/compat.py b/progressbar/compat.py similarity index 100% rename from regionfixer_core/progressbar/compat.py rename to progressbar/compat.py diff --git a/regionfixer_core/progressbar/progressbar.py b/progressbar/progressbar.py similarity index 100% rename from regionfixer_core/progressbar/progressbar.py rename to progressbar/progressbar.py diff --git a/regionfixer_core/progressbar/widgets.py b/progressbar/widgets.py similarity index 100% rename from regionfixer_core/progressbar/widgets.py rename to progressbar/widgets.py diff --git a/regionfixer.py b/regionfixer.py index 40d9e81..8bddbc0 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -32,21 +32,10 @@ from regionfixer_core.interactive import InteractiveLoop from regionfixer_core.util import entitle, is_bare_console, parse_paths,\ parse_backup_list -from regionfixer_core import progressbar from regionfixer_core.version import version_string from regionfixer_core.bug_reporter import BugReporter - -class FractionWidget(progressbar.ProgressBarWidget): - """ Convenience class to use the progressbar.py """ - def __init__(self, sep=' / '): - self.sep = sep - - def update(self, pbar): - return '%2d%s%2d' % (pbar.currval, self.sep, pbar.maxval) - - def delete_bad_chunks(options, scanned_obj): """ Takes a scanned object (world object or regionset object) and the options given to region-fixer, it deletes all the chunks with @@ -316,15 +305,6 @@ def main(): error = parser.error - # All scanners will use this progress bar - widgets = ['Scanning: ', - FractionWidget(), - ' ', - progressbar.Percentage(), - ' ', - progressbar.Bar(left='[', right=']'), - ' ', - progressbar.ETA()] if o.interactive or o.summary: if any_chunk_replace_option or any_region_replace_option: diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 9b6d5fc..dc81080 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -36,7 +36,7 @@ from nbt.nbt import MalformedFileError from nbt.region import ChunkDataError, ChunkHeaderError,\ RegionHeaderError, InconceivedChunk -from . import progressbar +from progressbar import ProgressBar, Bar, AdaptiveETA, SimpleProgress from . import world from regionfixer_core.util import entitle @@ -99,15 +99,6 @@ def save_error_log(self, filename='error.log'): return error_log_path -class FractionWidget(progressbar.ProgressBarWidget): - """ Convenience class to use the progressbar.py """ - def __init__(self, sep=' / '): - self.sep = sep - - def update(self, pbar): - return '%2d%s%2d' % (pbar.currval, self.sep, pbar.maxval) - - def multiprocess_scan_data(data): """ Does the multithread stuff for scan_data """ # Protect everything so an exception will be returned from the worker @@ -500,17 +491,6 @@ def __len__(self): return l -# All scanners will use this progress bar -widgets = ['Scanning: ', - FractionWidget(), - ' ', - progressbar.Percentage(), - ' ', - progressbar.Bar(left='[', right=']'), - ' ', - progressbar.ETA()] - - def console_scan_loop(scanners, scan_titles, verbose): """ Uses all the AsyncScanner passed to scan the files and print status text to the terminal. """ @@ -522,8 +502,7 @@ def console_scan_loop(scanners, scan_titles, verbose): else: total = len(scanner) if not verbose: - pbar = progressbar.ProgressBar(widgets=widgets, - maxval=total) + pbar = ProgressBar(widgets=[SimpleProgress(), Bar(), AdaptiveETA()], maxval=total).start() try: scanner.scan() counter = 0 From 51163483660ad2f97880c198ddfae9c6612aed84 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 22 Feb 2019 00:49:46 +0100 Subject: [PATCH 060/151] Add missing entity tag as new problem to detect. --- regionfixer.py | 6 +++--- regionfixer_core/scan.py | 10 ++++++++++ regionfixer_core/world.py | 37 ++++++++++++++++++++++++++++++------- 3 files changed, 43 insertions(+), 10 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 8bddbc0..f65c04b 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -321,13 +321,13 @@ def main(): else: if (len(regionset.regions) > 0): error('You can\'t use the replace options while scanning ' - 'sparate region files. The input should be only one ' - 'world and you intruduced {0} individual region ' + 'separate region files. The input should be only one ' + 'world and you introduced {0} individual region ' 'files.'.format(len(regionset.regions))) elif (len(world_list) > 1): error('You can\'t use the replace options while scanning ' 'multiple worlds. The input should be only one ' - 'world and you intruduced {0} ' + 'world and you introduced {0} ' 'worlds.'.format(len(world_list))) if not o.backups and any_chunk_replace_option: diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index dc81080..e484bf4 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -808,6 +808,16 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None + + except KeyError as e: + error = "Missing Entities TAG" + status = world.CHUNK_MISSING_TAG + status_text = error + scan_time = time() + chunk = None + data_coords = None + global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) + num_entities = None return chunk, (num_entities, status) if status != world.CHUNK_NOT_CREATED else None diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 07ed2a3..7a127e9 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -43,22 +43,26 @@ CHUNK_WRONG_LOCATED = 2 CHUNK_TOO_MANY_ENTITIES = 3 CHUNK_SHARED_OFFSET = 4 +CHUNK_MISSING_TAG = 5 CHUNK_STATUS_TEXT = {CHUNK_NOT_CREATED: "Not created", CHUNK_OK: "OK", CHUNK_CORRUPTED: "Corrupted", CHUNK_WRONG_LOCATED: "Wrong located", CHUNK_TOO_MANY_ENTITIES: "Too many entities", - CHUNK_SHARED_OFFSET: "Sharing offset"} + CHUNK_SHARED_OFFSET: "Sharing offset", + CHUNK_MISSING_TAG: "Missing Entities tag"} CHUNK_PROBLEMS = [CHUNK_CORRUPTED, CHUNK_WRONG_LOCATED, CHUNK_TOO_MANY_ENTITIES, - CHUNK_SHARED_OFFSET] + CHUNK_SHARED_OFFSET, + CHUNK_MISSING_TAG] CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', CHUNK_WRONG_LOCATED: 'wrong', CHUNK_TOO_MANY_ENTITIES: 'entities', - CHUNK_SHARED_OFFSET: 'sharing'} + CHUNK_SHARED_OFFSET: 'sharing', + CHUNK_MISSING_TAG: 'miss-tag'} # list with problem, status-text, problem arg tuples CHUNK_PROBLEMS_ITERATOR = [] for problem in CHUNK_PROBLEMS: @@ -663,6 +667,7 @@ def generate_report(self, standalone): wrong_located = self.count_chunks(CHUNK_WRONG_LOCATED) entities_prob = self.count_chunks(CHUNK_TOO_MANY_ENTITIES) shared_prob = self.count_chunks(CHUNK_SHARED_OFFSET) + miss_tag_prob = self.count_chunks(CHUNK_MISSING_TAG) total_chunks = self.count_chunks() too_small_region = self.count_regions(REGION_TOO_SMALL) @@ -674,13 +679,25 @@ def generate_report(self, standalone): # Print all this info in a table format # chunks - chunk_errors = ("Problem","Corrupted","Wrong l.","Etities","Shared o.", "Total chunks") - chunk_counters = ("Counts",corrupted, wrong_located, entities_prob, shared_prob, total_chunks) + chunk_errors = ("Problem", + "Corrupted", + "Wrong l.", + "Entities", + "Shared o.", + "Missing tag", + "Total chunks") + chunk_counters = ("Counts", + corrupted, + wrong_located, + entities_prob, + shared_prob, + miss_tag_prob, + total_chunks) table_data = [] for i, j in zip(chunk_errors, chunk_counters): table_data.append([i,j]) text += "\nChunk problems:\n" - if corrupted or wrong_located or entities_prob or shared_prob: + if corrupted or wrong_located or entities_prob or shared_prob or miss_tag_prob: text += table(table_data) else: text += "No problems found.\n" @@ -1023,12 +1040,16 @@ def rescan_entities(self, options): regionset.rescan_entities(options) def generate_report(self, standalone): + """ Generates a report with the results of the scan. """ # collect data corrupted = self.count_chunks(CHUNK_CORRUPTED) wrong_located = self.count_chunks(CHUNK_WRONG_LOCATED) entities_prob = self.count_chunks(CHUNK_TOO_MANY_ENTITIES) shared_prob = self.count_chunks(CHUNK_SHARED_OFFSET) + miss_tag_prob = self.count_chunks(CHUNK_MISSING_TAG) + print("miss tag prob {0}".format(miss_tag_prob)) + print("WHAT THE ACTUAL FFFF") total_chunks = self.count_chunks() too_small_region = self.count_regions(REGION_TOO_SMALL) @@ -1065,18 +1086,20 @@ def generate_report(self, standalone): "Wrong l.", "Entities", "Shared o.", + "Missing tag", "Total chunks") chunk_counters = ("Counts", corrupted, wrong_located, entities_prob, shared_prob, + miss_tag_prob, total_chunks) table_data = [] for i, j in zip(chunk_errors, chunk_counters): table_data.append([i,j]) text += "\nChunk problems:\n" - if corrupted or wrong_located or entities_prob or shared_prob: + if corrupted or wrong_located or entities_prob or shared_prob or miss_tag_prob: text += table(table_data) else: text += "No problems found.\n" From d6ea6306b7385bb5c8c6ad1c9b166799eda94399 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 1 Mar 2019 11:31:44 +0100 Subject: [PATCH 061/151] Remove bug report ftp sender. --- regionfixer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index f65c04b..1242bf6 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -490,7 +490,7 @@ def main(): had_exception = True print(ERROR_MSG) bug_sender = BugReporter(e.printable_traceback) - auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) + #auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str except Exception as e: @@ -498,7 +498,7 @@ def main(): print(ERROR_MSG) # Traceback will be taken in init bug_sender = BugReporter() - auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) + #auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str finally: From d6afd55dcd06324f452ace5b0aad9a4b8232bd6a Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 4 Mar 2019 23:28:21 +0100 Subject: [PATCH 062/151] Change generate_report in World and RegionSet to a more dynamic code, now its easier to add new chunk problems. --- regionfixer_core/scan.py | 2 +- regionfixer_core/world.py | 224 +++++++++++++++++++++----------------- 2 files changed, 127 insertions(+), 99 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index e484bf4..7ca4430 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -811,7 +811,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except KeyError as e: error = "Missing Entities TAG" - status = world.CHUNK_MISSING_TAG + status = world.CHUNK_MISSING_ENTITIES_TAG status_text = error scan_time = time() chunk = None diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 7a127e9..ef3ea2c 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -34,35 +34,55 @@ # Constants: +# + # Chunk related: # -------------- -# Used to mark the status of a chunks: +# Used to mark the status of chunks: CHUNK_NOT_CREATED = -1 CHUNK_OK = 0 CHUNK_CORRUPTED = 1 CHUNK_WRONG_LOCATED = 2 CHUNK_TOO_MANY_ENTITIES = 3 CHUNK_SHARED_OFFSET = 4 -CHUNK_MISSING_TAG = 5 +CHUNK_MISSING_ENTITIES_TAG = 5 + +# Text describing each chunk status CHUNK_STATUS_TEXT = {CHUNK_NOT_CREATED: "Not created", CHUNK_OK: "OK", CHUNK_CORRUPTED: "Corrupted", CHUNK_WRONG_LOCATED: "Wrong located", CHUNK_TOO_MANY_ENTITIES: "Too many entities", CHUNK_SHARED_OFFSET: "Sharing offset", - CHUNK_MISSING_TAG: "Missing Entities tag"} + CHUNK_MISSING_ENTITIES_TAG: "Missing Entities tag"} +# Status that are considered problems CHUNK_PROBLEMS = [CHUNK_CORRUPTED, CHUNK_WRONG_LOCATED, CHUNK_TOO_MANY_ENTITIES, CHUNK_SHARED_OFFSET, - CHUNK_MISSING_TAG] + CHUNK_MISSING_ENTITIES_TAG] +# arguments used in the options CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', CHUNK_WRONG_LOCATED: 'wrong', CHUNK_TOO_MANY_ENTITIES: 'entities', CHUNK_SHARED_OFFSET: 'sharing', - CHUNK_MISSING_TAG: 'miss-tag'} + CHUNK_MISSING_ENTITIES_TAG: 'miss_tag'} + +# Dictionary with possible solutions for the chunks problems, +# used to create options dynamically +# The possible solutions right now are: +CHUNK_SOLUTION_REMOVE = 101 +CHUNK_SOLUTION_REPLACE = 102 +CHUNK_SOLUTION_REMOVE_ENTITIES = 103 + +CHUNK_PROBLEMS_SOLUTIONS = {CHUNK_CORRUPTED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], + CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE]} + # list with problem, status-text, problem arg tuples CHUNK_PROBLEMS_ITERATOR = [] for problem in CHUNK_PROBLEMS: @@ -657,68 +677,71 @@ def rescan_entities(self, options): for r in list(self.keys()): self[r].rescan_entities(options) + def generate_report(self, standalone): - """ Generates a report of the last scan. If standalone is True - it will generate a report to print in a terminal. If it's False - it will returns the counters of every problem. """ - - # collect data - corrupted = self.count_chunks(CHUNK_CORRUPTED) - wrong_located = self.count_chunks(CHUNK_WRONG_LOCATED) - entities_prob = self.count_chunks(CHUNK_TOO_MANY_ENTITIES) - shared_prob = self.count_chunks(CHUNK_SHARED_OFFSET) - miss_tag_prob = self.count_chunks(CHUNK_MISSING_TAG) - total_chunks = self.count_chunks() - - too_small_region = self.count_regions(REGION_TOO_SMALL) - unreadable_region = self.count_regions(REGION_UNREADABLE) - total_regions = self.count_regions() + """ Generates a report with the results of the scan. The report + will include information about chunks and regions. + + If standalone is true it will return a string of text with the + results of the scan. + + If standalone is false it will return a dictionary with all the counts of chunks + and regions, to use the dictionary use the variables defined in the start of this + file. The variables are named CHUNK_* + """ + # collect chunk data + chunk_counts = {} + has_chunk_problems = False + for p in CHUNK_PROBLEMS: + chunk_counts[p] = self.count_chunks(p) + if chunk_counts[p] != 0: + has_chunk_problems = True + chunk_counts['TOTAL'] = self.count_chunks() + + # collect region data + region_counts = {} + has_region_problems = False + for p in REGION_PROBLEMS: + region_counts[p] = self.count_regions(p) + if region_counts[p] != 0: + has_region_problems = True + region_counts['TOTAL'] = self.count_regions() + + # create a text string with a report of all found if standalone: text = "" - # Print all this info in a table format - # chunks - chunk_errors = ("Problem", - "Corrupted", - "Wrong l.", - "Entities", - "Shared o.", - "Missing tag", - "Total chunks") - chunk_counters = ("Counts", - corrupted, - wrong_located, - entities_prob, - shared_prob, - miss_tag_prob, - total_chunks) - table_data = [] - for i, j in zip(chunk_errors, chunk_counters): - table_data.append([i,j]) + # add all chunk info in a table format text += "\nChunk problems:\n" - if corrupted or wrong_located or entities_prob or shared_prob or miss_tag_prob: + if has_chunk_problems: + table_data = [] + table_data.append(['Problem','Count']) + for p in CHUNK_PROBLEMS: + if chunk_counts[p] is not 0: + table_data.append([CHUNK_STATUS_TEXT[p],chunk_counts[p]]) + table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) else: text += "No problems found.\n" - # regions - text += "\n\nRegion files problems:\n" - region_errors = ("Problem","Too small","Unreadable","Total regions") - region_counters = ("Counts", too_small_region,unreadable_region, total_regions) - table_data = [] - # compose the columns for the table - for i, j in zip(region_errors, region_counters): - table_data.append([i,j]) - if too_small_region: + # add all region information + text += "\n\nRegion problems:\n" + if has_region_problems: + table_data = [] + table_data.append(['Problem','Count']) + for p in REGION_PROBLEMS: + if region_counts[p] is not 0: + table_data.append([REGION_STATUS_TEXT[p],region_counts[p]]) + table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) + else: text += "No problems found." - + return text else: - return corrupted, wrong_located, entities_prob, shared_prob, total_chunks, too_small_region, unreadable_region, total_regions - + return chunk_counts, region_counts def remove_problematic_regions(self, problem): """ Removes all the regions files with the given problem. This is NOT the same as removing chunks, this WILL DELETE @@ -1040,26 +1063,42 @@ def rescan_entities(self, options): regionset.rescan_entities(options) def generate_report(self, standalone): - """ Generates a report with the results of the scan. """ - - # collect data - corrupted = self.count_chunks(CHUNK_CORRUPTED) - wrong_located = self.count_chunks(CHUNK_WRONG_LOCATED) - entities_prob = self.count_chunks(CHUNK_TOO_MANY_ENTITIES) - shared_prob = self.count_chunks(CHUNK_SHARED_OFFSET) - miss_tag_prob = self.count_chunks(CHUNK_MISSING_TAG) - print("miss tag prob {0}".format(miss_tag_prob)) - print("WHAT THE ACTUAL FFFF") - total_chunks = self.count_chunks() - - too_small_region = self.count_regions(REGION_TOO_SMALL) - unreadable_region = self.count_regions(REGION_UNREADABLE) - total_regions = self.count_regions() + """ Generates a report with the results of the scan. The report + will include information about data structures (.dat files), + player files, chunks and regions. + + If standalone is true it will return a string of text with the + results of the scan. + + If standalone is false it will return a dictionary with all the counts, + to use the dictionary use the variables defined in the start of this + file. The variables are named CHUNK_*. Note that right now doesn't return + information about the data files. + """ + # collect chunk data + chunk_counts = {} + has_chunk_problems = False + for p in CHUNK_PROBLEMS: + chunk_counts[p] = self.count_chunks(p) + if chunk_counts[p] != 0: + has_chunk_problems = True + chunk_counts['TOTAL'] = self.count_chunks() + + # collect region data + region_counts = {} + has_region_problems = False + for p in REGION_PROBLEMS: + region_counts[p] = self.count_regions(p) + if region_counts[p] != 0: + has_region_problems = True + region_counts['TOTAL'] = self.count_regions() + + # create a text string with a report of all found if standalone: text = "" - # Print all the player files with problems + # add all the player files with problems text += "\nUnreadable player files:\n" broken_players = [p for p in self.players._get_list() if not p.readable] broken_players.extend([p for p in self.old_players._get_list() if not p.readable]) @@ -1080,47 +1119,36 @@ def generate_report(self, standalone): else: text += "No problems found.\n" - # Print all chunk info in a table format - chunk_errors = ("Problem", - "Corrupted", - "Wrong l.", - "Entities", - "Shared o.", - "Missing tag", - "Total chunks") - chunk_counters = ("Counts", - corrupted, - wrong_located, - entities_prob, - shared_prob, - miss_tag_prob, - total_chunks) - table_data = [] - for i, j in zip(chunk_errors, chunk_counters): - table_data.append([i,j]) + # add all chunk info in a table format text += "\nChunk problems:\n" - if corrupted or wrong_located or entities_prob or shared_prob or miss_tag_prob: + if has_chunk_problems: + table_data = [] + table_data.append(['Problem','Count']) + for p in CHUNK_PROBLEMS: + if chunk_counts[p] is not 0: + table_data.append([CHUNK_STATUS_TEXT[p],chunk_counts[p]]) + table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) else: text += "No problems found.\n" + # add all region information text += "\n\nRegion problems:\n" - region_errors = ("Problem","Too small","Unreadable","Total regions") - region_counters = ("Counts", too_small_region,unreadable_region, total_regions) - table_data = [] - # compose the columns for the table - for i, j in zip(region_errors, region_counters): - table_data.append([i,j]) - if too_small_region: + if has_region_problems: + table_data = [] + table_data.append(['Problem','Count']) + for p in REGION_PROBLEMS: + if region_counts[p] is not 0: + table_data.append([REGION_STATUS_TEXT[p],region_counts[p]]) + table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) + else: text += "No problems found." - + return text else: - return corrupted, wrong_located, entities_prob, shared_prob,\ - total_chunks, too_small_region, unreadable_region,\ - total_regions + return chunk_counts, region_counts From 6038c59c7acbb90cefedf622293b5b38ec502440 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 5 Mar 2019 00:12:18 +0100 Subject: [PATCH 063/151] Do a similar thing with region problems. Fix some typos. --- regionfixer_core/scan.py | 7 +++-- regionfixer_core/world.py | 62 +++++++++++++++++++++++++++++---------- 2 files changed, 52 insertions(+), 17 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 7ca4430..d1d7a6b 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -636,7 +636,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): If delete_entities is True it will delete entities while scanning - entiti_limit is the threshold tof entities to conisder a chunk + entiti_limit is the threshold of entities to consider a chunk with too much entities problems. """ try: @@ -713,6 +713,9 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # good one). Only wrong located chunk with a overlapping # flag are really BAD chunks! Use this criterion to # discriminate + # + # TODO: Why? I don't remember why + metadata = region_file.metadata sharing = [k for k in metadata if ( metadata[k].status == region.STATUS_CHUNK_OVERLAPPING and @@ -751,7 +754,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): def scan_chunk(region_file, coords, global_coords, entity_limit): - """ Takes a RegionFile obj and the local coordinatesof the chunk as + """ Takes a nbt.RegionFile object and the local coordinates of the chunk as inputs, then scans the chunk and returns all the data.""" el = entity_limit try: diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index ef3ea2c..e8a713d 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -47,6 +47,13 @@ CHUNK_SHARED_OFFSET = 4 CHUNK_MISSING_ENTITIES_TAG = 5 +# Status that are considered problems +CHUNK_PROBLEMS = [CHUNK_CORRUPTED, + CHUNK_WRONG_LOCATED, + CHUNK_TOO_MANY_ENTITIES, + CHUNK_SHARED_OFFSET, + CHUNK_MISSING_ENTITIES_TAG] + # Text describing each chunk status CHUNK_STATUS_TEXT = {CHUNK_NOT_CREATED: "Not created", CHUNK_OK: "OK", @@ -56,13 +63,6 @@ CHUNK_SHARED_OFFSET: "Sharing offset", CHUNK_MISSING_ENTITIES_TAG: "Missing Entities tag"} -# Status that are considered problems -CHUNK_PROBLEMS = [CHUNK_CORRUPTED, - CHUNK_WRONG_LOCATED, - CHUNK_TOO_MANY_ENTITIES, - CHUNK_SHARED_OFFSET, - CHUNK_MISSING_ENTITIES_TAG] - # arguments used in the options CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', CHUNK_WRONG_LOCATED: 'wrong', @@ -70,12 +70,19 @@ CHUNK_SHARED_OFFSET: 'sharing', CHUNK_MISSING_ENTITIES_TAG: 'miss_tag'} +# used in some places where there is less space +CHUNK_PROBLEMS_ABBR = {CHUNK_CORRUPTED: 'c', + CHUNK_WRONG_LOCATED: 'w', + CHUNK_TOO_MANY_ENTITIES: 'tme', + CHUNK_SHARED_OFFSET: 'so', + CHUNK_MISSING_ENTITIES_TAG: 'mt'} + # Dictionary with possible solutions for the chunks problems, # used to create options dynamically # The possible solutions right now are: -CHUNK_SOLUTION_REMOVE = 101 -CHUNK_SOLUTION_REPLACE = 102 -CHUNK_SOLUTION_REMOVE_ENTITIES = 103 +CHUNK_SOLUTION_REMOVE = 51 +CHUNK_SOLUTION_REPLACE = 52 +CHUNK_SOLUTION_REMOVE_ENTITIES = 53 CHUNK_PROBLEMS_SOLUTIONS = {CHUNK_CORRUPTED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], @@ -93,15 +100,36 @@ # Region related: # --------------- # Used to mark the status of region files: -REGION_OK = 10 -REGION_TOO_SMALL = 11 -REGION_UNREADABLE = 12 +REGION_OK = 100 +REGION_TOO_SMALL = 101 +REGION_UNREADABLE = 102 + +# Text describing each chunk status REGION_STATUS_TEXT = {REGION_OK: "Ok", REGION_TOO_SMALL: "Too small", REGION_UNREADABLE: "Unreadable"} -REGION_PROBLEMS = [REGION_TOO_SMALL] -REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too small'} +# Status that are considered problems +REGION_PROBLEMS = [REGION_TOO_SMALL, + REGION_UNREADABLE] + +# arguments used in the options +REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too_small', + REGION_UNREADABLE: 'unreadable'} + +# used in some places where there is less space +REGION_PROBLEMS_ABBR = {REGION_TOO_SMALL: 'ts', + REGION_UNREADABLE: 'ur'} + +# Dictionary with possible solutions for the chunks problems, +# used to create options dynamically +# The possible solutions right now are: +REGION_SOLUTION_REMOVE = 501 +REGION_SOLUTION_REPLACE = 502 + +REGION_PROBLEMS_SOLUTIONS = {REGION_TOO_SMALL: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE], + REGION_UNREADABLE: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE]} + # list with problem, status-text, problem arg tuples REGION_PROBLEMS_ITERATOR = [] @@ -122,6 +150,10 @@ TUPLE_NUM_ENTITIES = 0 TUPLE_STATUS = 1 +# Data files relate: +#------------------- +# TODO TODO TODO + # Dimension names: DIMENSION_NAMES = {"region": "Overworld", "DIM1": "The End", From c13a12a4669e4b50ce1718c99bf0a655db07b5af Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 5 Mar 2019 00:19:56 +0100 Subject: [PATCH 064/151] Fix more typos. --- regionfixer.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 1242bf6..3094455 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -188,7 +188,7 @@ def main(): 'the entities in chunks with more entities than ' '--entity-limit (300 by default). In a Minecraft ' 'entities are mostly mobs and items dropped in the ' - 'grond, items in chests and other stuff won\'t be ' + 'ground, items in chests and other stuff won\'t be ' 'touched. Read the README for more info. Region-Fixer ' 'will delete the entities while scanning so you can ' 'stop and resume the process', @@ -225,7 +225,7 @@ def main(): add_option('--processes', '-p', - help='Set the number of workers to use for scanning. (defaulta ' + help='Set the number of workers to use for scanning. (default ' '= 1, not use multiprocessing at all)', action='store', type=int, @@ -236,7 +236,7 @@ def main(): help='Don\'t use a progress bar, instead print a line per ' 'scanned region file with results information. The ' 'letters mean c: corrupted; w: wrong located; t: total of ' - 'chunksm; tme: too many entities problem', + 'chunks; tme: too many entities problem', action='store_true', default=False) @@ -250,7 +250,7 @@ def main(): add_option('--log', '-l', - help='Saves a log of all the problems found in the spicifyed ' + help='Saves a log of all the problems found in the specified ' 'file. The log file contains all the problems found with ' 'this information: region file, chunk coordinates and ' 'problem. Use \'-\' as name to show the log at the end ' @@ -271,11 +271,11 @@ def main(): if is_bare_console(): print("") - print("Minecraft Region Fixer hast a command line aplication and a GUI\n" + print("Minecraft Region Fixer has a command line application and a GUI\n" "(Graphic User Interface) and you have just double clicked the\n" "command line interface. If you really want to run the command line\n" "interface you have to use a command prompt (cmd.exe)\n\n" - "You can also run the gui, double click regionfixer_gui.py instead!") + "You can also run the GUI, double click regionfixer_gui.py instead!") print("") getpass("Press enter to continue:") return 1 From bdb6cab1c4d730793ea07a7479f8f46283836517 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 11 Apr 2019 23:06:28 +0200 Subject: [PATCH 065/151] Comment some code. Add lists with all the statuses for chunks and regions. --- regionfixer_core/scan.py | 18 ++++++++++++++++-- regionfixer_core/util.py | 8 ++++---- regionfixer_core/world.py | 25 ++++++++++++++++++++++--- 3 files changed, 42 insertions(+), 9 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index d1d7a6b..0e6034e 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -754,8 +754,22 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): def scan_chunk(region_file, coords, global_coords, entity_limit): - """ Takes a nbt.RegionFile object and the local coordinates of the chunk as - inputs, then scans the chunk and returns all the data.""" + """ Scans a chunk returning its status and number of entities. + + Keywords arguments: + region_file -- nbt.RegionFile object + coords -- tuple containing the local (region) coordinates of the chunk + global_coords -- tuple containing the global (world) coordinates of the chunk + entity_limit -- the number of entities that is considered to be too many + + Return: + chunk -- as a nbt file + (num_entities, status) -- tuple with the number of entities of the chunk and + the status described by the CHUNK_* variables in + world.py + + If the chunk does not exist (is not yet created it returns None + """ el = entity_limit try: chunk = region_file.get_chunk(*coords) diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index 5100b22..f77276c 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -107,12 +107,12 @@ def table(columns): returns a text string with a table. """ def get_max_len(l): - """ Takes a list and returns the length of the biggest - element """ + """ Takes a list of strings and returns the length of the biggest + string """ m = 0 for e in l: if len(str(e)) > m: - m = len(e) + m = len(str(e)) return m text = "" @@ -132,7 +132,7 @@ def get_max_len(l): ml_total += 1 + 2# +1 for the separator | and +2 for the borders text += "-"*ml_total + "\n" # all the columns have the same number of rows - row = get_max_len(columns) + row = len(columns[0]) for r in range(row): line = "|" # put all the elements in this row together with spaces diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index e8a713d..dad290e 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -47,6 +47,16 @@ CHUNK_SHARED_OFFSET = 4 CHUNK_MISSING_ENTITIES_TAG = 5 +# Chunk statuses +CHUNK_STATUSES = [CHUNK_NOT_CREATED, + CHUNK_OK, + CHUNK_CORRUPTED, + CHUNK_WRONG_LOCATED, + CHUNK_TOO_MANY_ENTITIES, + CHUNK_TOO_MANY_ENTITIES, + CHUNK_SHARED_OFFSET, + CHUNK_MISSING_ENTITIES_TAG] + # Status that are considered problems CHUNK_PROBLEMS = [CHUNK_CORRUPTED, CHUNK_WRONG_LOCATED, @@ -104,6 +114,11 @@ REGION_TOO_SMALL = 101 REGION_UNREADABLE = 102 +# Region statuses +REGION_STATUSES = [REGION_OK, + REGION_TOO_SMALL, + REGION_UNREADABLE] + # Text describing each chunk status REGION_STATUS_TEXT = {REGION_OK: "Ok", REGION_TOO_SMALL: "Too small", @@ -244,13 +259,17 @@ def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, # in the region file self.chunks = {} - # Counters with the number of chunks - # Filled in scan.scan_region_file + # Dictionary containing counters to for all the statuses + # self.corrupted_chunks = corrupted self.wrong_located_chunks = wrong self.entities_prob = entities_prob self.shared_offset = shared_offset self.chunk_count = chunks + + self.counts = {} + for s in CHUNK_STATUSES: + self.counts[s] = 0 # time when the scan for this file finished self.scan_time = time @@ -350,7 +369,7 @@ def get_coords(self): # without a problem will remove all the chunks in the region file!! def list_chunks(self, status=None): """ Returns a list of all the ScannedChunk objects of the chunks - with the given status, if no status is omited or None, + with the given status, if no status is omitted or None, returns all the existent chunks in the region file """ l = [] From f958c06cb954ce11ac17eccc3d845c57ebab46c3 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 11 Apr 2019 23:48:21 +0200 Subject: [PATCH 066/151] Provisionally add exceptions TypeError and UnicodeDecodeError to the scan_data function. --- regionfixer_core/scan.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 0e6034e..bb94fdd 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -595,7 +595,7 @@ def console_scan_regionset(regionset, processes, entity_limit, def scan_data(scanned_dat_file): - """ Try to parse the nbd data file, and fill the scanned object. + """ Try to parse the nbt data file, and fill the scanned object. If something is wrong it will return a tuple with useful info to debug the problem. @@ -622,6 +622,13 @@ def scan_data(scanned_dat_file): except IOError as e: s.readable = False s.status_text = str(e) + except UnicodeDecodeError as e: + s.readable = False + s.status_text = str(e) + except TypeError as e: + s.readable = False + s.status_text = str(e) + except: s.readable = False except_type, except_class, tb = sys.exc_info() From 9ab890bba45b0ae9e6b30be1cb702d4e1a357940 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 11 Apr 2019 23:59:31 +0200 Subject: [PATCH 067/151] Comment out some unused in scan_chunk --- regionfixer_core/scan.py | 56 ++++++++++++++++++++++++++-------------- 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index bb94fdd..d9781c1 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -784,60 +784,76 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): num_entities = len(chunk["Level"]["Entities"]) if data_coords != global_coords: status = world.CHUNK_WRONG_LOCATED - status_text = "Mismatched coordinates (wrong located chunk)." - scan_time = time() + #=================================================================== + # status_text = "Mismatched coordinates (wrong located chunk)." + # scan_time = time() + #=================================================================== elif num_entities > el: status = world.CHUNK_TOO_MANY_ENTITIES - status_text = "The chunks has too many entities (it has {0}, and it's more than the limit {1})".format(num_entities, entity_limit) - scan_time = time() + #=================================================================== + # status_text = "The chunks has too many entities (it has {0}, and it's more than the limit {1})".format(num_entities, entity_limit) + # scan_time = time() + #=================================================================== else: status = world.CHUNK_OK - status_text = "OK" - scan_time = time() + #=================================================================== + # status_text = "OK" + # scan_time = time() + #=================================================================== except InconceivedChunk as e: chunk = None data_coords = None num_entities = None status = world.CHUNK_NOT_CREATED - status_text = "The chunk doesn't exist" - scan_time = time() + #======================================================================= + # status_text = "The chunk doesn't exist" + # scan_time = time() + #======================================================================= except RegionHeaderError as e: - error = "Region header error: " + e.msg status = world.CHUNK_CORRUPTED - status_text = error - scan_time = time() + #======================================================================= + # error = "Region header error: " + e.msg + # status_text = error + # scan_time = time() + #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None except ChunkDataError as e: - error = "Chunk data error: " + e.msg status = world.CHUNK_CORRUPTED - status_text = error - scan_time = time() + #======================================================================= + # error = "Chunk data error: " + e.msg + # status_text = error + # scan_time = time() + #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None except ChunkHeaderError as e: - error = "Chunk herader error: " + e.msg status = world.CHUNK_CORRUPTED - status_text = error - scan_time = time() + #======================================================================= + # error = "Chunk herader error: " + e.msg + # status_text = error + # scan_time = time() + #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None except KeyError as e: - error = "Missing Entities TAG" status = world.CHUNK_MISSING_ENTITIES_TAG - status_text = error - scan_time = time() + #======================================================================= + # error = "Missing Entities TAG" + # status_text = error + # scan_time = time() + #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) From 8cf2c5fae45ff046ac3ce40f86a0d8806ff89ea4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 15 Apr 2019 12:20:25 +0200 Subject: [PATCH 068/151] Added option to fix missing tags, but it does not work right now. --- regionfixer.py | 49 ++++++++++++++++++++++++++++++++++++--- regionfixer_core/world.py | 40 ++++++++++++++++++++++++++++++++ 2 files changed, 86 insertions(+), 3 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 3094455..c6c467b 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -34,12 +34,42 @@ parse_backup_list from regionfixer_core.version import version_string from regionfixer_core.bug_reporter import BugReporter +from regionfixer_core.world import CHUNK_MISSING_ENTITIES_TAG + + +def fix_bad_chunks(options, scanned_obj): + """ Fixes chunks that can be repaired. + + Doesn't work right now. + """ + print("") + total = scanned_obj.count_chunks(CHUNK_MISSING_ENTITIES_TAG) + problem = CHUNK_MISSING_ENTITIES_TAG + status = world.CHUNK_STATUS_TEXT[CHUNK_MISSING_ENTITIES_TAG] + if options.fix_missing_tag: + if total: + + text = ' Repairing chunks with status: {0} '.format(status) + print(("\n{0:#^60}".format(text))) + counter = scanned_obj.fix_problematic_chunks(problem) + print(("\nRepaired {0} chunks with status: {1}".format(counter, + status))) + else: + print(("No chunks to fix with status: {0}".format(status))) def delete_bad_chunks(options, scanned_obj): - """ Takes a scanned object (world object or regionset object) and - the options given to region-fixer, it deletes all the chunks with - problems iterating through all the possible problems. """ + """ Takes a scanned object and deletes all the bad chunks. + + Keywords arguments + options -- options as returned by the module optparse + scanned_obj -- a regionfixer world or regionset + + Returns nothing. + + This function will deletes all the chunks with problems + iterating through all the possible problems and using the + options given. """ print("") # In the same order as in CHUNK_PROBLEMS options_delete = [options.delete_corrupted, @@ -214,6 +244,13 @@ def main(): default=False, action='store_true') + add_option('--fix-missing-tag', + '--fm', + help='[DON\'T WORK] Fixes chunks that have the Entities tag missing.', + dest='fix_missing_tag', + default=False, + action='store_true') + add_option('--entity-limit', '--el', help='Specify the limit for the --delete-entities option ' @@ -367,6 +404,9 @@ def main(): # Delete region files delete_bad_regions(options, regionset) + # fix chunks + fix_bad_chunks(options, regionset) + # Verbose log if options.summary: summary_text += "\n" @@ -453,6 +493,9 @@ def main(): # delete region files delete_bad_regions(options, w) + # fix chunks + fix_bad_chunks(options, scanned_obj) + # print a summary for this world if options.summary: summary_text += w.summary() diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index dad290e..60a0c0d 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -31,6 +31,7 @@ from shutil import copy import time +from nbt.nbt import TAG_List # Constants: @@ -421,6 +422,25 @@ def remove_problematic_chunks(self, problem): return counter + def fix_problematic_chunks(self, problem): + + counter = 0 + bad_chunks = self.list_chunks(problem) + for c in bad_chunks: + global_coords = c[0] + local_coords = _get_local_chunk_coords(*global_coords) + region_file = region.RegionFile(self.path) + chunk = region_file.get_chunk(*local_coords) + chunk['Level']['Entities'] = TAG_List() + region_file.write_chunk(local_coords[0],local_coords[1], chunk) + counter += 1 + # create the new status tuple + # (num_entities, chunk status) + self[local_coords] = (0 , CHUNK_NOT_CREATED) + + return counter + + def remove_entities(self): """ Removes all the entities in chunks with the problematic status CHUNK_TOO_MANY_ENTITIES that are in this region file. @@ -714,6 +734,19 @@ def remove_problematic_chunks(self, problem): return counter + def fix_problematic_chunks(self, problem): + """ Removes all the chunks with the given problem, returns a + counter with the number of deleted chunks. """ + + counter = 0 + if self.count_chunks(): + print(' Repairing chunks in region set \"{0}\":'.format(self._get_dimension_directory())) + for r in list(self.regions.keys()): + counter += self.regions[r].fix_problematic_chunks(problem) + print("Repaired {0} chunks in this regionset.\n".format(counter)) + + return counter + def remove_entities(self): """ Removes entities in chunks with the status TOO_MANY_ENTITIES. """ @@ -1043,6 +1076,13 @@ def remove_problematic_chunks(self, problem): counter += regionset.remove_problematic_chunks(problem) return counter + def fix_problematic_chunks(self, problem): + """ Removes all the chunks with the given problem. """ + counter = 0 + for regionset in self.regionsets: + counter += regionset.fix_problematic_chunks(problem) + return counter + def replace_problematic_regions(self, backup_worlds, problem, entity_limit, delete_entities): """ Replaces region files with the given problem using a backup directory. """ From f52da44d0f06a0cb3688602aa4a389c432633a1b Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 15 Apr 2019 12:34:17 +0200 Subject: [PATCH 069/151] Added UnicodeDecodeError and TypeError exceptions to scan_chunk as corrupted chunk status provisionally. --- regionfixer_core/scan.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index d9781c1..1c9c565 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -859,6 +859,32 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None + except UnicodeDecodeError as e: + # TODO: This should another kind of error, it's now being handled as corrupted chunk + status = world.CHUNK_CORRUPTED + #======================================================================= + # error = "Chunk data error: " + e.msg + # status_text = error + # scan_time = time() + #======================================================================= + chunk = None + data_coords = None + global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) + num_entities = None + + except TypeError as e: + # TODO: This should another kind of error, it's now being handled as corrupted chunk + status = world.CHUNK_CORRUPTED + #======================================================================= + # error = "Chunk data error: " + e.msg + # status_text = error + # scan_time = time() + #======================================================================= + chunk = None + data_coords = None + global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) + num_entities = None + return chunk, (num_entities, status) if status != world.CHUNK_NOT_CREATED else None From 7cb346aea3e777441962c063a521b4f19d60aed4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 15 Apr 2019 14:54:03 +0200 Subject: [PATCH 070/151] Fix wrong variable name. --- regionfixer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer.py b/regionfixer.py index c6c467b..7b5c9e8 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -494,7 +494,7 @@ def main(): delete_bad_regions(options, w) # fix chunks - fix_bad_chunks(options, scanned_obj) + fix_bad_chunks(options, w) # print a summary for this world if options.summary: From 6ead1169b653192dedb2320924992ebf87c00cbb Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Apr 2019 10:29:26 +0200 Subject: [PATCH 071/151] Region files are always opened as r+b mode, add new status for region files when this is not possible. --- regionfixer_core/scan.py | 7 +++++++ regionfixer_core/world.py | 25 ++++++++++++++++--------- 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 1c9c565..dcae02a 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -665,6 +665,13 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r.scan_time = time() r.scanned = True return r + + except PermissionError as e: + r.status = world.REGION_UNREADABLE_PERMISSION_ERROR + r.scan_time = time() + r.scanned = True + return r + except IOError as e: r.status = world.REGION_UNREADABLE r.scan_time = time() diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 60a0c0d..80fd46f 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -36,7 +36,7 @@ # Constants: # - +# -------------- # Chunk related: # -------------- # Used to mark the status of chunks: @@ -108,36 +108,43 @@ CHUNK_STATUS_TEXT[problem], CHUNK_PROBLEMS_ARGS[problem])) +# --------------- # Region related: # --------------- # Used to mark the status of region files: REGION_OK = 100 REGION_TOO_SMALL = 101 REGION_UNREADABLE = 102 +REGION_UNREADABLE_PERMISSION_ERROR = 103 # Region statuses REGION_STATUSES = [REGION_OK, REGION_TOO_SMALL, - REGION_UNREADABLE] + REGION_UNREADABLE, + REGION_UNREADABLE_PERMISSION_ERROR] -# Text describing each chunk status +# Text describing each region status used to list all the problem at the end of the scan REGION_STATUS_TEXT = {REGION_OK: "Ok", REGION_TOO_SMALL: "Too small", - REGION_UNREADABLE: "Unreadable"} + REGION_UNREADABLE: "Unreadable IOError", + REGION_UNREADABLE_PERMISSION_ERROR: "Permission error"} # Status that are considered problems REGION_PROBLEMS = [REGION_TOO_SMALL, - REGION_UNREADABLE] + REGION_UNREADABLE, + REGION_UNREADABLE_PERMISSION_ERROR] # arguments used in the options REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too_small', - REGION_UNREADABLE: 'unreadable'} + REGION_UNREADABLE: 'unreadable', + REGION_UNREADABLE_PERMISSION_ERROR: 'permission_error'} # used in some places where there is less space REGION_PROBLEMS_ABBR = {REGION_TOO_SMALL: 'ts', - REGION_UNREADABLE: 'ur'} + REGION_UNREADABLE: 'ur', + REGION_UNREADABLE_PERMISSION_ERROR: 'pe'} -# Dictionary with possible solutions for the chunks problems, +# Dictionary with possible solutions for the region problems, # used to create options dynamically # The possible solutions right now are: REGION_SOLUTION_REMOVE = 501 @@ -991,7 +998,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet """ Takes a list of world objects and a problem value and try to replace every chunk with that problem using a working chunk from the list of world objects. It uses the world - objects in left to riht order. """ + objects in left to right order. """ counter = 0 scanned_regions = {} From 64bb9828bbebb14871c005833a39f2917ab501ed Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Apr 2019 13:10:43 +0200 Subject: [PATCH 072/151] Add option to remove missing tag chunks. --- regionfixer.py | 25 +++++++++++++++++-------- regionfixer_core/world.py | 2 ++ 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 7b5c9e8..6c59f0c 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -75,7 +75,8 @@ def delete_bad_chunks(options, scanned_obj): options_delete = [options.delete_corrupted, options.delete_wrong_located, options.delete_entities, - options.delete_shared_offset] + options.delete_shared_offset, + options.delete_missing_tag] deleting = list(zip(options_delete, world.CHUNK_PROBLEMS)) for delete, problem in deleting: status = world.CHUNK_STATUS_TEXT[problem] @@ -236,11 +237,11 @@ def main(): default=False, dest='delete_shared_offset') - add_option('--delete-too-small', - '--dt', - help='[WARNING!] This option deletes! Removes any region files ' - 'found to be too small to actually be a region file.', - dest='delete_too_small', + add_option('--delete-missing-tag', + '--dmt', + help='[WARNING!] This option deletes! Removes any chunks ' + 'with the mandatory entities tag missing.', + dest='delete_missing_tag', default=False, action='store_true') @@ -251,6 +252,14 @@ def main(): default=False, action='store_true') + add_option('--delete-too-small', + '--dt', + help='[WARNING!] This option deletes! Removes any region files ' + 'found to be too small to actually be a region file.', + dest='delete_too_small', + default=False, + action='store_true') + add_option('--entity-limit', '--el', help='Specify the limit for the --delete-entities option ' @@ -299,9 +308,9 @@ def main(): (options, args) = parser.parse_args() o = options - if sys.version_info[0] > 5: + if sys.version_info[0] != 3: print("") - print("Minecraft Region Fixer only works with python 2.x") + print("Minecraft Region Fixer only works with python 3.x") print(("(And you just tried to run it in python {0})".format(sys.version))) print("") return 1 diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 80fd46f..92b262a 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -127,6 +127,8 @@ REGION_STATUS_TEXT = {REGION_OK: "Ok", REGION_TOO_SMALL: "Too small", REGION_UNREADABLE: "Unreadable IOError", + # This status differentiates IOError from a file that you don't have permission to access + # TODO: It would be better to open region files only in write mode when needed REGION_UNREADABLE_PERMISSION_ERROR: "Permission error"} # Status that are considered problems From f87f39d8a8a86acadd738397ef7b935f4e896a55 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Apr 2019 15:48:24 +0200 Subject: [PATCH 073/151] Finish implementing the option --fix-missing-tag --- regionfixer.py | 2 +- regionfixer_core/world.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 6c59f0c..fa9332c 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -247,7 +247,7 @@ def main(): add_option('--fix-missing-tag', '--fm', - help='[DON\'T WORK] Fixes chunks that have the Entities tag missing.', + help='Fixes chunks that have the Entities tag missing. This will add the missing tag.', dest='fix_missing_tag', default=False, action='store_true') diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 92b262a..f41aced 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -440,7 +440,9 @@ def fix_problematic_chunks(self, problem): local_coords = _get_local_chunk_coords(*global_coords) region_file = region.RegionFile(self.path) chunk = region_file.get_chunk(*local_coords) - chunk['Level']['Entities'] = TAG_List() + # The arguments to create the empty TAG_List have been somehow extracted by comparing + # the tag list from a healthy chunk with the one created by nbt + chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) region_file.write_chunk(local_coords[0],local_coords[1], chunk) counter += 1 # create the new status tuple From ce939a2afc5f990e93dbcc5f011d34ccb161507c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Apr 2019 15:57:14 +0200 Subject: [PATCH 074/151] Update Readme.rst --- README.rst | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 0ca0f27..d51f4eb 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,6 @@ Minecraft Region Fixer ====================== By Alejandro Aguilera (Fenixin) -Sponsored by NITRADO servers (http://nitrado.net) Locates problems and tries to fix Minecraft worlds (or region files). @@ -24,8 +23,8 @@ http://www.minecraftforum.net/topic/275730-minecraft-region-fixer/ Supported platforms =================== -This program seems to work with Python 2.7.x, and DOESN'T work with -python 3.x. There is also a windows executable for ease of use, if you +This program only works with Python 3.x, and DOESN'T work with +python 2.x. There is also a windows executable for ease of use, if you use the windows executable you don't need to install Python. @@ -79,6 +78,11 @@ http://www.minecraftforum.net/topic/275730-tool-minecraft-region-fixer/ http://www.minecraftforum.net/topic/302380-tool-minecraft-region-fixer/ +Donations and sponsors +====================== +Region-Fixer was created thanks to sponsors and donations. You can find +information about that in DONORS.txt + Contributors ============ See CONTRIBUTORS.txt From dae6070eb9724a77194546fe7cc23bc2025b0c0d Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 16 Apr 2019 21:26:50 +0200 Subject: [PATCH 075/151] Bump again the version number. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index e9e6338..9478b2d 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -4,5 +4,5 @@ @author: Alejandro ''' -version_string = "0.2.2" +version_string = "0.2.3" version_numbers = version_string.split('.') From 8d652a9c98ca8d655243c252c8aab3c702099f0f Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 21 Apr 2019 12:19:31 +0200 Subject: [PATCH 076/151] Big changes in how corrupted chunks, regions and data files are counted. Fixed the return value of the program being wrong always. --- regionfixer.py | 51 ++++++--- regionfixer_core/scan.py | 6 +- regionfixer_core/world.py | 211 ++++++++++++++++++++++++++++---------- 3 files changed, 198 insertions(+), 70 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index fa9332c..439ee0b 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -37,6 +37,15 @@ from regionfixer_core.world import CHUNK_MISSING_ENTITIES_TAG +################ +# Return values +################ + +RV_OK = 0 # world scanned and no problems found +RV_CRASH = 1 # crash or end unexpectedly +RV_NOTHING_TO_SCAN = 2 # no files/worlds to scan +RV_BAD_WORLD = 3 # scan completed successfully but problems have been found in the scan + def fix_bad_chunks(options, scanned_obj): """ Fixes chunks that can be repaired. @@ -313,7 +322,7 @@ def main(): print("Minecraft Region Fixer only works with python 3.x") print(("(And you just tried to run it in python {0})".format(sys.version))) print("") - return 1 + return RV_CRASH if is_bare_console(): print("") @@ -324,7 +333,7 @@ def main(): "You can also run the GUI, double click regionfixer_gui.py instead!") print("") getpass("Press enter to continue:") - return 1 + return RV_CRASH # Args are world_paths and region files if not args: @@ -335,7 +344,7 @@ def main(): if not (world_list or regionset): print ("Error: No worlds or region files to scan!") - return 1 + return RV_NOTHING_TO_SCAN # Check basic options compatibilities any_chunk_replace_option = o.replace_corrupted or \ @@ -395,14 +404,19 @@ def main(): else: backup_worlds = [] - # The program starts + # The scanning process starts + found_problems_in_regionsets = False + found_problems_in_worlds = False if o.interactive: c = InteractiveLoop(world_list, regionset, o, backup_worlds) c.cmdloop() + return RV_OK else: summary_text = "" # Scan the separate region files + if len(regionset.regions) > 0: + console_scan_regionset(regionset, o.processes, o.entity_limit, o.delete_entities, o.verbose) print((regionset.generate_report(True))) @@ -426,8 +440,13 @@ def main(): summary_text += t else: summary_text += "No problems found.\n\n" + + # Check if problems have been found + if regionset.has_problems: + found_problems_in_regionsets = True # scan all the world folders + for w in world_list: w_name = w.get_name() print((entitle(' Scanning world: {0} '.format(w_name), 0))) @@ -438,12 +457,8 @@ def main(): print("") print((entitle('Scan results for: {0}'.format(w_name), 0))) print((w.generate_report(True))) - -# corrupted, wrong_located, entities_prob, shared_prob,\ -# total_chunks, too_small_region, unreadable_region, total_regions\ -# = w.generate_report(standalone = False) - print("") + # Replace chunks if backup_worlds and not len(world_list) > 1: del_ent = options.delete_entities @@ -509,6 +524,10 @@ def main(): if options.summary: summary_text += w.summary() + # check if problems have been found + if w.has_problems: + found_problems_in_worlds = True + # verbose log text if options.summary == '-': print("\nPrinting log:\n") @@ -522,8 +541,11 @@ def main(): print(("Log file saved in \'{0}\'.".format(options.summary))) except: print("Something went wrong while saving the log file!") - - return 0 + + if found_problems_in_regionsets or found_problems_in_worlds: + return RV_BAD_WORLD + + return RV_OK if __name__ == '__main__': @@ -536,7 +558,9 @@ def main(): try: freeze_support() value = main() - sys.exit(value) + #======================================================================= + # sys.exit(value) + #======================================================================= except ChildProcessException as e: had_exception = True @@ -544,6 +568,7 @@ def main(): bug_sender = BugReporter(e.printable_traceback) #auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str + value = RV_CRASH except Exception as e: had_exception = True @@ -552,6 +577,7 @@ def main(): bug_sender = BugReporter() #auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str + value = RV_CRASH finally: if had_exception and not auto_reported: @@ -561,3 +587,4 @@ def main(): print(bug_report) elif had_exception and auto_reported: print("Bug report uploaded successfully") + sys.exit(value) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index dcae02a..6bba072 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -237,6 +237,7 @@ def get_last_result(self): self.raise_child_exception(d) # Copy it to the father process ds._replace_in_data_structure(d) + ds._update_counts(d) self.update_str_last_scanned(d) # Got result! Reset it! self.queries_without_results = 0 @@ -510,6 +511,7 @@ def console_scan_loop(scanners, scan_titles, verbose): scanner.sleep() result = scanner.get_last_result() if result: + logging.debug("\nNew result: {0}\n\nOneliner: {1}\n".format(result,result.oneliner_status)) counter += 1 if not verbose: pbar.update(counter) @@ -687,7 +689,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): g_coords, entity_limit) if c: - r.chunks[(x, z)] = c + r[(x, z)] = c chunk_count += 1 else: # chunk not created @@ -705,7 +707,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): " ({1},{2}) of the region file: {3}").format( c[TUPLE_NUM_ENTITIES], x, z, r.filename)) # entities removed, change chunk status to OK - r.chunks[(x, z)] = (0, world.CHUNK_OK) + r[(x, z)] = (0, world.CHUNK_OK) else: entities_prob += 1 diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index f41aced..76edb4e 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -54,7 +54,6 @@ CHUNK_CORRUPTED, CHUNK_WRONG_LOCATED, CHUNK_TOO_MANY_ENTITIES, - CHUNK_TOO_MANY_ENTITIES, CHUNK_SHARED_OFFSET, CHUNK_MISSING_ENTITIES_TAG] @@ -267,19 +266,13 @@ def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, # dictionary storing all the state tuples of all the chunks # in the region file - self.chunks = {} + self._chunks = {} # Dictionary containing counters to for all the statuses # - self.corrupted_chunks = corrupted - self.wrong_located_chunks = wrong - self.entities_prob = entities_prob - self.shared_offset = shared_offset - self.chunk_count = chunks - - self.counts = {} + self._counts = {} for s in CHUNK_STATUSES: - self.counts[s] = 0 + self._counts[s] = 0 # time when the scan for this file finished self.scan_time = time @@ -293,12 +286,15 @@ def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, @property def oneliner_status(self): + # TODO: update oneliner and automate it if self.scanned: status = self.status if status == REGION_OK: - stats = "c: {0}, w: {1}, tme: {2}, so: {3}, t: {4}".format(\ - self.corrupted_chunks, self.wrong_located_chunks,\ - self.entities_prob, self.shared_offset, self.chunk_count) + stats = "" + for s in CHUNK_PROBLEMS: + stats += "{0}:{1}, ".format(CHUNK_PROBLEMS_ABBR[s], self.count_chunks(s)) + stats += "t:{0}".format(self.count_chunks()) + elif status == REGION_TOO_SMALL: stats = "No header in the region file" elif status == REGION_UNREADABLE: @@ -318,20 +314,23 @@ def __str__(self): return text def __getitem__(self, key): - return self.chunks[key] + return self._chunks[key] def __setitem__(self, key, value): - self.chunks[key] = value + self._chunks[key] = value + self._counts[value[TUPLE_STATUS]] += 1 def keys(self): - return list(self.chunks.keys()) + return list(self._chunks.keys()) - def get_counters(self): - """ Returns integers with all the problem counters in this - region file. The order is corrupted, wrong located, entities - shared header, total chunks """ - return self.corrupted_chunks, self.wrong_located_chunks,\ - self.entities_prob, self.shared_offset, self.count_chunks() + #=========================================================================== + # def get_counters(self): + # """ Returns integers with all the problem counters in this + # region file. The order is corrupted, wrong located, entities + # shared header, total chunks """ + # return self.corrupted_chunks, self.wrong_located_chunks,\ + # self.entities_prob, self.shared_offset, self.count_chunks() + #=========================================================================== def get_path(self): """ Returns the path of the region file. """ @@ -341,12 +340,20 @@ def count_chunks(self, problem=None): """ Counts chunks in the region file with the given problem. If problem is omited or None, counts all the chunks. Returns an integer with the counter. """ - counter = 0 - for coords in list(self.keys()): - if self[coords] and (self[coords][TUPLE_STATUS] == problem or problem == None): - counter += 1 + #======================================================================= + # counter = 0 + # for coords in list(self.keys()): + # if self[coords] and (self[coords][TUPLE_STATUS] == problem or problem == None): + # counter += 1 + #======================================================================= + + if problem == None: + c = 0 + for s in CHUNK_STATUSES: c += self._counts[s] + else: + c = self._counts[problem] - return counter + return c def get_global_chunk_coords(self, chunkX, chunkZ): """ Takes the region filename and the chunk local @@ -505,7 +512,19 @@ def rescan_entities(self, options): class DataSet(object): - """ Stores data items to be scanned by AsyncScanner in scan.py. """ + """ Stores data items to be scanned by AsyncScanner in scan.py. + + The data should be in a dictionary and should be accessible through the + methods __getitem__, __setitem__. The methods, _get_list, __len__ are also used. + + _replace_in_data_structure should be created because during the scan the + different processes create copies of the original data, so replacing it in + the original data set is mandatory. + + _update_counts makes sure that the DataSet stores all the counts and that + it is not needed to loop through all of them to know the real count. + + """ def summary(self): """ Return a summary of problems found in this set. """ @@ -517,10 +536,11 @@ def _replace_in_data_structure(self, data): def _get_list(self): raise NotImplemented + def _update_counts(self, s): + raise NotImplemented + def __getitem__(self, key): - """ This and __setitem__ should use the path of the file as keys - not the filename. (I think) - TODO: Es realmente esto necesario? + """ """ raise NotImplemented @@ -546,6 +566,12 @@ def __init__(self, path, title, *args, **kwargs): for path in data_files_path: d[path] = ScannedDataFile(path) + @property + def has_problems(self): + for d in self.data_files.values(): + if not d.readable: return True + return False + def _get_list(self): return list(self.data_files.values()) @@ -555,6 +581,9 @@ def _replace_in_data_structure(self, data): def __len__(self): return len(self.data_files) + def _update_counts(self, s): + pass + def summary(self): """ Return a summary of problems found in this set. """ text = "" @@ -581,20 +610,28 @@ def __init__(self, regionset_path=None, region_list=[]): try: r = ScannedRegionFile(path) self.regions[r.get_coords()] = r - self.corrupted_chunks = 0 - self.wrong_located_chunks = 0 - self.entities_problems = 0 - self.shared_header = 0 - self.bad_list = [] - self.scanned = False except InvalidFileName as e: print("Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path)) - + + # region and chunk counters with all the data from the scan + self._region_counters = {} + for status in REGION_STATUSES: + self._region_counters[status] = 0 + + self._chunk_counters = {} + for status in CHUNK_STATUSES: + self._chunk_counters[status] = 0 + + # has this regionset been scanned? + self.scanned = False def get_name(self): - """ Return a string with the name of the dimension, the - directory if there is no name or "" if there's nothing """ + """ Return a string with a representative name for the regionset + + If the regionset is a dimension its name is returned, if not the directory and + if there is no name or "" if there is nothing to fall back + """ dim_directory = self._get_dimension_directory() if dim_directory: @@ -619,18 +656,38 @@ def _get_dimension_directory(self): return None def __str__(self): - text = "Region-set information:\n" + text = "RegionSet: {0}\n".format(self.get_name()) if self.path: text += " Regionset path: {0}\n".format(self.path) text += " Region files: {0}\n".format(len(self.regions)) text += " Scanned: {0}".format(str(self.scanned)) return text + @property + def has_problems(self): + """ Returns True if the regionset has chunk or region problems and false otherwise. """ + + for s in REGION_PROBLEMS: + if self.count_regions(s): + return True + + for s in CHUNK_PROBLEMS: + if self.count_chunks(s): + return True + + return False + def __getitem__(self, key): return self.regions[key] def __setitem__(self, key, value): self.regions[key] = value + assert(type(value) == ScannedRegionFile) + + self._region_counters[value.status] += 1 + + for status in CHUNK_STATUSES: + self._chunk_counters[status] += value.count_chunks(status) def __delitem__(self, key): del self.regions[key] @@ -653,11 +710,6 @@ def list_regions(self, status=None): all the objects.""" if status == None: - #~ print "Estamos tras pasar el if para status None" - #~ print "Los valores de el dict son:" - #~ print self.regions.values() - #~ print "El diccionario es si es:" - #~ print self.regions return list(self.regions.values()) t = [] for coords in list(self.regions.keys()): @@ -666,26 +718,50 @@ def list_regions(self, status=None): t.append(r) return t + def _update_counts(self, scanned_regionfile): + """ Updates the counters of the regionset with the new regionfile. """ + assert(type(scanned_regionfile) == ScannedRegionFile) + + self._region_counters[scanned_regionfile.status] += 1 + + for status in CHUNK_STATUSES: + self._chunk_counters[status] += scanned_regionfile.count_chunks(status) + def count_regions(self, status=None): """ Return the number of region files with status. If none returns the number of region files in this regionset. Possible status are: empty, too_small """ - + + #======================================================================= + # counter = 0 + # for r in list(self.keys()): + # if status == self[r].status: + # counter += 1 + # elif status == None: + # counter += 1 + #======================================================================= counter = 0 - for r in list(self.keys()): - if status == self[r].status: - counter += 1 - elif status == None: - counter += 1 + if status == None: + for s in REGION_STATUSES: + counter += self._region_counters[s] + else: + counter = self._region_counters[status] + + return counter def count_chunks(self, problem=None): """ Returns the number of chunks with the given problem. If problem is None returns the number of chunks. """ - counter = 0 - for r in list(self.keys()): - counter += self[r].count_chunks(problem) - return counter + + c = 0 + if problem == None: + for s in CHUNK_STATUSES: + c += self._chunk_counters[s] + else: + c = self._chunk_counters[problem] + + return c def list_chunks(self, status=None): """ Returns a list of the ScannedChunk objects of the chunks @@ -863,6 +939,8 @@ def __init__(self, world_path): for directory in glob(join(self.path, "DIM*/region")): self.regionsets.append(RegionSet(join(self.path, directory))) + print(self.regionsets) + # level.dat # Let's scan level.dat here so we can extract the world name level_dat_path = join(self.path, "level.dat") @@ -886,16 +964,20 @@ def __init__(self, world_path): "The file doesn't exist") # Player files + self.datafilesets = [] PLAYERS_DIRECTORY = 'playerdata' OLD_PLAYERS_DIRECTORY = ' players' STRUCTURES_DIRECTORY = 'data' self.players = DataFileSet(join(self.path, PLAYERS_DIRECTORY), "\nPlayer UUID files:\n") + self.datafilesets.append(self.players) self.old_players = DataFileSet(join(self.path, OLD_PLAYERS_DIRECTORY), "\nOld format player files:\n") + self.datafilesets.append(self.old_players) self.data_files = DataFileSet(join(self.path, STRUCTURES_DIRECTORY), "\nStructures and map data files:\n") + self.datafilesets.append(self.data_files) # Does it look like a world folder? region_files = False @@ -919,6 +1001,23 @@ def __str__(self): text += " Scanned: {0}".format(str(self.scanned)) return text + @property + def has_problems(self): + """ Returns True if the regionset has chunk or region problems and false otherwise. """ + + if not self.scanned_level.readable: + return True + + for d in self.datafilesets: + if d.has_problems: + return True + + for r in self.regionsets: + if r.has_problems: + return True + + return False + def get_number_regions(self): """ Returns a integer with the number of regions in this world""" counter = 0 From 0065dd8ba865d52616a6252ff069172cc6139656 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 08:06:41 +0200 Subject: [PATCH 077/151] Start moving data files status marking to status constants, as it is done with chunks and region files. Clean up the ScannedRegionFile and scan_region code and remove old stuff. --- regionfixer.py | 6 +- regionfixer_core/scan.py | 27 ++--- regionfixer_core/world.py | 217 +++++++++++++++++++++----------------- 3 files changed, 133 insertions(+), 117 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 439ee0b..cdfda5f 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -374,11 +374,11 @@ def main(): error('The option --backups needs at least one of the ' '--replace-* options') else: - if (len(regionset.regions) > 0): + if (len(regionset) > 0): error('You can\'t use the replace options while scanning ' 'separate region files. The input should be only one ' 'world and you introduced {0} individual region ' - 'files.'.format(len(regionset.regions))) + 'files.'.format(len(regionset))) elif (len(world_list) > 1): error('You can\'t use the replace options while scanning ' 'multiple worlds. The input should be only one ' @@ -415,7 +415,7 @@ def main(): summary_text = "" # Scan the separate region files - if len(regionset.regions) > 0: + if len(regionset) > 0: console_scan_regionset(regionset, o.processes, o.entity_limit, o.delete_entities, o.verbose) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 6bba072..e746f09 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -650,15 +650,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): """ try: r = scanned_regionfile_obj - # counters of problems - chunk_count = 0 - corrupted = 0 - wrong = 0 - entities_prob = 0 - shared = 0 - # used to detect chunks sharing headers - offsets = {} - filename = r.filename + # try to open the file and see if we can parse the header try: region_file = region.RegionFile(r.path) @@ -690,7 +682,6 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): entity_limit) if c: r[(x, z)] = c - chunk_count += 1 else: # chunk not created continue @@ -700,7 +691,8 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): elif c[TUPLE_STATUS] == world.CHUNK_TOO_MANY_ENTITIES: # Deleting entities is in here because parsing a chunk # with thousands of wrong entities takes a long time, - # and once detected is better to fix it at once. + # and sometimes GiB of RAM, and once detected is better + # to fix it at once. if delete_entities: world.delete_entities(region_file, x, z) print(("Deleted {0} entities in chunk" @@ -710,18 +702,17 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r[(x, z)] = (0, world.CHUNK_OK) else: - entities_prob += 1 # This stores all the entities in a file, # comes handy sometimes. #~ pretty_tree = chunk['Level']['Entities'].pretty_tree() #~ name = "{2}.chunk.{0}.{1}.txt".format(x,z,split(region_file.filename)[1]) #~ archivo = open(name,'w') #~ archivo.write(pretty_tree) - + pass elif c[TUPLE_STATUS] == world.CHUNK_CORRUPTED: - corrupted += 1 + pass elif c[TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED: - wrong += 1 + pass # Now check for chunks sharing offsets: # Please note! region.py will mark both overlapping chunks @@ -731,6 +722,7 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # discriminate # # TODO: Why? I don't remember why + # TODO: Leave this to nbt, which code is much better than this metadata = region_file.metadata sharing = [k for k in metadata if ( @@ -741,11 +733,6 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r[k] = (r[k][TUPLE_NUM_ENTITIES], world.CHUNK_SHARED_OFFSET) shared_counter += 1 - r.chunk_count = chunk_count - r.corrupted_chunks = corrupted - r.wrong_located_chunks = wrong - r.entities_prob = entities_prob - r.shared_offset = shared_counter r.scan_time = time() r.status = world.REGION_OK r.scanned = True diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 76edb4e..1fc33fb 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -107,6 +107,11 @@ CHUNK_STATUS_TEXT[problem], CHUNK_PROBLEMS_ARGS[problem])) +# Used to know where to look in a chunk status tuple +TUPLE_NUM_ENTITIES = 0 +TUPLE_STATUS = 1 + + # --------------- # Region related: # --------------- @@ -123,7 +128,7 @@ REGION_UNREADABLE_PERMISSION_ERROR] # Text describing each region status used to list all the problem at the end of the scan -REGION_STATUS_TEXT = {REGION_OK: "Ok", +REGION_STATUS_TEXT = {REGION_OK: "OK", REGION_TOO_SMALL: "Too small", REGION_UNREADABLE: "Unreadable IOError", # This status differentiates IOError from a file that you don't have permission to access @@ -148,8 +153,8 @@ # Dictionary with possible solutions for the region problems, # used to create options dynamically # The possible solutions right now are: -REGION_SOLUTION_REMOVE = 501 -REGION_SOLUTION_REPLACE = 502 +REGION_SOLUTION_REMOVE = 151 +REGION_SOLUTION_REPLACE = 152 REGION_PROBLEMS_SOLUTIONS = {REGION_TOO_SMALL: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE], REGION_UNREADABLE: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE]} @@ -167,16 +172,54 @@ REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too-small'} -# Used to know where to look in a chunk status tuple -#~ TUPLE_COORDS = 0 -#~ TUPLE_DATA_COORDS = 0 -#~ TUPLE_GLOBAL_COORDS = 2 -TUPLE_NUM_ENTITIES = 0 -TUPLE_STATUS = 1 +# ------------------ +# Data file related: +# ------------------ +# Used to mark the status of data files: +DATAFILE_OK = 200 +DATAFILE_UNREADABLE = 201 + + +# Data files statuses +DATAFILE_STATUSES = [DATAFILE_OK, + DATAFILE_UNREADABLE] + +# Status that are considered problems +DATAFILE_PROBLEMS = [DATAFILE_UNREADABLE] + +# Text describing each chunk status +DATAFILE_STATUS_TEXT = {DATAFILE_OK: "OK", + DATAFILE_UNREADABLE: "The data file cannot be read"} + +# arguments used in the options +DATAFILE_PROBLEMS_ARGS = {DATAFILE_OK: 'OK', + DATAFILE_UNREADABLE: 'unreadable'} + +# used in some places where there is less space +DATAFILE_PROBLEM_ABBR = {DATAFILE_OK: 'ok', + DATAFILE_UNREADABLE: 'ur'} + +# Dictionary with possible solutions for the chunks problems, +# used to create options dynamically +# The possible solutions right now are: +DATAFILE_SOLUTION_REMOVE = 251 + +DATAFILE_PROBLEMS_SOLUTIONS = {DATAFILE_UNREADABLE:[DATAFILE_SOLUTION_REMOVE]} + +# list with problem, status-text, problem arg tuples +DATAFILE_PROBLEMS_ITERATOR = [] +for problem in DATAFILE_PROBLEMS: + DATAFILE_PROBLEMS_ITERATOR.append((problem, + DATAFILE_STATUS_TEXT[problem], + DATAFILE_PROBLEMS_ARGS[problem])) + +CHUNK_PROBLEMS_ITERATOR = [] +for problem in CHUNK_PROBLEMS: + CHUNK_PROBLEMS_ITERATOR.append((problem, + CHUNK_STATUS_TEXT[problem], + CHUNK_PROBLEMS_ARGS[problem])) + -# Data files relate: -#------------------- -# TODO TODO TODO # Dimension names: DIMENSION_NAMES = {"region": "Overworld", @@ -187,7 +230,6 @@ class InvalidFileName(IOError): pass - class ScannedDataFile(object): def __init__(self, path=None, readable=None, status_text=None): self.path = path @@ -211,10 +253,11 @@ def oneliner_status(self): class ScannedChunk(object): """ Stores all the results of the scan. Not used at the moment, it prette nice but takes an huge amount of memory. """ - # WARNING: not used at the moment, it probably has bugs ans is - # outdated - # The problem with it was it took too much memory. It has been - # remplaced with a tuple + # WARNING: not used at the moment, it probably has bugs and it's outdated. + # The problem was it took too much memory, every full region file + # has a thousand chunks and a world with a thousand region files is not strange, + # that reach a million of this structures pretty fast. + # It has been replaced with a tuple def __init__(self, header_coords, global_coords=None, data_coords=None, status=None, num_entities=None, scan_time=None, region_path=None): @@ -254,8 +297,7 @@ def rescan_entities(self, options): class ScannedRegionFile(object): """ Stores all the scan information for a region file """ - def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, - shared_offset=0, chunks=0, status=0, time=None): + def __init__(self, filename, time=None): # general region file info self.path = filename self.filename = split(filename)[1] @@ -268,8 +310,7 @@ def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, # in the region file self._chunks = {} - # Dictionary containing counters to for all the statuses - # + # Dictionary containing counters to for all the chunks self._counts = {} for s in CHUNK_STATUSES: self._counts[s] = 0 @@ -280,21 +321,20 @@ def __init__(self, filename, corrupted=0, wrong=0, entities_prob=0, # The status of the region file. At the moment can be OK, # TOO SMALL or UNREADABLE see the constants at the start # of the file. - self.status = status + self.status = None self.scanned = False @property def oneliner_status(self): - # TODO: update oneliner and automate it + """ On line description of the status of the region file. """ if self.scanned: status = self.status - if status == REGION_OK: + if status == REGION_OK: # summary with all found in scan stats = "" for s in CHUNK_PROBLEMS: stats += "{0}:{1}, ".format(CHUNK_PROBLEMS_ABBR[s], self.count_chunks(s)) stats += "t:{0}".format(self.count_chunks()) - elif status == REGION_TOO_SMALL: stats = "No header in the region file" elif status == REGION_UNREADABLE: @@ -323,30 +363,16 @@ def __setitem__(self, key, value): def keys(self): return list(self._chunks.keys()) - #=========================================================================== - # def get_counters(self): - # """ Returns integers with all the problem counters in this - # region file. The order is corrupted, wrong located, entities - # shared header, total chunks """ - # return self.corrupted_chunks, self.wrong_located_chunks,\ - # self.entities_prob, self.shared_offset, self.count_chunks() - #=========================================================================== - def get_path(self): """ Returns the path of the region file. """ return self.path def count_chunks(self, problem=None): """ Counts chunks in the region file with the given problem. - If problem is omited or None, counts all the chunks. Returns - an integer with the counter. """ - #======================================================================= - # counter = 0 - # for coords in list(self.keys()): - # if self[coords] and (self[coords][TUPLE_STATUS] == problem or problem == None): - # counter += 1 - #======================================================================= + If problem is omitted or None, counts all the chunks. Returns + an integer with the counter. """ + if problem == None: c = 0 for s in CHUNK_STATUSES: c += self._counts[s] @@ -439,7 +465,12 @@ def remove_problematic_chunks(self, problem): return counter def fix_problematic_chunks(self, problem): - + """ This fixes problems in chunks that can be somehow easy to fix. + + Right now it only fixes chunks missing the TAG_List Entities. + """ + # TODO: it seems having the Entities TAG missing is just a little part. Some of the + # chunks have like 3 or 4 tag missing from the NBT structure. counter = 0 bad_chunks = self.list_chunks(problem) for c in bad_chunks: @@ -461,7 +492,7 @@ def fix_problematic_chunks(self, problem): def remove_entities(self): """ Removes all the entities in chunks with the problematic - status CHUNK_TOO_MANY_ENTITIES that are in this region file. + CHUNK_TOO_MANY_ENTITIES that are in this region file. Returns a counter of all the removed entities. """ problem = CHUNK_TOO_MANY_ENTITIES counter = 0 @@ -524,30 +555,42 @@ class DataSet(object): _update_counts makes sure that the DataSet stores all the counts and that it is not needed to loop through all of them to know the real count. - """ + has_problems should return True only if any element of the set has problems - def summary(self): - """ Return a summary of problems found in this set. """ - raise NotImplemented + """ - def _replace_in_data_structure(self, data): - raise NotImplemented + def __init__(self, *args, **kwargs): + object.__init__(self, *args, **kwargs) + self._set = {} def _get_list(self): - raise NotImplemented + return list(self._set.values()) - def _update_counts(self, s): + def __getitem__(self, key): + return self._set[key] + + def __delitem__(self, key): + del self._set[key] + + def __setitem__(self, key, value): + self._set[key] = value + + def __len__(self): + return len(self._set) + + # mandatory implementation methods + def summary(self): + """ Return a summary of problems found in this set. """ raise NotImplemented - def __getitem__(self, key): - """ - """ + @property + def has_problems(self): raise NotImplemented - def __setitem__(self, key, value): + def _replace_in_data_structure(self, data, key): raise NotImplemented - def __len__(self): + def _update_counts(self, s): raise NotImplemented @@ -558,36 +601,34 @@ class DataFileSet(DataSet): """ def __init__(self, path, title, *args, **kwargs): DataSet.__init__(self, *args, **kwargs) + d = self._set self.title = title self.path = path data_files_path = glob(join(path, "*.dat")) - self.data_files = d = {} + for path in data_files_path: d[path] = ScannedDataFile(path) @property def has_problems(self): - for d in self.data_files.values(): + for d in self._set.values(): if not d.readable: return True return False - def _get_list(self): - return list(self.data_files.values()) - def _replace_in_data_structure(self, data): - self.data_files[data.path] = data - - def __len__(self): - return len(self.data_files) + self._set[data.path] = data def _update_counts(self, s): pass + def count_datafiles(self, status): + pass + def summary(self): """ Return a summary of problems found in this set. """ text = "" - bad_data_files = [i for i in list(self.data_files.values()) if not i.readable] + bad_data_files = [i for i in list(self._set.values()) if not i.readable] for f in bad_data_files: text += "\t" + f.oneliner_status text += "\n" @@ -605,11 +646,11 @@ def __init__(self, regionset_path=None, region_list=[]): else: self.path = None self.region_list = region_list - self.regions = {} + self._set = {} for path in self.region_list: try: r = ScannedRegionFile(path) - self.regions[r.get_coords()] = r + self._set[r.get_coords()] = r except InvalidFileName as e: print("Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path)) @@ -659,7 +700,7 @@ def __str__(self): text = "RegionSet: {0}\n".format(self.get_name()) if self.path: text += " Regionset path: {0}\n".format(self.path) - text += " Region files: {0}\n".format(len(self.regions)) + text += " Region files: {0}\n".format(len(self._set)) text += " Scanned: {0}".format(str(self.scanned)) return text @@ -677,11 +718,8 @@ def has_problems(self): return False - def __getitem__(self, key): - return self.regions[key] - def __setitem__(self, key, value): - self.regions[key] = value + self._set[key] = value assert(type(value) == ScannedRegionFile) self._region_counters[value.status] += 1 @@ -689,20 +727,11 @@ def __setitem__(self, key, value): for status in CHUNK_STATUSES: self._chunk_counters[status] += value.count_chunks(status) - def __delitem__(self, key): - del self.regions[key] - - def __len__(self): - return len(self.regions) - - def _get_list(self): - return list(self.regions.values()) - def _replace_in_data_structure(self, data): - self.regions[data.get_coords()] = data + self._set[data.get_coords()] = data def keys(self): - return list(self.regions.keys()) + return list(self._set.keys()) def list_regions(self, status=None): """ Returns a list of all the ScannedRegionFile objects stored @@ -710,10 +739,10 @@ def list_regions(self, status=None): all the objects.""" if status == None: - return list(self.regions.values()) + return list(self._set.values()) t = [] - for coords in list(self.regions.keys()): - r = self.regions[coords] + for coords in list(self._set.keys()): + r = self._set[coords] if r.status == status: t.append(r) return t @@ -815,8 +844,8 @@ def remove_problematic_chunks(self, problem): counter = 0 if self.count_chunks(): print(' Deleting chunks in region set \"{0}\":'.format(self._get_dimension_directory())) - for r in list(self.regions.keys()): - counter += self.regions[r].remove_problematic_chunks(problem) + for r in list(self._set.keys()): + counter += self._set[r].remove_problematic_chunks(problem) print("Removed {0} chunks in this regionset.\n".format(counter)) return counter @@ -828,8 +857,8 @@ def fix_problematic_chunks(self, problem): counter = 0 if self.count_chunks(): print(' Repairing chunks in region set \"{0}\":'.format(self._get_dimension_directory())) - for r in list(self.regions.keys()): - counter += self.regions[r].fix_problematic_chunks(problem) + for r in list(self._set.keys()): + counter += self._set[r].fix_problematic_chunks(problem) print("Repaired {0} chunks in this regionset.\n".format(counter)) return counter @@ -838,8 +867,8 @@ def remove_entities(self): """ Removes entities in chunks with the status TOO_MANY_ENTITIES. """ counter = 0 - for r in list(self.regions.keys()): - counter += self.regions[r].remove_entities() + for r in list(self._set.keys()): + counter += self._set[r].remove_entities() return counter def rescan_entities(self, options): From d847f82192ed54f11757f9ff49e2e71ca07deb66 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 08:33:24 +0200 Subject: [PATCH 078/151] Remove some old code in scan_chunk. --- regionfixer_core/scan.py | 58 +++++++--------------------------------- 1 file changed, 10 insertions(+), 48 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index e746f09..283fef2 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -779,77 +779,49 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): data_coords = world.get_chunk_data_coords(chunk) num_entities = len(chunk["Level"]["Entities"]) if data_coords != global_coords: + # wrong located chunk status = world.CHUNK_WRONG_LOCATED - #=================================================================== - # status_text = "Mismatched coordinates (wrong located chunk)." - # scan_time = time() - #=================================================================== elif num_entities > el: + # too many entities in the chunk status = world.CHUNK_TOO_MANY_ENTITIES - #=================================================================== - # status_text = "The chunks has too many entities (it has {0}, and it's more than the limit {1})".format(num_entities, entity_limit) - # scan_time = time() - #=================================================================== else: + # chunk ok status = world.CHUNK_OK - #=================================================================== - # status_text = "OK" - # scan_time = time() - #=================================================================== - + except InconceivedChunk as e: + # chunk not created chunk = None data_coords = None num_entities = None status = world.CHUNK_NOT_CREATED - #======================================================================= - # status_text = "The chunk doesn't exist" - # scan_time = time() - #======================================================================= except RegionHeaderError as e: + # corrupted chunk, because of region header status = world.CHUNK_CORRUPTED - #======================================================================= - # error = "Region header error: " + e.msg - # status_text = error - # scan_time = time() - #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None except ChunkDataError as e: + # corrupted chunk, usually because of bad CRC in compression status = world.CHUNK_CORRUPTED - #======================================================================= - # error = "Chunk data error: " + e.msg - # status_text = error - # scan_time = time() - #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None except ChunkHeaderError as e: + # corrupted chunk, error in the header of the chunk status = world.CHUNK_CORRUPTED - #======================================================================= - # error = "Chunk herader error: " + e.msg - # status_text = error - # scan_time = time() - #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - + except KeyError as e: + # chunk with the mandatory tag Entities missing status = world.CHUNK_MISSING_ENTITIES_TAG - #======================================================================= - # error = "Missing Entities TAG" - # status_text = error - # scan_time = time() - #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -858,11 +830,6 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except UnicodeDecodeError as e: # TODO: This should another kind of error, it's now being handled as corrupted chunk status = world.CHUNK_CORRUPTED - #======================================================================= - # error = "Chunk data error: " + e.msg - # status_text = error - # scan_time = time() - #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -871,11 +838,6 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except TypeError as e: # TODO: This should another kind of error, it's now being handled as corrupted chunk status = world.CHUNK_CORRUPTED - #======================================================================= - # error = "Chunk data error: " + e.msg - # status_text = error - # scan_time = time() - #======================================================================= chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) From 00f2d317efcd961f5cceba0f92d9947f39d073dd Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 09:02:06 +0200 Subject: [PATCH 079/151] Remove more old unused code. Move all de data scanning to use status constants. --- regionfixer_core/scan.py | 23 +++++------- regionfixer_core/world.py | 73 ++++++++++----------------------------- 2 files changed, 27 insertions(+), 69 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 283fef2..76aad3c 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -40,6 +40,7 @@ from . import world from regionfixer_core.util import entitle +from regionfixer_core.world import DATAFILE_OK #~ TUPLE_COORDS = 0 @@ -189,7 +190,6 @@ def __init__(self, data_structure, processes, scan_function, init_args, initializer=_mp_init_function, initargs=(init_args,)) - # TODO: make this automatic amount # Recommended time to sleep between polls for results self.SCAN_START_SLEEP_TIME = 0.001 self.SCAN_MIN_SLEEP_TIME = 1e-6 @@ -211,7 +211,6 @@ def scan(self): logging.debug("Starting scan in: " + str(self)) logging.debug("########################################################") logging.debug("########################################################") - total_files = len(self.data_structure) # Tests indicate that smaller amount of jobs per worker make all type # of scans faster jobs_per_worker = 5 @@ -560,11 +559,11 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, if not w.scanned_level.path: print("[WARNING!] \'level.dat\' doesn't exist!") else: - if w.scanned_level.readable == True: + if w.scanned_level.status not in world.DATAFILE_PROBLEMS: print("\'level.dat\' is readable") else: print("[WARNING!]: \'level.dat\' is corrupted with the following error/s:") - print("\t {0}".format(w.scanned_level.status_text)) + print("\t {0}".format(world.DATAFILE_STATUS_TEXT[w.scanned_level.status])) ps = AsyncDataScanner(w.players, processes) ops = AsyncDataScanner(w.old_players, processes) @@ -617,22 +616,18 @@ def scan_data(scanned_dat_file): _ = nbt.NBTFile(buffer=f) else: _ = nbt.NBTFile(filename=s.path) - s.readable = True + s.status = world.DATAFILE_OK except MalformedFileError as e: - s.readable = False - s.status_text = str(e) + s.status = world.DATAFILE_UNREADABLE except IOError as e: - s.readable = False - s.status_text = str(e) + s.status = world.DATAFILE_UNREADABLE except UnicodeDecodeError as e: - s.readable = False - s.status_text = str(e) + s.status = world.DATAFILE_UNREADABLE except TypeError as e: - s.readable = False - s.status_text = str(e) + s.status = world.DATAFILE_UNREADABLE except: - s.readable = False + s.status = world.DATAFILE_UNREADABLE except_type, except_class, tb = sys.exc_info() s = (s, (except_type, except_class, extract_tb(tb))) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 1fc33fb..97c586a 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -231,68 +231,31 @@ class InvalidFileName(IOError): pass class ScannedDataFile(object): + """ Stores all the information of a scanned data file. """ def __init__(self, path=None, readable=None, status_text=None): self.path = path if self.path and exists(self.path): self.filename = split(path)[1] else: self.filename = None - self.readable = readable - self.status_text = status_text + # The status of the region file. + self.status = None def __str__(self): text = "NBT file:" + str(self.filename) + "\n" - text += "\tReadable:" + str(self.readable) + "\n" + text += "\tStatus:" + DATAFILE_STATUS_TEXT[self.status] + "\n" return text @property def oneliner_status(self): - return "File: \"" + self.filename + "\"; status: " + ("Readable" if self.readable else "Unreadable") + return "File: \"" + self.filename + "\"; status: " + DATAFILE_STATUS_TEXT[self.status] class ScannedChunk(object): """ Stores all the results of the scan. Not used at the moment, it prette nice but takes an huge amount of memory. """ - # WARNING: not used at the moment, it probably has bugs and it's outdated. - # The problem was it took too much memory, every full region file - # has a thousand chunks and a world with a thousand region files is not strange, - # that reach a million of this structures pretty fast. - # It has been replaced with a tuple - def __init__(self, header_coords, global_coords=None, data_coords=None, - status=None, num_entities=None, scan_time=None, - region_path=None): - """ Inits the object with all the scan information. """ - self.h_coords = header_coords - self.g_coords = global_coords - self.d_coords = data_coords - self.status = status - self.status_text = None - self.num_entities = num_entities - self.scan_time = scan_time - self.region_path = region_path - - def __str__(self): - text = "Chunk with header coordinates:" + str(self.h_coords) + "\n" - text += "\tData coordinates:" + str(self.d_coords) + "\n" - text += "\tGlobal coordinates:" + str(self.g_coords) + "\n" - text += "\tStatus:" + str(self.status_text) + "\n" - text += "\tNumber of entities:" + str(self.num_entities) + "\n" - text += "\tScan time:" + time.ctime(self.scan_time) + "\n" - return text - - def get_path(self): - """ Returns the path of the region file. """ - return self.region_path - - def rescan_entities(self, options): - """ Updates the status of the chunk when the the option - entity limit is changed. """ - if self.num_entities >= options.entity_limit: - self.status = CHUNK_TOO_MANY_ENTITIES - self.status_text = CHUNK_STATUS_TEXT[CHUNK_TOO_MANY_ENTITIES] - else: - self.status = CHUNK_OK - self.status_text = CHUNK_STATUS_TEXT[CHUNK_OK] + # WARNING: This is here so I remember to not use objects as ScannedChunk + # They take too much memory. class ScannedRegionFile(object): @@ -318,11 +281,10 @@ def __init__(self, filename, time=None): # time when the scan for this file finished self.scan_time = time - # The status of the region file. At the moment can be OK, - # TOO SMALL or UNREADABLE see the constants at the start - # of the file. + # The status of the region file. self.status = None + # has the file been scanned yet? self.scanned = False @property @@ -613,7 +575,8 @@ def __init__(self, path, title, *args, **kwargs): @property def has_problems(self): for d in self._set.values(): - if not d.readable: return True + if d.status not in DATAFILE_PROBLEMS: + return True return False def _replace_in_data_structure(self, data): @@ -628,7 +591,7 @@ def count_datafiles(self, status): def summary(self): """ Return a summary of problems found in this set. """ text = "" - bad_data_files = [i for i in list(self._set.values()) if not i.readable] + bad_data_files = [i for i in list(self._set.values()) if i.status in DATAFILE_PROBLEMS] for f in bad_data_files: text += "\t" + f.oneliner_status text += "\n" @@ -1034,7 +997,7 @@ def __str__(self): def has_problems(self): """ Returns True if the regionset has chunk or region problems and false otherwise. """ - if not self.scanned_level.readable: + if not self.scanned_level.status in DATAFILE_PROBLEMS: return True for d in self.datafilesets: @@ -1067,10 +1030,10 @@ def summary(self): # leve.dat and data files final += "\nlevel.dat:\n" - if self.scanned_level.readable: + if self.scanned_level.status in DATAFILE_PROBLEMS: final += "\t\'level.dat\' is readable\n" else: - final += "\t[WARNING]: \'level.dat\' isn't readable, error: {0}\n".format(self.scanned_level.status_text) + final += "\t[WARNING]: \'level.dat\' isn't readable, error: {0}\n".format(DATAFILE_STATUS_TEXT[self.scanned_level.status]) sets = [self.players, self.old_players, @@ -1330,8 +1293,8 @@ def generate_report(self, standalone): # add all the player files with problems text += "\nUnreadable player files:\n" - broken_players = [p for p in self.players._get_list() if not p.readable] - broken_players.extend([p for p in self.old_players._get_list() if not p.readable]) + broken_players = [p for p in self.players._get_list() if p.status in DATAFILE_PROBLEMS] + broken_players.extend([p for p in self.old_players._get_list() if p.status in DATAFILE_PROBLEMS]) if broken_players: broken_player_files = [p.filename for p in broken_players] text += "\n".join(broken_player_files) @@ -1341,7 +1304,7 @@ def generate_report(self, standalone): # Now all the data files text += "\nUnreadable data files:\n" - broken_data_files = [d for d in self.data_files._get_list() if not d.readable] + broken_data_files = [d for d in self.data_files._get_list() if d.status in DATAFILE_PROBLEMS] if broken_data_files: broken_data_filenames = [p.filename for p in broken_data_files] text += "\n".join(broken_data_filenames) From ccee7a68f9af4b0ebb33d3539c26bfcfd7b24658 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 09:12:26 +0200 Subject: [PATCH 080/151] Remove text about automatic bug report, feature which is going to be disabled for a long time. --- regionfixer.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index cdfda5f..a9592e4 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -549,7 +549,7 @@ def main(): if __name__ == '__main__': - ERROR_MSG = "\n\nOps! Something went really wrong and regionfixer crashed. I can try to send an automatic bug rerpot if you wish.\n\n" + ERROR_MSG = "\n\nOps! Something went really wrong and regionfixer crashed.\n" QUESTION_TEXT = ('Do you want to send an anonymous bug report to the region fixer ftp?\n' '(Answering no will print the bug report)') had_exception = False @@ -558,9 +558,6 @@ def main(): try: freeze_support() value = main() - #======================================================================= - # sys.exit(value) - #======================================================================= except ChildProcessException as e: had_exception = True From eb7eb2ac96cbc78b3102465435e5312e073974b4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 10:42:48 +0200 Subject: [PATCH 081/151] Commenting and some more coding around the DataSet structure, so it's more coherent. --- regionfixer_core/world.py | 85 ++++++++++++++++++++++----------------- 1 file changed, 49 insertions(+), 36 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 97c586a..45e2c4e 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -230,9 +230,11 @@ class InvalidFileName(IOError): pass + class ScannedDataFile(object): """ Stores all the information of a scanned data file. """ - def __init__(self, path=None, readable=None, status_text=None): + def __init__(self, path=None): + super().__init__() self.path = path if self.path and exists(self.path): self.filename = split(path)[1] @@ -248,6 +250,7 @@ def __str__(self): @property def oneliner_status(self): + """ One line describing the status of the file. """ return "File: \"" + self.filename + "\"; status: " + DATAFILE_STATUS_TEXT[self.status] @@ -260,11 +263,11 @@ class ScannedChunk(object): class ScannedRegionFile(object): """ Stores all the scan information for a region file """ - def __init__(self, filename, time=None): + def __init__(self, path, time=None): # general region file info - self.path = filename - self.filename = split(filename)[1] - self.folder = split(filename)[0] + self.path = path + self.filename = split(path)[1] + self.folder = split(path)[0] self.x = self.z = None self.x, self.z = self.get_coords() self.coords = (self.x, self.z) @@ -507,6 +510,9 @@ def rescan_entities(self, options): class DataSet(object): """ Stores data items to be scanned by AsyncScanner in scan.py. + typevalue is the type of the class to store in the set. When setting it will be + asserted if it is of that type + The data should be in a dictionary and should be accessible through the methods __getitem__, __setitem__. The methods, _get_list, __len__ are also used. @@ -521,11 +527,12 @@ class DataSet(object): """ - def __init__(self, *args, **kwargs): - object.__init__(self, *args, **kwargs) + def __init__(self, typevalue, *args, **kwargs): self._set = {} + self._typevalue = typevalue def _get_list(self): + """ Returns a list with all the values in the set. """ return list(self._set.values()) def __getitem__(self, key): @@ -535,7 +542,9 @@ def __delitem__(self, key): del self._set[key] def __setitem__(self, key, value): + assert(self._typevalue == type(value)) self._set[key] = value + self._update_counts(value) def __len__(self): return len(self._set) @@ -547,12 +556,20 @@ def summary(self): @property def has_problems(self): + """ Returns True if the scanned set has problems. """ raise NotImplemented def _replace_in_data_structure(self, data, key): + """ For multiprocessing. Replaces the data in the set with the new data. + + Child scanning processes make copies of the ScannedRegion/DataFile when they scan them. + The AsyncScanner will call this function so the ScannedRegion/DataFile is stored + in the set properly. + """ raise NotImplemented def _update_counts(self, s): + """ This functions is used by __set__ to update the counters. """ raise NotImplemented @@ -562,7 +579,7 @@ class DataFileSet(DataSet): DataSets are scanned using scan.AsyncScanner """ def __init__(self, path, title, *args, **kwargs): - DataSet.__init__(self, *args, **kwargs) + DataSet.__init__(self, ScannedDataFile, *args, **kwargs) d = self._set self.title = title @@ -571,9 +588,15 @@ def __init__(self, path, title, *args, **kwargs): for path in data_files_path: d[path] = ScannedDataFile(path) + + # stores the counts of files + self._counts = {} + for s in DATAFILE_STATUSES: + self._counts[s] = 0 @property def has_problems(self): + """ One line describing the status of the data file. """ for d in self._set.values(): if d.status not in DATAFILE_PROBLEMS: return True @@ -583,7 +606,8 @@ def _replace_in_data_structure(self, data): self._set[data.path] = data def _update_counts(self, s): - pass + assert(type(s) == self._typevalue) + self._counts[s.status] += 1 def count_datafiles(self, status): pass @@ -603,6 +627,7 @@ class RegionSet(DataSet): Inits with a list of region files. The regions dict is filled while scanning with ScannedRegionFiles and ScannedChunks.""" def __init__(self, regionset_path=None, region_list=[]): + DataSet.__init__(self, ScannedRegionFile) if regionset_path: self.path = regionset_path self.region_list = glob(join(self.path, "r.*.*.mca")) @@ -646,6 +671,16 @@ def get_name(self): else: return "" + def _update_counts(self, scanned_regionfile): + """ Updates the counters of the regionset with the new regionfile. """ + + assert(type(scanned_regionfile) == ScannedRegionFile) + + self._region_counters[scanned_regionfile.status] += 1 + + for status in CHUNK_STATUSES: + self._chunk_counters[status] += scanned_regionfile.count_chunks(status) + def _get_dimension_directory(self): """ Returns a string with the directory of the dimension, None if there is no such a directory and the regionset is composed @@ -659,6 +694,9 @@ def _get_dimension_directory(self): else: return None + def _replace_in_data_structure(self, data): + self._set[data.get_coords()] = data + def __str__(self): text = "RegionSet: {0}\n".format(self.get_name()) if self.path: @@ -681,18 +719,6 @@ def has_problems(self): return False - def __setitem__(self, key, value): - self._set[key] = value - assert(type(value) == ScannedRegionFile) - - self._region_counters[value.status] += 1 - - for status in CHUNK_STATUSES: - self._chunk_counters[status] += value.count_chunks(status) - - def _replace_in_data_structure(self, data): - self._set[data.get_coords()] = data - def keys(self): return list(self._set.keys()) @@ -710,15 +736,6 @@ def list_regions(self, status=None): t.append(r) return t - def _update_counts(self, scanned_regionfile): - """ Updates the counters of the regionset with the new regionfile. """ - assert(type(scanned_regionfile) == ScannedRegionFile) - - self._region_counters[scanned_regionfile.status] += 1 - - for status in CHUNK_STATUSES: - self._chunk_counters[status] += scanned_regionfile.count_chunks(status) - def count_regions(self, status=None): """ Return the number of region files with status. If none returns the number of region files in this regionset. @@ -940,14 +957,10 @@ def __init__(self, world_path): try: self.level_data = nbt.NBTFile(level_dat_path)["Data"] self.name = self.level_data["LevelName"].value - self.scanned_level = ScannedDataFile(level_dat_path, - readable=True, - status_text="OK") + self.scanned_level = ScannedDataFile(level_dat_path) except Exception as e: self.name = None - self.scanned_level = ScannedDataFile(level_dat_path, - readable=False, - status_text=e) + self.scanned_level = ScannedDataFile(level_dat_path) else: self.level_file = None self.level_data = None From d53e019e05d5bda781046c1ae8519e51184f9b96 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 11:35:28 +0200 Subject: [PATCH 082/151] Fix some more bugs in --log option. Add has_problems to ScannedRegionFile. --- regionfixer_core/world.py | 35 +++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 45e2c4e..110ad22 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -328,6 +328,16 @@ def __setitem__(self, key, value): def keys(self): return list(self._chunks.keys()) + @property + def has_problems(self): + """ Return True if the region file has problem in itself or in its chunks. """ + if self.status in REGION_PROBLEMS: + return True + for s in CHUNK_PROBLEMS: + if self.count_chunks(s): + return True + return False + def get_path(self): """ Returns the path of the region file. """ return self.path @@ -394,18 +404,19 @@ def summary(self): is a string with region file, global coords, local coords, and status of every problematic chunk. """ text = "" - if self.status == REGION_TOO_SMALL: - text += " |- This region file is too small in size to actually be a region file.\n" + if self.status in REGION_PROBLEMS: + text += " |- This region has status: {0}.\n".format(REGION_STATUS_TEXT[self.status]) else: for c in list(self.keys()): - if self[c][TUPLE_STATUS] == CHUNK_OK or self[c][TUPLE_STATUS] == CHUNK_NOT_CREATED: continue + if self[c][TUPLE_STATUS] not in CHUNK_PROBLEMS: + continue status = self[c][TUPLE_STATUS] h_coords = c g_coords = self.get_global_chunk_coords(*h_coords) text += " |-+-Chunk coords: header {0}, global {1}.\n".format(h_coords, g_coords) text += " | +-Status: {0}\n".format(CHUNK_STATUS_TEXT[status]) if self[c][TUPLE_STATUS] == CHUNK_TOO_MANY_ENTITIES: - text += " | +-Nº entities: {0}\n".format(self[c][TUPLE_NUM_ENTITIES]) + text += " | +-No. entities: {0}\n".format(self[c][TUPLE_NUM_ENTITIES]) text += " |\n" return text @@ -787,11 +798,7 @@ def summary(self): local coords, data coords and status. """ text = "" for r in list(self.keys()): - if not (self[r].count_chunks(CHUNK_CORRUPTED) or \ - self[r].count_chunks(CHUNK_TOO_MANY_ENTITIES) or \ - self[r].count_chunks(CHUNK_WRONG_LOCATED) or \ - self[r].count_chunks(CHUNK_SHARED_OFFSET) or \ - self[r].status == REGION_TOO_SMALL): + if not self[r].has_problems: continue text += "Region file: {0}\n".format(self[r].filename) text += self[r].summary() @@ -948,8 +955,6 @@ def __init__(self, world_path): for directory in glob(join(self.path, "DIM*/region")): self.regionsets.append(RegionSet(join(self.path, directory))) - print(self.regionsets) - # level.dat # Let's scan level.dat here so we can extract the world name level_dat_path = join(self.path, "level.dat") @@ -958,15 +963,17 @@ def __init__(self, world_path): self.level_data = nbt.NBTFile(level_dat_path)["Data"] self.name = self.level_data["LevelName"].value self.scanned_level = ScannedDataFile(level_dat_path) + self.scanned_level.status = DATAFILE_OK except Exception as e: self.name = None self.scanned_level = ScannedDataFile(level_dat_path) + self.scanned_level.status = DATAFILE_UNREADABLE else: self.level_file = None self.level_data = None self.name = None - self.scanned_level = ScannedDataFile(None, False, - "The file doesn't exist") + self.scanned_level = ScannedDataFile(None, level_dat_path) + self.scanned_level.status = DATAFILE_UNREADABLE # Player files self.datafilesets = [] @@ -1043,7 +1050,7 @@ def summary(self): # leve.dat and data files final += "\nlevel.dat:\n" - if self.scanned_level.status in DATAFILE_PROBLEMS: + if self.scanned_level.status not in DATAFILE_PROBLEMS: final += "\t\'level.dat\' is readable\n" else: final += "\t[WARNING]: \'level.dat\' isn't readable, error: {0}\n".format(DATAFILE_STATUS_TEXT[self.scanned_level.status]) From bb87cddba514a14f2c5625ddd0e6e3efb9f20fd8 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 13:55:00 +0200 Subject: [PATCH 083/151] Remove the gui from py2exe, it seems that py2exe does not work with python 3.7, so no exe right now. --- setup.py | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/setup.py b/setup.py index e06fdf0..6f648ee 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,9 @@ import shutil from regionfixer_core import version as cli_version -from gui import version as gui_version +#=============================================================================== +# from gui import version as gui_version +#=============================================================================== # Remove the build folder @@ -128,18 +130,20 @@ def __init__(self, **kw): # Ok, now we are going to build our target class. # I chose this building strategy as it works perfectly for me :-D -GUI_Target = Target( - # what to build - script = "regionfixer_gui.py", - icon_resources = icon_resources, - bitmap_resources = bitmap_resources, - other_resources = other_resources, - dest_base = "regionfixer_gui", - version = gui_version.version_string, - company_name = "No Company", - copyright = "Copyright (C) 2011 Alejandro Aguilera", - name = "Region Fixer GUI" - ) +#=============================================================================== +# GUI_Target = Target( +# # what to build +# script = "regionfixer_gui.py", +# icon_resources = icon_resources, +# bitmap_resources = bitmap_resources, +# other_resources = other_resources, +# dest_base = "regionfixer_gui", +# version = gui_version.version_string, +# company_name = "No Company", +# copyright = "Copyright (C) 2011 Alejandro Aguilera", +# name = "Region Fixer GUI" +# ) +#=============================================================================== CLI_Target = Target( # what to build @@ -150,7 +154,7 @@ def __init__(self, **kw): dest_base = "regionfixer", version = cli_version.version_string, company_name = "No Company", - copyright = "Copyright (C) 2011 Alejandro Aguilera", + copyright = "Copyright (C) 2019 Alejandro Aguilera", name = "Region Fixer" ) @@ -179,8 +183,8 @@ def __init__(self, **kw): }, zipfile = "lib\library.zip", - console = [CLI_Target], - windows = [GUI_Target] + console = [CLI_Target] + #windows = [GUI_Target] ) # This is a place where any post-compile code may go. From ee219bbeb9a917fe88a9b4a1b9e19180dbbc78b4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 14:01:53 +0200 Subject: [PATCH 084/151] Fix typo in GUI. --- gui/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gui/main.py b/gui/main.py index 80dab91..1a1bcb3 100644 --- a/gui/main.py +++ b/gui/main.py @@ -165,7 +165,7 @@ def OnHelp(self, e): def OnOpen(self, e): """ Called when the open world button is pressed. """ - dlg = wx.DirDialog(self, "Choose a Minecraf world folder") + dlg = wx.DirDialog(self, "Choose a Minecraft world folder") # Set the last path used dlg.SetPath(self.last_path) if dlg.ShowModal() == wx.ID_OK: From 81b018eef72c893b2cdac6a6c6fbadf41908adda Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 22 Apr 2019 14:04:41 +0200 Subject: [PATCH 085/151] Bump the second version number. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 9478b2d..4dbdbcd 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -4,5 +4,5 @@ @author: Alejandro ''' -version_string = "0.2.3" +version_string = "0.3.0" version_numbers = version_string.split('.') From 2273a1d4bc2a4c079917e454132eb278f2ca506f Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 11 May 2019 18:04:40 +0200 Subject: [PATCH 086/151] Update forgotten function --- regionfixer_core/world.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 110ad22..c28058c 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -300,10 +300,8 @@ def oneliner_status(self): for s in CHUNK_PROBLEMS: stats += "{0}:{1}, ".format(CHUNK_PROBLEMS_ABBR[s], self.count_chunks(s)) stats += "t:{0}".format(self.count_chunks()) - elif status == REGION_TOO_SMALL: - stats = "No header in the region file" - elif status == REGION_UNREADABLE: - stats = "Unreadable region file)" + else: + stats = REGION_STATUS_TEXT[status] else: stats = "Not scanned" From b4f58419d787bef8c810ec244a94acf833d73c39 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 11 May 2019 18:22:52 +0200 Subject: [PATCH 087/151] Fix crash when trying to identify world. --- regionfixer_core/world.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index c28058c..43cb648 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -970,7 +970,7 @@ def __init__(self, world_path): self.level_file = None self.level_data = None self.name = None - self.scanned_level = ScannedDataFile(None, level_dat_path) + self.scanned_level = ScannedDataFile(level_dat_path) self.scanned_level.status = DATAFILE_UNREADABLE # Player files From 40a69a2410f2b7e6f844d215b4eb0fa5b1e0d2b1 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 13 May 2019 00:47:42 +0200 Subject: [PATCH 088/151] Update README.rst --- README.rst | 38 ++++++++++++-------------------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/README.rst b/README.rst index d51f4eb..b2212b3 100644 --- a/README.rst +++ b/README.rst @@ -18,38 +18,23 @@ Web page: https://github.com/Fenixin/Minecraft-Region-Fixer Mincraft forums posts: -http://www.minecraftforum.net/topic/302380-minecraft-region-fixer/ -http://www.minecraftforum.net/topic/275730-minecraft-region-fixer/ +https://www.minecraftforum.net/forums/support/server-support-and/1903200-minecraft-region-fixer +https://www.minecraftforum.net/forums/mapping-and-modding-java-edition/minecraft-tools/1261480-minecraft-region-fixer Supported platforms =================== This program only works with Python 3.x, and DOESN'T work with -python 2.x. There is also a windows executable for ease of use, if you -use the windows executable you don't need to install Python. - - -Windows .exe downloads -====================== -The window executable is generated using py2exe and is the choice if -you don't want to install python in your system. - -These downloads were usually in the downloads section of github, but -github has deprecated this feature. So, from Region Fixer v0.1.0 -downloads are stored in mediafire: - -http://www.mediafire.com/?1exub0d8ys83y -or -http://adf.ly/HVHGu (if you want to contribute a little) - +python 2.x. There was a windows exe in older versions, but right +now You need to install the python interpreter to run this +program. Notes ===== -Older versions of Minecraft had big problems when loading corrupted -chunks. But in the latest versions of Minecraft (tested in 1.4.7) the -server itself removes corrupted chunks (when loading them) and -regenerate those chunks. +Older versions of Minecraft had big problems when loading broken +worlds. Newer versions of Minecraft are doing improving the way +they deal with corruption and other things. -Region-Fixer still is useful for replacing those chunks with a +Region-Fixer still is useful for replacing chunks/regions with a backup, removing entities, or trying to see what's going wrong with your world. @@ -72,10 +57,10 @@ https://github.com/Fenixin/Minecraft-Region-Fixer Feedback and questions should go preferably to the forums posts: (server administration) -http://www.minecraftforum.net/topic/275730-tool-minecraft-region-fixer/ +https://www.minecraftforum.net/forums/support/server-support-and/1903200-minecraft-region-fixer (mapping and modding) -http://www.minecraftforum.net/topic/302380-tool-minecraft-region-fixer/ +https://www.minecraftforum.net/forums/mapping-and-modding-java-edition/minecraft-tools/1261480-minecraft-region-fixer Donations and sponsors @@ -83,6 +68,7 @@ Donations and sponsors Region-Fixer was created thanks to sponsors and donations. You can find information about that in DONORS.txt + Contributors ============ See CONTRIBUTORS.txt From 36f9ab004cb74948731866f343c87213c11a8876 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 27 May 2019 00:13:19 +0200 Subject: [PATCH 089/151] Fix typo. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index b2212b3..fd71abb 100644 --- a/README.rst +++ b/README.rst @@ -25,7 +25,7 @@ Supported platforms =================== This program only works with Python 3.x, and DOESN'T work with python 2.x. There was a windows exe in older versions, but right -now You need to install the python interpreter to run this +now you need to install the python interpreter to run this program. Notes From b9ddb0935e8b36a8083e21df68810df6db3738aa Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 27 May 2019 00:23:34 +0200 Subject: [PATCH 090/151] Bump version number. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 4dbdbcd..478bca5 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -4,5 +4,5 @@ @author: Alejandro ''' -version_string = "0.3.0" +version_string = "0.3.1" version_numbers = version_string.split('.') From cf03afa3f4e4ed5c240b56064327a9906cd2d9c0 Mon Sep 17 00:00:00 2001 From: charly Date: Tue, 30 Jul 2019 10:46:08 +0200 Subject: [PATCH 091/151] fix rc --- regionfixer_core/world.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 43cb648..a884fcd 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -607,7 +607,7 @@ def __init__(self, path, title, *args, **kwargs): def has_problems(self): """ One line describing the status of the data file. """ for d in self._set.values(): - if d.status not in DATAFILE_PROBLEMS: + if d.status in DATAFILE_PROBLEMS: return True return False @@ -1015,7 +1015,7 @@ def __str__(self): def has_problems(self): """ Returns True if the regionset has chunk or region problems and false otherwise. """ - if not self.scanned_level.status in DATAFILE_PROBLEMS: + if self.scanned_level.status in DATAFILE_PROBLEMS: return True for d in self.datafilesets: From 8040252b3d4aea3dc97bc5804581937377d7bee2 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 18 Sep 2019 20:32:08 +0200 Subject: [PATCH 092/151] Fix comment on property has_problems. --- regionfixer_core/world.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index a884fcd..1bd0813 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -605,7 +605,7 @@ def __init__(self, path, title, *args, **kwargs): @property def has_problems(self): - """ One line describing the status of the data file. """ + """ Returns True if the dataset has problems and false otherwise. """ for d in self._set.values(): if d.status in DATAFILE_PROBLEMS: return True From b26374649708def04322233757cf6cad69c0393f Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 18 Sep 2019 20:57:21 +0200 Subject: [PATCH 093/151] Fix issue #89 get_chunk_region was doing a floating point operation istead of integer. --- regionfixer_core/world.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 1bd0813..cf1170a 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -1384,8 +1384,8 @@ def get_chunk_region(chunkX, chunkZ): """ Returns the name of the region file given global chunk coords """ - regionX = chunkX / 32 - regionZ = chunkZ / 32 + regionX = chunkX // 32 + regionZ = chunkZ // 32 region_name = 'r.' + str(regionX) + '.' + str(regionZ) + '.mca' From 75f51f7a41cfc9816b036b834f83e78e76498e85 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 18 Sep 2019 21:35:59 +0200 Subject: [PATCH 094/151] Fix using parse.error() and then trying to get correct return of value of main(). --- regionfixer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/regionfixer.py b/regionfixer.py index a9592e4..0f83bf7 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -337,8 +337,9 @@ def main(): # Args are world_paths and region files if not args: - parser.error('No world paths or region files specified! Use ' + print('Error: No world paths or region files specified! Use ' '--help for a complete list of options.') + return RV_NOTHING_TO_SCAN world_list, regionset = parse_paths(args) From 66ed5aac18439a95260ac75e0c360b11169d2d77 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 19 Sep 2019 19:33:30 +0200 Subject: [PATCH 095/151] Properly implement exception in the option parser to fix error msgs --- regionfixer.py | 47 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 35 insertions(+), 12 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 0f83bf7..7242fbf 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -22,7 +22,7 @@ # from multiprocessing import freeze_support -from optparse import OptionParser +from optparse import OptionParser, BadOptionError from getpass import getpass import sys @@ -44,6 +44,7 @@ RV_OK = 0 # world scanned and no problems found RV_CRASH = 1 # crash or end unexpectedly RV_NOTHING_TO_SCAN = 2 # no files/worlds to scan +RV_WRONG_COMMAND = 20 # the command line used is wrong and region fixer didn't execute RV_BAD_WORLD = 3 # scan completed successfully but problems have been found in the scan def fix_bad_chunks(options, scanned_obj): @@ -121,6 +122,26 @@ def delete_bad_regions(options, scanned_obj): else: print(("No regions to delete with status: {0}".format(status))) +class WrongOption(BadOptionError): + def __init__(self, *args, **kwargs): + BadOptionError.__init__(self, *args, **kwargs) + +class NewOptionParser(OptionParser): + def __init__(self, *args, **kwargs): + OptionParser.__init__(self, *args, **kwargs) + def error(self, msg): + """error(msg : string) + + Print a usage message incorporating 'msg' to stderr and exit. + If you override this in a subclass, it should not return -- it + should either exit or raise an exception. + """ + self.print_usage(sys.stderr) + #self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg)) + #raise self.BadOptionError(msg) + raise WrongOption(msg) + + def main(): @@ -133,7 +154,7 @@ def main(): 'are welcome to redistribute it under certain conditions; ' 'see COPYING.txt for details.') - parser = OptionParser(description=('Program to check the integrity of ' + parser = NewOptionParser(description=('Program to check the integrity of ' 'Minecraft worlds and fix them when ' 'possible. It uses NBT by twoolie. ' 'Author: Alejandro Aguilera (Fenixin)'), @@ -337,9 +358,8 @@ def main(): # Args are world_paths and region files if not args: - print('Error: No world paths or region files specified! Use ' + parser.error('Error: No world paths or region files specified! Use ' '--help for a complete list of options.') - return RV_NOTHING_TO_SCAN world_list, regionset = parse_paths(args) @@ -359,12 +379,10 @@ def main(): any_region_replace_option = o.replace_too_small any_region_delete_option = o.delete_too_small - error = parser.error - if o.interactive or o.summary: if any_chunk_replace_option or any_region_replace_option: - error('Can\'t use the options --replace-* , --delete-* and ' + parser.error('Error: Can\'t use the options --replace-* , --delete-* and ' '--log with --interactive. You can choose all this ' 'while in the interactive mode.') @@ -372,25 +390,25 @@ def main(): # Not options.interactive if o.backups: if not any_chunk_replace_option and not any_region_replace_option: - error('The option --backups needs at least one of the ' + parser.error('Error: The option --backups needs at least one of the ' '--replace-* options') else: if (len(regionset) > 0): - error('You can\'t use the replace options while scanning ' + parser.error('Error: You can\'t use the replace options while scanning ' 'separate region files. The input should be only one ' 'world and you introduced {0} individual region ' 'files.'.format(len(regionset))) elif (len(world_list) > 1): - error('You can\'t use the replace options while scanning ' + parser.error('Error: You can\'t use the replace options while scanning ' 'multiple worlds. The input should be only one ' 'world and you introduced {0} ' 'worlds.'.format(len(world_list))) if not o.backups and any_chunk_replace_option: - error("The options --replace-* need the --backups option") + parser.error("Error: The options --replace-* need the --backups option") if o.entity_limit < 0: - error("The entity limit must be at least 0!") + parser.error("Error: The entity limit must be at least 0!") print("\nWelcome to Region Fixer!") print(("(version: {0})".format(parser.version))) @@ -559,6 +577,11 @@ def main(): try: freeze_support() value = main() + + except WrongOption as e: + had_exception = False + value = RV_WRONG_COMMAND + print(str(e)) except ChildProcessException as e: had_exception = True From 4fb602ddd64d35eef7785ae9e24e92d6fa8e7e70 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 19 Sep 2019 20:14:50 +0200 Subject: [PATCH 096/151] Add default value to return --- regionfixer.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 7242fbf..88af6ad 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -132,17 +132,12 @@ def __init__(self, *args, **kwargs): def error(self, msg): """error(msg : string) - Print a usage message incorporating 'msg' to stderr and exit. - If you override this in a subclass, it should not return -- it - should either exit or raise an exception. + Overrides the original method. Raises WrongOption and stores the msm. """ self.print_usage(sys.stderr) - #self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg)) - #raise self.BadOptionError(msg) raise WrongOption(msg) - def main(): usage = ('usage: \n%prog [options] ' @@ -573,6 +568,7 @@ def main(): '(Answering no will print the bug report)') had_exception = False auto_reported = False + value = 0 try: freeze_support() From da51bb90ec31e4c8516297ce871e62049de8868a Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 19 Sep 2019 23:02:42 +0200 Subject: [PATCH 097/151] Bump version number to 0.3.2 --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 478bca5..f6e7a07 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -4,5 +4,5 @@ @author: Alejandro ''' -version_string = "0.3.1" +version_string = "0.3.2" version_numbers = version_string.split('.') From 64dadf981733d5ddd0240f23754a0794fa7b4b45 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 3 May 2020 23:07:42 +0200 Subject: [PATCH 098/151] Update issue templates --- .github/ISSUE_TEMPLATE/bug_report.md | 31 +++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 23 +++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..dfe7056 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,31 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: Bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**Full copied text from the MS-DOS view** +Please, include the command used to run regionfixer. + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Files that would help solving the issue** +If possible, the world/files that triggers the error. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Python version: [e.g. 2.7] + - Region Fixer Version [e.g. 2.0.1] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..ae32cb5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,23 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: Feature request +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe how the solution would be implemented** +If possible, describe how the solution would be implemented. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. From 39ad4fed19c61b7b40e260cee9166255d8e2077a Mon Sep 17 00:00:00 2001 From: Tobias Lindenberg Date: Sat, 23 May 2020 18:28:39 +0200 Subject: [PATCH 099/151] optparser ported to argparse; general formatting and typo; more adaptations to python3 --- regionfixer.py | 568 +++++++++++++++----------------- regionfixer_core/interactive.py | 83 +++-- regionfixer_core/scan.py | 126 +++---- regionfixer_core/util.py | 39 ++- regionfixer_core/world.py | 257 ++++++++------- 5 files changed, 540 insertions(+), 533 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 88af6ad..a1eeac3 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -21,35 +21,38 @@ # along with this program. If not, see . # -from multiprocessing import freeze_support -from optparse import OptionParser, BadOptionError +import argparse from getpass import getpass +from multiprocessing import freeze_support import sys from regionfixer_core import world -from regionfixer_core.scan import console_scan_world, console_scan_regionset,\ - ChildProcessException +from regionfixer_core.scan import (console_scan_world, + console_scan_regionset, + ChildProcessException) from regionfixer_core.interactive import InteractiveLoop -from regionfixer_core.util import entitle, is_bare_console, parse_paths,\ - parse_backup_list +from regionfixer_core.util import (entitle, + is_bare_console, + parse_paths, + parse_backup_list) from regionfixer_core.version import version_string from regionfixer_core.bug_reporter import BugReporter from regionfixer_core.world import CHUNK_MISSING_ENTITIES_TAG - ################ # Return values ################ -RV_OK = 0 # world scanned and no problems found -RV_CRASH = 1 # crash or end unexpectedly -RV_NOTHING_TO_SCAN = 2 # no files/worlds to scan -RV_WRONG_COMMAND = 20 # the command line used is wrong and region fixer didn't execute -RV_BAD_WORLD = 3 # scan completed successfully but problems have been found in the scan +RV_OK = 0 # world scanned and no problems found +RV_CRASH = 1 # crash or end unexpectedly +RV_NOTHING_TO_SCAN = 2 # no files/worlds to scan +RV_WRONG_COMMAND = 20 # the command line used is wrong and region fixer didn't execute +RV_BAD_WORLD = 3 # scan completed successfully but problems have been found in the scan + def fix_bad_chunks(options, scanned_obj): """ Fixes chunks that can be repaired. - + Doesn't work right now. """ print("") @@ -58,25 +61,25 @@ def fix_bad_chunks(options, scanned_obj): status = world.CHUNK_STATUS_TEXT[CHUNK_MISSING_ENTITIES_TAG] if options.fix_missing_tag: if total: - + text = ' Repairing chunks with status: {0} '.format(status) print(("\n{0:#^60}".format(text))) counter = scanned_obj.fix_problematic_chunks(problem) print(("\nRepaired {0} chunks with status: {1}".format(counter, - status))) + status))) else: print(("No chunks to fix with status: {0}".format(status))) def delete_bad_chunks(options, scanned_obj): """ Takes a scanned object and deletes all the bad chunks. - + Keywords arguments options -- options as returned by the module optparse scanned_obj -- a regionfixer world or regionset - + Returns nothing. - + This function will deletes all the chunks with problems iterating through all the possible problems and using the options given. """ @@ -97,7 +100,7 @@ def delete_bad_chunks(options, scanned_obj): print(("\n{0:#^60}".format(text))) counter = scanned_obj.remove_problematic_chunks(problem) print(("\nDeleted {0} chunks with status: {1}".format(counter, - status))) + status))) else: print(("No chunks to delete with status: {0}".format(status))) @@ -118,29 +121,13 @@ def delete_bad_regions(options, scanned_obj): print(("{0:#^60}".format(text))) counter = scanned_obj.remove_problematic_regions(problem) print(("Deleted {0} regions with status: {1}".format(counter, - status))) + status))) else: print(("No regions to delete with status: {0}".format(status))) -class WrongOption(BadOptionError): - def __init__(self, *args, **kwargs): - BadOptionError.__init__(self, *args, **kwargs) - -class NewOptionParser(OptionParser): - def __init__(self, *args, **kwargs): - OptionParser.__init__(self, *args, **kwargs) - def error(self, msg): - """error(msg : string) - - Overrides the original method. Raises WrongOption and stores the msm. - """ - self.print_usage(sys.stderr) - raise WrongOption(msg) - def main(): - - usage = ('usage: \n%prog [options] ' + usage = ('%(prog)s [options] ' ' ... ...') epilog = ('Copyright (C) 2011 Alejandro Aguilera (Fenixin)\n' 'https://github.com/Fenixin/Minecraft-Region-Fixer\n' @@ -149,189 +136,189 @@ def main(): 'are welcome to redistribute it under certain conditions; ' 'see COPYING.txt for details.') - parser = NewOptionParser(description=('Program to check the integrity of ' - 'Minecraft worlds and fix them when ' - 'possible. It uses NBT by twoolie. ' - 'Author: Alejandro Aguilera (Fenixin)'), - prog='region_fixer', - version=version_string, - usage=usage, - epilog=epilog) - - add_option = parser.add_option - - add_option('--backups', - '-b', - help=('List of backup directories of the Minecraft world ' - 'to use to fix corrupted chunks and/or wrong located ' - 'chunks. Warning! Region-Fixer is not going to check if' - 'it\'s the same world, be careful! This argument can be a' - ' comma separated list (but never with spaces between ' - 'elements!). This option can be only used scanning one ' - 'world.'), - metavar='', - type=str, - dest='backups', - default=None) - - add_option('--replace-corrupted', - '--rc', - help='Tries to replace the corrupted chunks using the backup' - ' directories. This option can be only used scanning one' - ' world.', - default=False, - dest='replace_corrupted', - action='store_true') - - add_option('--replace-wrong-located', - '--rw', - help='Tries to replace the wrong located chunks using the ' - 'backup directories. This option can be only used scanning' - ' one world.', - default=False, - dest='replace_wrong_located', - action='store_true') - - add_option('--replace-entities', - '--re', - help='Tries to replace the chunks with too many entities using ' - 'the backup directories. This option can be only used ' - 'scanning one world.', - default=False, - dest='replace_entities', - action='store_true') - - add_option('--replace-shared-offset', - '--rs', - help='Tries to replace the chunks with a shared offset using ' - 'the backup directories. This option can be only used' - 'scanning one world.', - default=False, - dest='replace_shared_offset', - action='store_true') - - add_option('--replace-too-small', - '--rt', - help='Tries to replace the region files that are too small to ' - 'be actually be a region file using the backup ' - 'directories. This option can be only used scanning one ' - 'world.', - default=False, - dest='replace_too_small', - action='store_true') - - add_option('--delete-corrupted', - '--dc', - help='[WARNING!] This option deletes! This option will delete ' - 'all the corrupted chunks. Used with --replace-corrupted ' - 'or --replace-wrong-located it will delete all the ' - 'non-replaced chunks.', - action='store_true', - default=False) - - add_option('--delete-wrong-located', - '--dw', - help=('[WARNING!] This option deletes!' - 'The same as --delete-corrupted but for wrong ' - 'located chunks'), - action='store_true', - default=False, - dest='delete_wrong_located') - - add_option('--delete-entities', - '--de', - help='[WARNING!] This option deletes! This option deletes ALL ' - 'the entities in chunks with more entities than ' - '--entity-limit (300 by default). In a Minecraft ' - 'entities are mostly mobs and items dropped in the ' - 'ground, items in chests and other stuff won\'t be ' - 'touched. Read the README for more info. Region-Fixer ' - 'will delete the entities while scanning so you can ' - 'stop and resume the process', - action='store_true', - default=False, - dest='delete_entities') - - add_option('--delete-shared-offset', - '--ds', - help='[WARNING!] This option deletes! This option will delete ' - 'all the chunk with status shared offset. It will remove ' - 'the region header for the false chunk, note that you ' - 'don\'t loos any chunk doing this.', - action='store_true', - default=False, - dest='delete_shared_offset') - - add_option('--delete-missing-tag', - '--dmt', - help='[WARNING!] This option deletes! Removes any chunks ' - 'with the mandatory entities tag missing.', - dest='delete_missing_tag', - default=False, - action='store_true') - - add_option('--fix-missing-tag', - '--fm', - help='Fixes chunks that have the Entities tag missing. This will add the missing tag.', - dest='fix_missing_tag', - default=False, - action='store_true') - - add_option('--delete-too-small', - '--dt', - help='[WARNING!] This option deletes! Removes any region files ' - 'found to be too small to actually be a region file.', - dest='delete_too_small', - default=False, - action='store_true') - - add_option('--entity-limit', - '--el', - help='Specify the limit for the --delete-entities option ' - '(default = 300).', - dest='entity_limit', - default=300, - action='store', - type=int) - - add_option('--processes', - '-p', - help='Set the number of workers to use for scanning. (default ' - '= 1, not use multiprocessing at all)', - action='store', - type=int, - default=1) - - add_option('--verbose', - '-v', - help='Don\'t use a progress bar, instead print a line per ' - 'scanned region file with results information. The ' - 'letters mean c: corrupted; w: wrong located; t: total of ' - 'chunks; tme: too many entities problem', - action='store_true', - default=False) - - add_option('--interactive', - '-i', - help='Enter in interactive mode, where you can scan, see the ' - 'problems, and fix them in a terminal like mode', - dest='interactive', - default=False, - action='store_true',) - - add_option('--log', - '-l', - help='Saves a log of all the problems found in the specified ' - 'file. The log file contains all the problems found with ' - 'this information: region file, chunk coordinates and ' - 'problem. Use \'-\' as name to show the log at the end ' - 'of the scan.', - type=str, - default=None, - dest='summary') - - (options, args) = parser.parse_args() - o = options + parser = argparse.ArgumentParser(description=('Program to check the integrity of ' + 'Minecraft worlds and fix them when ' + 'possible. It uses NBT by twoolie. ' + 'Author: Alejandro Aguilera (Fenixin)'), + prog='region_fixer', + usage=usage, + epilog=epilog) + + parser.add_argument('--backups', + '-b', + help=('List of backup directories of the Minecraft world ' + 'to use to fix corrupted chunks and/or wrong located ' + 'chunks. Warning! Region-Fixer is not going to check if' + 'it\'s the same world, be careful! This argument can be a' + ' comma separated list (but never with spaces between ' + 'elements!). This option can be only used scanning one ' + 'world.'), + metavar='', + type=str, + dest='backups', + default=None) + + parser.add_argument('--replace-corrupted', + '--rc', + help='Tries to replace the corrupted chunks using the backup' + ' directories. This option can be only used scanning one' + ' world.', + default=False, + dest='replace_corrupted', + action='store_true') + + parser.add_argument('--replace-wrong-located', + '--rw', + help='Tries to replace the wrong located chunks using the ' + 'backup directories. This option can be only used scanning' + ' one world.', + default=False, + dest='replace_wrong_located', + action='store_true') + + parser.add_argument('--replace-entities', + '--re', + help='Tries to replace the chunks with too many entities using ' + 'the backup directories. This option can be only used ' + 'scanning one world.', + default=False, + dest='replace_entities', + action='store_true') + + parser.add_argument('--replace-shared-offset', + '--rs', + help='Tries to replace the chunks with a shared offset using ' + 'the backup directories. This option can be only used' + 'scanning one world.', + default=False, + dest='replace_shared_offset', + action='store_true') + + parser.add_argument('--replace-too-small', + '--rt', + help='Tries to replace the region files that are too small to ' + 'be actually be a region file using the backup ' + 'directories. This option can be only used scanning one ' + 'world.', + default=False, + dest='replace_too_small', + action='store_true') + + parser.add_argument('--delete-corrupted', + '--dc', + help='[WARNING!] This option deletes! This option will delete ' + 'all the corrupted chunks. Used with --replace-corrupted ' + 'or --replace-wrong-located it will delete all the ' + 'non-replaced chunks.', + action='store_true', + default=False) + + parser.add_argument('--delete-wrong-located', + '--dw', + help=('[WARNING!] This option deletes!' + 'The same as --delete-corrupted but for wrong ' + 'located chunks'), + action='store_true', + default=False, + dest='delete_wrong_located') + + parser.add_argument('--delete-entities', + '--de', + help='[WARNING!] This option deletes! This option deletes ALL ' + 'the entities in chunks with more entities than ' + '--entity-limit (300 by default). In a Minecraft ' + 'entities are mostly mobs and items dropped in the ' + 'ground, items in chests and other stuff won\'t be ' + 'touched. Read the README for more info. Region-Fixer ' + 'will delete the entities while scanning so you can ' + 'stop and resume the process', + action='store_true', + default=False, + dest='delete_entities') + + parser.add_argument('--delete-shared-offset', + '--ds', + help='[WARNING!] This option deletes! This option will delete ' + 'all the chunk with status shared offset. It will remove ' + 'the region header for the false chunk, note that you ' + 'don\'t loos any chunk doing this.', + action='store_true', + default=False, + dest='delete_shared_offset') + + parser.add_argument('--delete-missing-tag', + '--dmt', + help='[WARNING!] This option deletes! Removes any chunks ' + 'with the mandatory entities tag missing.', + dest='delete_missing_tag', + default=False, + action='store_true') + + parser.add_argument('--fix-missing-tag', + '--fm', + help='Fixes chunks that have the Entities tag missing. This will add the missing tag.', + dest='fix_missing_tag', + default=False, + action='store_true') + + parser.add_argument('--delete-too-small', + '--dt', + help='[WARNING!] This option deletes! Removes any region files ' + 'found to be too small to actually be a region file.', + dest='delete_too_small', + default=False, + action='store_true') + + parser.add_argument('--entity-limit', + '--el', + help='Specify the limit for the --delete-entities option ' + '(default = 300).', + dest='entity_limit', + default=300, + action='store', + type=int) + + parser.add_argument('--processes', + '-p', + help='Set the number of workers to use for scanning. (default ' + '= 1, not use multiprocessing at all)', + action='store', + type=int, + default=1) + + parser.add_argument('--verbose', + '-v', + help='Don\'t use a progress bar, instead print a line per ' + 'scanned region file with results information. The ' + 'letters mean c: corrupted; w: wrong located; t: total of ' + 'chunks; tme: too many entities problem', + action='store_true', + default=False) + + parser.add_argument('--interactive', + '-i', + help='Enter in interactive mode, where you can scan, see the ' + 'problems, and fix them in a terminal like mode', + dest='interactive', + default=False, + action='store_true', ) + + parser.add_argument('--log', + '-l', + help='Saves a log of all the problems found in the specified ' + 'file. The log file contains all the problems found with ' + 'this information: region file, chunk coordinates and ' + 'problem. Use \'-\' as name to show the log at the end ' + 'of the scan.', + type=str, + default=None, + dest='summary') + + parser.add_argument('paths', + help='List with world or region paths', + nargs='*') + + args = parser.parse_args() if sys.version_info[0] != 3: print("") @@ -356,96 +343,90 @@ def main(): parser.error('Error: No world paths or region files specified! Use ' '--help for a complete list of options.') - world_list, regionset = parse_paths(args) + world_list, regionset = parse_paths(args.paths) if not (world_list or regionset): - print ("Error: No worlds or region files to scan!") + print("Error: No worlds or region files to scan!") return RV_NOTHING_TO_SCAN # Check basic options compatibilities - any_chunk_replace_option = o.replace_corrupted or \ - o.replace_wrong_located or \ - o.replace_entities or \ - o.replace_shared_offset - any_chunk_delete_option = o.delete_corrupted or \ - o.delete_wrong_located or \ - o.delete_entities or \ - o.delete_shared_offset - any_region_replace_option = o.replace_too_small - any_region_delete_option = o.delete_too_small - - - if o.interactive or o.summary: + any_chunk_replace_option = args.replace_corrupted or \ + args.replace_wrong_located or \ + args.replace_entities or \ + args.replace_shared_offset + any_region_replace_option = args.replace_too_small + + if args.interactive or args.summary: if any_chunk_replace_option or any_region_replace_option: parser.error('Error: Can\'t use the options --replace-* , --delete-* and ' - '--log with --interactive. You can choose all this ' - 'while in the interactive mode.') + '--log with --interactive. You can choose all this ' + 'while in the interactive mode.') else: # Not options.interactive - if o.backups: + if args.backups: if not any_chunk_replace_option and not any_region_replace_option: parser.error('Error: The option --backups needs at least one of the ' - '--replace-* options') + '--replace-* options') else: - if (len(regionset) > 0): + if len(regionset) > 0: parser.error('Error: You can\'t use the replace options while scanning ' - 'separate region files. The input should be only one ' - 'world and you introduced {0} individual region ' - 'files.'.format(len(regionset))) - elif (len(world_list) > 1): + 'separate region files. The input should be only one ' + 'world and you introduced {0} individual region ' + 'files.'.format(len(regionset))) + elif len(world_list) > 1: parser.error('Error: You can\'t use the replace options while scanning ' - 'multiple worlds. The input should be only one ' - 'world and you introduced {0} ' - 'worlds.'.format(len(world_list))) + 'multiple worlds. The input should be only one ' + 'world and you introduced {0} ' + 'worlds.'.format(len(world_list))) - if not o.backups and any_chunk_replace_option: + if not args.backups and any_chunk_replace_option: parser.error("Error: The options --replace-* need the --backups option") - if o.entity_limit < 0: + if args.entity_limit < 0: parser.error("Error: The entity limit must be at least 0!") print("\nWelcome to Region Fixer!") - print(("(version: {0})".format(parser.version))) + print(("(version: {0})".format(version_string))) # Do things with the option options args # Create a list of worlds containing the backups of the region files - if o.backups: - backup_worlds = parse_backup_list(o.backups) + if args.backups: + backup_worlds = parse_backup_list(args.backups) if not backup_worlds: - print ('[WARNING] No valid backup directories found, won\'t fix ' - 'any chunk.') + print('[WARNING] No valid backup directories found, won\'t fix ' + 'any chunk.') else: backup_worlds = [] # The scanning process starts found_problems_in_regionsets = False found_problems_in_worlds = False - if o.interactive: - c = InteractiveLoop(world_list, regionset, o, backup_worlds) + if args.interactive: + c = InteractiveLoop(world_list, regionset, args, backup_worlds) c.cmdloop() return RV_OK else: summary_text = "" # Scan the separate region files - + if len(regionset) > 0: - - console_scan_regionset(regionset, o.processes, o.entity_limit, - o.delete_entities, o.verbose) + + console_scan_regionset(regionset, args.processes, args.entity_limit, + args.delete_entities, args.verbose) print((regionset.generate_report(True))) # Delete chunks - delete_bad_chunks(options, regionset) + delete_bad_chunks(args, regionset) # Delete region files - delete_bad_regions(options, regionset) + delete_bad_regions(args, regionset) # fix chunks - fix_bad_chunks(options, regionset) + fix_bad_chunks(args, regionset) # Verbose log - if options.summary: + if args.summary: summary_text += "\n" summary_text += entitle("Separate region files") summary_text += "\n" @@ -454,19 +435,19 @@ def main(): summary_text += t else: summary_text += "No problems found.\n\n" - + # Check if problems have been found if regionset.has_problems: found_problems_in_regionsets = True # scan all the world folders - + for w in world_list: w_name = w.get_name() print((entitle(' Scanning world: {0} '.format(w_name), 0))) - console_scan_world(w, o.processes, o.entity_limit, - o.delete_entities, o.verbose) + console_scan_world(w, args.processes, args.entity_limit, + args.delete_entities, args.verbose) print("") print((entitle('Scan results for: {0}'.format(w_name), 0))) @@ -474,13 +455,13 @@ def main(): print("") # Replace chunks - if backup_worlds and not len(world_list) > 1: - del_ent = options.delete_entities - ent_lim = options.entity_limit - options_replace = [o.replace_corrupted, - o.replace_wrong_located, - o.replace_entities, - o.replace_shared_offset] + if backup_worlds and len(world_list) <= 1: + del_ent = args.delete_entities + ent_lim = args.entity_limit + options_replace = [args.replace_corrupted, + args.replace_wrong_located, + args.replace_entities, + args.replace_shared_offset] replacing = list(zip(options_replace, world.CHUNK_PROBLEMS_ITERATOR)) for replace, (problem, status, arg) in replacing: if replace: @@ -501,10 +482,10 @@ def main(): print("Can't use the replace options while scanning more than one world!") # replace region files - if backup_worlds and not len(world_list) > 1: - del_ent = options.delete_entities - ent_lim = options.entity_limit - options_replace = [o.replace_too_small] + if backup_worlds and len(world_list) <= 1: + del_ent = args.delete_entities + ent_lim = args.entity_limit + options_replace = [args.replace_too_small] replacing = list(zip(options_replace, world.REGION_PROBLEMS_ITERATOR)) for replace, (problem, status, arg) in replacing: if replace: @@ -526,16 +507,16 @@ def main(): print("Can't use the replace options while scanning more than one world!") # delete chunks - delete_bad_chunks(options, w) + delete_bad_chunks(args, w) # delete region files - delete_bad_regions(options, w) + delete_bad_regions(args, w) # fix chunks - fix_bad_chunks(options, w) + fix_bad_chunks(args, w) # print a summary for this world - if options.summary: + if args.summary: summary_text += w.summary() # check if problems have been found @@ -543,22 +524,22 @@ def main(): found_problems_in_worlds = True # verbose log text - if options.summary == '-': + if args.summary == '-': print("\nPrinting log:\n") print(summary_text) - elif options.summary != None: + elif args.summary is not None: try: - f = open(options.summary, 'w') + f = open(args.summary, 'w') f.write(summary_text) f.write('\n') f.close() - print(("Log file saved in \'{0}\'.".format(options.summary))) + print(("Log file saved in \'{0}\'.".format(args.summary))) except: print("Something went wrong while saving the log file!") - + if found_problems_in_regionsets or found_problems_in_worlds: return RV_BAD_WORLD - + return RV_OK @@ -573,17 +554,12 @@ def main(): try: freeze_support() value = main() - - except WrongOption as e: - had_exception = False - value = RV_WRONG_COMMAND - print(str(e)) except ChildProcessException as e: had_exception = True print(ERROR_MSG) bug_sender = BugReporter(e.printable_traceback) - #auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) + # auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str value = RV_CRASH @@ -592,7 +568,7 @@ def main(): print(ERROR_MSG) # Traceback will be taken in init bug_sender = BugReporter() - #auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) + # auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str value = RV_CRASH diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index ee8a0af..9e5e610 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -22,9 +22,9 @@ # -from . import world - from cmd import Cmd + +from . import world from .scan import console_scan_world, console_scan_regionset @@ -33,7 +33,7 @@ def __init__(self, world_list, regionset, options, backup_worlds): Cmd.__init__(self) self.world_list = world_list self.regionset = regionset - self.world_names = [str(i.name) for i in self.world_list] + self.world_names = [str(i.name) for i in self.world_list] # if there's only one world use it if len(self.world_list) == 1 and len(self.regionset) == 0: self.current = world_list[0] @@ -70,7 +70,7 @@ def __init__(self, world_list, regionset, options, backup_worlds): ################################################# # Do methods ################################################# - def do_set(self,arg): + def do_set(self, arg): """ Command to change some options and variables in interactive mode """ args = arg.split() @@ -105,15 +105,17 @@ def do_set(self,arg): print(" ### world{0} ###".format(number)) number += 1 # add a tab and print - for i in w.__str__().split("\n"): print("\t" + i) - print() + for i in w.__str__().split("\n"): + print("\t" + i) + print() print(" ### regionset ###") - for i in self.regionset.__str__().split("\n"): print("\t" + i) + for i in self.regionset.__str__().split("\n"): + print("\t" + i) print("\n(Use \"set workload world1\" or name_of_the_world or regionset to choose one)") else: a = args[1] - if len(a) == 6 and a[:5] == "world" and int(a[-1]) >= 1 : + if len(a) == 6 and a[:5] == "world" and int(a[-1]) >= 1: # get the number and choos the correct world from the list number = int(args[1][-1]) - 1 try: @@ -187,8 +189,10 @@ def do_summary(self, arg): def do_current_workload(self, arg): """ Prints the info of the current workload """ if len(arg) == 0: - if self.current: print(self.current) - else: print("No world/region-set is set! Use \'set workload\' to set a world/regionset to work with.") + if self.current: + print(self.current) + else: + print("No world/region-set is set! Use \'set workload\' to set a world/regionset to work with.") else: print("This command doesn't use any arguments.") @@ -285,7 +289,8 @@ def do_remove_entities(self, arg): self.current.rescan_entities(self.options) elif answer == 'no': print("Ok!") - else: print("Invalid answer, use \'yes\' or \'no\' the next time!.") + else: + print("Invalid answer, use \'yes\' or \'no\' the next time!.") else: print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") @@ -349,7 +354,7 @@ def do_replace_regions(self, arg): print("Unknown argument.") else: print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") - + def do_remove_regions(self, arg): if self.current and self.current.scanned: if len(arg.split()) == 0: @@ -368,7 +373,6 @@ def do_remove_regions(self, arg): print("Unknown argument.") else: print("The world hasn't be scanned (or it needs a rescan). Use \'scan\' to scan it.") - pass def do_quit(self, arg): print("Quitting.") @@ -395,11 +399,11 @@ def complete_arg(self, text, possible_args): def complete_set(self, text, line, begidx, endidx): if "workload " in line: # return the list of world names plus 'regionset' plus a list of world1, world2... - possible_args = tuple(self.world_names) + ('regionset',) + tuple([ 'world' + str(i+1) for i in range(len(self.world_names))]) + possible_args = tuple(self.world_names) + ('regionset',) + tuple(['world' + str(i + 1) for i in range(len(self.world_names))]) elif 'verbose ' in line: - possible_args = ('True','False') + possible_args = ('True', 'False') else: - possible_args = ('entity-limit','verbose','processes','workload') + possible_args = ('entity-limit', 'verbose', 'processes', 'workload') return self.complete_arg(text, possible_args) def complete_count_chunks(self, text, line, begidx, endidx): @@ -432,38 +436,42 @@ def complete_replace_regions(self, text, line, begidx, endidx): # TODO sería una buena idea poner un artículo de ayuda de como usar el programa en un caso típico. # TODO: the help texts need a normalize def help_set(self): - print ("\nSets some variables used for the scan in interactive mode. " - "If you run this command without an argument for a variable " - "you can see the current state of the variable. You can set:\n" - " verbose\n" - "If True prints a line per scanned region file instead of " - "showing a progress bar.\n" - " entity-limit\n" - "If a chunk has more than this number of entities it will be " - "added to the list of chunks with too many entities problem.\n" - " processes" - "Number of cores used while scanning the world.\n" - " workload\n" - "If you input a few worlds you can choose wich one will be " - "scanned using this command.\n") + print("\nSets some variables used for the scan in interactive mode. " + "If you run this command without an argument for a variable " + "you can see the current state of the variable. You can set:\n" + " verbose\n" + "If True prints a line per scanned region file instead of " + "showing a progress bar.\n" + " entity-limit\n" + "If a chunk has more than this number of entities it will be " + "added to the list of chunks with too many entities problem.\n" + " processes" + "Number of cores used while scanning the world.\n" + " workload\n" + "If you input a few worlds you can choose wich one will be " + "scanned using this command.\n") + def help_current_workload(self): print("\nPrints information of the current region-set/world. This will be the region-set/world to scan and fix.\n") + def help_scan(self): print("\nScans the current world set or the region set.\n") def help_count_chunks(self): print("\n Prints out the number of chunks with the given status. For example") print("\'count corrupted\' prints the number of corrupted chunks in the world.") - print() + print() print("Possible status are: {0}\n".format(self.possible_chunk_args_text)) + def help_remove_entities(self): print("\nRemove all the entities in chunks that have more than entity-limit entities.") - print() + print() print("This chunks are the ones with status \'too many entities\'.\n") + def help_remove_chunks(self): print("\nRemoves bad chunks with the given problem.") print() - print("Please, be careful, when used with the status too-many-entities this will") + print("Please, be careful, when used with the status too-many-entities this will") print("REMOVE THE CHUNKS with too many entities problems, not the entities.") print("To remove only the entities see the command remove_entities.") print() @@ -471,6 +479,7 @@ def help_remove_chunks(self): print() print("Possible status are: {0}\n".format(self.possible_chunk_args_text)) print() + def help_replace_chunks(self): print("\nReplaces bad chunks with the given status using the backups directories.") print() @@ -485,8 +494,9 @@ def help_replace_chunks(self): def help_count_regions(self): print("\n Prints out the number of regions with the given status. For example ") print("\'count_regions too-small\' prints the number of region with \'too-small\' status.") - print() + print() print("Possible status are: {0}\n".format(self.possible_region_args_text)) + def help_remove_regions(self): print("\nRemoves regions with the given status.") print() @@ -497,6 +507,7 @@ def help_remove_regions(self): print("Possible status are: {0}".format(self.possible_region_args_text)) print() print("Note: after removing any regions you have to rescan the world.\n") + def help_replace_regions(self): print("\nReplaces regions with the given status.") print() @@ -511,11 +522,15 @@ def help_replace_regions(self): def help_summary(self): print("\nPrints a summary of all the problems found in the current workload.\n") + def help_quit(self): print("\nQuits interactive mode, exits region-fixer. Same as \'EOF\' and \'exit\' commands.\n") + def help_EOF(self): print("\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'exit\' commands\n") + def help_exit(self): print("\nQuits interactive mode, exits region-fixer. Same as \'quit\' and \'EOF\' commands\n") + def help_help(self): print("Prints help help.") diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 76aad3c..e90f3b1 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -34,22 +34,22 @@ import nbt.region as region import nbt.nbt as nbt from nbt.nbt import MalformedFileError -from nbt.region import ChunkDataError, ChunkHeaderError,\ - RegionHeaderError, InconceivedChunk +from nbt.region import (ChunkDataError, + ChunkHeaderError, + RegionHeaderError, + InconceivedChunk) from progressbar import ProgressBar, Bar, AdaptiveETA, SimpleProgress from . import world from regionfixer_core.util import entitle from regionfixer_core.world import DATAFILE_OK - -#~ TUPLE_COORDS = 0 -#~ TUPLE_DATA_COORDS = 0 -#~ TUPLE_GLOBAL_COORDS = 2 +# ~ TUPLE_COORDS = 0 +# ~ TUPLE_DATA_COORDS = 0 +# ~ TUPLE_GLOBAL_COORDS = 2 TUPLE_NUM_ENTITIES = 0 TUPLE_STATUS = 1 - logging.basicConfig(filename=None, level=logging.CRITICAL) @@ -59,6 +59,7 @@ class ChildProcessException(Exception): Stores all the info given by sys.exc_info() and the scanned file object which is probably partially filled. """ + def __init__(self, partial_scanned_file, exc_type, exc_class, tb_text): self.scanned_file = partial_scanned_file self.exc_type = exc_type @@ -138,26 +139,26 @@ def _mp_data_pool_init(d): Requiere to pass the multiprocessing queue as argument. """ - assert(type(d) == dict) - assert('queue' in d) + assert isinstance(d, dict) + assert 'queue' in d multiprocess_scan_data.q = d['queue'] def _mp_regionset_pool_init(d): """ Function to initialize the multiprocessing in scan_regionset. Is used to pass values to the child process. """ - assert(type(d) == dict) - assert('regionset' in d) - assert('queue' in d) - assert('entity_limit' in d) - assert('remove_entities' in d) + assert isinstance(d, dict) + assert 'regionset' in d + assert 'queue' in d + assert 'entity_limit' in d + assert 'remove_entities' in d multiprocess_scan_regionfile.regionset = d['regionset'] multiprocess_scan_regionfile.q = d['queue'] multiprocess_scan_regionfile.entity_limit = d['entity_limit'] multiprocess_scan_regionfile.remove_entities = d['remove_entities'] -class AsyncScanner(object): +class AsyncScanner: """ Class to derive all the scanner classes from. To implement a scanner you have to override: @@ -165,6 +166,7 @@ class AsyncScanner(object): Use try-finally to call terminate, if not processes will be hanging in the background """ + def __init__(self, data_structure, processes, scan_function, init_args, _mp_init_function): """ Init the scanner. @@ -175,7 +177,7 @@ def __init__(self, data_structure, processes, scan_function, init_args, init_args are the arguments passed to the init function _mp_init_function is the function used to init the child processes """ - assert(isinstance(data_structure, world.DataSet)) + assert isinstance(data_structure, world.DataSet) self.data_structure = data_structure self.list_files_to_scan = data_structure._get_list() self.processes = processes @@ -187,8 +189,8 @@ def __init__(self, data_structure, processes, scan_function, init_args, # NOTE TO SELF: initargs doesn't handle kwargs, only args! # Pass a dict with all the args self.pool = multiprocessing.Pool(processes=processes, - initializer=_mp_init_function, - initargs=(init_args,)) + initializer=_mp_init_function, + initargs=(init_args,)) # Recommended time to sleep between polls for results self.SCAN_START_SLEEP_TIME = 0.001 @@ -205,20 +207,20 @@ def __init__(self, data_structure, processes, scan_function, init_args, def scan(self): """ Launch the child processes and scan all the files. """ - + logging.debug("########################################################") logging.debug("########################################################") - logging.debug("Starting scan in: " + str(self)) + logging.debug("Starting scan in: %s", str(self)) logging.debug("########################################################") logging.debug("########################################################") # Tests indicate that smaller amount of jobs per worker make all type # of scans faster jobs_per_worker = 5 - #jobs_per_worker = max(1, total_files // self.processes + # jobs_per_worker = max(1, total_files // self.processes self._results = self.pool.map_async(self.scan_function, self.list_files_to_scan, jobs_per_worker) - + # No more tasks to the pool, exit the processes once the tasks are done self.pool.close() @@ -259,7 +261,7 @@ def raise_child_exception(self, exception_tuple): def update_str_last_scanned(self): """ Updates the string that represents the last file scanned. """ - raise NotImplemented + raise NotImplementedError def sleep(self): """ Sleep waiting for results. @@ -271,9 +273,9 @@ def sleep(self): if not ((self.queries_without_results < self.MAX_QUERY_NUM) & (self.queries_without_results > self.MIN_QUERY_NUM)): # ... increase or decrease it to optimize queries - if (self.queries_without_results < self.MIN_QUERY_NUM): + if self.queries_without_results < self.MIN_QUERY_NUM: self.scan_sleep_time *= 0.5 - elif (self.queries_without_results > self.MAX_QUERY_NUM): + elif self.queries_without_results > self.MAX_QUERY_NUM: self.scan_sleep_time *= 2.0 # and don't go farther than max/min if self.scan_sleep_time > self.SCAN_MAX_SLEEP_TIME: @@ -285,9 +287,9 @@ def sleep(self): # Log how it's going logging.debug("") - logging.debug("Nº of queries without result: " + str(self.queries_without_results)) - logging.debug("Current sleep time: " + str(self.scan_sleep_time)) - logging.debug("Time between calls to sleep(): " + str(time() - self.last_time)) + logging.debug("Nº of queries without result: %s", str(self.queries_without_results)) + logging.debug("Current sleep time: %s", str(self.scan_sleep_time)) + logging.debug("Time between calls to sleep(): %s", str(time() - self.last_time)) self.last_time = time() # Sleep, let the other processes do their job @@ -335,6 +337,7 @@ def __len__(self): class AsyncDataScanner(AsyncScanner): """ Scan a DataFileSet and fill the data structure. """ + def __init__(self, data_structure, processes): scan_function = multiprocess_scan_data init_args = {} @@ -352,10 +355,10 @@ def update_str_last_scanned(self, data): class AsyncRegionsetScanner(AsyncScanner): """ Scan a RegionSet and fill the data structure. """ + def __init__(self, regionset, processes, entity_limit, remove_entities=False): - - assert(isinstance(regionset, world.DataSet)) + assert isinstance(regionset, world.DataSet) scan_function = multiprocess_scan_regionfile _mp_init_function = _mp_regionset_pool_init @@ -376,9 +379,10 @@ def update_str_last_scanned(self, r): self._str_last_scanned = self.data_structure.get_name() + ": " + r.filename -class AsyncWorldRegionScanner(object): +class AsyncWorldRegionScanner: """ Wrapper around the calls of AsyncScanner to scan all the regionsets of a world. """ + def __init__(self, world_obj, processes, entity_limit, remove_entities=False): @@ -426,7 +430,7 @@ def get_last_result(self): process. """ cr = self._current_regionset - + if cr is not None: if not cr.finished: r = cr.get_last_result() @@ -510,7 +514,7 @@ def console_scan_loop(scanners, scan_titles, verbose): scanner.sleep() result = scanner.get_last_result() if result: - logging.debug("\nNew result: {0}\n\nOneliner: {1}\n".format(result,result.oneliner_status)) + logging.debug("\nNew result: {0}\n\nOneliner: {1}\n".format(result, result.oneliner_status)) counter += 1 if not verbose: pbar.update(counter) @@ -525,11 +529,11 @@ def console_scan_loop(scanners, scan_titles, verbose): scanner.terminate() raise e except ChildProcessException as e: -# print "\n\nSomething went really wrong scanning a file." -# print ("This is probably a bug! If you have the time, please report " -# "it to the region-fixer github or in the region fixer post " -# "in minecraft forums") -# print e.printable_traceback + # print "\n\nSomething went really wrong scanning a file." + # print ("This is probably a bug! If you have the time, please report " + # "it to the region-fixer github or in the region fixer post " + # "in minecraft forums") + # print e.printable_traceback raise e @@ -549,9 +553,9 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, print(("There are {0} region files, {1} player files and {2} data" " files in the world directory.").format( - w.get_number_regions(), - len(w.players) + len(w.old_players), - len(w.data_files))) + w.get_number_regions(), + len(w.players) + len(w.old_players), + len(w.data_files))) # check the level.dat print("\n{0:-^60}".format(' Checking level.dat ')) @@ -617,15 +621,15 @@ def scan_data(scanned_dat_file): else: _ = nbt.NBTFile(filename=s.path) s.status = world.DATAFILE_OK - except MalformedFileError as e: + except MalformedFileError: s.status = world.DATAFILE_UNREADABLE - except IOError as e: + except IOError: s.status = world.DATAFILE_UNREADABLE - except UnicodeDecodeError as e: + except UnicodeDecodeError: s.status = world.DATAFILE_UNREADABLE - except TypeError as e: + except TypeError: s.status = world.DATAFILE_UNREADABLE - + except: s.status = world.DATAFILE_UNREADABLE except_type, except_class, tb = sys.exc_info() @@ -691,18 +695,17 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): if delete_entities: world.delete_entities(region_file, x, z) print(("Deleted {0} entities in chunk" - " ({1},{2}) of the region file: {3}").format( - c[TUPLE_NUM_ENTITIES], x, z, r.filename)) + " ({1},{2}) of the region file: {3}").format(c[TUPLE_NUM_ENTITIES], x, z, r.filename)) # entities removed, change chunk status to OK r[(x, z)] = (0, world.CHUNK_OK) else: # This stores all the entities in a file, # comes handy sometimes. - #~ pretty_tree = chunk['Level']['Entities'].pretty_tree() - #~ name = "{2}.chunk.{0}.{1}.txt".format(x,z,split(region_file.filename)[1]) - #~ archivo = open(name,'w') - #~ archivo.write(pretty_tree) + # ~ pretty_tree = chunk['Level']['Entities'].pretty_tree() + # ~ name = "{2}.chunk.{0}.{1}.txt".format(x,z,split(region_file.filename)[1]) + # ~ archivo = open(name,'w') + # ~ archivo.write(pretty_tree) pass elif c[TUPLE_STATUS] == world.CHUNK_CORRUPTED: pass @@ -718,11 +721,10 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # # TODO: Why? I don't remember why # TODO: Leave this to nbt, which code is much better than this - + metadata = region_file.metadata - sharing = [k for k in metadata if ( - metadata[k].status == region.STATUS_CHUNK_OVERLAPPING and - r[k][TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED)] + sharing = [k for k in metadata if (metadata[k].status == region.STATUS_CHUNK_OVERLAPPING and + r[k][TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED)] shared_counter = 0 for k in sharing: r[k] = (r[k][TUPLE_NUM_ENTITIES], world.CHUNK_SHARED_OFFSET) @@ -753,19 +755,19 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): def scan_chunk(region_file, coords, global_coords, entity_limit): """ Scans a chunk returning its status and number of entities. - + Keywords arguments: region_file -- nbt.RegionFile object coords -- tuple containing the local (region) coordinates of the chunk global_coords -- tuple containing the global (world) coordinates of the chunk entity_limit -- the number of entities that is considered to be too many - + Return: chunk -- as a nbt file - (num_entities, status) -- tuple with the number of entities of the chunk and - the status described by the CHUNK_* variables in + (num_entities, status) -- tuple with the number of entities of the chunk and + the status described by the CHUNK_* variables in world.py - + If the chunk does not exist (is not yet created it returns None """ el = entity_limit @@ -782,7 +784,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): else: # chunk ok status = world.CHUNK_OK - + except InconceivedChunk as e: # chunk not created chunk = None diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index f77276c..9e18ff5 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -22,11 +22,12 @@ # import platform -from os.path import join, split, exists, isfile +from os.path import join, exists, isfile import sys -from . import world import traceback +from . import world + def get_str_from_traceback(ty, value, tb): """ Return a string from a traceback + exception. """ @@ -36,8 +37,11 @@ def get_str_from_traceback(ty, value, tb): s += i return s + # Stolen from: # http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input + + def query_yes_no(question, default="yes"): """Ask a yes/no question via raw_input() and return their answer. @@ -49,7 +53,8 @@ def query_yes_no(question, default="yes"): The "answer" return value is one of "yes" or "no". """ valid = {"yes": True, "y": True, "ye": True, - "no": False, "n": False} + "no": False, "n": False + } if default is None: prompt = " [y/n] " elif default == "yes": @@ -71,7 +76,7 @@ def query_yes_no(question, default="yes"): "(or 'y' or 'n').\n") -# stolen from minecraft overviewer +# stolen from minecraft overviewer # https://github.com/overviewer/Minecraft-Overviewer/ def is_bare_console(): """Returns true if Overviewer is running in a bare console in @@ -83,7 +88,7 @@ def is_bare_console(): import ctypes GetConsoleProcessList = ctypes.windll.kernel32.GetConsoleProcessList num = GetConsoleProcessList(ctypes.byref(ctypes.c_int(0)), ctypes.c_int(1)) - if (num == 1): + if num == 1: return True except Exception: @@ -91,7 +96,7 @@ def is_bare_console(): return False -def entitle(text, level = 0): +def entitle(text, level=0): """ Put the text in a title with lot's of hashes everywhere. """ t = '' if level == 0: @@ -128,24 +133,24 @@ def get_max_len(l): # get the total width of the table: ml_total = 0 for i in range(len(ml)): - ml_total += ml[i] + 2 # size of each word + 2 spaces - ml_total += 1 + 2# +1 for the separator | and +2 for the borders - text += "-"*ml_total + "\n" + ml_total += ml[i] + 2 # size of each word + 2 spaces + ml_total += 1 + 2 # +1 for the separator | and +2 for the borders + text += "-" * ml_total + "\n" # all the columns have the same number of rows row = len(columns[0]) for r in range(row): line = "|" # put all the elements in this row together with spaces for i in range(len(columns)): - line += "{0: ^{width}}".format(columns[i][r],width = ml[i] + 2) + line += "{0: ^{width}}".format(columns[i][r], width=ml[i] + 2) # add a separator for the first column if i == 0: line += "|" text += line + "|" + "\n" if r == 0: - text += "-"*ml_total + "\n" - text += "-"*ml_total + text += "-" * ml_total + "\n" + text += "-" * ml_total return text @@ -177,8 +182,8 @@ def parse_chunk_list(chunk_list, world_obj): def parse_paths(args): - """ Parse the list of args passed to region-fixer.py and returns a - RegionSet object with the list of regions and a list of World + """ Parse the list of args passed to region-fixer.py and returns a + RegionSet object with the list of regions and a list of World objects. """ # parese the list of region files and worlds paths world_list = [] @@ -187,7 +192,7 @@ def parse_paths(args): for arg in args: if arg[-4:] == ".mca": region_list.append(arg) - elif arg[-4:] == ".mcr": # ignore pre-anvil region files + elif arg[-4:] == ".mcr": # ignore pre-anvil region files if not warning: print("Warning: Region-Fixer only works with anvil format region files. Ignoring *.mcr files") warning = True @@ -209,13 +214,13 @@ def parse_paths(args): # init the world objects world_list = parse_world_list(world_list) - return world_list, world.RegionSet(region_list = region_list) + return world_list, world.RegionSet(region_list=region_list) def parse_world_list(world_path_list): """ Parses a world list checking if they exists and are a minecraft world folders. Returns a list of World objects. """ - + tmp = [] for d in world_path_list: if exists(d): diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index cf1170a..cad6063 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -35,7 +35,7 @@ # Constants: -# +# # -------------- # Chunk related: # -------------- @@ -48,7 +48,7 @@ CHUNK_SHARED_OFFSET = 4 CHUNK_MISSING_ENTITIES_TAG = 5 -# Chunk statuses +# Chunk statuses CHUNK_STATUSES = [CHUNK_NOT_CREATED, CHUNK_OK, CHUNK_CORRUPTED, @@ -71,21 +71,24 @@ CHUNK_WRONG_LOCATED: "Wrong located", CHUNK_TOO_MANY_ENTITIES: "Too many entities", CHUNK_SHARED_OFFSET: "Sharing offset", - CHUNK_MISSING_ENTITIES_TAG: "Missing Entities tag"} + CHUNK_MISSING_ENTITIES_TAG: "Missing Entities tag" + } # arguments used in the options CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', CHUNK_WRONG_LOCATED: 'wrong', CHUNK_TOO_MANY_ENTITIES: 'entities', CHUNK_SHARED_OFFSET: 'sharing', - CHUNK_MISSING_ENTITIES_TAG: 'miss_tag'} + CHUNK_MISSING_ENTITIES_TAG: 'miss_tag' + } # used in some places where there is less space CHUNK_PROBLEMS_ABBR = {CHUNK_CORRUPTED: 'c', CHUNK_WRONG_LOCATED: 'w', CHUNK_TOO_MANY_ENTITIES: 'tme', CHUNK_SHARED_OFFSET: 'so', - CHUNK_MISSING_ENTITIES_TAG: 'mt'} + CHUNK_MISSING_ENTITIES_TAG: 'mt' + } # Dictionary with possible solutions for the chunks problems, # used to create options dynamically @@ -95,10 +98,11 @@ CHUNK_SOLUTION_REMOVE_ENTITIES = 53 CHUNK_PROBLEMS_SOLUTIONS = {CHUNK_CORRUPTED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], - CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE]} + CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], + CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE] + } # list with problem, status-text, problem arg tuples CHUNK_PROBLEMS_ITERATOR = [] @@ -111,7 +115,6 @@ TUPLE_NUM_ENTITIES = 0 TUPLE_STATUS = 1 - # --------------- # Region related: # --------------- @@ -133,7 +136,8 @@ REGION_UNREADABLE: "Unreadable IOError", # This status differentiates IOError from a file that you don't have permission to access # TODO: It would be better to open region files only in write mode when needed - REGION_UNREADABLE_PERMISSION_ERROR: "Permission error"} + REGION_UNREADABLE_PERMISSION_ERROR: "Permission error" + } # Status that are considered problems REGION_PROBLEMS = [REGION_TOO_SMALL, @@ -143,12 +147,14 @@ # arguments used in the options REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too_small', REGION_UNREADABLE: 'unreadable', - REGION_UNREADABLE_PERMISSION_ERROR: 'permission_error'} + REGION_UNREADABLE_PERMISSION_ERROR: 'permission_error' + } # used in some places where there is less space REGION_PROBLEMS_ABBR = {REGION_TOO_SMALL: 'ts', REGION_UNREADABLE: 'ur', - REGION_UNREADABLE_PERMISSION_ERROR: 'pe'} + REGION_UNREADABLE_PERMISSION_ERROR: 'pe' + } # Dictionary with possible solutions for the region problems, # used to create options dynamically @@ -157,8 +163,8 @@ REGION_SOLUTION_REPLACE = 152 REGION_PROBLEMS_SOLUTIONS = {REGION_TOO_SMALL: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE], - REGION_UNREADABLE: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE]} - + REGION_UNREADABLE: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE] + } # list with problem, status-text, problem arg tuples REGION_PROBLEMS_ITERATOR = [] @@ -179,32 +185,34 @@ DATAFILE_OK = 200 DATAFILE_UNREADABLE = 201 - -# Data files statuses +# Data files statuses DATAFILE_STATUSES = [DATAFILE_OK, - DATAFILE_UNREADABLE] + DATAFILE_UNREADABLE] # Status that are considered problems DATAFILE_PROBLEMS = [DATAFILE_UNREADABLE] # Text describing each chunk status DATAFILE_STATUS_TEXT = {DATAFILE_OK: "OK", - DATAFILE_UNREADABLE: "The data file cannot be read"} + DATAFILE_UNREADABLE: "The data file cannot be read" + } # arguments used in the options DATAFILE_PROBLEMS_ARGS = {DATAFILE_OK: 'OK', - DATAFILE_UNREADABLE: 'unreadable'} + DATAFILE_UNREADABLE: 'unreadable' + } # used in some places where there is less space DATAFILE_PROBLEM_ABBR = {DATAFILE_OK: 'ok', - DATAFILE_UNREADABLE: 'ur'} + DATAFILE_UNREADABLE: 'ur' + } # Dictionary with possible solutions for the chunks problems, # used to create options dynamically # The possible solutions right now are: DATAFILE_SOLUTION_REMOVE = 251 -DATAFILE_PROBLEMS_SOLUTIONS = {DATAFILE_UNREADABLE:[DATAFILE_SOLUTION_REMOVE]} +DATAFILE_PROBLEMS_SOLUTIONS = {DATAFILE_UNREADABLE: [DATAFILE_SOLUTION_REMOVE]} # list with problem, status-text, problem arg tuples DATAFILE_PROBLEMS_ITERATOR = [] @@ -219,20 +227,20 @@ CHUNK_STATUS_TEXT[problem], CHUNK_PROBLEMS_ARGS[problem])) - - # Dimension names: DIMENSION_NAMES = {"region": "Overworld", "DIM1": "The End", - "DIM-1": "Nether"} + "DIM-1": "Nether" + } class InvalidFileName(IOError): pass -class ScannedDataFile(object): +class ScannedDataFile: """ Stores all the information of a scanned data file. """ + def __init__(self, path=None): super().__init__() self.path = path @@ -251,18 +259,19 @@ def __str__(self): @property def oneliner_status(self): """ One line describing the status of the file. """ - return "File: \"" + self.filename + "\"; status: " + DATAFILE_STATUS_TEXT[self.status] + return "File: \"" + self.filename + "\"; status: " + DATAFILE_STATUS_TEXT[self.status] -class ScannedChunk(object): +class ScannedChunk: """ Stores all the results of the scan. Not used at the moment, it prette nice but takes an huge amount of memory. """ - # WARNING: This is here so I remember to not use objects as ScannedChunk - # They take too much memory. + # WARNING: This is here so I remember to not use objects as ScannedChunk + # They take too much memory. -class ScannedRegionFile(object): +class ScannedRegionFile: """ Stores all the scan information for a region file """ + def __init__(self, path, time=None): # general region file info self.path = path @@ -286,7 +295,7 @@ def __init__(self, path, time=None): # The status of the region file. self.status = None - + # has the file been scanned yet? self.scanned = False @@ -295,7 +304,7 @@ def oneliner_status(self): """ On line description of the status of the region file. """ if self.scanned: status = self.status - if status == REGION_OK: # summary with all found in scan + if status == REGION_OK: # summary with all found in scan stats = "" for s in CHUNK_PROBLEMS: stats += "{0}:{1}, ".format(CHUNK_PROBLEMS_ABBR[s], self.count_chunks(s)) @@ -342,13 +351,14 @@ def get_path(self): def count_chunks(self, problem=None): """ Counts chunks in the region file with the given problem. - + If problem is omitted or None, counts all the chunks. Returns an integer with the counter. """ if problem == None: c = 0 - for s in CHUNK_STATUSES: c += self._counts[s] + for s in CHUNK_STATUSES: + c += self._counts[s] else: c = self._counts[problem] @@ -381,8 +391,8 @@ def get_coords(self): return coordX, coordZ -# TODO TODO TODO: This is dangerous! Running the method remove_problematic_chunks -# without a problem will remove all the chunks in the region file!! + # TODO TODO TODO: This is dangerous! Running the method remove_problematic_chunks + # without a problem will remove all the chunks in the region file!! def list_chunks(self, status=None): """ Returns a list of all the ScannedChunk objects of the chunks with the given status, if no status is omitted or None, @@ -406,7 +416,7 @@ def summary(self): text += " |- This region has status: {0}.\n".format(REGION_STATUS_TEXT[self.status]) else: for c in list(self.keys()): - if self[c][TUPLE_STATUS] not in CHUNK_PROBLEMS: + if self[c][TUPLE_STATUS] not in CHUNK_PROBLEMS: continue status = self[c][TUPLE_STATUS] h_coords = c @@ -434,17 +444,17 @@ def remove_problematic_chunks(self, problem): counter += 1 # create the new status tuple # (num_entities, chunk status) - self[local_coords] = (0 , CHUNK_NOT_CREATED) + self[local_coords] = (0, CHUNK_NOT_CREATED) return counter def fix_problematic_chunks(self, problem): """ This fixes problems in chunks that can be somehow easy to fix. - + Right now it only fixes chunks missing the TAG_List Entities. """ # TODO: it seems having the Entities TAG missing is just a little part. Some of the - # chunks have like 3 or 4 tag missing from the NBT structure. + # chunks have like 3 or 4 tag missing from the NBT structure. counter = 0 bad_chunks = self.list_chunks(problem) for c in bad_chunks: @@ -455,15 +465,14 @@ def fix_problematic_chunks(self, problem): # The arguments to create the empty TAG_List have been somehow extracted by comparing # the tag list from a healthy chunk with the one created by nbt chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) - region_file.write_chunk(local_coords[0],local_coords[1], chunk) + region_file.write_chunk(local_coords[0], local_coords[1], chunk) counter += 1 # create the new status tuple # (num_entities, chunk status) - self[local_coords] = (0 , CHUNK_NOT_CREATED) + self[local_coords] = (0, CHUNK_NOT_CREATED) return counter - def remove_entities(self): """ Removes all the entities in chunks with the problematic CHUNK_TOO_MANY_ENTITIES that are in this region file. @@ -478,7 +487,7 @@ def remove_entities(self): counter += self.remove_chunk_entities(*local_coords) # create new status tuple: # (num_entities, chunk status) - self[local_coords] = (0 , CHUNK_OK) + self[local_coords] = (0, CHUNK_OK) return counter def remove_chunk_entities(self, x, z): @@ -486,9 +495,9 @@ def remove_chunk_entities(self, x, z): the entities in it. Return an integer with the number of entities removed""" region_file = region.RegionFile(self.path) - chunk = region_file.get_chunk(x,z) + chunk = region_file.get_chunk(x, z) counter = len(chunk['Level']['Entities']) - empty_tag_list = nbt.TAG_List(nbt.TAG_Byte,'','Entities') + empty_tag_list = nbt.TAG_List(nbt.TAG_Byte, '', 'Entities') chunk['Level']['Entities'] = empty_tag_list region_file.write_chunk(x, z, chunk) @@ -500,7 +509,7 @@ def rescan_entities(self, options): for c in list(self.keys()): # for safety reasons use a temporary list to generate the # new tuple - t = [0,0] + t = [0, 0] if self[c][TUPLE_STATUS] in (CHUNK_TOO_MANY_ENTITIES, CHUNK_OK): # only touch the ok chunks and the too many entities chunk if self[c][TUPLE_NUM_ENTITIES] > options.entity_limit: @@ -516,16 +525,16 @@ def rescan_entities(self, options): self[c] = tuple(t) -class DataSet(object): - """ Stores data items to be scanned by AsyncScanner in scan.py. +class DataSet: + """ Stores data items to be scanned by AsyncScanner in scan.py. typevalue is the type of the class to store in the set. When setting it will be asserted if it is of that type - The data should be in a dictionary and should be accessible through the + The data should be in a dictionary and should be accessible through the methods __getitem__, __setitem__. The methods, _get_list, __len__ are also used. - _replace_in_data_structure should be created because during the scan the + _replace_in_data_structure should be created because during the scan the different processes create copies of the original data, so replacing it in the original data set is mandatory. @@ -551,35 +560,35 @@ def __delitem__(self, key): del self._set[key] def __setitem__(self, key, value): - assert(self._typevalue == type(value)) + assert self._typevalue == type(value) self._set[key] = value self._update_counts(value) def __len__(self): return len(self._set) - + # mandatory implementation methods def summary(self): """ Return a summary of problems found in this set. """ - raise NotImplemented + raise NotImplementedError @property def has_problems(self): """ Returns True if the scanned set has problems. """ - raise NotImplemented + raise NotImplementedError def _replace_in_data_structure(self, data, key): """ For multiprocessing. Replaces the data in the set with the new data. - + Child scanning processes make copies of the ScannedRegion/DataFile when they scan them. The AsyncScanner will call this function so the ScannedRegion/DataFile is stored in the set properly. """ - raise NotImplemented + raise NotImplementedError def _update_counts(self, s): """ This functions is used by __set__ to update the counters. """ - raise NotImplemented + raise NotImplementedError class DataFileSet(DataSet): @@ -587,6 +596,7 @@ class DataFileSet(DataSet): DataSets are scanned using scan.AsyncScanner """ + def __init__(self, path, title, *args, **kwargs): DataSet.__init__(self, ScannedDataFile, *args, **kwargs) d = self._set @@ -597,7 +607,7 @@ def __init__(self, path, title, *args, **kwargs): for path in data_files_path: d[path] = ScannedDataFile(path) - + # stores the counts of files self._counts = {} for s in DATAFILE_STATUSES: @@ -615,7 +625,7 @@ def _replace_in_data_structure(self, data): self._set[data.path] = data def _update_counts(self, s): - assert(type(s) == self._typevalue) + assert isinstance(s) == self._typevalue self._counts[s.status] += 1 def count_datafiles(self, status): @@ -635,6 +645,7 @@ class RegionSet(DataSet): """Stores an arbitrary number of region files and the scan results. Inits with a list of region files. The regions dict is filled while scanning with ScannedRegionFiles and ScannedChunks.""" + def __init__(self, regionset_path=None, region_list=[]): DataSet.__init__(self, ScannedRegionFile) if regionset_path: @@ -651,22 +662,22 @@ def __init__(self, regionset_path=None, region_list=[]): except InvalidFileName as e: print("Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path)) - + # region and chunk counters with all the data from the scan self._region_counters = {} for status in REGION_STATUSES: self._region_counters[status] = 0 - + self._chunk_counters = {} for status in CHUNK_STATUSES: self._chunk_counters[status] = 0 - + # has this regionset been scanned? self.scanned = False def get_name(self): """ Return a string with a representative name for the regionset - + If the regionset is a dimension its name is returned, if not the directory and if there is no name or "" if there is nothing to fall back """ @@ -683,8 +694,8 @@ def get_name(self): def _update_counts(self, scanned_regionfile): """ Updates the counters of the regionset with the new regionfile. """ - assert(type(scanned_regionfile) == ScannedRegionFile) - + assert isinstance(scanned_regionfile) == ScannedRegionFile + self._region_counters[scanned_regionfile.status] += 1 for status in CHUNK_STATUSES: @@ -721,11 +732,11 @@ def has_problems(self): for s in REGION_PROBLEMS: if self.count_regions(s): return True - + for s in CHUNK_PROBLEMS: if self.count_chunks(s): return True - + return False def keys(self): @@ -736,7 +747,7 @@ def list_regions(self, status=None): in the RegionSet with status. If status = None it returns all the objects.""" - if status == None: + if status is None: return list(self._set.values()) t = [] for coords in list(self._set.keys()): @@ -749,23 +760,22 @@ def count_regions(self, status=None): """ Return the number of region files with status. If none returns the number of region files in this regionset. Possible status are: empty, too_small """ - - #======================================================================= + + # ======================================================================= # counter = 0 # for r in list(self.keys()): # if status == self[r].status: # counter += 1 # elif status == None: # counter += 1 - #======================================================================= + # ======================================================================= counter = 0 - if status == None: + if status is None: for s in REGION_STATUSES: counter += self._region_counters[s] - else: + else: counter = self._region_counters[status] - return counter def count_chunks(self, problem=None): @@ -773,12 +783,12 @@ def count_chunks(self, problem=None): problem is None returns the number of chunks. """ c = 0 - if problem == None: + if problem is None: for s in CHUNK_STATUSES: c += self._chunk_counters[s] else: c = self._chunk_counters[problem] - + return c def list_chunks(self, status=None): @@ -862,14 +872,13 @@ def rescan_entities(self, options): for r in list(self.keys()): self[r].rescan_entities(options) - def generate_report(self, standalone): """ Generates a report with the results of the scan. The report - will include information about chunks and regions. - - If standalone is true it will return a string of text with the + will include information about chunks and regions. + + If standalone is true it will return a string of text with the results of the scan. - + If standalone is false it will return a dictionary with all the counts of chunks and regions, to use the dictionary use the variables defined in the start of this file. The variables are named CHUNK_* @@ -883,7 +892,7 @@ def generate_report(self, standalone): if chunk_counts[p] != 0: has_chunk_problems = True chunk_counts['TOTAL'] = self.count_chunks() - + # collect region data region_counts = {} has_region_problems = False @@ -892,7 +901,7 @@ def generate_report(self, standalone): if region_counts[p] != 0: has_region_problems = True region_counts['TOTAL'] = self.count_regions() - + # create a text string with a report of all found if standalone: text = "" @@ -901,10 +910,10 @@ def generate_report(self, standalone): text += "\nChunk problems:\n" if has_chunk_problems: table_data = [] - table_data.append(['Problem','Count']) + table_data.append(['Problem', 'Count']) for p in CHUNK_PROBLEMS: if chunk_counts[p] is not 0: - table_data.append([CHUNK_STATUS_TEXT[p],chunk_counts[p]]) + table_data.append([CHUNK_STATUS_TEXT[p], chunk_counts[p]]) table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) else: @@ -914,19 +923,20 @@ def generate_report(self, standalone): text += "\n\nRegion problems:\n" if has_region_problems: table_data = [] - table_data.append(['Problem','Count']) + table_data.append(['Problem', 'Count']) for p in REGION_PROBLEMS: if region_counts[p] is not 0: - table_data.append([REGION_STATUS_TEXT[p],region_counts[p]]) + table_data.append([REGION_STATUS_TEXT[p], region_counts[p]]) table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) - + else: text += "No problems found." return text else: return chunk_counts, region_counts + def remove_problematic_regions(self, problem): """ Removes all the regions files with the given problem. This is NOT the same as removing chunks, this WILL DELETE @@ -938,7 +948,7 @@ def remove_problematic_regions(self, problem): return counter -class World(object): +class World: """ This class stores all the info needed of a world, and once scanned, stores all the problems found. It also has all the tools needed to modify the world.""" @@ -1017,15 +1027,15 @@ def has_problems(self): if self.scanned_level.status in DATAFILE_PROBLEMS: return True - + for d in self.datafilesets: if d.has_problems: return True - + for r in self.regionsets: if r.has_problems: return True - + return False def get_number_regions(self): @@ -1067,7 +1077,6 @@ def summary(self): # chunk info chunk_info = "" for regionset in self.regionsets: - title = regionset.get_name() final += "\n" + title + ":\n" @@ -1075,8 +1084,6 @@ def summary(self): text = regionset.summary() chunk_info += text if text else "" final += chunk_info if chunk_info else "All the chunks are ok." - - return final @@ -1086,12 +1093,12 @@ def get_name(self): if self.name: return self.name else: - n = split(self.path) + n = split(self.path) if n[1] == '': n = split(n[0])[1] return n - def count_regions(self, status = None): + def count_regions(self, status=None): """ Returns a number with the count of region files with status. """ counter = 0 @@ -1099,7 +1106,7 @@ def count_regions(self, status = None): counter += r.count_regions(status) return counter - def count_chunks(self, status = None): + def count_chunks(self, status=None): """ Counts problems """ counter = 0 for r in self.regionsets: @@ -1147,7 +1154,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet # The backups world doesn't change, check if the # region_file is already scanned: try: - coords = get_region_coords(split(backup_region_path)[1]) + coords = get_region_coords(split(backup_region_path)[1]) r = scanned_regions[coords] except KeyError: from .scan import scan_region_file @@ -1166,16 +1173,16 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet if status == CHUNK_OK: backup_region_file = region.RegionFile(backup_region_path) - working_chunk = backup_region_file.get_chunk(local_coords[0],local_coords[1]) + working_chunk = backup_region_file.get_chunk(local_coords[0], local_coords[1]) print("Replacing...") # the chunk exists and is healthy, fix it! tofix_region_file = region.RegionFile(tofix_region_path) # first unlink the chunk, second write the chunk. - # unlinking the chunk is more secure and the only way to replace chunks with + # unlinking the chunk is more secure and the only way to replace chunks with # a shared offset withou overwriting the good chunk tofix_region_file.unlink_chunk(*local_coords) - tofix_region_file.write_chunk(local_coords[0], local_coords[1],working_chunk) + tofix_region_file.write_chunk(local_coords[0], local_coords[1], working_chunk) counter += 1 print("Chunk replaced using backup dir: {0}".format(backup.path)) @@ -1188,7 +1195,6 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet return counter - def remove_problematic_chunks(self, problem): """ Removes all the chunks with the given problem. """ counter = 0 @@ -1275,12 +1281,12 @@ def rescan_entities(self, options): def generate_report(self, standalone): """ Generates a report with the results of the scan. The report - will include information about data structures (.dat files), - player files, chunks and regions. - - If standalone is true it will return a string of text with the + will include information about data structures (.dat files), + player files, chunks and regions. + + If standalone is true it will return a string of text with the results of the scan. - + If standalone is false it will return a dictionary with all the counts, to use the dictionary use the variables defined in the start of this file. The variables are named CHUNK_*. Note that right now doesn't return @@ -1295,7 +1301,7 @@ def generate_report(self, standalone): if chunk_counts[p] != 0: has_chunk_problems = True chunk_counts['TOTAL'] = self.count_chunks() - + # collect region data region_counts = {} has_region_problems = False @@ -1304,7 +1310,7 @@ def generate_report(self, standalone): if region_counts[p] != 0: has_region_problems = True region_counts['TOTAL'] = self.count_regions() - + # create a text string with a report of all found if standalone: text = "" @@ -1334,10 +1340,10 @@ def generate_report(self, standalone): text += "\nChunk problems:\n" if has_chunk_problems: table_data = [] - table_data.append(['Problem','Count']) + table_data.append(['Problem', 'Count']) for p in CHUNK_PROBLEMS: if chunk_counts[p] is not 0: - table_data.append([CHUNK_STATUS_TEXT[p],chunk_counts[p]]) + table_data.append([CHUNK_STATUS_TEXT[p], chunk_counts[p]]) table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) else: @@ -1347,13 +1353,13 @@ def generate_report(self, standalone): text += "\n\nRegion problems:\n" if has_region_problems: table_data = [] - table_data.append(['Problem','Count']) + table_data.append(['Problem', 'Count']) for p in REGION_PROBLEMS: if region_counts[p] is not 0: - table_data.append([REGION_STATUS_TEXT[p],region_counts[p]]) + table_data.append([REGION_STATUS_TEXT[p], region_counts[p]]) table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) - + else: text += "No problems found." @@ -1362,14 +1368,13 @@ def generate_report(self, standalone): return chunk_counts, region_counts - def delete_entities(region_file, x, z): """ This function is used while scanning the world in scan.py! Takes a region file obj and a local chunks coords and deletes all the entities in that chunk. """ - chunk = region_file.get_chunk(x,z) + chunk = region_file.get_chunk(x, z) counter = len(chunk['Level']['Entities']) - empty_tag_list = nbt.TAG_List(nbt.TAG_Byte,'','Entities') + empty_tag_list = nbt.TAG_List(nbt.TAG_Byte, '', 'Entities') chunk['Level']['Entities'] = empty_tag_list region_file.write_chunk(x, z, chunk) @@ -1380,6 +1385,7 @@ def _get_local_chunk_coords(chunkx, chunkz): """ Takes the chunk global coords and returns the local coords """ return chunkx % 32, chunkz % 32 + def get_chunk_region(chunkX, chunkZ): """ Returns the name of the region file given global chunk coords """ @@ -1391,6 +1397,7 @@ def get_chunk_region(chunkX, chunkZ): return region_name + def get_chunk_data_coords(nbt_file): """ Gets the coords stored in the NBT structure of the chunk. @@ -1404,6 +1411,7 @@ def get_chunk_data_coords(nbt_file): return coordX, coordZ + def get_region_coords(filename): """ Splits the region filename (full pathname or just filename) and returns his region X and Z coordinates as integers. """ @@ -1414,14 +1422,15 @@ def get_region_coords(filename): return coordX, coordZ + def get_global_chunk_coords(region_name, chunkX, chunkZ): """ Takes the region filename and the chunk local - coords and returns the global chunkcoords as integerss. This - version does exactly the same as the method in + coords and returns the global chunkcoords as integerss. This + version does exactly the same as the method in ScannedRegionFile. """ regionX, regionZ = get_region_coords(region_name) - chunkX += regionX*32 - chunkZ += regionZ*32 + chunkX += regionX * 32 + chunkZ += regionZ * 32 return chunkX, chunkZ From 833d6d714bacbb410924959f7da36679debc12f2 Mon Sep 17 00:00:00 2001 From: Tobias Lindenberg Date: Sat, 23 May 2020 20:35:58 +0200 Subject: [PATCH 100/151] type to isinstance --- regionfixer_core/world.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index cad6063..6f6ec80 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -625,7 +625,7 @@ def _replace_in_data_structure(self, data): self._set[data.path] = data def _update_counts(self, s): - assert isinstance(s) == self._typevalue + assert isinstance(s, self._typevalue) self._counts[s.status] += 1 def count_datafiles(self, status): @@ -694,7 +694,7 @@ def get_name(self): def _update_counts(self, scanned_regionfile): """ Updates the counters of the regionset with the new regionfile. """ - assert isinstance(scanned_regionfile) == ScannedRegionFile + assert isinstance(scanned_regionfile, ScannedRegionFile) self._region_counters[scanned_regionfile.status] += 1 From edd8a73f0cd48b70ce023eb469cdad5407cc5f89 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 10 Jul 2020 23:24:17 +0200 Subject: [PATCH 101/151] Fix some error msgs and update return codes to use argparse error return value of 2 when sys.exit --- regionfixer.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index a1eeac3..002b513 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -45,8 +45,8 @@ RV_OK = 0 # world scanned and no problems found RV_CRASH = 1 # crash or end unexpectedly -RV_NOTHING_TO_SCAN = 2 # no files/worlds to scan -RV_WRONG_COMMAND = 20 # the command line used is wrong and region fixer didn't execute +RV_NOTHING_TO_SCAN = 20 # no files/worlds to scan +# RV_WRONG_COMMAND = 2 # the command line used is wrong and region fixer didn't execute. argparse uses this value by default RV_BAD_WORLD = 3 # scan completed successfully but problems have been found in the scan @@ -144,6 +144,7 @@ def main(): usage=usage, epilog=epilog) + parser.add_argument('--backups', '-b', help=('List of backup directories of the Minecraft world ' @@ -338,15 +339,13 @@ def main(): getpass("Press enter to continue:") return RV_CRASH - # Args are world_paths and region files - if not args: - parser.error('Error: No world paths or region files specified! Use ' - '--help for a complete list of options.') world_list, regionset = parse_paths(args.paths) + # Check if there are valid worlds to scan if not (world_list or regionset): - print("Error: No worlds or region files to scan!") + print('Error: No worlds or region files to scan! Use ' + '--help for a complete list of options.') return RV_NOTHING_TO_SCAN # Check basic options compatibilities @@ -555,6 +554,11 @@ def main(): freeze_support() value = main() + except SystemExit as e: + # sys.exit() was called within the program + had_exception = False + value = e.code + except ChildProcessException as e: had_exception = True print(ERROR_MSG) From 3d62319356ba9a27c7af8aec4d68849838bddcc2 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 6 Jul 2020 23:54:12 +0200 Subject: [PATCH 102/151] Fix circular dependent imports that gives problems in only some systems. --- regionfixer.py | 6 +-- regionfixer_core/util.py | 89 -------------------------------------- regionfixer_core/world.py | 91 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 92 insertions(+), 94 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 002b513..5be0bd2 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -31,10 +31,8 @@ console_scan_regionset, ChildProcessException) from regionfixer_core.interactive import InteractiveLoop -from regionfixer_core.util import (entitle, - is_bare_console, - parse_paths, - parse_backup_list) +from regionfixer_core.util import entitle, is_bare_console +from regionfixer_core.world import parse_paths, parse_backup_list from regionfixer_core.version import version_string from regionfixer_core.bug_reporter import BugReporter from regionfixer_core.world import CHUNK_MISSING_ENTITIES_TAG diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index 9e18ff5..6e72b90 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -22,7 +22,6 @@ # import platform -from os.path import join, exists, isfile import sys import traceback @@ -153,91 +152,3 @@ def get_max_len(l): text += "-" * ml_total return text - -def parse_chunk_list(chunk_list, world_obj): - """ Generate a list of chunks to use with world.delete_chunk_list. - - It takes a list of global chunk coordinates and generates a list of - tuples containing: - - (region fullpath, chunk X, chunk Z) - - """ - # this is not used right now - parsed_list = [] - for line in chunk_list: - try: - chunk = eval(line) - except: - print("The chunk {0} is not valid.".format(line)) - continue - region_name = world.get_chunk_region(chunk[0], chunk[1]) - fullpath = join(world_obj.world_path, "region", region_name) - if fullpath in world_obj.all_mca_files: - parsed_list.append((fullpath, chunk[0], chunk[1])) - else: - print("The chunk {0} should be in the region file {1} and this region files doesn't extist!".format(chunk, fullpath)) - - return parsed_list - - -def parse_paths(args): - """ Parse the list of args passed to region-fixer.py and returns a - RegionSet object with the list of regions and a list of World - objects. """ - # parese the list of region files and worlds paths - world_list = [] - region_list = [] - warning = False - for arg in args: - if arg[-4:] == ".mca": - region_list.append(arg) - elif arg[-4:] == ".mcr": # ignore pre-anvil region files - if not warning: - print("Warning: Region-Fixer only works with anvil format region files. Ignoring *.mcr files") - warning = True - else: - world_list.append(arg) - - # check if they exist - region_list_tmp = [] - for f in region_list: - if exists(f): - if isfile(f): - region_list_tmp.append(f) - else: - print("Warning: \"{0}\" is not a file. Skipping it and scanning the rest.".format(f)) - else: - print("Warning: The region file {0} doesn't exists. Skipping it and scanning the rest.".format(f)) - region_list = region_list_tmp - - # init the world objects - world_list = parse_world_list(world_list) - - return world_list, world.RegionSet(region_list=region_list) - - -def parse_world_list(world_path_list): - """ Parses a world list checking if they exists and are a minecraft - world folders. Returns a list of World objects. """ - - tmp = [] - for d in world_path_list: - if exists(d): - w = world.World(d) - if w.isworld: - tmp.append(w) - else: - print("Warning: The folder {0} doesn't look like a minecraft world. I'll skip it.".format(d)) - else: - print("Warning: The folder {0} doesn't exist. I'll skip it.".format(d)) - return tmp - - -def parse_backup_list(world_backup_dirs): - """ Generates a list with the input of backup dirs containing the - world objects of valid world directories.""" - - directories = world_backup_dirs.split(',') - backup_worlds = parse_world_list(directories) - return backup_worlds diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 6f6ec80..374f050 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -26,7 +26,7 @@ from .util import table from glob import glob -from os.path import join, split, exists +from os.path import join, split, exists, isfile from os import remove from shutil import copy @@ -234,6 +234,95 @@ } +def parse_chunk_list(chunk_list, world_obj): + """ Generate a list of chunks to use with world.delete_chunk_list. + + It takes a list of global chunk coordinates and generates a list of + tuples containing: + + (region fullpath, chunk X, chunk Z) + + """ + # this is not used right now + parsed_list = [] + for line in chunk_list: + try: + chunk = eval(line) + except: + print("The chunk {0} is not valid.".format(line)) + continue + region_name = get_chunk_region(chunk[0], chunk[1]) + fullpath = join(world_obj.world_path, "region", region_name) + if fullpath in world_obj.all_mca_files: + parsed_list.append((fullpath, chunk[0], chunk[1])) + else: + print("The chunk {0} should be in the region file {1} and this region files doesn't extist!".format(chunk, fullpath)) + + return parsed_list + + +def parse_paths(args): + """ Parse the list of args passed to region-fixer.py and returns a + RegionSet object with the list of regions and a list of World + objects. """ + # parese the list of region files and worlds paths + world_list = [] + region_list = [] + warning = False + for arg in args: + if arg[-4:] == ".mca": + region_list.append(arg) + elif arg[-4:] == ".mcr": # ignore pre-anvil region files + if not warning: + print("Warning: Region-Fixer only works with anvil format region files. Ignoring *.mcr files") + warning = True + else: + world_list.append(arg) + + # check if they exist + region_list_tmp = [] + for f in region_list: + if exists(f): + if isfile(f): + region_list_tmp.append(f) + else: + print("Warning: \"{0}\" is not a file. Skipping it and scanning the rest.".format(f)) + else: + print("Warning: The region file {0} doesn't exists. Skipping it and scanning the rest.".format(f)) + region_list = region_list_tmp + + # init the world objects + world_list = parse_world_list(world_list) + + return world_list, RegionSet(region_list = region_list) + + +def parse_world_list(world_path_list): + """ Parses a world list checking if they exists and are a minecraft + world folders. Returns a list of World objects. """ + + tmp = [] + for d in world_path_list: + if exists(d): + w = World(d) + if w.isworld: + tmp.append(w) + else: + print("Warning: The folder {0} doesn't look like a minecraft world. I'll skip it.".format(d)) + else: + print("Warning: The folder {0} doesn't exist. I'll skip it.".format(d)) + return tmp + + +def parse_backup_list(world_backup_dirs): + """ Generates a list with the input of backup dirs containing the + world objects of valid world directories.""" + + directories = world_backup_dirs.split(',') + backup_worlds = parse_world_list(directories) + return backup_worlds + + class InvalidFileName(IOError): pass From e7cbfa713bdbf2a66010ea9a11e26fa74c199ac4 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 15 Jul 2020 04:11:03 +0200 Subject: [PATCH 103/151] Add option to fix wrong located chunks. --- regionfixer.py | 34 ++++++++++++----- regionfixer_core/world.py | 80 +++++++++++++++++++++++++++------------ 2 files changed, 80 insertions(+), 34 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 5be0bd2..afbf284 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -57,16 +57,22 @@ def fix_bad_chunks(options, scanned_obj): total = scanned_obj.count_chunks(CHUNK_MISSING_ENTITIES_TAG) problem = CHUNK_MISSING_ENTITIES_TAG status = world.CHUNK_STATUS_TEXT[CHUNK_MISSING_ENTITIES_TAG] - if options.fix_missing_tag: - if total: - - text = ' Repairing chunks with status: {0} '.format(status) - print(("\n{0:#^60}".format(text))) - counter = scanned_obj.fix_problematic_chunks(problem) - print(("\nRepaired {0} chunks with status: {1}".format(counter, - status))) - else: - print(("No chunks to fix with status: {0}".format(status))) + # In the same order as in FIXABLE_CHUNK_PROBLEMS + options_fix = [options.fix_missing_tag, + options.fix_wrong_located] + fixing = list(zip(options_fix, world.FIXABLE_CHUNK_PROBLEMS)) + for fix, problem in fixing: + status = world.CHUNK_STATUS_TEXT[problem] + total = scanned_obj.count_chunks(problem) + if fix: + if total: + text = ' Repairing chunks with status: {0} '.format(status) + print(("\n{0:#^60}".format(text))) + counter = scanned_obj.fix_problematic_chunks(problem) + print(("\nRepaired {0} chunks with status: {1}".format(counter, + status))) + else: + print(("No chunks to fix with status: {0}".format(status))) def delete_bad_chunks(options, scanned_obj): @@ -260,6 +266,14 @@ def main(): default=False, action='store_true') + parser.add_argument('--fix-wrong-located', + '--fw', + help='Fixes chunks that have that are wrong located. This will save them in the coordinates ' + 'stored in their data.', + dest='fix_wrong_located', + default=False, + action='store_true') + parser.add_argument('--delete-too-small', '--dt', help='[WARNING!] This option deletes! Removes any region files ' diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 374f050..35f800f 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -96,13 +96,16 @@ CHUNK_SOLUTION_REMOVE = 51 CHUNK_SOLUTION_REPLACE = 52 CHUNK_SOLUTION_REMOVE_ENTITIES = 53 +CHUNK_SOLUTION_RELOCATE_USING_DATA = 54 CHUNK_PROBLEMS_SOLUTIONS = {CHUNK_CORRUPTED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], - CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE] - } + CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE, CHUNK_SOLUTION_RELOCATE_USING_DATA], + CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], + CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE]} + +# chunk problems that can be fixed (so they don't need to be removed or replaced) +FIXABLE_CHUNK_PROBLEMS = [CHUNK_MISSING_ENTITIES_TAG, CHUNK_WRONG_LOCATED] # list with problem, status-text, problem arg tuples CHUNK_PROBLEMS_ITERATOR = [] @@ -371,7 +374,8 @@ def __init__(self, path, time=None): self.coords = (self.x, self.z) # dictionary storing all the state tuples of all the chunks - # in the region file + # in the region file, keys are the local coords of the chunk + # sometimes called header coords self._chunks = {} # Dictionary containing counters to for all the chunks @@ -454,8 +458,7 @@ def count_chunks(self, problem=None): return c def get_global_chunk_coords(self, chunkX, chunkZ): - """ Takes the region filename and the chunk local - coords and returns the global chunkcoords as integerss """ + """ Takes the chunk local coords and returns the global chunkcoords """ regionX, regionZ = self.get_coords() chunkX += regionX * 32 @@ -483,9 +486,9 @@ def get_coords(self): # TODO TODO TODO: This is dangerous! Running the method remove_problematic_chunks # without a problem will remove all the chunks in the region file!! def list_chunks(self, status=None): - """ Returns a list of all the ScannedChunk objects of the chunks - with the given status, if no status is omitted or None, - returns all the existent chunks in the region file """ + """ Returns a list of tuples (global coords, status tuple) for all the chunks with said status. + + If no status is omitted or None, returns all the chunks in the region file """ l = [] for c in list(self.keys()): @@ -494,6 +497,7 @@ def list_chunks(self, status=None): l.append((self.get_global_chunk_coords(*c), t)) elif status == None: l.append((self.get_global_chunk_coords(*c), t)) + return l def summary(self): @@ -540,10 +544,19 @@ def remove_problematic_chunks(self, problem): def fix_problematic_chunks(self, problem): """ This fixes problems in chunks that can be somehow easy to fix. - Right now it only fixes chunks missing the TAG_List Entities. + + Right now it only fixes chunks missing the TAG_List Entities and wrong located chunks. + + -TAG_List is fixed by adding said tag. + + -Wrong located chunks are relocated to the data coordinates stored in the zip stream. + We suppose these coordinates are right because the data has checksum. """ # TODO: it seems having the Entities TAG missing is just a little part. Some of the - # chunks have like 3 or 4 tag missing from the NBT structure. + # chunks have like 3 or 4 tag missing from the NBT structure. + + # TODO: remove the assert? + assert(problem in FIXABLE_CHUNK_PROBLEMS) counter = 0 bad_chunks = self.list_chunks(problem) for c in bad_chunks: @@ -551,14 +564,31 @@ def fix_problematic_chunks(self, problem): local_coords = _get_local_chunk_coords(*global_coords) region_file = region.RegionFile(self.path) chunk = region_file.get_chunk(*local_coords) - # The arguments to create the empty TAG_List have been somehow extracted by comparing - # the tag list from a healthy chunk with the one created by nbt - chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) - region_file.write_chunk(local_coords[0], local_coords[1], chunk) - counter += 1 - # create the new status tuple - # (num_entities, chunk status) - self[local_coords] = (0, CHUNK_NOT_CREATED) + if problem == CHUNK_MISSING_ENTITIES_TAG: + # The arguments to create the empty TAG_List have been somehow extracted by comparing + # the tag list from a healthy chunk with the one created by nbt + chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) + region_file.write_chunk(local_coords[0],local_coords[1], chunk) + + # create the new status tuple + # (num_entities, chunk status) + self[local_coords] = (0 , CHUNK_NOT_CREATED) + counter += 1 + + elif problem == CHUNK_WRONG_LOCATED: + data_coords = get_chunk_data_coords(chunk) + data_l_coords = _get_local_chunk_coords(*data_coords) + region_file.write_chunk(data_l_coords[0], data_l_coords[1], chunk) + region_file.unlink_chunk(*local_coords) + # what to do with the old chunk in the wrong position? + # remove it or keep it? It's probably the best to remove it. + # create the new status tuple + + # remove the wrong position of the chunk and update the status + # (num_entities, chunk status) + self[local_coords] = (0 , CHUNK_NOT_CREATED) + self[data_l_coords]= (0 , CHUNK_OK) + counter += 1 return counter @@ -927,7 +957,8 @@ def remove_problematic_chunks(self, problem): counter = 0 if self.count_chunks(): - print(' Deleting chunks in region set \"{0}\":'.format(self._get_dimension_directory())) + dim_name = self._get_dimension_directory() + print(' Deleting chunks in region set \"{0}\":'.format(dim_name if dim_name else "selected region files")) for r in list(self._set.keys()): counter += self._set[r].remove_problematic_chunks(problem) print("Removed {0} chunks in this regionset.\n".format(counter)) @@ -935,12 +966,13 @@ def remove_problematic_chunks(self, problem): return counter def fix_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem, returns a + """ Fix all the chunks with the given problem, returns a counter with the number of deleted chunks. """ counter = 0 if self.count_chunks(): - print(' Repairing chunks in region set \"{0}\":'.format(self._get_dimension_directory())) + dim_name = self._get_dimension_directory() + print(' Repairing chunks in region set \"{0}\":'.format(dim_name if dim_name else "selected region files")) for r in list(self._set.keys()): counter += self._set[r].fix_problematic_chunks(problem) print("Repaired {0} chunks in this regionset.\n".format(counter)) From 945be202228985289f75da6b6e5c0e5fba03e146 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 15 Jul 2020 04:38:34 +0200 Subject: [PATCH 104/151] Update year in copyright notice --- regionfixer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index afbf284..3713f92 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -4,7 +4,7 @@ # # Region Fixer. # Fix your region files with a backup copy of your Minecraft world. -# Copyright (C) 2011 Alejandro Aguilera (Fenixin) +# Copyright (C) 2020 Alejandro Aguilera (Fenixin) # https://github.com/Fenixin/Minecraft-Region-Fixer # # This program is free software: you can redistribute it and/or modify @@ -133,7 +133,7 @@ def delete_bad_regions(options, scanned_obj): def main(): usage = ('%(prog)s [options] ' ' ... ...') - epilog = ('Copyright (C) 2011 Alejandro Aguilera (Fenixin)\n' + epilog = ('Copyright (C) 2020 Alejandro Aguilera (Fenixin)\n' 'https://github.com/Fenixin/Minecraft-Region-Fixer\n' 'This program comes with ABSOLUTELY NO WARRANTY; for ' 'details see COPYING.txt. This is free software, and you ' From bc3316a23c38f361a10203556cc5fe240f3ab551 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 15 Jul 2020 12:20:36 +0200 Subject: [PATCH 105/151] Update more copyright notices. --- regionfixer_core/interactive.py | 2 +- regionfixer_core/scan.py | 2 +- regionfixer_core/util.py | 2 +- regionfixer_core/world.py | 2 +- setup.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index 9e5e610..20b8f63 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -4,7 +4,7 @@ # # Region Fixer. # Fix your region files with a backup copy of your Minecraft world. -# Copyright (C) 2011 Alejandro Aguilera (Fenixin) +# Copyright (C) 2020 Alejandro Aguilera (Fenixin) # https://github.com/Fenixin/Minecraft-Region-Fixer # # This program is free software: you can redistribute it and/or modify diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index e90f3b1..bebf9fd 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -4,7 +4,7 @@ # # Region Fixer. # Fix your region files with a backup copy of your Minecraft world. -# Copyright (C) 2011 Alejandro Aguilera (Fenixin) +# Copyright (C) 2020 Alejandro Aguilera (Fenixin) # https://github.com/Fenixin/Minecraft-Region-Fixer # # This program is free software: you can redistribute it and/or modify diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index 6e72b90..3d59229 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -4,7 +4,7 @@ # # Region Fixer. # Fix your region files with a backup copy of your Minecraft world. -# Copyright (C) 2011 Alejandro Aguilera (Fenixin) +# Copyright (C) 2020 Alejandro Aguilera (Fenixin) # https://github.com/Fenixin/Minecraft-Region-Fixer # # This program is free software: you can redistribute it and/or modify diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 35f800f..31dbffb 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -4,7 +4,7 @@ # # Region Fixer. # Fix your region files with a backup copy of your Minecraft world. -# Copyright (C) 2011 Alejandro Aguilera (Fenixin) +# Copyright (C) 2020 Alejandro Aguilera (Fenixin) # https://github.com/Fenixin/Minecraft-Region-Fixer # # This program is free software: you can redistribute it and/or modify diff --git a/setup.py b/setup.py index 6f648ee..684b3bc 100644 --- a/setup.py +++ b/setup.py @@ -140,7 +140,7 @@ def __init__(self, **kw): # dest_base = "regionfixer_gui", # version = gui_version.version_string, # company_name = "No Company", -# copyright = "Copyright (C) 2011 Alejandro Aguilera", +# copyright = "Copyright (C) 2020 Alejandro Aguilera", # name = "Region Fixer GUI" # ) #=============================================================================== From b661b735137a3ccc06f9f4628453f74a5423ab95 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 18 Jul 2020 12:21:35 +0200 Subject: [PATCH 106/151] Improve comments in world.py. Some commenting in regionfixer.py --- regionfixer.py | 34 +- regionfixer_core/world.py | 890 +++++++++++++++++++++++++++----------- 2 files changed, 655 insertions(+), 269 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 3713f92..51d5f98 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -51,8 +51,16 @@ def fix_bad_chunks(options, scanned_obj): """ Fixes chunks that can be repaired. - Doesn't work right now. + Keywords arguments: + options -- argparse arguments, the whole argparse.ArgumentParser() object + scanned_obj -- this can be a RegionSet or World objects from world.py + + Returns nothing. + + It will fix the chunks as requested by options and modify the RegionSet and World objects + with the new fixed chunks. """ + print("") total = scanned_obj.count_chunks(CHUNK_MISSING_ENTITIES_TAG) problem = CHUNK_MISSING_ENTITIES_TAG @@ -79,14 +87,16 @@ def delete_bad_chunks(options, scanned_obj): """ Takes a scanned object and deletes all the bad chunks. Keywords arguments - options -- options as returned by the module optparse - scanned_obj -- a regionfixer world or regionset + options -- argparse arguments, the whole argparse.ArgumentParser() object + scanned_obj -- this can be a RegionSet or World objects from world.py Returns nothing. This function will deletes all the chunks with problems iterating through all the possible problems and using the - options given. """ + options given. + """ + print("") # In the same order as in CHUNK_PROBLEMS options_delete = [options.delete_corrupted, @@ -110,9 +120,19 @@ def delete_bad_chunks(options, scanned_obj): def delete_bad_regions(options, scanned_obj): - """ Takes an scanned object (world object or regionset object) and - the options give to region-fixer, it deletes all the region files - with problems iterating through all the possible problems. """ + """ Takes a scanned object and deletes all bad region files. + + Keywords arguments: + options -- argparse arguments, the whole argparse.ArgumentParser() object + scanned_obj -- this can be a RegionSet or World objects from world.py + + Returns nothing. + + Takes an scanned object (World object or RegionSet object) and + the options given to region-fixer and it deletes all the region files + with problems iterating through all the possible problems. + """ + print("") options_delete = [options.delete_too_small] deleting = list(zip(options_delete, world.REGION_PROBLEMS)) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 31dbffb..48e74b7 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -34,7 +34,6 @@ from nbt.nbt import TAG_List # Constants: - # # -------------- # Chunk related: @@ -237,101 +236,16 @@ } -def parse_chunk_list(chunk_list, world_obj): - """ Generate a list of chunks to use with world.delete_chunk_list. - - It takes a list of global chunk coordinates and generates a list of - tuples containing: - - (region fullpath, chunk X, chunk Z) - - """ - # this is not used right now - parsed_list = [] - for line in chunk_list: - try: - chunk = eval(line) - except: - print("The chunk {0} is not valid.".format(line)) - continue - region_name = get_chunk_region(chunk[0], chunk[1]) - fullpath = join(world_obj.world_path, "region", region_name) - if fullpath in world_obj.all_mca_files: - parsed_list.append((fullpath, chunk[0], chunk[1])) - else: - print("The chunk {0} should be in the region file {1} and this region files doesn't extist!".format(chunk, fullpath)) - - return parsed_list - - -def parse_paths(args): - """ Parse the list of args passed to region-fixer.py and returns a - RegionSet object with the list of regions and a list of World - objects. """ - # parese the list of region files and worlds paths - world_list = [] - region_list = [] - warning = False - for arg in args: - if arg[-4:] == ".mca": - region_list.append(arg) - elif arg[-4:] == ".mcr": # ignore pre-anvil region files - if not warning: - print("Warning: Region-Fixer only works with anvil format region files. Ignoring *.mcr files") - warning = True - else: - world_list.append(arg) - - # check if they exist - region_list_tmp = [] - for f in region_list: - if exists(f): - if isfile(f): - region_list_tmp.append(f) - else: - print("Warning: \"{0}\" is not a file. Skipping it and scanning the rest.".format(f)) - else: - print("Warning: The region file {0} doesn't exists. Skipping it and scanning the rest.".format(f)) - region_list = region_list_tmp - - # init the world objects - world_list = parse_world_list(world_list) - - return world_list, RegionSet(region_list = region_list) - - -def parse_world_list(world_path_list): - """ Parses a world list checking if they exists and are a minecraft - world folders. Returns a list of World objects. """ - - tmp = [] - for d in world_path_list: - if exists(d): - w = World(d) - if w.isworld: - tmp.append(w) - else: - print("Warning: The folder {0} doesn't look like a minecraft world. I'll skip it.".format(d)) - else: - print("Warning: The folder {0} doesn't exist. I'll skip it.".format(d)) - return tmp - - -def parse_backup_list(world_backup_dirs): - """ Generates a list with the input of backup dirs containing the - world objects of valid world directories.""" - - directories = world_backup_dirs.split(',') - backup_worlds = parse_world_list(directories) - return backup_worlds - - class InvalidFileName(IOError): + """ Exception raised when a filename is wrong. """ pass class ScannedDataFile: - """ Stores all the information of a scanned data file. """ + """ Stores all the information of a scanned data file. + + Only needs the path of the data file to be initialized. + """ def __init__(self, path=None): super().__init__() @@ -355,14 +269,22 @@ def oneliner_status(self): class ScannedChunk: - """ Stores all the results of the scan. Not used at the moment, it - prette nice but takes an huge amount of memory. """ + """ Stores all the information of a scanned chunk. + + Not used at the moment, it's nice but takes an huge amount of memory when + is not strange for chunks to be in the order of millions.""" # WARNING: This is here so I remember to not use objects as ScannedChunk # They take too much memory. class ScannedRegionFile: - """ Stores all the scan information for a region file """ + """ Stores all the scan information for a region file. + + Keywords arguments: + - path -- A string with the path of the region file + - time -- The time at which the region file has been scanned. + None by default. + """ def __init__(self, path, time=None): # general region file info @@ -425,12 +347,50 @@ def __setitem__(self, key, value): self._chunks[key] = value self._counts[value[TUPLE_STATUS]] += 1 + def get_coords(self): + """ Returns the region file coordinates as two integers. + + Return: + - coordX, coordZ -- Integers with the x and z coordinates of the + region file. + + Either parse the region file name or uses the stored ones. + + """ + + if self.x != None and self.z != None: + return self.x, self.z + else: + splited = split(self.filename) + filename = splited[1] + l = filename.split('.') + try: + coordX = int(l[1]) + coordZ = int(l[2]) + except ValueError: + raise InvalidFileName() + + return coordX, coordZ + def keys(self): + """Returns a list with all the local coordinates (header coordinates). + + Return: + - list -- A list with all the chunk coordinates extracted form the + region file header + """ + return list(self._chunks.keys()) @property def has_problems(self): - """ Return True if the region file has problem in itself or in its chunks. """ + """ Return True if the region file has problem in itself or in its chunks. + + Return: + - boolean -- True f the region has problems or False otherwise. + + """ + if self.status in REGION_PROBLEMS: return True for s in CHUNK_PROBLEMS: @@ -439,14 +399,25 @@ def has_problems(self): return False def get_path(self): - """ Returns the path of the region file. """ + """ Returns the path of the region file. + + Return: + - path -- A string with the path of the region file. + + """ + return self.path def count_chunks(self, problem=None): """ Counts chunks in the region file with the given problem. + + Keyword arguments: + - problem -- This is the status of the chunk to count for. See CHUNK_PROBLEMS + + If problem is omitted or None, counts all the chunks. Returns + an integer with the counter. - If problem is omitted or None, counts all the chunks. Returns - an integer with the counter. """ + """ if problem == None: c = 0 @@ -458,37 +429,29 @@ def count_chunks(self, problem=None): return c def get_global_chunk_coords(self, chunkX, chunkZ): - """ Takes the chunk local coords and returns the global chunkcoords """ + """ Takes the chunk local coordinates and returns its global coordinates. + + Keyword arguments: + - chunkX -- Local X chunk coordinate. + - chunkZ -- Local Z chunk coordinate. - regionX, regionZ = self.get_coords() + """ + + regionX, regionZ = self.get_coords() chunkX += regionX * 32 chunkZ += regionZ * 32 return chunkX, chunkZ - def get_coords(self): - """ Splits the region filename (full pathname or just filename) - and returns his region X and Z coordinates as integers. """ - if self.x != None and self.z != None: - return self.x, self.z - else: - splited = split(self.filename) - filename = splited[1] - l = filename.split('.') - try: - coordX = int(l[1]) - coordZ = int(l[2]) - except ValueError: - raise InvalidFileName() - - return coordX, coordZ - - # TODO TODO TODO: This is dangerous! Running the method remove_problematic_chunks - # without a problem will remove all the chunks in the region file!! def list_chunks(self, status=None): - """ Returns a list of tuples (global coords, status tuple) for all the chunks with said status. + """ Returns a list of tuples (global coords, status tuple) for all the chunks with 'status'. + + Keyword arguments: + - status -- Defaults to None. Status of the chunk to list, see CHUNK_STATUSES - If no status is omitted or None, returns all the chunks in the region file """ + If status is omitted or None, returns all the chunks in the region file + + """ l = [] for c in list(self.keys()): @@ -501,9 +464,13 @@ def list_chunks(self, status=None): return l def summary(self): - """ Returns a summary of the problematic chunks. The summary - is a string with region file, global coords, local coords, - and status of every problematic chunk. """ + """ Returns a summary of all the problematic chunks. + + The summary is a string with region file, global coordinates, local coordinates, + and status of every problematic chunk, in a subtree like format. + + """ + text = "" if self.status in REGION_PROBLEMS: text += " |- This region has status: {0}.\n".format(REGION_STATUS_TEXT[self.status]) @@ -523,14 +490,20 @@ def summary(self): return text def remove_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem, returns a - counter with the number of deleted chunks. """ + """ Removes all the chunks with the given problem + + Keyword arguments: + - problem -- Status of the chunks to remove. See CHUNK_STATUSES. + + Return: + - counter -- An integer with the amount of removed chunks. + + """ counter = 0 bad_chunks = self.list_chunks(problem) for c in bad_chunks: global_coords = c[0] - status_tuple = c[1] local_coords = _get_local_chunk_coords(*global_coords) region_file = region.RegionFile(self.path) region_file.unlink_chunk(*local_coords) @@ -542,20 +515,27 @@ def remove_problematic_chunks(self, problem): return counter def fix_problematic_chunks(self, problem): - """ This fixes problems in chunks that can be somehow easy to fix. - + """ This fixes problems in chunks that can be somehow fixed. + + Keyword arguments: + - problem -- Status of the chunks to fix. See FIXABLE_CHUNK_PROBLEMS + + Return: + - counter -- An integer with the amount of fixed chunks. Right now it only fixes chunks missing the TAG_List Entities and wrong located chunks. -TAG_List is fixed by adding said tag. -Wrong located chunks are relocated to the data coordinates stored in the zip stream. - We suppose these coordinates are right because the data has checksum. + We suppose these coordinates are right because the data has checksum. + """ + # TODO: it seems having the Entities TAG missing is just a little part. Some of the - # chunks have like 3 or 4 tag missing from the NBT structure. + # chunks have like 3 or 4 tag missing from the NBT structure. I don't really know which + # of them are mandatory. - # TODO: remove the assert? assert(problem in FIXABLE_CHUNK_PROBLEMS) counter = 0 bad_chunks = self.list_chunks(problem) @@ -593,15 +573,18 @@ def fix_problematic_chunks(self, problem): return counter def remove_entities(self): - """ Removes all the entities in chunks with the problematic - CHUNK_TOO_MANY_ENTITIES that are in this region file. - Returns a counter of all the removed entities. """ + """ Removes all the entities in chunks with status CHUNK_TOO_MANY_ENTITIES. + + Return: + - counter -- Integer with the number of removed entities. + + """ + problem = CHUNK_TOO_MANY_ENTITIES counter = 0 bad_chunks = self.list_chunks(problem) for c in bad_chunks: global_coords = c[0] - status_tuple = c[1] local_coords = _get_local_chunk_coords(*global_coords) counter += self.remove_chunk_entities(*local_coords) # create new status tuple: @@ -610,9 +593,20 @@ def remove_entities(self): return counter def remove_chunk_entities(self, x, z): - """ Takes a chunk coordinates, opens the chunk and removes all - the entities in it. Return an integer with the number of - entities removed""" + """ Takes a chunk local coordinates and remove its entities. + + Keyword arguments: + - x -- X local coordinate of the chunk + - z -- Z local coordinate of the chunk + + Return: + - counter -- An integer with the number of entities removed. + + This will remove all the entities in the chunk, it will not perform any + kind of check. + + """ + region_file = region.RegionFile(self.path) chunk = region_file.get_chunk(x, z) counter = len(chunk['Level']['Entities']) @@ -623,8 +617,14 @@ def remove_chunk_entities(self, x, z): return counter def rescan_entities(self, options): - """ Updates the status of all the chunks in the region file when - the the option entity limit is changed. """ + """ Updates the status of all the chunks after changing entity_limit. + + Keyword arguments: + - options -- argparse arguments, the whole argparse.ArgumentParser() object as used + by regionfixer.py + + """ + for c in list(self.keys()): # for safety reasons use a temporary list to generate the # new tuple @@ -650,17 +650,19 @@ class DataSet: typevalue is the type of the class to store in the set. When setting it will be asserted if it is of that type - The data should be in a dictionary and should be accessible through the - methods __getitem__, __setitem__. The methods, _get_list, __len__ are also used. + The data will be stored in the self._set dictionary. + + Implemented private methods are: __getitem__, __setitem__, _get_list, __len__. - _replace_in_data_structure should be created because during the scan the - different processes create copies of the original data, so replacing it in - the original data set is mandatory. + Three methods should be implemented to work with a DataSet, two of the mandatory: + - _replace_in_data_structure -- (mandatory) Should be created because during the scan the + different processes create copies of the original data, so replacing it in + the original data set is mandatory in order to keep everything working. - _update_counts makes sure that the DataSet stores all the counts and that - it is not needed to loop through all of them to know the real count. + - _update_counts -- (mandatory) Makes sure that the DataSet stores all the counts and + that it is not needed to loop through all of them to know the real count. - has_problems should return True only if any element of the set has problems + - has_problems -- Should return True only if any element of the set has problems """ @@ -670,6 +672,7 @@ def __init__(self, typevalue, *args, **kwargs): def _get_list(self): """ Returns a list with all the values in the set. """ + return list(self._set.values()) def __getitem__(self, key): @@ -689,31 +692,41 @@ def __len__(self): # mandatory implementation methods def summary(self): """ Return a summary of problems found in this set. """ + raise NotImplementedError @property def has_problems(self): """ Returns True if the scanned set has problems. """ + raise NotImplementedError def _replace_in_data_structure(self, data, key): """ For multiprocessing. Replaces the data in the set with the new data. + Keyword arguments: + - data -- Value of the data to be stored + - key -- Key in which to store the data + Child scanning processes make copies of the ScannedRegion/DataFile when they scan them. The AsyncScanner will call this function so the ScannedRegion/DataFile is stored in the set properly. """ + raise NotImplementedError def _update_counts(self, s): """ This functions is used by __set__ to update the counters. """ + raise NotImplementedError class DataFileSet(DataSet): - """ Any scanneable set should derive from this. - - DataSets are scanned using scan.AsyncScanner + """ DataSet for Minecraft data files (.dat). + + Keyword arguments: + - path -- Path to the folder containing data files + - title -- Some user readable string to represent the DataSet """ def __init__(self, path, title, *args, **kwargs): @@ -735,6 +748,7 @@ def __init__(self, path, title, *args, **kwargs): @property def has_problems(self): """ Returns True if the dataset has problems and false otherwise. """ + for d in self._set.values(): if d.status in DATAFILE_PROBLEMS: return True @@ -752,6 +766,7 @@ def count_datafiles(self, status): def summary(self): """ Return a summary of problems found in this set. """ + text = "" bad_data_files = [i for i in list(self._set.values()) if i.status in DATAFILE_PROBLEMS] for f in bad_data_files: @@ -761,9 +776,12 @@ def summary(self): class RegionSet(DataSet): - """Stores an arbitrary number of region files and the scan results. - Inits with a list of region files. The regions dict is filled - while scanning with ScannedRegionFiles and ScannedChunks.""" + """Stores an arbitrary number of region files and their scan results. + + Keyword arguments: + - regionset_path -- Path to the folder containing region files + - region_list -- List of paths to all the region files + """ def __init__(self, regionset_path=None, region_list=[]): DataSet.__init__(self, ScannedRegionFile) @@ -797,8 +815,11 @@ def __init__(self, regionset_path=None, region_list=[]): def get_name(self): """ Return a string with a representative name for the regionset - If the regionset is a dimension its name is returned, if not the directory and - if there is no name or "" if there is nothing to fall back + The order for getting the name is: + 1 - The name derived by the dimension path + 2 - The name of the last directory in the path as returned by _get_dimension_directory + 3 - Empty string "" + """ dim_directory = self._get_dimension_directory() @@ -821,9 +842,13 @@ def _update_counts(self, scanned_regionfile): self._chunk_counters[status] += scanned_regionfile.count_chunks(status) def _get_dimension_directory(self): - """ Returns a string with the directory of the dimension, None - if there is no such a directory and the regionset is composed - of sparse region files. """ + """ Returns a string with the directory containing the RegionSet. + + If there is no such a directory returns None. If it's composed + of sparse region files returns 'regionset'. + + """ + if self.path: rest, region = split(self.path) rest, dim_path = split(rest) @@ -862,9 +887,14 @@ def keys(self): return list(self._set.keys()) def list_regions(self, status=None): - """ Returns a list of all the ScannedRegionFile objects stored - in the RegionSet with status. If status = None it returns - all the objects.""" + """ Returns a list of all the ScannedRegionFile objects with 'status'. + + Keyword arguments: + - status -- The region file status. See REGION_STATUSES + + If status = None it returns all the objects. + + """ if status is None: return list(self._set.values()) @@ -876,18 +906,14 @@ def list_regions(self, status=None): return t def count_regions(self, status=None): - """ Return the number of region files with status. If none - returns the number of region files in this regionset. - Possible status are: empty, too_small """ - - # ======================================================================= - # counter = 0 - # for r in list(self.keys()): - # if status == self[r].status: - # counter += 1 - # elif status == None: - # counter += 1 - # ======================================================================= + """ Return the number of region files with status. + + - status -- The region file status. See REGION_STATUSES + + If none returns the total number of region files in this regionset. + + """ + counter = 0 if status is None: for s in REGION_STATUSES: @@ -898,8 +924,13 @@ def count_regions(self, status=None): return counter def count_chunks(self, problem=None): - """ Returns the number of chunks with the given problem. If - problem is None returns the number of chunks. """ + """ Returns the number of chunks with the given problem. + + - status -- The chunk status to count. See CHUNK_STATUSES + + If problem is None returns the number of chunks in this region file. + + """ c = 0 if problem is None: @@ -911,18 +942,27 @@ def count_chunks(self, problem=None): return c def list_chunks(self, status=None): - """ Returns a list of the ScannedChunk objects of the chunks - with the given status. If status = None returns all the - chunks. """ + """ Returns a list of all the chunk tuples with 'status'. + + Keyword arguments: + - status -- The chunk status to list. See CHUNK_STATUSES + + If status = None it returns all the chunk tuples. + + """ + l = [] for r in list(self.keys()): l.extend(self[r].list_chunks(status)) return l def summary(self): - """ Returns a summary of the problematic chunks in this - regionset. The summary is a string with global coords, - local coords, data coords and status. """ + """ Returns a string with a summary of the problematic chunks. + + The summary contains global coords, local coords, data coords and status. + + """ + text = "" for r in list(self.keys()): if not self[r].has_problems: @@ -933,9 +973,16 @@ def summary(self): return text def locate_chunk(self, global_coords): - """ Takes the global coordinates of a chunk and returns the - region filename and the local coordinates of the chunk or - None if it doesn't exits in this RegionSet """ + """ Takes the global coordinates of a chunk and returns its location. + + Keyword arguments: + - global_coords -- Global chunk coordinates of the chunk to locate. + + Return: + - filename -- Filename where the chunk is stored + - local_coords -- Local coordinates of the chunk in the region file + + """ filename = self.path + get_chunk_region(*global_coords) local_coords = _get_local_chunk_coords(*global_coords) @@ -943,8 +990,16 @@ def locate_chunk(self, global_coords): return filename, local_coords def locate_region(self, coords): - """ Returns a string with the path of the region file with - the given coords in this regionset or None if not found. """ + """ Returns a string with the path of the region file. + + Keyword arguments: + - coords -- Global region coordinates of the region file to locate in + this RegionSet. + + Return: + - region_name -- String containing the path of the region file or None if it + doesn't exist + """ x, z = coords region_name = 'r.' + str(x) + '.' + str(z) + '.mca' @@ -952,8 +1007,14 @@ def locate_region(self, coords): return region_name def remove_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem, returns a - counter with the number of deleted chunks. """ + """ Removes all the chunks with the given problem. + + Keyword arguments: + - problem -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. + + Return: + - counter -- Integer with the number of chunks removed + """ counter = 0 if self.count_chunks(): @@ -966,8 +1027,14 @@ def remove_problematic_chunks(self, problem): return counter def fix_problematic_chunks(self, problem): - """ Fix all the chunks with the given problem, returns a - counter with the number of deleted chunks. """ + """ Try to fix all the chunks with the given problem. + + Keyword arguments: + - problem -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. + + Return: + - counter -- Integer with the number of chunks fixed. + """ counter = 0 if self.count_chunks(): @@ -980,29 +1047,42 @@ def fix_problematic_chunks(self, problem): return counter def remove_entities(self): - """ Removes entities in chunks with the status - TOO_MANY_ENTITIES. """ + """ Removes entities in chunks with the status TOO_MANY_ENTITIES. + + Return: + - counter -- Integer with the number of removed entities. + """ + counter = 0 for r in list(self._set.keys()): counter += self._set[r].remove_entities() return counter def rescan_entities(self, options): - """ Updates the status of all the chunks in the regionset when - the option entity limit is changed. """ + """ Updates the CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. + + This should be ran when the option entity limit is changed. + """ + for r in list(self.keys()): self[r].rescan_entities(options) def generate_report(self, standalone): - """ Generates a report with the results of the scan. The report - will include information about chunks and regions. - - If standalone is true it will return a string of text with the - results of the scan. + """ Generates a report with the results of the scan. + + Keyword arguments: + - standalone -- If true the report will be a human readable String. If false the + report will be a dictionary with all the counts of chunks and regions. + + Return if standalone = True: + - text -- A human readable string of text with the results of the scan. + + Return if standlone = False: + - chunk_counts -- Dictionary with all the counts of chunks for all the statuses. To read + it use the CHUNK_* constants. + - region_counts -- Dictionary with all the counts of region files for all the statuses. To read + it use the REGION_* constants. - If standalone is false it will return a dictionary with all the counts of chunks - and regions, to use the dictionary use the variables defined in the start of this - file. The variables are named CHUNK_* """ # collect chunk data @@ -1059,20 +1139,33 @@ def generate_report(self, standalone): return chunk_counts, region_counts def remove_problematic_regions(self, problem): - """ Removes all the regions files with the given problem. - This is NOT the same as removing chunks, this WILL DELETE - the region files from the hard drive. """ + """ Removes all the regions files with the given problem. See the warning! + + Keyword arguments: + - problem -- Status of the region files to remove. See REGION_STATUSES for a list. + + Return: + - counter -- An integer with the amount of removed region files. + + Warning! This is NOT the same as removing chunks, this WILL DELETE the region files + from the hard drive. + """ + counter = 0 for r in self.list_regions(problem): remove(r.get_path()) counter += 1 return counter - class World: - """ This class stores all the info needed of a world, and once - scanned, stores all the problems found. It also has all the tools - needed to modify the world.""" + """ This class stores information and scan results for a Minecraft world. + + Keyword arguments: + - world_path -- String with the path of the world. + + Once scanned, stores all the problems found in it. It also has all the tools + needed to modify the world. + """ def __init__(self, world_path): self.path = world_path @@ -1144,7 +1237,12 @@ def __str__(self): @property def has_problems(self): - """ Returns True if the regionset has chunk or region problems and false otherwise. """ + """ Returns True if the regionset has chunk or region problems and false otherwise. + + Return: + - boolean -- A boolean, True if the world has any problems, false otherwise + + """ if self.scanned_level.status in DATAFILE_PROBLEMS: return True @@ -1160,7 +1258,13 @@ def has_problems(self): return False def get_number_regions(self): - """ Returns a integer with the number of regions in this world""" + """ Returns a integer with the number of regions files in this world + + Return: + - counter -- An integer with the amount of region files. + + """ + counter = 0 for dim in self.regionsets: counter += len(dim) @@ -1168,8 +1272,16 @@ def get_number_regions(self): return counter def summary(self): - """ Returns a text string with a summary of all the problems - found in the world object.""" + """ Returns a string with a summary of the problems in this world. + + Return: + - text -- A String with a human readable summary of all the problems in this world. + + This method calls the other summary() methods in RegionSet and DataSet. See these + methods for more details. + + """ + final = "" # intro with the world name @@ -1209,8 +1321,14 @@ def summary(self): return final def get_name(self): - """ Returns a string with the name as found in level.dat or - with the world folder's name. """ + """ Returns a string with the name of the world. + + Return: + - name -- Either the world name as found in level.dat or the last + directory in the world path. + + """ + if self.name: return self.name else: @@ -1220,15 +1338,33 @@ def get_name(self): return n def count_regions(self, status=None): - """ Returns a number with the count of region files with - status. """ + """ Returns an integer with the count of region files with status. + + Keyword arguments: + - status -- An integer from REGION_STATUSES to region files with that status. + For a list os status see REGION_STATUSES. + + Return: + - counter -- An integer with the number of region files with the given status. + + """ + counter = 0 for r in self.regionsets: counter += r.count_regions(status) return counter def count_chunks(self, status=None): - """ Counts problems """ + """ Returns an integer with the count of chunks with 'status'. + + Keyword arguments: + - status -- An integer from CHUNK_STATUSES to count chunks with that status. + For a list of status see CHUNK_STATUSES. + + Return: + - counter -- An integer with the number of chunks with the given status. + + """ counter = 0 for r in self.regionsets: count = r.count_chunks(status) @@ -1236,10 +1372,20 @@ def count_chunks(self, status=None): return counter def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delete_entities): - """ Takes a list of world objects and a problem value and try - to replace every chunk with that problem using a working - chunk from the list of world objects. It uses the world - objects in left to right order. """ + """ Replaces problematic chunks using backups. + + Keyword arguments: + - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used + in a ordered way. + - problem -- An integer indicating the status of chunks to be replaced. + See CHUNK_STATUSES for a complete list. + - entity_limit -- The threshold to consider a chunk with the status TOO_MANY_ENTITIES. + - delete_entities -- Boolean indicating if the chunks with too_many_entities should have + their entities removed. + Return: + - counter -- An integer with the number of chunks replaced. + + """ counter = 0 scanned_regions = {} @@ -1301,7 +1447,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet tofix_region_file = region.RegionFile(tofix_region_path) # first unlink the chunk, second write the chunk. # unlinking the chunk is more secure and the only way to replace chunks with - # a shared offset withou overwriting the good chunk + # a shared offset without overwriting the good chunk tofix_region_file.unlink_chunk(*local_coords) tofix_region_file.write_chunk(local_coords[0], local_coords[1], working_chunk) counter += 1 @@ -1317,22 +1463,60 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet return counter def remove_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem. """ + """ Removes all the chunks with the given problem. + + Keyword arguments: + - problem -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. + + Return: + - counter -- Integer with the number of chunks removed + + This method calls remove_problematic_chunks() in the RegionSets. + + """ + counter = 0 for regionset in self.regionsets: counter += regionset.remove_problematic_chunks(problem) return counter def fix_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem. """ + """ Try to fix all the chunks with the given problem. + + Keyword arguments: + - problem -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. + + Return: + - counter -- Integer with the number of chunks fixed. + + This method calls remove_problematic_chunks() in the RegionSets. + + """ + counter = 0 for regionset in self.regionsets: counter += regionset.fix_problematic_chunks(problem) return counter def replace_problematic_regions(self, backup_worlds, problem, entity_limit, delete_entities): - """ Replaces region files with the given problem using a backup - directory. """ + """ Replaces problematic region files using backups. + + Keyword arguments: + - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used + in a ordered way. + - problem -- An integer indicating the status of region files to be replaced. + See REGION_STATUSES for a complete list. + - entity_limit -- The threshold to consider a chunk with the status TOO_MANY_ENTITIES. + - delete_entities -- Boolean indicating if the chunks with too_many_entities should have + their entities removed. + Return: + - counter -- An integer with the number of chunks replaced. + + Note: entity_limit and delete_entities are not really used here. They are just there to make all + the methods homogeneus. + + """ + counter = 0 for regionset in self.regionsets: for backup in backup_worlds: @@ -1378,40 +1562,63 @@ def replace_problematic_regions(self, backup_worlds, problem, entity_limit, dele return counter def remove_problematic_regions(self, problem): - """ Removes all the regions files with the given problem. - This is NOT the same as removing chunks, this WILL DELETE - the region files from the hard drive. """ + """ Removes all the regions files with the given problem. See the warning! + + Keyword arguments: + - problem -- Status of the region files to remove. See REGION_STATUSES for a list. + + Return: + - counter -- An integer with the amount of removed region files. + + Warning! This is NOT the same as removing chunks, this WILL DELETE the region files + from the hard drive. + + """ + counter = 0 for regionset in self.regionsets: counter += regionset.remove_problematic_regions(problem) return counter def remove_entities(self): - """ Delete all the entities in the chunks that have more than - entity-limit entities. """ + """ Removes entities in chunks with the status TOO_MANY_ENTITIES. + + Return: + - counter -- Integer with the number of removed entities. + + """ + counter = 0 for regionset in self.regionsets: counter += regionset.remove_entities() return counter def rescan_entities(self, options): - """ Updates the status of all the chunks in the world when the - option entity limit is changed. """ + """ Updates the CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. + + This should be ran when the option entity limit is changed. + + """ + for regionset in self.regionsets: regionset.rescan_entities(options) - def generate_report(self, standalone): - """ Generates a report with the results of the scan. The report - will include information about data structures (.dat files), - player files, chunks and regions. - - If standalone is true it will return a string of text with the - results of the scan. + def generate_report(self, standalone): + """ Generates a report with the results of the scan. + + Keyword arguments: + - standalone -- If true the report will be a human readable String. If false the + report will be a dictionary with all the counts of chunks and regions. + + Return if standalone = True: + - text -- A human readable string of text with the results of the scan. + + Return if standlone = False: + - chunk_counts -- Dictionary with all the counts of chunks for all the statuses. To read + it use the CHUNK_* constants. + - region_counts -- Dictionary with all the counts of region files for all the statuses. To read + it use the REGION_* constants. - If standalone is false it will return a dictionary with all the counts, - to use the dictionary use the variables defined in the start of this - file. The variables are named CHUNK_*. Note that right now doesn't return - information about the data files. """ # collect chunk data @@ -1489,10 +1696,127 @@ def generate_report(self, standalone): return chunk_counts, region_counts + +def parse_chunk_list(chunk_list, world_obj): + """ Generate a list of chunks to use with world.delete_chunk_list. + + It takes a list of global chunk coordinates and generates a list of + tuples containing: + + (region fullpath, chunk X, chunk Z) + + """ + # this is not used right now + parsed_list = [] + for line in chunk_list: + try: + chunk = eval(line) + except: + print("The chunk {0} is not valid.".format(line)) + continue + region_name = get_chunk_region(chunk[0], chunk[1]) + fullpath = join(world_obj.world_path, "region", region_name) + if fullpath in world_obj.all_mca_files: + parsed_list.append((fullpath, chunk[0], chunk[1])) + else: + print("The chunk {0} should be in the region file {1} and this region files doesn't extist!".format(chunk, fullpath)) + + return parsed_list + + +def parse_paths(args): + """ Parse a list of paths to and returns World and a RegionSet objects. + + Keywords arguments: + args -- arguments as argparse got them + + Return: + world_list -- A list of World objects + RegionSet -- A RegionSet object with all the regionfiles found in args + """ + + # parese the list of region files and worlds paths + world_list = [] + region_list = [] + warning = False + for arg in args: + if arg[-4:] == ".mca": + region_list.append(arg) + elif arg[-4:] == ".mcr": # ignore pre-anvil region files + if not warning: + print("Warning: Region-Fixer only works with anvil format region files. Ignoring *.mcr files") + warning = True + else: + world_list.append(arg) + + # check if they exist + region_list_tmp = [] + for f in region_list: + if exists(f): + if isfile(f): + region_list_tmp.append(f) + else: + print("Warning: \"{0}\" is not a file. Skipping it and scanning the rest.".format(f)) + else: + print("Warning: The region file {0} doesn't exists. Skipping it and scanning the rest.".format(f)) + region_list = region_list_tmp + + # init the world objects + world_list = parse_world_list(world_list) + + return world_list, RegionSet(region_list = region_list) + + +def parse_world_list(world_path_list): + """ Parses a world path list. Returns a list of World objects. + + Keywords arguments: + world_path_list -- A list of paths where minecraft worlds are supposed to be + + Return: + world_list -- A list of World objects using the paths from the input + + Parses a world path list checking if they exists and are a minecraft + world folders. Returns a list of World objects. + """ + + world_list = [] + for d in world_path_list: + if exists(d): + w = World(d) + if w.isworld: + world_list.append(w) + else: + print("Warning: The folder {0} doesn't look like a minecraft world. I'll skip it.".format(d)) + else: + print("Warning: The folder {0} doesn't exist. I'll skip it.".format(d)) + return world_list + + +def parse_backup_list(world_backup_dirs): + """ Generates a list with the input of backup dirs containing the + world objects of valid world directories.""" + + directories = world_backup_dirs.split(',') + backup_worlds = parse_world_list(directories) + return backup_worlds + + def delete_entities(region_file, x, z): - """ This function is used while scanning the world in scan.py! Takes - a region file obj and a local chunks coords and deletes all the - entities in that chunk. """ + """ Removes entities in chunks with the status TOO_MANY_ENTITIES. + + Keyword entities: + - x -- X local coordinate of the chunk in the region files + - z -- Z local coordinate of the chunk in the region files + - region_file -- RegionFile object where the chunk is stored + + Return: + - counter -- Integer with the number of removed entities. + + This function is used in scan.py. + + """ + chunk = region_file.get_chunk(x, z) counter = len(chunk['Level']['Entities']) empty_tag_list = nbt.TAG_List(nbt.TAG_Byte, '', 'Entities') @@ -1503,13 +1827,32 @@ def delete_entities(region_file, x, z): def _get_local_chunk_coords(chunkx, chunkz): - """ Takes the chunk global coords and returns the local coords """ + """ Gives the chunk local coordinates from the global coordinates. + + Keyword arguments: + - chunkx -- X chunk global coordinate in the world. + - chunkz -- Z chunk global coordinate in the world. + + Return: + - x, z -- X and Z local coordinates of the chunk in the region file. + + """ + return chunkx % 32, chunkz % 32 def get_chunk_region(chunkX, chunkZ): - """ Returns the name of the region file given global chunk - coords """ + """ Returns the name of the region file given global chunk coordinates. + + Keyword arguments: + - chunkx -- X chunk global coordinate in the world. + - chunkz -- Z chunk global coordinate in the world. + + Return: + - region_name -- A string with the name of the region file where the chunk + should be. + + """ regionX = chunkX // 32 regionZ = chunkZ // 32 @@ -1520,10 +1863,18 @@ def get_chunk_region(chunkX, chunkZ): def get_chunk_data_coords(nbt_file): - """ Gets the coords stored in the NBT structure of the chunk. + """ Gets and returns the coordinates stored in the NBT structure of the chunk. + + Keyword arguments: + - nbt_file -- An NBT file. + + Return: + - coordX, coordZ -- Integers with the X and Z global coordinates of the chunk. - Takes an nbt obj and returns the coords as integers. - Don't confuse with get_global_chunk_coords! """ + Do not confuse with the coordinates returned by get_global_coords, which could be different, + marking this chunk as wrong_located. + + """ level = nbt_file.__getitem__('Level') @@ -1534,8 +1885,15 @@ def get_chunk_data_coords(nbt_file): def get_region_coords(filename): - """ Splits the region filename (full pathname or just filename) - and returns his region X and Z coordinates as integers. """ + """ Get and return a region file coordinates from path. + + Keyword arguments: + - filename -- Filename or path of the region file. + + Return: + - coordX, coordZ -- X and z coordinates of the region file. + + """ l = filename.split('.') coordX = int(l[1]) @@ -1545,10 +1903,18 @@ def get_region_coords(filename): def get_global_chunk_coords(region_name, chunkX, chunkZ): - """ Takes the region filename and the chunk local - coords and returns the global chunkcoords as integerss. This - version does exactly the same as the method in - ScannedRegionFile. """ + """ Get and return a region file coordinates from path. + + Keyword arguments: + - region_name -- Filename or path of the region file. + - chunkX -- X local coordinate of the chunk + - chunkZ -- Z local coordinate of the chunk + + Return: + - coordX, coordZ -- X and z global coordinates of the + chunk in that region file. + + """ regionX, regionZ = get_region_coords(region_name) chunkX += regionX * 32 From c5dadf8979965424b0ce5558030e3349e18ab65c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 18 Jul 2020 12:37:04 +0200 Subject: [PATCH 107/151] Update the bare_console help. --- regionfixer.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 51d5f98..69f7a4b 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -362,11 +362,10 @@ def main(): if is_bare_console(): print("") - print("Minecraft Region Fixer has a command line application and a GUI\n" - "(Graphic User Interface) and you have just double clicked the\n" - "command line interface. If you really want to run the command line\n" - "interface you have to use a command prompt (cmd.exe)\n\n" - "You can also run the GUI, double click regionfixer_gui.py instead!") + print("Minecraft Region Fixer is a command line application and \n" + "you have just double clicked it. If you really want to run \n" + "the command line interface you have to use a command prompt.\n" + "Run cmd.exe in the run window.\n\n") print("") getpass("Press enter to continue:") return RV_CRASH From fb22f95c0103c1e7f150e28692080a1c19f62ab2 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 19 Jul 2020 01:10:58 +0200 Subject: [PATCH 108/151] Implement a way to recover some corrupted chunks. Tidy up some code. --- regionfixer.py | 28 ++++++++++++++--------- regionfixer_core/world.py | 47 +++++++++++++++++++++++++++++++-------- 2 files changed, 56 insertions(+), 19 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 69f7a4b..ccdd779 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -26,16 +26,15 @@ from multiprocessing import freeze_support import sys -from regionfixer_core import world + from regionfixer_core.scan import (console_scan_world, console_scan_regionset, ChildProcessException) from regionfixer_core.interactive import InteractiveLoop +from regionfixer_core.bug_reporter import BugReporter from regionfixer_core.util import entitle, is_bare_console -from regionfixer_core.world import parse_paths, parse_backup_list from regionfixer_core.version import version_string -from regionfixer_core.bug_reporter import BugReporter -from regionfixer_core.world import CHUNK_MISSING_ENTITIES_TAG +from regionfixer_core import world ################ # Return values @@ -62,11 +61,12 @@ def fix_bad_chunks(options, scanned_obj): """ print("") - total = scanned_obj.count_chunks(CHUNK_MISSING_ENTITIES_TAG) - problem = CHUNK_MISSING_ENTITIES_TAG - status = world.CHUNK_STATUS_TEXT[CHUNK_MISSING_ENTITIES_TAG] + total = scanned_obj.count_chunks(world.CHUNK_MISSING_ENTITIES_TAG) + problem = world.CHUNK_MISSING_ENTITIES_TAG + status = world.CHUNK_STATUS_TEXT[world.CHUNK_MISSING_ENTITIES_TAG] # In the same order as in FIXABLE_CHUNK_PROBLEMS - options_fix = [options.fix_missing_tag, + options_fix = [options.fix_corrupted, + options.fix_missing_tag, options.fix_wrong_located] fixing = list(zip(options_fix, world.FIXABLE_CHUNK_PROBLEMS)) for fix, problem in fixing: @@ -279,6 +279,14 @@ def main(): default=False, action='store_true') + parser.add_argument('--fix-corrupted', + '--fc', + help='Tries to fix chunks that are corrupted. This will try to decompress as much as possible from' + 'the data stream and see if the size is reasonable.', + dest='fix_corrupted', + default=False, + action='store_true') + parser.add_argument('--fix-missing-tag', '--fm', help='Fixes chunks that have the Entities tag missing. This will add the missing tag.', @@ -371,7 +379,7 @@ def main(): return RV_CRASH - world_list, regionset = parse_paths(args.paths) + world_list, regionset = world.parse_paths(args.paths) # Check if there are valid worlds to scan if not (world_list or regionset): @@ -422,7 +430,7 @@ def main(): # Do things with the option options args # Create a list of worlds containing the backups of the region files if args.backups: - backup_worlds = parse_backup_list(args.backups) + backup_worlds = world.parse_backup_list(args.backups) if not backup_worlds: print('[WARNING] No valid backup directories found, won\'t fix ' 'any chunk.') diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 48e74b7..0035f09 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -21,18 +21,19 @@ # along with this program. If not, see . # -import nbt.region as region -import nbt.nbt as nbt -from .util import table - from glob import glob from os.path import join, split, exists, isfile from os import remove from shutil import copy - import time +import zlib + +import nbt.region as region +import nbt.nbt as nbt +from .util import table from nbt.nbt import TAG_List + # Constants: # # -------------- @@ -104,7 +105,7 @@ CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE]} # chunk problems that can be fixed (so they don't need to be removed or replaced) -FIXABLE_CHUNK_PROBLEMS = [CHUNK_MISSING_ENTITIES_TAG, CHUNK_WRONG_LOCATED] +FIXABLE_CHUNK_PROBLEMS = [CHUNK_CORRUPTED, CHUNK_MISSING_ENTITIES_TAG, CHUNK_WRONG_LOCATED] # list with problem, status-text, problem arg tuples CHUNK_PROBLEMS_ITERATOR = [] @@ -543,7 +544,35 @@ def fix_problematic_chunks(self, problem): global_coords = c[0] local_coords = _get_local_chunk_coords(*global_coords) region_file = region.RegionFile(self.path) - chunk = region_file.get_chunk(*local_coords) + # catch the exception of corrupted chunks + try: + chunk = region_file.get_chunk(*local_coords) + except region.ChunkDataError: + # if we are here the chunk is corrupted, but still + if problem == CHUNK_CORRUPTED: + # read the data raw + m = region_file.metadata[local_coords[0], local_coords[1]] + region_file.file.seek(m.blockstart * region.SECTOR_LENGTH + 5) + raw_chunk = region_file.file.read(m.length - 1) + # decompress byte by byte so we can get as much as we can before the error happens + try: + dc = zlib.decompressobj() + out = "" + for c in raw_chunk: + out += dc.decompress(c) + except: + pass + # compare the sizes of the new compressed strem and the old one to see if we've got something good + cdata = zlib.compress(out.encode()) + if len(cdata) == len(raw_chunk): + # the chunk is probably good, write it in the region file + region_file.write_blockdata(local_coords[0], local_coords[1], out) + print("The chunk {0},{1} in region file {2} was fixed successfully.".format(local_coords[0], local_coords[1], self.filename)) + else: + print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) + #print("Extracted: " + str(len(out))) + #print("Size of the compressed stream: " + str(len(raw_chunk))) + if problem == CHUNK_MISSING_ENTITIES_TAG: # The arguments to create the empty TAG_List have been somehow extracted by comparing # the tag list from a healthy chunk with the one created by nbt @@ -1039,10 +1068,10 @@ def fix_problematic_chunks(self, problem): counter = 0 if self.count_chunks(): dim_name = self._get_dimension_directory() - print(' Repairing chunks in region set \"{0}\":'.format(dim_name if dim_name else "selected region files")) + print('Repairing chunks in region set \"{0}\":'.format(dim_name if dim_name else "selected region files")) for r in list(self._set.keys()): counter += self._set[r].fix_problematic_chunks(problem) - print("Repaired {0} chunks in this regionset.\n".format(counter)) + print(" Repaired {0} chunks in this regionset.\n".format(counter)) return counter From 3f5878bfa412410683f1c3f68fac7ce4d8291ea1 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 19 Jul 2020 01:20:49 +0200 Subject: [PATCH 109/151] Fix not showing the proper name of regionsets. Move code arround. Typos. --- regionfixer_core/world.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 0035f09..9dbb545 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -860,16 +860,6 @@ def get_name(self): else: return "" - def _update_counts(self, scanned_regionfile): - """ Updates the counters of the regionset with the new regionfile. """ - - assert isinstance(scanned_regionfile, ScannedRegionFile) - - self._region_counters[scanned_regionfile.status] += 1 - - for status in CHUNK_STATUSES: - self._chunk_counters[status] += scanned_regionfile.count_chunks(status) - def _get_dimension_directory(self): """ Returns a string with the directory containing the RegionSet. @@ -887,6 +877,16 @@ def _get_dimension_directory(self): else: return None + def _update_counts(self, scanned_regionfile): + """ Updates the counters of the regionset with the new regionfile. """ + + assert isinstance(scanned_regionfile, ScannedRegionFile) + + self._region_counters[scanned_regionfile.status] += 1 + + for status in CHUNK_STATUSES: + self._chunk_counters[status] += scanned_regionfile.count_chunks(status) + def _replace_in_data_structure(self, data): self._set[data.get_coords()] = data @@ -1047,8 +1047,8 @@ def remove_problematic_chunks(self, problem): counter = 0 if self.count_chunks(): - dim_name = self._get_dimension_directory() - print(' Deleting chunks in region set \"{0}\":'.format(dim_name if dim_name else "selected region files")) + dim_name = self.get_name() + print(' Deleting chunks in regionset \"{0}\":'.format(dim_name if dim_name else "selected region files")) for r in list(self._set.keys()): counter += self._set[r].remove_problematic_chunks(problem) print("Removed {0} chunks in this regionset.\n".format(counter)) @@ -1067,8 +1067,8 @@ def fix_problematic_chunks(self, problem): counter = 0 if self.count_chunks(): - dim_name = self._get_dimension_directory() - print('Repairing chunks in region set \"{0}\":'.format(dim_name if dim_name else "selected region files")) + dim_name = self.get_name() + print('Repairing chunks in regionset \"{0}\":'.format(dim_name if dim_name else "selected region files")) for r in list(self._set.keys()): counter += self._set[r].fix_problematic_chunks(problem) print(" Repaired {0} chunks in this regionset.\n".format(counter)) From 84266c434e9f913872ae5f5d224e3e5da3e7f579 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 19 Jul 2020 01:22:00 +0200 Subject: [PATCH 110/151] Remove unused __init__.py file --- regionfixer_core/__init__.py | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 regionfixer_core/__init__.py diff --git a/regionfixer_core/__init__.py b/regionfixer_core/__init__.py deleted file mode 100644 index a5682fb..0000000 --- a/regionfixer_core/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- \ No newline at end of file From 5b1f1e121507f5844570876066f255479bd49739 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 19 Jul 2020 01:22:26 +0200 Subject: [PATCH 111/151] Bump version number. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index f6e7a07..352d151 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -4,5 +4,5 @@ @author: Alejandro ''' -version_string = "0.3.2" +version_string = "0.3.3" version_numbers = version_string.split('.') From 2cd6011da35dffdd854baa4b47d8e66223505f6a Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 19 Jul 2020 01:30:49 +0200 Subject: [PATCH 112/151] Tidy up imports in scan.py. --- regionfixer_core/scan.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index bebf9fd..1dc0c1d 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -25,7 +25,6 @@ import sys import logging import multiprocessing -from multiprocessing import SimpleQueue from os.path import split, abspath from time import sleep, time from copy import copy @@ -38,15 +37,12 @@ ChunkHeaderError, RegionHeaderError, InconceivedChunk) + from progressbar import ProgressBar, Bar, AdaptiveETA, SimpleProgress -from . import world +from regionfixer_core import world from regionfixer_core.util import entitle -from regionfixer_core.world import DATAFILE_OK -# ~ TUPLE_COORDS = 0 -# ~ TUPLE_DATA_COORDS = 0 -# ~ TUPLE_GLOBAL_COORDS = 2 TUPLE_NUM_ENTITIES = 0 TUPLE_STATUS = 1 @@ -184,7 +180,7 @@ def __init__(self, data_structure, processes, scan_function, init_args, self.scan_function = scan_function # Queue used by processes to pass results - self.queue = SimpleQueue() + self.queue = multiprocessing.SimpleQueue() init_args.update({'queue': self.queue}) # NOTE TO SELF: initargs doesn't handle kwargs, only args! # Pass a dict with all the args From b30729e802e4584532d3d95e5c6d02566ef6b033 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 19 Jul 2020 12:18:21 +0200 Subject: [PATCH 113/151] Change variable names from problem to status. --- regionfixer_core/world.py | 102 +++++++++++++++++++------------------- 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 9dbb545..587c108 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -409,23 +409,23 @@ def get_path(self): return self.path - def count_chunks(self, problem=None): + def count_chunks(self, status=None): """ Counts chunks in the region file with the given problem. Keyword arguments: - - problem -- This is the status of the chunk to count for. See CHUNK_PROBLEMS + - status -- This is the status of the chunk to count for. See CHUNK_PROBLEMS If problem is omitted or None, counts all the chunks. Returns an integer with the counter. """ - if problem == None: + if status == None: c = 0 for s in CHUNK_STATUSES: c += self._counts[s] else: - c = self._counts[problem] + c = self._counts[status] return c @@ -490,11 +490,11 @@ def summary(self): return text - def remove_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem + def remove_problematic_chunks(self, status): + """ Removes all the chunks with the given status Keyword arguments: - - problem -- Status of the chunks to remove. See CHUNK_STATUSES. + - status -- Status of the chunks to remove. See CHUNK_STATUSES. Return: - counter -- An integer with the amount of removed chunks. @@ -502,7 +502,7 @@ def remove_problematic_chunks(self, problem): """ counter = 0 - bad_chunks = self.list_chunks(problem) + bad_chunks = self.list_chunks(status) for c in bad_chunks: global_coords = c[0] local_coords = _get_local_chunk_coords(*global_coords) @@ -515,11 +515,11 @@ def remove_problematic_chunks(self, problem): return counter - def fix_problematic_chunks(self, problem): + def fix_problematic_chunks(self, status): """ This fixes problems in chunks that can be somehow fixed. Keyword arguments: - - problem -- Status of the chunks to fix. See FIXABLE_CHUNK_PROBLEMS + - status -- Status of the chunks to fix. See FIXABLE_CHUNK_PROBLEMS Return: - counter -- An integer with the amount of fixed chunks. @@ -537,9 +537,9 @@ def fix_problematic_chunks(self, problem): # chunks have like 3 or 4 tag missing from the NBT structure. I don't really know which # of them are mandatory. - assert(problem in FIXABLE_CHUNK_PROBLEMS) + assert(status in FIXABLE_CHUNK_PROBLEMS) counter = 0 - bad_chunks = self.list_chunks(problem) + bad_chunks = self.list_chunks(status) for c in bad_chunks: global_coords = c[0] local_coords = _get_local_chunk_coords(*global_coords) @@ -549,7 +549,7 @@ def fix_problematic_chunks(self, problem): chunk = region_file.get_chunk(*local_coords) except region.ChunkDataError: # if we are here the chunk is corrupted, but still - if problem == CHUNK_CORRUPTED: + if status == CHUNK_CORRUPTED: # read the data raw m = region_file.metadata[local_coords[0], local_coords[1]] region_file.file.seek(m.blockstart * region.SECTOR_LENGTH + 5) @@ -573,7 +573,7 @@ def fix_problematic_chunks(self, problem): #print("Extracted: " + str(len(out))) #print("Size of the compressed stream: " + str(len(raw_chunk))) - if problem == CHUNK_MISSING_ENTITIES_TAG: + if status == CHUNK_MISSING_ENTITIES_TAG: # The arguments to create the empty TAG_List have been somehow extracted by comparing # the tag list from a healthy chunk with the one created by nbt chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) @@ -584,7 +584,7 @@ def fix_problematic_chunks(self, problem): self[local_coords] = (0 , CHUNK_NOT_CREATED) counter += 1 - elif problem == CHUNK_WRONG_LOCATED: + elif status == CHUNK_WRONG_LOCATED: data_coords = get_chunk_data_coords(chunk) data_l_coords = _get_local_chunk_coords(*data_coords) region_file.write_chunk(data_l_coords[0], data_l_coords[1], chunk) @@ -609,9 +609,9 @@ def remove_entities(self): """ - problem = CHUNK_TOO_MANY_ENTITIES + status = CHUNK_TOO_MANY_ENTITIES counter = 0 - bad_chunks = self.list_chunks(problem) + bad_chunks = self.list_chunks(status) for c in bad_chunks: global_coords = c[0] local_coords = _get_local_chunk_coords(*global_coords) @@ -952,21 +952,21 @@ def count_regions(self, status=None): return counter - def count_chunks(self, problem=None): - """ Returns the number of chunks with the given problem. + def count_chunks(self, status=None): + """ Returns the number of chunks with the given status. - status -- The chunk status to count. See CHUNK_STATUSES - If problem is None returns the number of chunks in this region file. + If status is None returns the number of chunks in this region file. """ c = 0 - if problem is None: + if status is None: for s in CHUNK_STATUSES: c += self._chunk_counters[s] else: - c = self._chunk_counters[problem] + c = self._chunk_counters[status] return c @@ -1035,11 +1035,11 @@ def locate_region(self, coords): return region_name - def remove_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem. + def remove_problematic_chunks(self, status): + """ Removes all the chunks with the given status. Keyword arguments: - - problem -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks removed @@ -1050,16 +1050,16 @@ def remove_problematic_chunks(self, problem): dim_name = self.get_name() print(' Deleting chunks in regionset \"{0}\":'.format(dim_name if dim_name else "selected region files")) for r in list(self._set.keys()): - counter += self._set[r].remove_problematic_chunks(problem) + counter += self._set[r].remove_problematic_chunks(status) print("Removed {0} chunks in this regionset.\n".format(counter)) return counter - def fix_problematic_chunks(self, problem): + def fix_problematic_chunks(self, status): """ Try to fix all the chunks with the given problem. Keyword arguments: - - problem -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks fixed. @@ -1070,7 +1070,7 @@ def fix_problematic_chunks(self, problem): dim_name = self.get_name() print('Repairing chunks in regionset \"{0}\":'.format(dim_name if dim_name else "selected region files")) for r in list(self._set.keys()): - counter += self._set[r].fix_problematic_chunks(problem) + counter += self._set[r].fix_problematic_chunks(status) print(" Repaired {0} chunks in this regionset.\n".format(counter)) return counter @@ -1167,11 +1167,11 @@ def generate_report(self, standalone): else: return chunk_counts, region_counts - def remove_problematic_regions(self, problem): - """ Removes all the regions files with the given problem. See the warning! + def remove_problematic_regions(self, status): + """ Removes all the regions files with the given status. See the warning! Keyword arguments: - - problem -- Status of the region files to remove. See REGION_STATUSES for a list. + - status -- Status of the region files to remove. See REGION_STATUSES for a list. Return: - counter -- An integer with the amount of removed region files. @@ -1181,7 +1181,7 @@ def remove_problematic_regions(self, problem): """ counter = 0 - for r in self.list_regions(problem): + for r in self.list_regions(status): remove(r.get_path()) counter += 1 return counter @@ -1400,13 +1400,13 @@ def count_chunks(self, status=None): counter += count return counter - def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delete_entities): + def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete_entities): """ Replaces problematic chunks using backups. Keyword arguments: - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used in a ordered way. - - problem -- An integer indicating the status of chunks to be replaced. + - status -- An integer indicating the status of chunks to be replaced. See CHUNK_STATUSES for a complete list. - entity_limit -- The threshold to consider a chunk with the status TOO_MANY_ENTITIES. - delete_entities -- Boolean indicating if the chunks with too_many_entities should have @@ -1429,7 +1429,7 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet # this don't need to be aware of region status, it just # iterates the list returned by list_chunks() - bad_chunks = regionset.list_chunks(problem) + bad_chunks = regionset.list_chunks(status) if bad_chunks and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory())) @@ -1491,11 +1491,11 @@ def replace_problematic_chunks(self, backup_worlds, problem, entity_limit, delet return counter - def remove_problematic_chunks(self, problem): - """ Removes all the chunks with the given problem. + def remove_problematic_chunks(self, status): + """ Removes all the chunks with the given status. Keyword arguments: - - problem -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks removed @@ -1506,14 +1506,14 @@ def remove_problematic_chunks(self, problem): counter = 0 for regionset in self.regionsets: - counter += regionset.remove_problematic_chunks(problem) + counter += regionset.remove_problematic_chunks(status) return counter - def fix_problematic_chunks(self, problem): - """ Try to fix all the chunks with the given problem. + def fix_problematic_chunks(self, status): + """ Try to fix all the chunks with the given status. Keyword arguments: - - problem -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks fixed. @@ -1524,16 +1524,16 @@ def fix_problematic_chunks(self, problem): counter = 0 for regionset in self.regionsets: - counter += regionset.fix_problematic_chunks(problem) + counter += regionset.fix_problematic_chunks(status) return counter - def replace_problematic_regions(self, backup_worlds, problem, entity_limit, delete_entities): + def replace_problematic_regions(self, backup_worlds, status, entity_limit, delete_entities): """ Replaces problematic region files using backups. Keyword arguments: - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used in a ordered way. - - problem -- An integer indicating the status of region files to be replaced. + - status -- An integer indicating the status of region files to be replaced. See REGION_STATUSES for a complete list. - entity_limit -- The threshold to consider a chunk with the status TOO_MANY_ENTITIES. - delete_entities -- Boolean indicating if the chunks with too_many_entities should have @@ -1556,7 +1556,7 @@ def replace_problematic_regions(self, backup_worlds, problem, entity_limit, dele b_regionset = temp_regionset break - bad_regions = regionset.list_regions(problem) + bad_regions = regionset.list_regions(status) if bad_regions and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory())) else: @@ -1590,11 +1590,11 @@ def replace_problematic_regions(self, backup_worlds, problem, entity_limit, dele return counter - def remove_problematic_regions(self, problem): - """ Removes all the regions files with the given problem. See the warning! + def remove_problematic_regions(self, status): + """ Removes all the regions files with the given status. See the warning! Keyword arguments: - - problem -- Status of the region files to remove. See REGION_STATUSES for a list. + - status -- Status of the region files to remove. See REGION_STATUSES for a list. Return: - counter -- An integer with the amount of removed region files. @@ -1606,7 +1606,7 @@ def remove_problematic_regions(self, problem): counter = 0 for regionset in self.regionsets: - counter += regionset.remove_problematic_regions(problem) + counter += regionset.remove_problematic_regions(status) return counter def remove_entities(self): From 930b2aa9f688fdba8367fe5a00a27aec502dd381 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 20 Jul 2020 00:22:25 +0200 Subject: [PATCH 114/151] Improve comments in scan.py. Remove constants and get them from world.py --- regionfixer_core/scan.py | 261 +++++++++++++++++++++++++++++---------- 1 file changed, 195 insertions(+), 66 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 1dc0c1d..9db5df1 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -43,8 +43,6 @@ from regionfixer_core import world from regionfixer_core.util import entitle -TUPLE_NUM_ENTITIES = 0 -TUPLE_STATUS = 1 logging.basicConfig(filename=None, level=logging.CRITICAL) @@ -52,10 +50,18 @@ class ChildProcessException(Exception): """ Raised when a child process has problems. - Stores all the info given by sys.exc_info() and the - scanned file object which is probably partially filled. + Keyword arguments: + - partial_scanned_file -- ScannedObject from world.py partially filled with + the results of the scan + - exc_type -- Type of the exception being handled, extracted from sys.exc_info() + - exc_class -- The exception instance, extracted from sys.exc_info() + - tb_text -- The traceback text, extracted from traceback object from sys.exc_info() + + Stores all the info given by sys.exc_info() and the scanned file object which is + probably partially filled. + """ - + #TODO: not sure about the tb_text argument is that. def __init__(self, partial_scanned_file, exc_type, exc_class, tb_text): self.scanned_file = partial_scanned_file self.exc_type = exc_type @@ -66,9 +72,16 @@ def __init__(self, partial_scanned_file, exc_type, exc_class, tb_text): def printable_traceback(self): """ Returns a nice printable traceback. - It uses a lot of asteriks to ensure it doesn't mix with + This traceback reports: + - The file that was being scanned + - The type and class of exception + - The text of the traceback + + It uses a lot of asteriks as indentation to ensure it doesn't mix with the main process traceback. + """ + text = "" scanned_file = self.scanned_file text += "*" * 10 + "\n" @@ -85,7 +98,16 @@ def printable_traceback(self): return text def save_error_log(self, filename='error.log'): - """ Save the error in filename, return the absolute path of saved file. """ + """ Save the error in filename, return the path. + + Keyword argument: + - filename -- Name of the file to write the error log. + + Return: + - error_log_path -- Path where the error log was saved. + + """ + f = open(filename, 'w') error_log_path = abspath(f.name) filename = self.scanned_file.filename @@ -130,11 +152,16 @@ def multiprocess_scan_regionfile(region_file): def _mp_data_pool_init(d): - """ Function to initialize the multiprocessing in scan_regionset. - Is used to pass values to the child process. + """ Function to initialize the multiprocessing in scan_dataset. + + Keyword arguments: + - d -- Dictionary containing the information to copy to the function of the child process. + + This function adds the queue to each of the child processes objects. This queue + is used to get the results from the child process. - Requiere to pass the multiprocessing queue as argument. """ + assert isinstance(d, dict) assert 'queue' in d multiprocess_scan_data.q = d['queue'] @@ -142,7 +169,15 @@ def _mp_data_pool_init(d): def _mp_regionset_pool_init(d): """ Function to initialize the multiprocessing in scan_regionset. - Is used to pass values to the child process. """ + + Keyword arguments: + - d -- Dictionary containing the information to copy to the function of the child process. + + This function adds the queue to each of the child processes objects. This queue + is used to get the results from the child process. + + """ + assert isinstance(d, dict) assert 'regionset' in d assert 'queue' in d @@ -157,22 +192,24 @@ def _mp_regionset_pool_init(d): class AsyncScanner: """ Class to derive all the scanner classes from. + Keyword arguments: + - data_structure -- Is one of the objects in world: DataSet, RegionSet + - processes -- Integer with the number of child processes to use for the scan + - scan_function -- Function used to scan the data + - init_args -- These are the initialization arguments passed to __init__ + - _mp_init_function -- Function used to initialize the child processes + To implement a scanner you have to override: update_str_last_scanned() - Use try-finally to call terminate, if not processes will be - hanging in the background - """ + + It's imperative to use try-finally to call terminate at the end of the run, + if not processes will be hanging in the background for all eternity. + + """ def __init__(self, data_structure, processes, scan_function, init_args, _mp_init_function): - """ Init the scanner. - - data_structure is a world.DataSet - processes is the number of child processes to use - scan_function is the function to use for scanning - init_args are the arguments passed to the init function - _mp_init_function is the function used to init the child processes - """ + """ Init the scanner """ assert isinstance(data_structure, world.DataSet) self.data_structure = data_structure self.list_files_to_scan = data_structure._get_list() @@ -250,8 +287,14 @@ def terminate(self): self.pool.terminate() def raise_child_exception(self, exception_tuple): - """ Raises a ChildProcessException using the info - contained in the tuple returned by the child process. """ + """ Raises a ChildProcessException. + + Keyword arguments: + - exception_tuple -- Tuple containing all the information about the exception + of the child process. + + """ + e = exception_tuple raise ChildProcessException(e[0], e[1][0], e[1][1], e[1][2]) @@ -262,9 +305,11 @@ def update_str_last_scanned(self): def sleep(self): """ Sleep waiting for results. - This method will sleep less when results arrive faster and - more when they arrive slower. + This method will adjust automatically the sleep time. It will sleep less + when results arrive faster and more when they arrive slower. + """ + # If the query number is outside of our range... if not ((self.queries_without_results < self.MAX_QUERY_NUM) & (self.queries_without_results > self.MIN_QUERY_NUM)): @@ -293,13 +338,18 @@ def sleep(self): @property def str_last_scanned(self): - """ A friendly string with last scanned thing. """ + """ A friendly string with last scanned result. """ return self._str_last_scanned if self._str_last_scanned \ else "Scanning..." @property def finished(self): - """ Finished the operation. The queue could have elements """ + """ Return True if the scan has finished. + + It checks if the queue is empty and if the results are ready. + + """ + return self._results.ready() and self.queue.empty() @property @@ -310,9 +360,11 @@ def results(self): but also the most sloppy. If you want to closely control the scan process (for example cancel the process in the middle, whatever is happening) use get_last_result(). - + + Usage: for result in scanner.results: # do things + """ q = self.queue @@ -332,7 +384,13 @@ def __len__(self): class AsyncDataScanner(AsyncScanner): - """ Scan a DataFileSet and fill the data structure. """ + """ Scan a DataFileSet and fill the data structure. + + Keyword arguments: + - data_structure -- A DataFileSet from world.py containing the files to scan + - processes -- An integer with the number of child processes to use + + """ def __init__(self, data_structure, processes): scan_function = multiprocess_scan_data @@ -350,7 +408,18 @@ def update_str_last_scanned(self, data): class AsyncRegionsetScanner(AsyncScanner): - """ Scan a RegionSet and fill the data structure. """ + """ Scan a RegionSet and fill the data structure. + + Keyword arguments: + - data_structure -- A RegionSet from world.py containing the files to scan + - processes -- An integer with the number of child processes to use + - entity_limit -- An integer, threshold of entities for a chunk to be considered + with too many entities + - remove_entities -- A boolean, defaults to False, to remove the entities whilel + scanning. This is really handy because opening chunks with + too many entities for scanning can take minutes. + + """ def __init__(self, regionset, processes, entity_limit, remove_entities=False): @@ -376,8 +445,22 @@ def update_str_last_scanned(self, r): class AsyncWorldRegionScanner: - """ Wrapper around the calls of AsyncScanner to scan all the - regionsets of a world. """ + """ Wrapper around the calls of AsyncScanner the whole world. + + Keyword arguments: + - world_obj -- A World object from world.py + - processes -- An integer with the number of child processes to use + - entity_limit -- An integer, threshold of entities for a chunk to be considered + with too many entities + - remove_entities -- A boolean, defaults to False, to remove the entities while + scanning. This is really handy because opening chunks with + too many entities for scanning can take minutes. + + This class is just a wrapper around AsyncRegionsetScanner to scan all the region sets + of the world. + + + """ def __init__(self, world_obj, processes, entity_limit, remove_entities=False): @@ -399,12 +482,15 @@ def sleep(self): """ Sleep waiting for results. This method will sleep less when results arrive faster and - more when they arrive slower. + more when they arrive slower. See AsyncScanner.sleep(). + """ + self._current_regionset.sleep() def scan(self): """ Scan and fill the given regionset. """ + cr = AsyncRegionsetScanner(self.regionsets.pop(0), self.processes, self.entity_limit, @@ -424,7 +510,9 @@ def get_last_result(self): This method is better if you want to closely control the scan process. + """ + cr = self._current_regionset if cr is not None: @@ -441,6 +529,8 @@ def get_last_result(self): return None def terminate(self): + """ Terminates scan of the current RegionSet. """ + self._current_regionset.terminate() @property @@ -450,11 +540,18 @@ def str_last_scanned(self): @property def current_regionset(self): + """ Returns the current RegionSet being scanned. """ + return self._current_regionset.regionset @property def finished(self): - """ Finished the operation. The queue could have elements """ + """ Return True if the scan has finished. + + It checks if the queue is empty and if the results are ready. + + """ + return not self.regionsets and self._current_regionset.finished @property @@ -470,8 +567,7 @@ def results(self): scan process (for example cancel the process in the middle, whatever is happening) use get_last_result(). - Example using this method: - + Usage: for result in scanner.results: # do things """ @@ -492,8 +588,16 @@ def __len__(self): def console_scan_loop(scanners, scan_titles, verbose): - """ Uses all the AsyncScanner passed to scan the files and - print status text to the terminal. """ + """ Scan all the AsyncScanner object printing status to console. + + Keyword arguments: + - scanners -- List of AsyncScanner objects to scan. + - scan_titles -- List of string with the names of the world/regionsets in the same + order as in scanners. + - verbose -- Boolean, if true it will print a line per scanned region file. + + """ + try: for scanner, title in zip(scanners, scan_titles): print("\n{0:-^60}".format(title)) @@ -537,7 +641,16 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, verbose): """ Scans a world folder prints status to console. - It will scan region files and data files (includes players). + Keyword arguments: + - world_obj -- World object from world.py that will be scanned + - processes -- An integer with the number of child processes to use + - entity_limit -- An integer, threshold of entities for a chunk to be considered + with too many entities + - remove_entities -- A boolean, defaults to False, to remove the entities whilel + scanning. This is really handy because opening chunks with + too many entities for scanning can take minutes. + - verbose -- Boolean, if true it will print a line per scanned region file. + """ # Time to wait between asking for results. Note that if the time is too big @@ -580,11 +693,19 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, w.scanned = True -def console_scan_regionset(regionset, processes, entity_limit, - remove_entities, verbose): +def console_scan_regionset(regionset, processes, entity_limit, remove_entities, verbose): """ Scan a regionset printing status to console. - Uses AsyncRegionsetScanner. + Keyword arguments: + - regionset -- RegionSet object from world.py that will be scanned + - processes -- An integer with the number of child processes to use + - entity_limit -- An integer, threshold of entities for a chunk to be considered + with too many entities + - remove_entities -- A boolean, defaults to False, to remove the entities whilel + scanning. This is really handy because opening chunks with + too many entities for scanning can take minutes. + - verbose -- Boolean, if true it will print a line per scanned region file. + """ rs = AsyncRegionsetScanner(regionset, processes, entity_limit, @@ -598,6 +719,9 @@ def console_scan_regionset(regionset, processes, entity_limit, def scan_data(scanned_dat_file): """ Try to parse the nbt data file, and fill the scanned object. + Keyword arguments: + - scanned_dat_file -- ScannedDataFile object from world.py. + If something is wrong it will return a tuple with useful info to debug the problem. @@ -605,6 +729,7 @@ def scan_data(scanned_dat_file): is not compressed, we handle the special case here. """ + s = scanned_dat_file try: if s.filename == 'idcounts.dat': @@ -634,15 +759,19 @@ def scan_data(scanned_dat_file): return s -def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): - """ Scan a region file filling the ScannedRegionFile +def scan_region_file(scanned_regionfile_obj, entity_limit, remove_entities): + """ Scan a region file filling the ScannedRegionFile object - If delete_entities is True it will delete entities while - scanning + Keyword arguments: + - scanned_regionfile_obj -- ScannedRegionfile object from world.py that will be scanned + - entity_limit -- An integer, threshold of entities for a chunk to be considered + with too many entities + - remove_entities -- A boolean, defaults to False, to remove the entities while + scanning. This is really handy because opening chunks with + too many entities for scanning can take minutes. - entiti_limit is the threshold of entities to consider a chunk - with too much entities problems. """ + try: r = scanned_regionfile_obj @@ -655,13 +784,13 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): r.scanned = True return r - except PermissionError as e: + except PermissionError: r.status = world.REGION_UNREADABLE_PERMISSION_ERROR r.scan_time = time() r.scanned = True return r - except IOError as e: + except IOError: r.status = world.REGION_UNREADABLE r.scan_time = time() r.scanned = True @@ -681,17 +810,17 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # chunk not created continue - if c[TUPLE_STATUS] == world.CHUNK_OK: + if c[world.TUPLE_STATUS] == world.CHUNK_OK: continue - elif c[TUPLE_STATUS] == world.CHUNK_TOO_MANY_ENTITIES: + elif c[world.TUPLE_STATUS] == world.CHUNK_TOO_MANY_ENTITIES: # Deleting entities is in here because parsing a chunk # with thousands of wrong entities takes a long time, # and sometimes GiB of RAM, and once detected is better # to fix it at once. - if delete_entities: + if remove_entities: world.delete_entities(region_file, x, z) print(("Deleted {0} entities in chunk" - " ({1},{2}) of the region file: {3}").format(c[TUPLE_NUM_ENTITIES], x, z, r.filename)) + " ({1},{2}) of the region file: {3}").format(c[world.TUPLE_NUM_ENTITIES], x, z, r.filename)) # entities removed, change chunk status to OK r[(x, z)] = (0, world.CHUNK_OK) @@ -703,9 +832,9 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): # ~ archivo = open(name,'w') # ~ archivo.write(pretty_tree) pass - elif c[TUPLE_STATUS] == world.CHUNK_CORRUPTED: + elif c[world.TUPLE_STATUS] == world.CHUNK_CORRUPTED: pass - elif c[TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED: + elif c[world.TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED: pass # Now check for chunks sharing offsets: @@ -720,10 +849,10 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, delete_entities): metadata = region_file.metadata sharing = [k for k in metadata if (metadata[k].status == region.STATUS_CHUNK_OVERLAPPING and - r[k][TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED)] + r[k][world.TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED)] shared_counter = 0 for k in sharing: - r[k] = (r[k][TUPLE_NUM_ENTITIES], world.CHUNK_SHARED_OFFSET) + r[k] = (r[k][world.TUPLE_NUM_ENTITIES], world.CHUNK_SHARED_OFFSET) shared_counter += 1 r.scan_time = time() @@ -781,14 +910,14 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): # chunk ok status = world.CHUNK_OK - except InconceivedChunk as e: + except InconceivedChunk: # chunk not created chunk = None data_coords = None num_entities = None status = world.CHUNK_NOT_CREATED - except RegionHeaderError as e: + except RegionHeaderError: # corrupted chunk, because of region header status = world.CHUNK_CORRUPTED chunk = None @@ -796,7 +925,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except ChunkDataError as e: + except ChunkDataError: # corrupted chunk, usually because of bad CRC in compression status = world.CHUNK_CORRUPTED chunk = None @@ -804,7 +933,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except ChunkHeaderError as e: + except ChunkHeaderError: # corrupted chunk, error in the header of the chunk status = world.CHUNK_CORRUPTED chunk = None @@ -812,7 +941,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except KeyError as e: + except KeyError: # chunk with the mandatory tag Entities missing status = world.CHUNK_MISSING_ENTITIES_TAG chunk = None @@ -820,7 +949,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except UnicodeDecodeError as e: + except UnicodeDecodeError: # TODO: This should another kind of error, it's now being handled as corrupted chunk status = world.CHUNK_CORRUPTED chunk = None @@ -828,7 +957,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except TypeError as e: + except TypeError: # TODO: This should another kind of error, it's now being handled as corrupted chunk status = world.CHUNK_CORRUPTED chunk = None From 3fe9052fe3d9283cd4b9133c1f4763dc14932b9f Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 20 Jul 2020 00:30:23 +0200 Subject: [PATCH 115/151] Improve comments in util.py. Remove unused import. --- regionfixer_core/util.py | 36 ++++++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index 3d59229..f4d8e41 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -25,11 +25,17 @@ import sys import traceback -from . import world - def get_str_from_traceback(ty, value, tb): - """ Return a string from a traceback + exception. """ + """ Return a string from a traceback plus exception. + + Keyword arguments: + - ty -- Exception type + - value -- value of the traceback + - tb -- Traceback + + """ + t = traceback.format_exception(ty, value, tb) s = str(ty) + "\n" for i in t: @@ -39,8 +45,6 @@ def get_str_from_traceback(ty, value, tb): # Stolen from: # http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input - - def query_yes_no(question, default="yes"): """Ask a yes/no question via raw_input() and return their answer. @@ -78,10 +82,13 @@ def query_yes_no(question, default="yes"): # stolen from minecraft overviewer # https://github.com/overviewer/Minecraft-Overviewer/ def is_bare_console(): - """Returns true if Overviewer is running in a bare console in - Windows, that is, if overviewer wasn't started in a cmd.exe + """Returns true if the python script is running in a bare console + + In Windows, that is, if the script wasn't started in a cmd.exe session. + """ + if platform.system() == 'Windows': try: import ctypes @@ -96,7 +103,8 @@ def is_bare_console(): def entitle(text, level=0): - """ Put the text in a title with lot's of hashes everywhere. """ + """ Put the text in a title with lot's of hashes around it. """ + t = '' if level == 0: t += "\n" @@ -107,12 +115,16 @@ def entitle(text, level=0): def table(columns): - """ Gets a list with lists in which each list is a column, - returns a text string with a table. """ + """ Generates a text containing a pretty table. + + Keyword argument: + - columns -- A list containing lists in which each one of the is a column + of the table. + + """ def get_max_len(l): - """ Takes a list of strings and returns the length of the biggest - string """ + """ Takes a list of strings and returns the length of the biggest string """ m = 0 for e in l: if len(str(e)) > m: From 1c971a9d42bcdc30738a5b6b26a599037a04cfa9 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 20 Jul 2020 00:31:16 +0200 Subject: [PATCH 116/151] Add the copyright notice to version.py. --- regionfixer_core/version.py | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 352d151..cdedb71 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -1,8 +1,25 @@ -''' -Created on 24/06/2014 +#!/usr/bin/env python +# -*- coding: utf-8 -*- -@author: Alejandro -''' +# +# Region Fixer. +# Fix your region files with a backup copy of your Minecraft world. +# Copyright (C) 2020 Alejandro Aguilera (Fenixin) +# https://github.com/Fenixin/Minecraft-Region-Fixer +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# version_string = "0.3.3" version_numbers = version_string.split('.') From 63b7c338f435e04b2d0cb0762c19139fe1e84892 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 20 Jul 2020 00:35:48 +0200 Subject: [PATCH 117/151] Move the greetings a version printing before the errors in the code. --- regionfixer.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index ccdd779..ad78f0f 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -381,6 +381,10 @@ def main(): world_list, regionset = world.parse_paths(args.paths) + # print greetings an version number + print("\nWelcome to Region Fixer!") + print(("(v {0})".format(version_string))) + # Check if there are valid worlds to scan if not (world_list or regionset): print('Error: No worlds or region files to scan! Use ' @@ -424,9 +428,6 @@ def main(): if args.entity_limit < 0: parser.error("Error: The entity limit must be at least 0!") - print("\nWelcome to Region Fixer!") - print(("(version: {0})".format(version_string))) - # Do things with the option options args # Create a list of worlds containing the backups of the region files if args.backups: From 2d5733f56706b00e41377fe025b61df74168c2ef Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 21 Jul 2020 00:47:01 +0200 Subject: [PATCH 118/151] Move constants to new file. --- regionfixer.py | 56 ++-- regionfixer_core/constants.py | 243 ++++++++++++++++ regionfixer_core/interactive.py | 45 +-- regionfixer_core/scan.py | 72 ++--- regionfixer_core/world.py | 472 ++++++++++---------------------- 5 files changed, 469 insertions(+), 419 deletions(-) create mode 100644 regionfixer_core/constants.py diff --git a/regionfixer.py b/regionfixer.py index ad78f0f..9246199 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -27,24 +27,16 @@ import sys +from regionfixer_core.bug_reporter import BugReporter +import regionfixer_core.constants as c +from regionfixer_core.interactive import InteractiveLoop from regionfixer_core.scan import (console_scan_world, console_scan_regionset, ChildProcessException) -from regionfixer_core.interactive import InteractiveLoop -from regionfixer_core.bug_reporter import BugReporter from regionfixer_core.util import entitle, is_bare_console from regionfixer_core.version import version_string from regionfixer_core import world -################ -# Return values -################ - -RV_OK = 0 # world scanned and no problems found -RV_CRASH = 1 # crash or end unexpectedly -RV_NOTHING_TO_SCAN = 20 # no files/worlds to scan -# RV_WRONG_COMMAND = 2 # the command line used is wrong and region fixer didn't execute. argparse uses this value by default -RV_BAD_WORLD = 3 # scan completed successfully but problems have been found in the scan def fix_bad_chunks(options, scanned_obj): @@ -61,16 +53,16 @@ def fix_bad_chunks(options, scanned_obj): """ print("") - total = scanned_obj.count_chunks(world.CHUNK_MISSING_ENTITIES_TAG) - problem = world.CHUNK_MISSING_ENTITIES_TAG - status = world.CHUNK_STATUS_TEXT[world.CHUNK_MISSING_ENTITIES_TAG] + total = scanned_obj.count_chunks(c.CHUNK_MISSING_ENTITIES_TAG) + problem = c.CHUNK_MISSING_ENTITIES_TAG + status = c.CHUNK_STATUS_TEXT[c.CHUNK_MISSING_ENTITIES_TAG] # In the same order as in FIXABLE_CHUNK_PROBLEMS options_fix = [options.fix_corrupted, options.fix_missing_tag, options.fix_wrong_located] - fixing = list(zip(options_fix, world.FIXABLE_CHUNK_PROBLEMS)) + fixing = list(zip(options_fix, c.FIXABLE_CHUNK_PROBLEMS)) for fix, problem in fixing: - status = world.CHUNK_STATUS_TEXT[problem] + status = c.CHUNK_STATUS_TEXT[problem] total = scanned_obj.count_chunks(problem) if fix: if total: @@ -104,9 +96,9 @@ def delete_bad_chunks(options, scanned_obj): options.delete_entities, options.delete_shared_offset, options.delete_missing_tag] - deleting = list(zip(options_delete, world.CHUNK_PROBLEMS)) + deleting = list(zip(options_delete, c.CHUNK_PROBLEMS)) for delete, problem in deleting: - status = world.CHUNK_STATUS_TEXT[problem] + status = c.CHUNK_STATUS_TEXT[problem] total = scanned_obj.count_chunks(problem) if delete: if total: @@ -135,9 +127,9 @@ def delete_bad_regions(options, scanned_obj): print("") options_delete = [options.delete_too_small] - deleting = list(zip(options_delete, world.REGION_PROBLEMS)) + deleting = list(zip(options_delete, c.REGION_PROBLEMS)) for delete, problem in deleting: - status = world.REGION_STATUS_TEXT[problem] + status = c.REGION_STATUS_TEXT[problem] total = scanned_obj.count_regions(problem) if delete: if total: @@ -366,7 +358,7 @@ def main(): print("Minecraft Region Fixer only works with python 3.x") print(("(And you just tried to run it in python {0})".format(sys.version))) print("") - return RV_CRASH + return c.RV_CRASH if is_bare_console(): print("") @@ -376,7 +368,7 @@ def main(): "Run cmd.exe in the run window.\n\n") print("") getpass("Press enter to continue:") - return RV_CRASH + return c.RV_CRASH world_list, regionset = world.parse_paths(args.paths) @@ -389,7 +381,7 @@ def main(): if not (world_list or regionset): print('Error: No worlds or region files to scan! Use ' '--help for a complete list of options.') - return RV_NOTHING_TO_SCAN + return c.RV_NOTHING_TO_SCAN # Check basic options compatibilities any_chunk_replace_option = args.replace_corrupted or \ @@ -442,9 +434,9 @@ def main(): found_problems_in_regionsets = False found_problems_in_worlds = False if args.interactive: - c = InteractiveLoop(world_list, regionset, args, backup_worlds) - c.cmdloop() - return RV_OK + ci = InteractiveLoop(world_list, regionset, args, backup_worlds) + ci.cmdloop() + return c.RV_OK else: summary_text = "" # Scan the separate region files @@ -501,7 +493,7 @@ def main(): args.replace_wrong_located, args.replace_entities, args.replace_shared_offset] - replacing = list(zip(options_replace, world.CHUNK_PROBLEMS_ITERATOR)) + replacing = list(zip(options_replace, c.CHUNK_PROBLEMS_ITERATOR)) for replace, (problem, status, arg) in replacing: if replace: total = w.count_chunks(problem) @@ -525,7 +517,7 @@ def main(): del_ent = args.delete_entities ent_lim = args.entity_limit options_replace = [args.replace_too_small] - replacing = list(zip(options_replace, world.REGION_PROBLEMS_ITERATOR)) + replacing = list(zip(options_replace, c.REGION_PROBLEMS_ITERATOR)) for replace, (problem, status, arg) in replacing: if replace: total = w.count_regions(problem) @@ -577,9 +569,9 @@ def main(): print("Something went wrong while saving the log file!") if found_problems_in_regionsets or found_problems_in_worlds: - return RV_BAD_WORLD + return c.RV_BAD_WORLD - return RV_OK + return c.RV_OK if __name__ == '__main__': @@ -605,7 +597,7 @@ def main(): bug_sender = BugReporter(e.printable_traceback) # auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str - value = RV_CRASH + value = c.RV_CRASH except Exception as e: had_exception = True @@ -614,7 +606,7 @@ def main(): bug_sender = BugReporter() # auto_reported = bug_sender.ask_and_send(QUESTION_TEXT) bug_report = bug_sender.error_str - value = RV_CRASH + value = c.RV_CRASH finally: if had_exception and not auto_reported: diff --git a/regionfixer_core/constants.py b/regionfixer_core/constants.py new file mode 100644 index 0000000..a438390 --- /dev/null +++ b/regionfixer_core/constants.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Region Fixer. +# Fix your region files with a backup copy of your Minecraft world. +# Copyright (C) 2020 Alejandro Aguilera (Fenixin) +# https://github.com/Fenixin/Minecraft-Region-Fixer +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# + + + +################ +# Return values +################ + +RV_OK = 0 # world scanned and no problems found +RV_CRASH = 1 # crash or end unexpectedly +RV_NOTHING_TO_SCAN = 20 # no files/worlds to scan +# RV_WRONG_COMMAND = 2 # the command line used is wrong and region fixer didn't execute. argparse uses this value by default +RV_BAD_WORLD = 3 # scan completed successfully but problems have been found in the scan + + + + +# -------------- +# Chunk related: +# -------------- +# Used to mark the status of chunks: +CHUNK_NOT_CREATED = -1 +CHUNK_OK = 0 +CHUNK_CORRUPTED = 1 +CHUNK_WRONG_LOCATED = 2 +CHUNK_TOO_MANY_ENTITIES = 3 +CHUNK_SHARED_OFFSET = 4 +CHUNK_MISSING_ENTITIES_TAG = 5 + +# Chunk statuses +CHUNK_STATUSES = [CHUNK_NOT_CREATED, + CHUNK_OK, + CHUNK_CORRUPTED, + CHUNK_WRONG_LOCATED, + CHUNK_TOO_MANY_ENTITIES, + CHUNK_SHARED_OFFSET, + CHUNK_MISSING_ENTITIES_TAG] + +# Status that are considered problems +CHUNK_PROBLEMS = [CHUNK_CORRUPTED, + CHUNK_WRONG_LOCATED, + CHUNK_TOO_MANY_ENTITIES, + CHUNK_SHARED_OFFSET, + CHUNK_MISSING_ENTITIES_TAG] + +# Text describing each chunk status +CHUNK_STATUS_TEXT = {CHUNK_NOT_CREATED: "Not created", + CHUNK_OK: "OK", + CHUNK_CORRUPTED: "Corrupted", + CHUNK_WRONG_LOCATED: "Wrong located", + CHUNK_TOO_MANY_ENTITIES: "Too many entities", + CHUNK_SHARED_OFFSET: "Sharing offset", + CHUNK_MISSING_ENTITIES_TAG: "Missing Entities tag" + } + +# arguments used in the options +CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', + CHUNK_WRONG_LOCATED: 'wrong', + CHUNK_TOO_MANY_ENTITIES: 'entities', + CHUNK_SHARED_OFFSET: 'sharing', + CHUNK_MISSING_ENTITIES_TAG: 'miss_tag' + } + +# used in some places where there is less space +CHUNK_PROBLEMS_ABBR = {CHUNK_CORRUPTED: 'c', + CHUNK_WRONG_LOCATED: 'w', + CHUNK_TOO_MANY_ENTITIES: 'tme', + CHUNK_SHARED_OFFSET: 'so', + CHUNK_MISSING_ENTITIES_TAG: 'mt' + } + +# Dictionary with possible solutions for the chunks problems, +# used to create options dynamically +# The possible solutions right now are: +CHUNK_SOLUTION_REMOVE = 51 +CHUNK_SOLUTION_REPLACE = 52 +CHUNK_SOLUTION_REMOVE_ENTITIES = 53 +CHUNK_SOLUTION_RELOCATE_USING_DATA = 54 + +CHUNK_PROBLEMS_SOLUTIONS = {CHUNK_CORRUPTED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE, CHUNK_SOLUTION_RELOCATE_USING_DATA], + CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], + CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], + CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE]} + +# chunk problems that can be fixed (so they don't need to be removed or replaced) +FIXABLE_CHUNK_PROBLEMS = [CHUNK_CORRUPTED, CHUNK_MISSING_ENTITIES_TAG, CHUNK_WRONG_LOCATED] + +# list with problem, status-text, problem arg tuples +CHUNK_PROBLEMS_ITERATOR = [] +for problem in CHUNK_PROBLEMS: + CHUNK_PROBLEMS_ITERATOR.append((problem, + CHUNK_STATUS_TEXT[problem], + CHUNK_PROBLEMS_ARGS[problem])) + +# Used to know where to look in a chunk status tuple +TUPLE_NUM_ENTITIES = 0 +TUPLE_STATUS = 1 + + + + +# --------------- +# Region related: +# --------------- +# Used to mark the status of region files: +REGION_OK = 100 +REGION_TOO_SMALL = 101 +REGION_UNREADABLE = 102 +REGION_UNREADABLE_PERMISSION_ERROR = 103 + +# Region statuses +REGION_STATUSES = [REGION_OK, + REGION_TOO_SMALL, + REGION_UNREADABLE, + REGION_UNREADABLE_PERMISSION_ERROR] + +# Text describing each region status used to list all the problem at the end of the scan +REGION_STATUS_TEXT = {REGION_OK: "OK", + REGION_TOO_SMALL: "Too small", + REGION_UNREADABLE: "Unreadable IOError", + # This status differentiates IOError from a file that you don't have permission to access + # TODO: It would be better to open region files only in write mode when needed + REGION_UNREADABLE_PERMISSION_ERROR: "Permission error" + } + +# Status that are considered problems +REGION_PROBLEMS = [REGION_TOO_SMALL, + REGION_UNREADABLE, + REGION_UNREADABLE_PERMISSION_ERROR] + +# arguments used in the options +REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too_small', + REGION_UNREADABLE: 'unreadable', + REGION_UNREADABLE_PERMISSION_ERROR: 'permission_error' + } + +# used in some places where there is less space +REGION_PROBLEMS_ABBR = {REGION_TOO_SMALL: 'ts', + REGION_UNREADABLE: 'ur', + REGION_UNREADABLE_PERMISSION_ERROR: 'pe' + } + +# Dictionary with possible solutions for the region problems, +# used to create options dynamically +# The possible solutions right now are: +REGION_SOLUTION_REMOVE = 151 +REGION_SOLUTION_REPLACE = 152 + +REGION_PROBLEMS_SOLUTIONS = {REGION_TOO_SMALL: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE], + REGION_UNREADABLE: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE] + } + +# list with problem, status-text, problem arg tuples +REGION_PROBLEMS_ITERATOR = [] +for problem in REGION_PROBLEMS: + try: + REGION_PROBLEMS_ITERATOR.append((problem, + REGION_STATUS_TEXT[problem], + REGION_PROBLEMS_ARGS[problem])) + except KeyError: + pass + +REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too-small'} + + + + +# ------------------ +# Data file related: +# ------------------ +# Used to mark the status of data files: +DATAFILE_OK = 200 +DATAFILE_UNREADABLE = 201 + +# Data files statuses +DATAFILE_STATUSES = [DATAFILE_OK, + DATAFILE_UNREADABLE] + +# Status that are considered problems +DATAFILE_PROBLEMS = [DATAFILE_UNREADABLE] + +# Text describing each chunk status +DATAFILE_STATUS_TEXT = {DATAFILE_OK: "OK", + DATAFILE_UNREADABLE: "The data file cannot be read" + } + +# arguments used in the options +DATAFILE_PROBLEMS_ARGS = {DATAFILE_OK: 'OK', + DATAFILE_UNREADABLE: 'unreadable' + } + +# used in some places where there is less space +DATAFILE_PROBLEM_ABBR = {DATAFILE_OK: 'ok', + DATAFILE_UNREADABLE: 'ur' + } + +# Dictionary with possible solutions for the chunks problems, +# used to create options dynamically +# The possible solutions right now are: +DATAFILE_SOLUTION_REMOVE = 251 + +DATAFILE_PROBLEMS_SOLUTIONS = {DATAFILE_UNREADABLE: [DATAFILE_SOLUTION_REMOVE]} + +# list with problem, status-text, problem arg tuples +DATAFILE_PROBLEMS_ITERATOR = [] +for problem in DATAFILE_PROBLEMS: + DATAFILE_PROBLEMS_ITERATOR.append((problem, + DATAFILE_STATUS_TEXT[problem], + DATAFILE_PROBLEMS_ARGS[problem])) + +CHUNK_PROBLEMS_ITERATOR = [] +for problem in CHUNK_PROBLEMS: + CHUNK_PROBLEMS_ITERATOR.append((problem, + CHUNK_STATUS_TEXT[problem], + CHUNK_PROBLEMS_ARGS[problem])) + +# Dimension names: +DIMENSION_NAMES = {"region": "Overworld", + "DIM1": "The End", + "DIM-1": "Nether" + } diff --git a/regionfixer_core/interactive.py b/regionfixer_core/interactive.py index 20b8f63..e64789f 100644 --- a/regionfixer_core/interactive.py +++ b/regionfixer_core/interactive.py @@ -24,8 +24,9 @@ from cmd import Cmd -from . import world -from .scan import console_scan_world, console_scan_regionset +import regionfixer_core.constants as c +from regionfixer_core import world +from regionfixer_core.scan import console_scan_world, console_scan_regionset class InteractiveLoop(Cmd): @@ -50,7 +51,7 @@ def __init__(self, world_list, regionset, options, backup_worlds): # Possible args for chunks stuff possible_args = "" first = True - for i in list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all']: + for i in list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all']: if not first: possible_args += ", " possible_args += i @@ -60,7 +61,7 @@ def __init__(self, world_list, regionset, options, backup_worlds): # Possible args for region stuff possible_args = "" first = True - for i in list(world.REGION_PROBLEMS_ARGS.values()) + ['all']: + for i in list(c.REGION_PROBLEMS_ARGS.values()) + ['all']: if not first: possible_args += ", " possible_args += i @@ -227,9 +228,9 @@ def do_count_chunks(self, arg): elif len(arg.split()) > 1: print("Error: too many parameters.") else: - if arg in list(world.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': + if arg in list(c.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': total = self.current.count_chunks(None) - for problem, status_text, a in world.CHUNK_PROBLEMS_ITERATOR: + for problem, status_text, a in c.CHUNK_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.count_chunks(problem) print("Chunks with status \'{0}\': {1}".format(status_text, n)) @@ -248,9 +249,9 @@ def do_count_regions(self, arg): elif len(arg.split()) > 1: print("Error: too many parameters.") else: - if arg in list(world.REGION_PROBLEMS_ARGS.values()) or arg == 'all': + if arg in list(c.REGION_PROBLEMS_ARGS.values()) or arg == 'all': total = self.current.count_regions(None) - for problem, status_text, a in world.REGION_PROBLEMS_ITERATOR: + for problem, status_text, a in c.REGION_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.count_regions(problem) print("Regions with status \'{0}\': {1}".format(status_text, n)) @@ -301,8 +302,8 @@ def do_remove_chunks(self, arg): elif len(arg.split()) > 1: print("Error: too many parameters.") else: - if arg in list(world.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': - for problem, status_text, a in world.CHUNK_PROBLEMS_ITERATOR: + if arg in list(c.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': + for problem, status_text, a in c.CHUNK_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.remove_problematic_chunks(problem) if n: @@ -322,8 +323,8 @@ def do_replace_chunks(self, arg): elif len(arg.split()) > 1: print("Error: too many parameters.") else: - if arg in list(world.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': - for problem, status_text, a in world.CHUNK_PROBLEMS_ITERATOR: + if arg in list(c.CHUNK_PROBLEMS_ARGS.values()) or arg == 'all': + for problem, status_text, a in c.CHUNK_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.replace_problematic_chunks(self.backup_worlds, problem, el, de) if n: @@ -343,8 +344,8 @@ def do_replace_regions(self, arg): elif len(arg.split()) > 1: print("Error: too many parameters.") else: - if arg in list(world.REGION_PROBLEMS_ARGS.values()) or arg == 'all': - for problem, status_text, a in world.REGION_PROBLEMS_ITERATOR: + if arg in list(c.REGION_PROBLEMS_ARGS.values()) or arg == 'all': + for problem, status_text, a in c.REGION_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.replace_problematic_regions(self.backup_worlds, problem, el, de) if n: @@ -362,8 +363,8 @@ def do_remove_regions(self, arg): elif len(arg.split()) > 1: print("Error: too many parameters.") else: - if arg in list(world.REGION_PROBLEMS_ARGS.values()) or arg == 'all': - for problem, status_text, a in world.REGION_PROBLEMS_ITERATOR: + if arg in list(c.REGION_PROBLEMS_ARGS.values()) or arg == 'all': + for problem, status_text, a in c.REGION_PROBLEMS_ITERATOR: if arg == 'all' or arg == a: n = self.current.remove_problematic_regions(problem) if n: @@ -407,27 +408,27 @@ def complete_set(self, text, line, begidx, endidx): return self.complete_arg(text, possible_args) def complete_count_chunks(self, text, line, begidx, endidx): - possible_args = list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all'] + possible_args = list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_remove_chunks(self, text, line, begidx, endidx): - possible_args = list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all'] + possible_args = list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_replace_chunks(self, text, line, begidx, endidx): - possible_args = list(world.CHUNK_PROBLEMS_ARGS.values()) + ['all'] + possible_args = list(c.CHUNK_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_count_regions(self, text, line, begidx, endidx): - possible_args = list(world.REGION_PROBLEMS_ARGS.values()) + ['all'] + possible_args = list(c.REGION_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_remove_regions(self, text, line, begidx, endidx): - possible_args = list(world.REGION_PROBLEMS_ARGS.values()) + ['all'] + possible_args = list(c.REGION_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) def complete_replace_regions(self, text, line, begidx, endidx): - possible_args = list(world.REGION_PROBLEMS_ARGS.values()) + ['all'] + possible_args = list(c.REGION_PROBLEMS_ARGS.values()) + ['all'] return self.complete_arg(text, possible_args) ################################################# diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 9db5df1..72d1c22 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -40,8 +40,10 @@ from progressbar import ProgressBar, Bar, AdaptiveETA, SimpleProgress -from regionfixer_core import world +import regionfixer_core.constants as c from regionfixer_core.util import entitle +from regionfixer_core import world + logging.basicConfig(filename=None, level=logging.CRITICAL) @@ -672,11 +674,11 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, if not w.scanned_level.path: print("[WARNING!] \'level.dat\' doesn't exist!") else: - if w.scanned_level.status not in world.DATAFILE_PROBLEMS: + if w.scanned_level.status not in c.DATAFILE_PROBLEMS: print("\'level.dat\' is readable") else: print("[WARNING!]: \'level.dat\' is corrupted with the following error/s:") - print("\t {0}".format(world.DATAFILE_STATUS_TEXT[w.scanned_level.status])) + print("\t {0}".format(c.DATAFILE_STATUS_TEXT[w.scanned_level.status])) ps = AsyncDataScanner(w.players, processes) ops = AsyncDataScanner(w.old_players, processes) @@ -741,18 +743,18 @@ def scan_data(scanned_dat_file): _ = nbt.NBTFile(buffer=f) else: _ = nbt.NBTFile(filename=s.path) - s.status = world.DATAFILE_OK + s.status = c.DATAFILE_OK except MalformedFileError: - s.status = world.DATAFILE_UNREADABLE + s.status = c.DATAFILE_UNREADABLE except IOError: - s.status = world.DATAFILE_UNREADABLE + s.status = c.DATAFILE_UNREADABLE except UnicodeDecodeError: - s.status = world.DATAFILE_UNREADABLE + s.status = c.DATAFILE_UNREADABLE except TypeError: - s.status = world.DATAFILE_UNREADABLE + s.status = c.DATAFILE_UNREADABLE except: - s.status = world.DATAFILE_UNREADABLE + s.status = c.DATAFILE_UNREADABLE except_type, except_class, tb = sys.exc_info() s = (s, (except_type, except_class, extract_tb(tb))) @@ -779,19 +781,19 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, remove_entities): try: region_file = region.RegionFile(r.path) except region.NoRegionHeader: # The region has no header - r.status = world.REGION_TOO_SMALL + r.status = c.REGION_TOO_SMALL r.scan_time = time() r.scanned = True return r except PermissionError: - r.status = world.REGION_UNREADABLE_PERMISSION_ERROR + r.status = c.REGION_UNREADABLE_PERMISSION_ERROR r.scan_time = time() r.scanned = True return r except IOError: - r.status = world.REGION_UNREADABLE + r.status = c.REGION_UNREADABLE r.scan_time = time() r.scanned = True return r @@ -800,19 +802,19 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, remove_entities): for z in range(32): # start the actual chunk scanning g_coords = r.get_global_chunk_coords(x, z) - chunk, c = scan_chunk(region_file, + chunk, tup = scan_chunk(region_file, (x, z), g_coords, entity_limit) - if c: - r[(x, z)] = c + if tup: + r[(x, z)] = tup else: # chunk not created continue - if c[world.TUPLE_STATUS] == world.CHUNK_OK: + if tup[c.TUPLE_STATUS] == c.CHUNK_OK: continue - elif c[world.TUPLE_STATUS] == world.CHUNK_TOO_MANY_ENTITIES: + elif tup[c.TUPLE_STATUS] == c.CHUNK_TOO_MANY_ENTITIES: # Deleting entities is in here because parsing a chunk # with thousands of wrong entities takes a long time, # and sometimes GiB of RAM, and once detected is better @@ -820,9 +822,9 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, remove_entities): if remove_entities: world.delete_entities(region_file, x, z) print(("Deleted {0} entities in chunk" - " ({1},{2}) of the region file: {3}").format(c[world.TUPLE_NUM_ENTITIES], x, z, r.filename)) + " ({1},{2}) of the region file: {3}").format(tup[c.TUPLE_NUM_ENTITIES], x, z, r.filename)) # entities removed, change chunk status to OK - r[(x, z)] = (0, world.CHUNK_OK) + r[(x, z)] = (0, c.CHUNK_OK) else: # This stores all the entities in a file, @@ -832,9 +834,9 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, remove_entities): # ~ archivo = open(name,'w') # ~ archivo.write(pretty_tree) pass - elif c[world.TUPLE_STATUS] == world.CHUNK_CORRUPTED: + elif tup[c.TUPLE_STATUS] == c.CHUNK_CORRUPTED: pass - elif c[world.TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED: + elif tup[c.TUPLE_STATUS] == c.CHUNK_WRONG_LOCATED: pass # Now check for chunks sharing offsets: @@ -849,14 +851,14 @@ def scan_region_file(scanned_regionfile_obj, entity_limit, remove_entities): metadata = region_file.metadata sharing = [k for k in metadata if (metadata[k].status == region.STATUS_CHUNK_OVERLAPPING and - r[k][world.TUPLE_STATUS] == world.CHUNK_WRONG_LOCATED)] + r[k][c.TUPLE_STATUS] == c.CHUNK_WRONG_LOCATED)] shared_counter = 0 for k in sharing: - r[k] = (r[k][world.TUPLE_NUM_ENTITIES], world.CHUNK_SHARED_OFFSET) + r[k] = (r[k][c.TUPLE_NUM_ENTITIES], c.CHUNK_SHARED_OFFSET) shared_counter += 1 r.scan_time = time() - r.status = world.REGION_OK + r.status = c.REGION_OK r.scanned = True return r @@ -902,24 +904,24 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): num_entities = len(chunk["Level"]["Entities"]) if data_coords != global_coords: # wrong located chunk - status = world.CHUNK_WRONG_LOCATED + status = c.CHUNK_WRONG_LOCATED elif num_entities > el: # too many entities in the chunk - status = world.CHUNK_TOO_MANY_ENTITIES + status = c.CHUNK_TOO_MANY_ENTITIES else: # chunk ok - status = world.CHUNK_OK + status = c.CHUNK_OK except InconceivedChunk: # chunk not created chunk = None data_coords = None num_entities = None - status = world.CHUNK_NOT_CREATED + status = c.CHUNK_NOT_CREATED except RegionHeaderError: # corrupted chunk, because of region header - status = world.CHUNK_CORRUPTED + status = c.CHUNK_CORRUPTED chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -927,7 +929,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except ChunkDataError: # corrupted chunk, usually because of bad CRC in compression - status = world.CHUNK_CORRUPTED + status = c.CHUNK_CORRUPTED chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -935,7 +937,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except ChunkHeaderError: # corrupted chunk, error in the header of the chunk - status = world.CHUNK_CORRUPTED + status = c.CHUNK_CORRUPTED chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -943,7 +945,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except KeyError: # chunk with the mandatory tag Entities missing - status = world.CHUNK_MISSING_ENTITIES_TAG + status = c.CHUNK_MISSING_ENTITIES_TAG chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -951,7 +953,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except UnicodeDecodeError: # TODO: This should another kind of error, it's now being handled as corrupted chunk - status = world.CHUNK_CORRUPTED + status = c.CHUNK_CORRUPTED chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) @@ -959,13 +961,13 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): except TypeError: # TODO: This should another kind of error, it's now being handled as corrupted chunk - status = world.CHUNK_CORRUPTED + status = c.CHUNK_CORRUPTED chunk = None data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - return chunk, (num_entities, status) if status != world.CHUNK_NOT_CREATED else None + return chunk, (num_entities, status) if status != c.CHUNK_NOT_CREATED else None if __name__ == '__main__': diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 587c108..d01ea11 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -33,208 +33,8 @@ from .util import table from nbt.nbt import TAG_List +import regionfixer_core.constants as c -# Constants: -# -# -------------- -# Chunk related: -# -------------- -# Used to mark the status of chunks: -CHUNK_NOT_CREATED = -1 -CHUNK_OK = 0 -CHUNK_CORRUPTED = 1 -CHUNK_WRONG_LOCATED = 2 -CHUNK_TOO_MANY_ENTITIES = 3 -CHUNK_SHARED_OFFSET = 4 -CHUNK_MISSING_ENTITIES_TAG = 5 - -# Chunk statuses -CHUNK_STATUSES = [CHUNK_NOT_CREATED, - CHUNK_OK, - CHUNK_CORRUPTED, - CHUNK_WRONG_LOCATED, - CHUNK_TOO_MANY_ENTITIES, - CHUNK_SHARED_OFFSET, - CHUNK_MISSING_ENTITIES_TAG] - -# Status that are considered problems -CHUNK_PROBLEMS = [CHUNK_CORRUPTED, - CHUNK_WRONG_LOCATED, - CHUNK_TOO_MANY_ENTITIES, - CHUNK_SHARED_OFFSET, - CHUNK_MISSING_ENTITIES_TAG] - -# Text describing each chunk status -CHUNK_STATUS_TEXT = {CHUNK_NOT_CREATED: "Not created", - CHUNK_OK: "OK", - CHUNK_CORRUPTED: "Corrupted", - CHUNK_WRONG_LOCATED: "Wrong located", - CHUNK_TOO_MANY_ENTITIES: "Too many entities", - CHUNK_SHARED_OFFSET: "Sharing offset", - CHUNK_MISSING_ENTITIES_TAG: "Missing Entities tag" - } - -# arguments used in the options -CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', - CHUNK_WRONG_LOCATED: 'wrong', - CHUNK_TOO_MANY_ENTITIES: 'entities', - CHUNK_SHARED_OFFSET: 'sharing', - CHUNK_MISSING_ENTITIES_TAG: 'miss_tag' - } - -# used in some places where there is less space -CHUNK_PROBLEMS_ABBR = {CHUNK_CORRUPTED: 'c', - CHUNK_WRONG_LOCATED: 'w', - CHUNK_TOO_MANY_ENTITIES: 'tme', - CHUNK_SHARED_OFFSET: 'so', - CHUNK_MISSING_ENTITIES_TAG: 'mt' - } - -# Dictionary with possible solutions for the chunks problems, -# used to create options dynamically -# The possible solutions right now are: -CHUNK_SOLUTION_REMOVE = 51 -CHUNK_SOLUTION_REPLACE = 52 -CHUNK_SOLUTION_REMOVE_ENTITIES = 53 -CHUNK_SOLUTION_RELOCATE_USING_DATA = 54 - -CHUNK_PROBLEMS_SOLUTIONS = {CHUNK_CORRUPTED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE, CHUNK_SOLUTION_RELOCATE_USING_DATA], - CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], - CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], - CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE]} - -# chunk problems that can be fixed (so they don't need to be removed or replaced) -FIXABLE_CHUNK_PROBLEMS = [CHUNK_CORRUPTED, CHUNK_MISSING_ENTITIES_TAG, CHUNK_WRONG_LOCATED] - -# list with problem, status-text, problem arg tuples -CHUNK_PROBLEMS_ITERATOR = [] -for problem in CHUNK_PROBLEMS: - CHUNK_PROBLEMS_ITERATOR.append((problem, - CHUNK_STATUS_TEXT[problem], - CHUNK_PROBLEMS_ARGS[problem])) - -# Used to know where to look in a chunk status tuple -TUPLE_NUM_ENTITIES = 0 -TUPLE_STATUS = 1 - -# --------------- -# Region related: -# --------------- -# Used to mark the status of region files: -REGION_OK = 100 -REGION_TOO_SMALL = 101 -REGION_UNREADABLE = 102 -REGION_UNREADABLE_PERMISSION_ERROR = 103 - -# Region statuses -REGION_STATUSES = [REGION_OK, - REGION_TOO_SMALL, - REGION_UNREADABLE, - REGION_UNREADABLE_PERMISSION_ERROR] - -# Text describing each region status used to list all the problem at the end of the scan -REGION_STATUS_TEXT = {REGION_OK: "OK", - REGION_TOO_SMALL: "Too small", - REGION_UNREADABLE: "Unreadable IOError", - # This status differentiates IOError from a file that you don't have permission to access - # TODO: It would be better to open region files only in write mode when needed - REGION_UNREADABLE_PERMISSION_ERROR: "Permission error" - } - -# Status that are considered problems -REGION_PROBLEMS = [REGION_TOO_SMALL, - REGION_UNREADABLE, - REGION_UNREADABLE_PERMISSION_ERROR] - -# arguments used in the options -REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too_small', - REGION_UNREADABLE: 'unreadable', - REGION_UNREADABLE_PERMISSION_ERROR: 'permission_error' - } - -# used in some places where there is less space -REGION_PROBLEMS_ABBR = {REGION_TOO_SMALL: 'ts', - REGION_UNREADABLE: 'ur', - REGION_UNREADABLE_PERMISSION_ERROR: 'pe' - } - -# Dictionary with possible solutions for the region problems, -# used to create options dynamically -# The possible solutions right now are: -REGION_SOLUTION_REMOVE = 151 -REGION_SOLUTION_REPLACE = 152 - -REGION_PROBLEMS_SOLUTIONS = {REGION_TOO_SMALL: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE], - REGION_UNREADABLE: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE] - } - -# list with problem, status-text, problem arg tuples -REGION_PROBLEMS_ITERATOR = [] -for problem in REGION_PROBLEMS: - try: - REGION_PROBLEMS_ITERATOR.append((problem, - REGION_STATUS_TEXT[problem], - REGION_PROBLEMS_ARGS[problem])) - except KeyError: - pass - -REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too-small'} - -# ------------------ -# Data file related: -# ------------------ -# Used to mark the status of data files: -DATAFILE_OK = 200 -DATAFILE_UNREADABLE = 201 - -# Data files statuses -DATAFILE_STATUSES = [DATAFILE_OK, - DATAFILE_UNREADABLE] - -# Status that are considered problems -DATAFILE_PROBLEMS = [DATAFILE_UNREADABLE] - -# Text describing each chunk status -DATAFILE_STATUS_TEXT = {DATAFILE_OK: "OK", - DATAFILE_UNREADABLE: "The data file cannot be read" - } - -# arguments used in the options -DATAFILE_PROBLEMS_ARGS = {DATAFILE_OK: 'OK', - DATAFILE_UNREADABLE: 'unreadable' - } - -# used in some places where there is less space -DATAFILE_PROBLEM_ABBR = {DATAFILE_OK: 'ok', - DATAFILE_UNREADABLE: 'ur' - } - -# Dictionary with possible solutions for the chunks problems, -# used to create options dynamically -# The possible solutions right now are: -DATAFILE_SOLUTION_REMOVE = 251 - -DATAFILE_PROBLEMS_SOLUTIONS = {DATAFILE_UNREADABLE: [DATAFILE_SOLUTION_REMOVE]} - -# list with problem, status-text, problem arg tuples -DATAFILE_PROBLEMS_ITERATOR = [] -for problem in DATAFILE_PROBLEMS: - DATAFILE_PROBLEMS_ITERATOR.append((problem, - DATAFILE_STATUS_TEXT[problem], - DATAFILE_PROBLEMS_ARGS[problem])) - -CHUNK_PROBLEMS_ITERATOR = [] -for problem in CHUNK_PROBLEMS: - CHUNK_PROBLEMS_ITERATOR.append((problem, - CHUNK_STATUS_TEXT[problem], - CHUNK_PROBLEMS_ARGS[problem])) - -# Dimension names: -DIMENSION_NAMES = {"region": "Overworld", - "DIM1": "The End", - "DIM-1": "Nether" - } class InvalidFileName(IOError): @@ -260,13 +60,13 @@ def __init__(self, path=None): def __str__(self): text = "NBT file:" + str(self.filename) + "\n" - text += "\tStatus:" + DATAFILE_STATUS_TEXT[self.status] + "\n" + text += "\tStatus:" + c.DATAFILE_STATUS_TEXT[self.status] + "\n" return text @property def oneliner_status(self): """ One line describing the status of the file. """ - return "File: \"" + self.filename + "\"; status: " + DATAFILE_STATUS_TEXT[self.status] + return "File: \"" + self.filename + "\"; status: " + c.DATAFILE_STATUS_TEXT[self.status] class ScannedChunk: @@ -303,7 +103,7 @@ def __init__(self, path, time=None): # Dictionary containing counters to for all the chunks self._counts = {} - for s in CHUNK_STATUSES: + for s in c.CHUNK_STATUSES: self._counts[s] = 0 # time when the scan for this file finished @@ -320,13 +120,13 @@ def oneliner_status(self): """ On line description of the status of the region file. """ if self.scanned: status = self.status - if status == REGION_OK: # summary with all found in scan + if status == c.REGION_OK: # summary with all found in scan stats = "" - for s in CHUNK_PROBLEMS: - stats += "{0}:{1}, ".format(CHUNK_PROBLEMS_ABBR[s], self.count_chunks(s)) + for s in c.CHUNK_PROBLEMS: + stats += "{0}:{1}, ".format(c.CHUNK_PROBLEMS_ABBR[s], self.count_chunks(s)) stats += "t:{0}".format(self.count_chunks()) else: - stats = REGION_STATUS_TEXT[status] + stats = c.REGION_STATUS_TEXT[status] else: stats = "Not scanned" @@ -346,7 +146,7 @@ def __getitem__(self, key): def __setitem__(self, key, value): self._chunks[key] = value - self._counts[value[TUPLE_STATUS]] += 1 + self._counts[value[c.TUPLE_STATUS]] += 1 def get_coords(self): """ Returns the region file coordinates as two integers. @@ -392,9 +192,9 @@ def has_problems(self): """ - if self.status in REGION_PROBLEMS: + if self.status in c.REGION_PROBLEMS: return True - for s in CHUNK_PROBLEMS: + for s in c.CHUNK_PROBLEMS: if self.count_chunks(s): return True return False @@ -413,7 +213,10 @@ def count_chunks(self, status=None): """ Counts chunks in the region file with the given problem. Keyword arguments: - - status -- This is the status of the chunk to count for. See CHUNK_PROBLEMS + - status -- This is the status of the chunk to count for. See c.CHUNK_PROBLEMS + + Return: + - counter -- Integer with the number of chunks with that status If problem is omitted or None, counts all the chunks. Returns an integer with the counter. @@ -421,13 +224,13 @@ def count_chunks(self, status=None): """ if status == None: - c = 0 - for s in CHUNK_STATUSES: - c += self._counts[s] + counter = 0 + for s in c.CHUNK_STATUSES: + counter += self._counts[s] else: - c = self._counts[status] + counter = self._counts[status] - return c + return counter def get_global_chunk_coords(self, chunkX, chunkZ): """ Takes the chunk local coordinates and returns its global coordinates. @@ -445,22 +248,26 @@ def get_global_chunk_coords(self, chunkX, chunkZ): return chunkX, chunkZ def list_chunks(self, status=None): - """ Returns a list of tuples (global coords, status tuple) for all the chunks with 'status'. + """ Returns a list of tuples of chunks for all the chunks with 'status'. Keyword arguments: - - status -- Defaults to None. Status of the chunk to list, see CHUNK_STATUSES + - status -- Defaults to None. Status of the chunk to list, see c.CHUNK_STATUSES + + Return: + - list - List with tuples like (global_coordinates, status_tuple) where status + tuple is (number_of_entities, status) If status is omitted or None, returns all the chunks in the region file """ l = [] - for c in list(self.keys()): - t = self[c] - if status == t[TUPLE_STATUS]: - l.append((self.get_global_chunk_coords(*c), t)) + for ck in list(self.keys()): + t = self[ck] + if status == t[c.TUPLE_STATUS]: + l.append((self.get_global_chunk_coords(*ck), t)) elif status == None: - l.append((self.get_global_chunk_coords(*c), t)) + l.append((self.get_global_chunk_coords(*ck), t)) return l @@ -473,19 +280,19 @@ def summary(self): """ text = "" - if self.status in REGION_PROBLEMS: - text += " |- This region has status: {0}.\n".format(REGION_STATUS_TEXT[self.status]) + if self.status in c.REGION_PROBLEMS: + text += " |- This region has status: {0}.\n".format(c.REGION_STATUS_TEXT[self.status]) else: - for c in list(self.keys()): - if self[c][TUPLE_STATUS] not in CHUNK_PROBLEMS: + for ck in list(self.keys()): + if self[ck][c.TUPLE_STATUS] not in c.CHUNK_PROBLEMS: continue - status = self[c][TUPLE_STATUS] - h_coords = c + status = self[ck][c.TUPLE_STATUS] + h_coords = ck g_coords = self.get_global_chunk_coords(*h_coords) text += " |-+-Chunk coords: header {0}, global {1}.\n".format(h_coords, g_coords) - text += " | +-Status: {0}\n".format(CHUNK_STATUS_TEXT[status]) - if self[c][TUPLE_STATUS] == CHUNK_TOO_MANY_ENTITIES: - text += " | +-No. entities: {0}\n".format(self[c][TUPLE_NUM_ENTITIES]) + text += " | +-Status: {0}\n".format(c.CHUNK_STATUS_TEXT[status]) + if self[ck][c.TUPLE_STATUS] == c.CHUNK_TOO_MANY_ENTITIES: + text += " | +-No. entities: {0}\n".format(self[c][c.TUPLE_NUM_ENTITIES]) text += " |\n" return text @@ -494,7 +301,7 @@ def remove_problematic_chunks(self, status): """ Removes all the chunks with the given status Keyword arguments: - - status -- Status of the chunks to remove. See CHUNK_STATUSES. + - status -- Status of the chunks to remove. See c.CHUNK_STATUSES. Return: - counter -- An integer with the amount of removed chunks. @@ -503,7 +310,7 @@ def remove_problematic_chunks(self, status): counter = 0 bad_chunks = self.list_chunks(status) - for c in bad_chunks: + for ck in bad_chunks: global_coords = c[0] local_coords = _get_local_chunk_coords(*global_coords) region_file = region.RegionFile(self.path) @@ -511,7 +318,7 @@ def remove_problematic_chunks(self, status): counter += 1 # create the new status tuple # (num_entities, chunk status) - self[local_coords] = (0, CHUNK_NOT_CREATED) + self[local_coords] = (0, c.CHUNK_NOT_CREATED) return counter @@ -519,7 +326,7 @@ def fix_problematic_chunks(self, status): """ This fixes problems in chunks that can be somehow fixed. Keyword arguments: - - status -- Status of the chunks to fix. See FIXABLE_CHUNK_PROBLEMS + - status -- Status of the chunks to fix. See c.FIXABLE_CHUNK_PROBLEMS Return: - counter -- An integer with the amount of fixed chunks. @@ -537,11 +344,11 @@ def fix_problematic_chunks(self, status): # chunks have like 3 or 4 tag missing from the NBT structure. I don't really know which # of them are mandatory. - assert(status in FIXABLE_CHUNK_PROBLEMS) + assert(status in c.FIXABLE_CHUNK_PROBLEMS) counter = 0 bad_chunks = self.list_chunks(status) - for c in bad_chunks: - global_coords = c[0] + for ck in bad_chunks: + global_coords = ck[0] local_coords = _get_local_chunk_coords(*global_coords) region_file = region.RegionFile(self.path) # catch the exception of corrupted chunks @@ -549,7 +356,7 @@ def fix_problematic_chunks(self, status): chunk = region_file.get_chunk(*local_coords) except region.ChunkDataError: # if we are here the chunk is corrupted, but still - if status == CHUNK_CORRUPTED: + if status == c.CHUNK_CORRUPTED: # read the data raw m = region_file.metadata[local_coords[0], local_coords[1]] region_file.file.seek(m.blockstart * region.SECTOR_LENGTH + 5) @@ -558,8 +365,8 @@ def fix_problematic_chunks(self, status): try: dc = zlib.decompressobj() out = "" - for c in raw_chunk: - out += dc.decompress(c) + for i in raw_chunk: + out += dc.decompress(i) except: pass # compare the sizes of the new compressed strem and the old one to see if we've got something good @@ -573,7 +380,7 @@ def fix_problematic_chunks(self, status): #print("Extracted: " + str(len(out))) #print("Size of the compressed stream: " + str(len(raw_chunk))) - if status == CHUNK_MISSING_ENTITIES_TAG: + if status == c.CHUNK_MISSING_ENTITIES_TAG: # The arguments to create the empty TAG_List have been somehow extracted by comparing # the tag list from a healthy chunk with the one created by nbt chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) @@ -581,10 +388,10 @@ def fix_problematic_chunks(self, status): # create the new status tuple # (num_entities, chunk status) - self[local_coords] = (0 , CHUNK_NOT_CREATED) + self[local_coords] = (0 , c.CHUNK_NOT_CREATED) counter += 1 - elif status == CHUNK_WRONG_LOCATED: + elif status == c.CHUNK_WRONG_LOCATED: data_coords = get_chunk_data_coords(chunk) data_l_coords = _get_local_chunk_coords(*data_coords) region_file.write_chunk(data_l_coords[0], data_l_coords[1], chunk) @@ -595,30 +402,30 @@ def fix_problematic_chunks(self, status): # remove the wrong position of the chunk and update the status # (num_entities, chunk status) - self[local_coords] = (0 , CHUNK_NOT_CREATED) - self[data_l_coords]= (0 , CHUNK_OK) + self[local_coords] = (0 , c.CHUNK_NOT_CREATED) + self[data_l_coords]= (0 , c.CHUNK_OK) counter += 1 return counter def remove_entities(self): - """ Removes all the entities in chunks with status CHUNK_TOO_MANY_ENTITIES. + """ Removes all the entities in chunks with status c.CHUNK_TOO_MANY_ENTITIES. Return: - counter -- Integer with the number of removed entities. """ - status = CHUNK_TOO_MANY_ENTITIES + status = c.CHUNK_TOO_MANY_ENTITIES counter = 0 bad_chunks = self.list_chunks(status) - for c in bad_chunks: - global_coords = c[0] + for ck in bad_chunks: + global_coords = ck[0] local_coords = _get_local_chunk_coords(*global_coords) counter += self.remove_chunk_entities(*local_coords) # create new status tuple: # (num_entities, chunk status) - self[local_coords] = (0, CHUNK_OK) + self[local_coords] = (0, c.CHUNK_OK) return counter def remove_chunk_entities(self, x, z): @@ -654,23 +461,23 @@ def rescan_entities(self, options): """ - for c in list(self.keys()): + for ck in list(self.keys()): # for safety reasons use a temporary list to generate the # new tuple t = [0, 0] - if self[c][TUPLE_STATUS] in (CHUNK_TOO_MANY_ENTITIES, CHUNK_OK): + if self[ck][c.TUPLE_STATUS] in (c.CHUNK_TOO_MANY_ENTITIES, c.CHUNK_OK): # only touch the ok chunks and the too many entities chunk - if self[c][TUPLE_NUM_ENTITIES] > options.entity_limit: + if self[ck][c.TUPLE_NUM_ENTITIES] > options.entity_limit: # now it's a too many entities problem - t[TUPLE_NUM_ENTITIES] = self[c][TUPLE_NUM_ENTITIES] - t[TUPLE_STATUS] = CHUNK_TOO_MANY_ENTITIES + t[c.TUPLE_NUM_ENTITIES] = self[ck][c.TUPLE_NUM_ENTITIES] + t[c.TUPLE_STATUS] = c.CHUNK_TOO_MANY_ENTITIES - elif self[c][TUPLE_NUM_ENTITIES] <= options.entity_limit: + elif self[c][c.TUPLE_NUM_ENTITIES] <= options.entity_limit: # the new limit says it's a normal chunk - t[TUPLE_NUM_ENTITIES] = self[c][TUPLE_NUM_ENTITIES] - t[TUPLE_STATUS] = CHUNK_OK + t[c.TUPLE_NUM_ENTITIES] = self[ck][c.TUPLE_NUM_ENTITIES] + t[c.TUPLE_STATUS] = c.CHUNK_OK - self[c] = tuple(t) + self[ck] = tuple(t) class DataSet: @@ -771,7 +578,7 @@ def __init__(self, path, title, *args, **kwargs): # stores the counts of files self._counts = {} - for s in DATAFILE_STATUSES: + for s in c.DATAFILE_STATUSES: self._counts[s] = 0 @property @@ -779,7 +586,7 @@ def has_problems(self): """ Returns True if the dataset has problems and false otherwise. """ for d in self._set.values(): - if d.status in DATAFILE_PROBLEMS: + if d.status in c.DATAFILE_PROBLEMS: return True return False @@ -797,7 +604,7 @@ def summary(self): """ Return a summary of problems found in this set. """ text = "" - bad_data_files = [i for i in list(self._set.values()) if i.status in DATAFILE_PROBLEMS] + bad_data_files = [i for i in list(self._set.values()) if i.status in c.DATAFILE_PROBLEMS] for f in bad_data_files: text += "\t" + f.oneliner_status text += "\n" @@ -826,16 +633,16 @@ def __init__(self, regionset_path=None, region_list=[]): r = ScannedRegionFile(path) self._set[r.get_coords()] = r - except InvalidFileName as e: + except InvalidFileName: print("Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path)) # region and chunk counters with all the data from the scan self._region_counters = {} - for status in REGION_STATUSES: + for status in c.REGION_STATUSES: self._region_counters[status] = 0 self._chunk_counters = {} - for status in CHUNK_STATUSES: + for status in c.CHUNK_STATUSES: self._chunk_counters[status] = 0 # has this regionset been scanned? @@ -854,7 +661,7 @@ def get_name(self): dim_directory = self._get_dimension_directory() if dim_directory: try: - return DIMENSION_NAMES[dim_directory] + return c.DIMENSION_NAMES[dim_directory] except: return dim_directory else: @@ -884,7 +691,7 @@ def _update_counts(self, scanned_regionfile): self._region_counters[scanned_regionfile.status] += 1 - for status in CHUNK_STATUSES: + for status in c.CHUNK_STATUSES: self._chunk_counters[status] += scanned_regionfile.count_chunks(status) def _replace_in_data_structure(self, data): @@ -902,11 +709,11 @@ def __str__(self): def has_problems(self): """ Returns True if the regionset has chunk or region problems and false otherwise. """ - for s in REGION_PROBLEMS: + for s in c.REGION_PROBLEMS: if self.count_regions(s): return True - for s in CHUNK_PROBLEMS: + for s in c.CHUNK_PROBLEMS: if self.count_chunks(s): return True @@ -919,7 +726,7 @@ def list_regions(self, status=None): """ Returns a list of all the ScannedRegionFile objects with 'status'. Keyword arguments: - - status -- The region file status. See REGION_STATUSES + - status -- The region file status. See c.REGION_STATUSES If status = None it returns all the objects. @@ -937,7 +744,7 @@ def list_regions(self, status=None): def count_regions(self, status=None): """ Return the number of region files with status. - - status -- The region file status. See REGION_STATUSES + - status -- The region file status. See c.REGION_STATUSES If none returns the total number of region files in this regionset. @@ -945,7 +752,7 @@ def count_regions(self, status=None): counter = 0 if status is None: - for s in REGION_STATUSES: + for s in c.REGION_STATUSES: counter += self._region_counters[s] else: counter = self._region_counters[status] @@ -955,26 +762,30 @@ def count_regions(self, status=None): def count_chunks(self, status=None): """ Returns the number of chunks with the given status. - - status -- The chunk status to count. See CHUNK_STATUSES + Keyword arguments: + - status -- The chunk status to count. See c.CHUNK_STATUSES + Return: + - counter -- Integer with the number of chunks removed + If status is None returns the number of chunks in this region file. - + """ - c = 0 + counter = 0 if status is None: - for s in CHUNK_STATUSES: - c += self._chunk_counters[s] + for s in c.CHUNK_STATUSES: + counter += self._chunk_counters[s] else: - c = self._chunk_counters[status] + counter = self._chunk_counters[status] - return c + return counter def list_chunks(self, status=None): """ Returns a list of all the chunk tuples with 'status'. Keyword arguments: - - status -- The chunk status to list. See CHUNK_STATUSES + - status -- The chunk status to list. See c.CHUNK_STATUSES If status = None it returns all the chunk tuples. @@ -1039,7 +850,7 @@ def remove_problematic_chunks(self, status): """ Removes all the chunks with the given status. Keyword arguments: - - status -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to remove. See c.CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks removed @@ -1059,7 +870,7 @@ def fix_problematic_chunks(self, status): """ Try to fix all the chunks with the given problem. Keyword arguments: - - status -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to fix. See c.CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks fixed. @@ -1088,7 +899,7 @@ def remove_entities(self): return counter def rescan_entities(self, options): - """ Updates the CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. + """ Updates the c.CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. This should be ran when the option entity limit is changed. """ @@ -1117,7 +928,7 @@ def generate_report(self, standalone): # collect chunk data chunk_counts = {} has_chunk_problems = False - for p in CHUNK_PROBLEMS: + for p in c.CHUNK_PROBLEMS: chunk_counts[p] = self.count_chunks(p) if chunk_counts[p] != 0: has_chunk_problems = True @@ -1126,7 +937,7 @@ def generate_report(self, standalone): # collect region data region_counts = {} has_region_problems = False - for p in REGION_PROBLEMS: + for p in c.REGION_PROBLEMS: region_counts[p] = self.count_regions(p) if region_counts[p] != 0: has_region_problems = True @@ -1141,9 +952,9 @@ def generate_report(self, standalone): if has_chunk_problems: table_data = [] table_data.append(['Problem', 'Count']) - for p in CHUNK_PROBLEMS: + for p in c.CHUNK_PROBLEMS: if chunk_counts[p] is not 0: - table_data.append([CHUNK_STATUS_TEXT[p], chunk_counts[p]]) + table_data.append([c.CHUNK_STATUS_TEXT[p], chunk_counts[p]]) table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) else: @@ -1154,9 +965,9 @@ def generate_report(self, standalone): if has_region_problems: table_data = [] table_data.append(['Problem', 'Count']) - for p in REGION_PROBLEMS: + for p in c.REGION_PROBLEMS: if region_counts[p] is not 0: - table_data.append([REGION_STATUS_TEXT[p], region_counts[p]]) + table_data.append([c.REGION_STATUS_TEXT[p], region_counts[p]]) table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) @@ -1171,7 +982,7 @@ def remove_problematic_regions(self, status): """ Removes all the regions files with the given status. See the warning! Keyword arguments: - - status -- Status of the region files to remove. See REGION_STATUSES for a list. + - status -- Status of the region files to remove. See c.REGION_STATUSES for a list. Return: - counter -- An integer with the amount of removed region files. @@ -1214,17 +1025,17 @@ def __init__(self, world_path): self.level_data = nbt.NBTFile(level_dat_path)["Data"] self.name = self.level_data["LevelName"].value self.scanned_level = ScannedDataFile(level_dat_path) - self.scanned_level.status = DATAFILE_OK - except Exception as e: + self.scanned_level.status = c.DATAFILE_OK + except Exception: self.name = None self.scanned_level = ScannedDataFile(level_dat_path) - self.scanned_level.status = DATAFILE_UNREADABLE + self.scanned_level.status = c.DATAFILE_UNREADABLE else: self.level_file = None self.level_data = None self.name = None self.scanned_level = ScannedDataFile(level_dat_path) - self.scanned_level.status = DATAFILE_UNREADABLE + self.scanned_level.status = c.DATAFILE_UNREADABLE # Player files self.datafilesets = [] @@ -1273,7 +1084,7 @@ def has_problems(self): """ - if self.scanned_level.status in DATAFILE_PROBLEMS: + if self.scanned_level.status in c.DATAFILE_PROBLEMS: return True for d in self.datafilesets: @@ -1320,18 +1131,18 @@ def summary(self): # leve.dat and data files final += "\nlevel.dat:\n" - if self.scanned_level.status not in DATAFILE_PROBLEMS: + if self.scanned_level.status not in c.DATAFILE_PROBLEMS: final += "\t\'level.dat\' is readable\n" else: - final += "\t[WARNING]: \'level.dat\' isn't readable, error: {0}\n".format(DATAFILE_STATUS_TEXT[self.scanned_level.status]) + final += "\t[WARNING]: \'level.dat\' isn't readable, error: {0}\n".format(c.DATAFILE_STATUS_TEXT[self.scanned_level.status]) sets = [self.players, self.old_players, self.data_files] - for set in sets: - final += set.title - text = set.summary() + for s in sets: + final += s.title + text = s.summary() final += text if text else "All files ok.\n" final += "\n" @@ -1370,8 +1181,8 @@ def count_regions(self, status=None): """ Returns an integer with the count of region files with status. Keyword arguments: - - status -- An integer from REGION_STATUSES to region files with that status. - For a list os status see REGION_STATUSES. + - status -- An integer from c.REGION_STATUSES to region files with that status. + For a list os status see c.REGION_STATUSES. Return: - counter -- An integer with the number of region files with the given status. @@ -1387,8 +1198,8 @@ def count_chunks(self, status=None): """ Returns an integer with the count of chunks with 'status'. Keyword arguments: - - status -- An integer from CHUNK_STATUSES to count chunks with that status. - For a list of status see CHUNK_STATUSES. + - status -- An integer from c.CHUNK_STATUSES to count chunks with that status. + For a list of status see c.CHUNK_STATUSES. Return: - counter -- An integer with the number of chunks with the given status. @@ -1407,7 +1218,7 @@ def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used in a ordered way. - status -- An integer indicating the status of chunks to be replaced. - See CHUNK_STATUSES for a complete list. + See c.CHUNK_STATUSES for a complete list. - entity_limit -- The threshold to consider a chunk with the status TOO_MANY_ENTITIES. - delete_entities -- Boolean indicating if the chunks with too_many_entities should have their entities removed. @@ -1434,9 +1245,9 @@ def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete if bad_chunks and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory())) else: - for c in bad_chunks: - global_coords = c[0] - status_tuple = c[1] + for ck in bad_chunks: + global_coords = ck[0] + status_tuple = ck[1] local_coords = _get_local_chunk_coords(*global_coords) print("\n{0:-^60}".format(' New chunk to replace. Coords: x = {0}; z = {1} '.format(*global_coords))) @@ -1463,11 +1274,11 @@ def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete # Retrive the status from status_tuple if status_tuple == None: - status = CHUNK_NOT_CREATED + status = c.CHUNK_NOT_CREATED else: - status = status_tuple[TUPLE_STATUS] + status = status_tuple[c.TUPLE_STATUS] - if status == CHUNK_OK: + if status == c.CHUNK_OK: backup_region_file = region.RegionFile(backup_region_path) working_chunk = backup_region_file.get_chunk(local_coords[0], local_coords[1]) @@ -1483,7 +1294,7 @@ def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete print("Chunk replaced using backup dir: {0}".format(backup.path)) else: - print("Can't use this backup directory, the chunk has the status: {0}".format(CHUNK_STATUS_TEXT[status])) + print("Can't use this backup directory, the chunk has the status: {0}".format(c.CHUNK_STATUS_TEXT[status])) continue else: @@ -1495,7 +1306,7 @@ def remove_problematic_chunks(self, status): """ Removes all the chunks with the given status. Keyword arguments: - - status -- The chunk status to remove. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to remove. See c.CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks removed @@ -1513,7 +1324,7 @@ def fix_problematic_chunks(self, status): """ Try to fix all the chunks with the given status. Keyword arguments: - - status -- The chunk status to fix. See CHUNK_STATUSES for a list of possible statuses. + - status -- The chunk status to fix. See c.CHUNK_STATUSES for a list of possible statuses. Return: - counter -- Integer with the number of chunks fixed. @@ -1534,10 +1345,11 @@ def replace_problematic_regions(self, backup_worlds, status, entity_limit, delet - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used in a ordered way. - status -- An integer indicating the status of region files to be replaced. - See REGION_STATUSES for a complete list. + See c.REGION_STATUSES for a complete list. - entity_limit -- The threshold to consider a chunk with the status TOO_MANY_ENTITIES. + (variable not used, just for inputs to be homogeneous) - delete_entities -- Boolean indicating if the chunks with too_many_entities should have - their entities removed. + their entities removed. (variable not used, just for inputs to be homogeneous) Return: - counter -- An integer with the number of chunks replaced. @@ -1594,7 +1406,7 @@ def remove_problematic_regions(self, status): """ Removes all the regions files with the given status. See the warning! Keyword arguments: - - status -- Status of the region files to remove. See REGION_STATUSES for a list. + - status -- Status of the region files to remove. See c.REGION_STATUSES for a list. Return: - counter -- An integer with the amount of removed region files. @@ -1623,7 +1435,7 @@ def remove_entities(self): return counter def rescan_entities(self, options): - """ Updates the CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. + """ Updates the c.CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. This should be ran when the option entity limit is changed. @@ -1653,7 +1465,7 @@ def generate_report(self, standalone): # collect chunk data chunk_counts = {} has_chunk_problems = False - for p in CHUNK_PROBLEMS: + for p in c.CHUNK_PROBLEMS: chunk_counts[p] = self.count_chunks(p) if chunk_counts[p] != 0: has_chunk_problems = True @@ -1662,7 +1474,7 @@ def generate_report(self, standalone): # collect region data region_counts = {} has_region_problems = False - for p in REGION_PROBLEMS: + for p in c.REGION_PROBLEMS: region_counts[p] = self.count_regions(p) if region_counts[p] != 0: has_region_problems = True @@ -1674,8 +1486,8 @@ def generate_report(self, standalone): # add all the player files with problems text += "\nUnreadable player files:\n" - broken_players = [p for p in self.players._get_list() if p.status in DATAFILE_PROBLEMS] - broken_players.extend([p for p in self.old_players._get_list() if p.status in DATAFILE_PROBLEMS]) + broken_players = [p for p in self.players._get_list() if p.status in c.DATAFILE_PROBLEMS] + broken_players.extend([p for p in self.old_players._get_list() if p.status in c.DATAFILE_PROBLEMS]) if broken_players: broken_player_files = [p.filename for p in broken_players] text += "\n".join(broken_player_files) @@ -1685,7 +1497,7 @@ def generate_report(self, standalone): # Now all the data files text += "\nUnreadable data files:\n" - broken_data_files = [d for d in self.data_files._get_list() if d.status in DATAFILE_PROBLEMS] + broken_data_files = [d for d in self.data_files._get_list() if d.status in c.DATAFILE_PROBLEMS] if broken_data_files: broken_data_filenames = [p.filename for p in broken_data_files] text += "\n".join(broken_data_filenames) @@ -1698,9 +1510,9 @@ def generate_report(self, standalone): if has_chunk_problems: table_data = [] table_data.append(['Problem', 'Count']) - for p in CHUNK_PROBLEMS: + for p in c.CHUNK_PROBLEMS: if chunk_counts[p] is not 0: - table_data.append([CHUNK_STATUS_TEXT[p], chunk_counts[p]]) + table_data.append([c.CHUNK_STATUS_TEXT[p], chunk_counts[p]]) table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) else: @@ -1711,9 +1523,9 @@ def generate_report(self, standalone): if has_region_problems: table_data = [] table_data.append(['Problem', 'Count']) - for p in REGION_PROBLEMS: + for p in c.REGION_PROBLEMS: if region_counts[p] is not 0: - table_data.append([REGION_STATUS_TEXT[p], region_counts[p]]) + table_data.append([c.REGION_STATUS_TEXT[p], region_counts[p]]) table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) From 779917b73ea2a86c373db6a56b7243b6f9e6c9f1 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 21 Jul 2020 11:32:19 +0200 Subject: [PATCH 119/151] Add support for scanning poi region files. --- regionfixer_core/constants.py | 3 +- regionfixer_core/scan.py | 106 +++++++++++++++++++++++----------- regionfixer_core/world.py | 2 + 3 files changed, 75 insertions(+), 36 deletions(-) diff --git a/regionfixer_core/constants.py b/regionfixer_core/constants.py index a438390..33b8ea1 100644 --- a/regionfixer_core/constants.py +++ b/regionfixer_core/constants.py @@ -239,5 +239,6 @@ # Dimension names: DIMENSION_NAMES = {"region": "Overworld", "DIM1": "The End", - "DIM-1": "Nether" + "DIM-1": "Nether", + "poi": "POIs" } diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 72d1c22..c2bae94 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -895,23 +895,83 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): the status described by the CHUNK_* variables in world.py - If the chunk does not exist (is not yet created it returns None + If the chunk does not exist (is not yet created it returns None) + + This function also scan the chunks contained in the POI region files. + """ + el = entity_limit + try: chunk = region_file.get_chunk(*coords) - data_coords = world.get_chunk_data_coords(chunk) - num_entities = len(chunk["Level"]["Entities"]) - if data_coords != global_coords: - # wrong located chunk - status = c.CHUNK_WRONG_LOCATED - elif num_entities > el: - # too many entities in the chunk - status = c.CHUNK_TOO_MANY_ENTITIES - else: - # chunk ok + if "Level" in chunk: + # to know if is a poi chunk or a level chunk check the contents + # if 'Level' is at root is a level chunk + + # Level chunk + try: + data_coords = world.get_chunk_data_coords(chunk) + num_entities = len(chunk["Level"]["Entities"]) + if data_coords != global_coords: + # wrong located chunk + status = c.CHUNK_WRONG_LOCATED + elif num_entities > el: + # too many entities in the chunk + status = c.CHUNK_TOO_MANY_ENTITIES + else: + # chunk ok + status = c.CHUNK_OK + + ############################ + # Chunk error detection + ############################ + except KeyError: + # chunk with the mandatory tag Entities missing + status = c.CHUNK_MISSING_ENTITIES_TAG + chunk = None + data_coords = None + global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) + num_entities = None + + except UnicodeDecodeError: + # TODO: This should another kind of error, it's now being handled as corrupted chunk + status = c.CHUNK_CORRUPTED + chunk = None + data_coords = None + global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) + num_entities = None + + except TypeError: + # TODO: This should another kind of error, it's now being handled as corrupted chunk + status = c.CHUNK_CORRUPTED + chunk = None + data_coords = None + global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) + num_entities = None + + elif "Sections" in chunk: + # To check if it's a POI chunk check for the tag "Sections" + # If we give a look to the wiki: + # https://minecraft.gamepedia.com/Java_Edition_level_format#poi_format + # We can see that there are two TAGs at root of a POI, "Data" and "DataVersion", but + # in my tests the TAGs at root are "Sections and "DataVersion", no trace of "Data". + # + # So, let's use "Sections" as a differentiating factor + + # POI chunk + data_coords = None + num_entities = None status = c.CHUNK_OK + + else: + # what is this? we shouldn't reach this part of the code, as far as + # we know there is only POI chunks and Level chunks + raise AssertionError("Unrecognized scanned chunk in scan_chunk().") + ############################################### + # POI chunk and Level chunk common errors + ############################################### except InconceivedChunk: # chunk not created chunk = None @@ -943,30 +1003,6 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - except KeyError: - # chunk with the mandatory tag Entities missing - status = c.CHUNK_MISSING_ENTITIES_TAG - chunk = None - data_coords = None - global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) - num_entities = None - - except UnicodeDecodeError: - # TODO: This should another kind of error, it's now being handled as corrupted chunk - status = c.CHUNK_CORRUPTED - chunk = None - data_coords = None - global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) - num_entities = None - - except TypeError: - # TODO: This should another kind of error, it's now being handled as corrupted chunk - status = c.CHUNK_CORRUPTED - chunk = None - data_coords = None - global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) - num_entities = None - return chunk, (num_entities, status) if status != c.CHUNK_NOT_CREATED else None diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index d01ea11..4ea669c 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -1016,6 +1016,8 @@ def __init__(self, world_path): self.regionsets.append(RegionSet(join(self.path, "region/"))) for directory in glob(join(self.path, "DIM*/region")): self.regionsets.append(RegionSet(join(self.path, directory))) + # TODO: let's scan POI as region files maybe it's enough, + self.regionsets.append(RegionSet(join(self.path, "poi/"))) # level.dat # Let's scan level.dat here so we can extract the world name From cab35cdc4f2621b238cd4dae8db685247f8d129c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 21 Jul 2020 19:29:19 +0200 Subject: [PATCH 120/151] More improvements to comments. Remove old unused function. --- regionfixer_core/scan.py | 26 ++-- regionfixer_core/util.py | 4 +- regionfixer_core/world.py | 266 +++++++++++++++++++++----------------- 3 files changed, 159 insertions(+), 137 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index c2bae94..32a4fc1 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -52,7 +52,7 @@ class ChildProcessException(Exception): """ Raised when a child process has problems. - Keyword arguments: + Inputs: - partial_scanned_file -- ScannedObject from world.py partially filled with the results of the scan - exc_type -- Type of the exception being handled, extracted from sys.exc_info() @@ -156,7 +156,7 @@ def multiprocess_scan_regionfile(region_file): def _mp_data_pool_init(d): """ Function to initialize the multiprocessing in scan_dataset. - Keyword arguments: + Inputs: - d -- Dictionary containing the information to copy to the function of the child process. This function adds the queue to each of the child processes objects. This queue @@ -172,7 +172,7 @@ def _mp_data_pool_init(d): def _mp_regionset_pool_init(d): """ Function to initialize the multiprocessing in scan_regionset. - Keyword arguments: + Inputs: - d -- Dictionary containing the information to copy to the function of the child process. This function adds the queue to each of the child processes objects. This queue @@ -194,7 +194,7 @@ def _mp_regionset_pool_init(d): class AsyncScanner: """ Class to derive all the scanner classes from. - Keyword arguments: + Inputs: - data_structure -- Is one of the objects in world: DataSet, RegionSet - processes -- Integer with the number of child processes to use for the scan - scan_function -- Function used to scan the data @@ -291,7 +291,7 @@ def terminate(self): def raise_child_exception(self, exception_tuple): """ Raises a ChildProcessException. - Keyword arguments: + Inputs: - exception_tuple -- Tuple containing all the information about the exception of the child process. @@ -388,7 +388,7 @@ def __len__(self): class AsyncDataScanner(AsyncScanner): """ Scan a DataFileSet and fill the data structure. - Keyword arguments: + Inputs: - data_structure -- A DataFileSet from world.py containing the files to scan - processes -- An integer with the number of child processes to use @@ -412,7 +412,7 @@ def update_str_last_scanned(self, data): class AsyncRegionsetScanner(AsyncScanner): """ Scan a RegionSet and fill the data structure. - Keyword arguments: + Inputs: - data_structure -- A RegionSet from world.py containing the files to scan - processes -- An integer with the number of child processes to use - entity_limit -- An integer, threshold of entities for a chunk to be considered @@ -449,7 +449,7 @@ def update_str_last_scanned(self, r): class AsyncWorldRegionScanner: """ Wrapper around the calls of AsyncScanner the whole world. - Keyword arguments: + Inputs: - world_obj -- A World object from world.py - processes -- An integer with the number of child processes to use - entity_limit -- An integer, threshold of entities for a chunk to be considered @@ -592,7 +592,7 @@ def __len__(self): def console_scan_loop(scanners, scan_titles, verbose): """ Scan all the AsyncScanner object printing status to console. - Keyword arguments: + Inputs: - scanners -- List of AsyncScanner objects to scan. - scan_titles -- List of string with the names of the world/regionsets in the same order as in scanners. @@ -643,7 +643,7 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, verbose): """ Scans a world folder prints status to console. - Keyword arguments: + Inputs: - world_obj -- World object from world.py that will be scanned - processes -- An integer with the number of child processes to use - entity_limit -- An integer, threshold of entities for a chunk to be considered @@ -698,7 +698,7 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, def console_scan_regionset(regionset, processes, entity_limit, remove_entities, verbose): """ Scan a regionset printing status to console. - Keyword arguments: + Inputs: - regionset -- RegionSet object from world.py that will be scanned - processes -- An integer with the number of child processes to use - entity_limit -- An integer, threshold of entities for a chunk to be considered @@ -721,7 +721,7 @@ def console_scan_regionset(regionset, processes, entity_limit, remove_entities, def scan_data(scanned_dat_file): """ Try to parse the nbt data file, and fill the scanned object. - Keyword arguments: + Inputs: - scanned_dat_file -- ScannedDataFile object from world.py. If something is wrong it will return a tuple with useful info @@ -764,7 +764,7 @@ def scan_data(scanned_dat_file): def scan_region_file(scanned_regionfile_obj, entity_limit, remove_entities): """ Scan a region file filling the ScannedRegionFile object - Keyword arguments: + Inputs: - scanned_regionfile_obj -- ScannedRegionfile object from world.py that will be scanned - entity_limit -- An integer, threshold of entities for a chunk to be considered with too many entities diff --git a/regionfixer_core/util.py b/regionfixer_core/util.py index f4d8e41..4859a6d 100644 --- a/regionfixer_core/util.py +++ b/regionfixer_core/util.py @@ -29,7 +29,7 @@ def get_str_from_traceback(ty, value, tb): """ Return a string from a traceback plus exception. - Keyword arguments: + Inputs: - ty -- Exception type - value -- value of the traceback - tb -- Traceback @@ -117,7 +117,7 @@ def entitle(text, level=0): def table(columns): """ Generates a text containing a pretty table. - Keyword argument: + Input: - columns -- A list containing lists in which each one of the is a column of the table. diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 4ea669c..64ead6f 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -25,7 +25,6 @@ from os.path import join, split, exists, isfile from os import remove from shutil import copy -import time import zlib import nbt.region as region @@ -45,7 +44,8 @@ class InvalidFileName(IOError): class ScannedDataFile: """ Stores all the information of a scanned data file. - Only needs the path of the data file to be initialized. + Inputs: + - path -- String with the path of the data file. Defaults to None. """ def __init__(self, path=None): @@ -83,11 +83,12 @@ class ScannedRegionFile: Keywords arguments: - path -- A string with the path of the region file - - time -- The time at which the region file has been scanned. - None by default. + - scanned_time -- Float, time as returned by bult-in time module. The time + at which the region file has been scanned. None by default. + """ - def __init__(self, path, time=None): + def __init__(self, path, scanned_time=None): # general region file info self.path = path self.filename = split(path)[1] @@ -107,7 +108,7 @@ def __init__(self, path, time=None): self._counts[s] = 0 # time when the scan for this file finished - self.scan_time = time + self.scan_time = scanned_time # The status of the region file. self.status = None @@ -135,7 +136,7 @@ def oneliner_status(self): def __str__(self): text = "Path: {0}".format(self.path) scanned = False - if time: + if self.scan_time: scanned = True text += "\nScanned: {0}".format(scanned) @@ -155,7 +156,7 @@ def get_coords(self): - coordX, coordZ -- Integers with the x and z coordinates of the region file. - Either parse the region file name or uses the stored ones. + Either parse the region file name or uses the stored in the object. """ @@ -177,8 +178,8 @@ def keys(self): """Returns a list with all the local coordinates (header coordinates). Return: - - list -- A list with all the chunk coordinates extracted form the - region file header + - list -- A list with all the local chunk coordinates extracted form the + region file header as integer tuples """ return list(self._chunks.keys()) @@ -212,8 +213,9 @@ def get_path(self): def count_chunks(self, status=None): """ Counts chunks in the region file with the given problem. - Keyword arguments: - - status -- This is the status of the chunk to count for. See c.CHUNK_PROBLEMS + Inputs: + - status -- Integer with the status of the chunk to count for. See + CHUNK_PROBLEMS in constants.py. Return: - counter -- Integer with the number of chunks with that status @@ -235,9 +237,12 @@ def count_chunks(self, status=None): def get_global_chunk_coords(self, chunkX, chunkZ): """ Takes the chunk local coordinates and returns its global coordinates. - Keyword arguments: - - chunkX -- Local X chunk coordinate. - - chunkZ -- Local Z chunk coordinate. + Inputs: + - chunkX -- Integer, local X chunk coordinate. + - chunkZ -- Integer, local Z chunk coordinate. + + Return: + - chunkX, chunkZ -- Integers with the x and z global chunk coordinates """ @@ -250,8 +255,9 @@ def get_global_chunk_coords(self, chunkX, chunkZ): def list_chunks(self, status=None): """ Returns a list of tuples of chunks for all the chunks with 'status'. - Keyword arguments: - - status -- Defaults to None. Status of the chunk to list, see c.CHUNK_STATUSES + Inputs: + - status -- Defaults to None. Integer with the status of the chunk to list, + see CHUNK_STATUSES in constants.py Return: - list - List with tuples like (global_coordinates, status_tuple) where status @@ -274,8 +280,12 @@ def list_chunks(self, status=None): def summary(self): """ Returns a summary of all the problematic chunks. - The summary is a string with region file, global coordinates, local coordinates, - and status of every problematic chunk, in a subtree like format. + Return: + - text -- Human readable string with the summary of the scan. + + The summary is a human readable string with region file, global + coordinates, local coordinates, and status of every problematic + chunk, in a subtree like format. """ @@ -300,8 +310,9 @@ def summary(self): def remove_problematic_chunks(self, status): """ Removes all the chunks with the given status - Keyword arguments: - - status -- Status of the chunks to remove. See c.CHUNK_STATUSES. + Inputs: + - status -- Integer with the status of the chunks to remove. + See CHUNK_STATUSES in constants.py Return: - counter -- An integer with the amount of removed chunks. @@ -311,7 +322,7 @@ def remove_problematic_chunks(self, status): counter = 0 bad_chunks = self.list_chunks(status) for ck in bad_chunks: - global_coords = c[0] + global_coords = ck[0] local_coords = _get_local_chunk_coords(*global_coords) region_file = region.RegionFile(self.path) region_file.unlink_chunk(*local_coords) @@ -325,18 +336,24 @@ def remove_problematic_chunks(self, status): def fix_problematic_chunks(self, status): """ This fixes problems in chunks that can be somehow fixed. - Keyword arguments: - - status -- Status of the chunks to fix. See c.FIXABLE_CHUNK_PROBLEMS + Inputs: + - status -- Integer with the status of the chunks to fix. See + FIXABLE_CHUNK_PROBLEMS in constants.py Return: - counter -- An integer with the amount of fixed chunks. - Right now it only fixes chunks missing the TAG_List Entities and wrong located chunks. + Right now it only fixes chunks missing the TAG_List Entities, wrong located chunks and + in some cases corrupted chunks. -TAG_List is fixed by adding said tag. -Wrong located chunks are relocated to the data coordinates stored in the zip stream. We suppose these coordinates are right because the data has checksum. + + -Corrupted chunks: tries to read the the compressed stream byte by byte until it raises + exception. After that compares the size of the compressed chunk stored in the region file + with the compressed chunk extracted from the strem, if they are the same it's good to go! """ @@ -431,9 +448,9 @@ def remove_entities(self): def remove_chunk_entities(self, x, z): """ Takes a chunk local coordinates and remove its entities. - Keyword arguments: - - x -- X local coordinate of the chunk - - z -- Z local coordinate of the chunk + Inputs: + - x -- Integer with the X local (header) coordinate of the chunk + - z -- Integer with the Z local (header) coordinate of the chunk Return: - counter -- An integer with the number of entities removed. @@ -455,7 +472,7 @@ def remove_chunk_entities(self, x, z): def rescan_entities(self, options): """ Updates the status of all the chunks after changing entity_limit. - Keyword arguments: + Inputs: - options -- argparse arguments, the whole argparse.ArgumentParser() object as used by regionfixer.py @@ -483,14 +500,15 @@ def rescan_entities(self, options): class DataSet: """ Stores data items to be scanned by AsyncScanner in scan.py. - typevalue is the type of the class to store in the set. When setting it will be - asserted if it is of that type + Inputs: + - typevalue -- The type of the class to store in the set. In initialization it will be + asserted if it is of that type The data will be stored in the self._set dictionary. Implemented private methods are: __getitem__, __setitem__, _get_list, __len__. - Three methods should be implemented to work with a DataSet, two of the mandatory: + Three methods should be overridden to work with a DataSet, two of the mandatory: - _replace_in_data_structure -- (mandatory) Should be created because during the scan the different processes create copies of the original data, so replacing it in the original data set is mandatory in order to keep everything working. @@ -498,7 +516,8 @@ class DataSet: - _update_counts -- (mandatory) Makes sure that the DataSet stores all the counts and that it is not needed to loop through all of them to know the real count. - - has_problems -- Should return True only if any element of the set has problems + - has_problems -- (optional but used) Should return True only if any element + of the set has problems """ @@ -540,7 +559,7 @@ def has_problems(self): def _replace_in_data_structure(self, data, key): """ For multiprocessing. Replaces the data in the set with the new data. - Keyword arguments: + Inputs: - data -- Value of the data to be stored - key -- Key in which to store the data @@ -560,7 +579,7 @@ def _update_counts(self, s): class DataFileSet(DataSet): """ DataSet for Minecraft data files (.dat). - Keyword arguments: + Inputs: - path -- Path to the folder containing data files - title -- Some user readable string to represent the DataSet """ @@ -614,7 +633,7 @@ def summary(self): class RegionSet(DataSet): """Stores an arbitrary number of region files and their scan results. - Keyword arguments: + Inputs: - regionset_path -- Path to the folder containing region files - region_list -- List of paths to all the region files """ @@ -725,9 +744,12 @@ def keys(self): def list_regions(self, status=None): """ Returns a list of all the ScannedRegionFile objects with 'status'. - Keyword arguments: + Inputs: - status -- The region file status. See c.REGION_STATUSES + Return: + - t -- List with all the ScannedRegionFile objects with that status + If status = None it returns all the objects. """ @@ -744,8 +766,12 @@ def list_regions(self, status=None): def count_regions(self, status=None): """ Return the number of region files with status. + Inputs: - status -- The region file status. See c.REGION_STATUSES + Return: + - counter -- Integer with the number of regions with that status + If none returns the total number of region files in this regionset. """ @@ -762,8 +788,9 @@ def count_regions(self, status=None): def count_chunks(self, status=None): """ Returns the number of chunks with the given status. - Keyword arguments: - - status -- The chunk status to count. See c.CHUNK_STATUSES + Inputs: + - status -- Integer with the chunk status to count. See + c.CHUNK_STATUSES in constants.py Return: - counter -- Integer with the number of chunks removed @@ -784,9 +811,14 @@ def count_chunks(self, status=None): def list_chunks(self, status=None): """ Returns a list of all the chunk tuples with 'status'. - Keyword arguments: + Inputs: - status -- The chunk status to list. See c.CHUNK_STATUSES + Return: + - l -- List with tuples like (global_coordinates, status_tuple) where status + tuple is (number_of_entities, status). For more details see + ScannedRegionFile.list_chunks() + If status = None it returns all the chunk tuples. """ @@ -799,8 +831,12 @@ def list_chunks(self, status=None): def summary(self): """ Returns a string with a summary of the problematic chunks. - The summary contains global coords, local coords, data coords and status. - + Return: + - text -- String, human readable text with information about the scan. + + The summary contains global coordinates, local coordinates, + data coordinates and status. + """ text = "" @@ -813,32 +849,35 @@ def summary(self): return text def locate_chunk(self, global_coords): - """ Takes the global coordinates of a chunk and returns its location. + """ Takes the global coordinates of a chunk and returns where is it. - Keyword arguments: - - global_coords -- Global chunk coordinates of the chunk to locate. + Inputs: + - global_coords -- Tuple of two integers with the global chunk coordinates to locate. Return: - - filename -- Filename where the chunk is stored - - local_coords -- Local coordinates of the chunk in the region file - + - path -- String, with the path of the region file where + the chunk is stored + - local_coords -- Tuple of two integers with local coordinates of the + chunk in the region file + """ - filename = self.path + get_chunk_region(*global_coords) + path = self.path + get_chunk_region(*global_coords) local_coords = _get_local_chunk_coords(*global_coords) - return filename, local_coords + return path, local_coords def locate_region(self, coords): """ Returns a string with the path of the region file. - Keyword arguments: - - coords -- Global region coordinates of the region file to locate in - this RegionSet. + Inputs: + - coords -- Tuple of two integers with the global region coordinates of the region + file to locate in this RegionSet. Return: - region_name -- String containing the path of the region file or None if it doesn't exist + """ x, z = coords @@ -849,8 +888,9 @@ def locate_region(self, coords): def remove_problematic_chunks(self, status): """ Removes all the chunks with the given status. - Keyword arguments: - - status -- The chunk status to remove. See c.CHUNK_STATUSES for a list of possible statuses. + Inputs: + - status -- Integer with the chunk status to remove. See c.CHUNK_STATUSES + in constants.py for a list of possible statuses. Return: - counter -- Integer with the number of chunks removed @@ -869,8 +909,9 @@ def remove_problematic_chunks(self, status): def fix_problematic_chunks(self, status): """ Try to fix all the chunks with the given problem. - Keyword arguments: - - status -- The chunk status to fix. See c.CHUNK_STATUSES for a list of possible statuses. + Inputs: + - status -- Integer with the chunk status to fix. See c.CHUNK_STATUSES in constants.py + for a list of possible statuses. Return: - counter -- Integer with the number of chunks fixed. @@ -910,7 +951,7 @@ def rescan_entities(self, options): def generate_report(self, standalone): """ Generates a report with the results of the scan. - Keyword arguments: + Inputs: - standalone -- If true the report will be a human readable String. If false the report will be a dictionary with all the counts of chunks and regions. @@ -981,8 +1022,9 @@ def generate_report(self, standalone): def remove_problematic_regions(self, status): """ Removes all the regions files with the given status. See the warning! - Keyword arguments: - - status -- Status of the region files to remove. See c.REGION_STATUSES for a list. + Inputs: + - status -- Integer with the status of the region files to remove. + See c.REGION_STATUSES in constants.py for a list. Return: - counter -- An integer with the amount of removed region files. @@ -1000,11 +1042,12 @@ def remove_problematic_regions(self, status): class World: """ This class stores information and scan results for a Minecraft world. - Keyword arguments: + Inputs: - world_path -- String with the path of the world. Once scanned, stores all the problems found in it. It also has all the tools needed to modify the world. + """ def __init__(self, world_path): @@ -1166,7 +1209,7 @@ def get_name(self): """ Returns a string with the name of the world. Return: - - name -- Either the world name as found in level.dat or the last + - name -- String with either the world name as found in level.dat or the last directory in the world path. """ @@ -1182,9 +1225,9 @@ def get_name(self): def count_regions(self, status=None): """ Returns an integer with the count of region files with status. - Keyword arguments: + Inputs: - status -- An integer from c.REGION_STATUSES to region files with that status. - For a list os status see c.REGION_STATUSES. + For a list of status see REGION_STATUSES in constants.py Return: - counter -- An integer with the number of region files with the given status. @@ -1199,7 +1242,7 @@ def count_regions(self, status=None): def count_chunks(self, status=None): """ Returns an integer with the count of chunks with 'status'. - Keyword arguments: + Inputs: - status -- An integer from c.CHUNK_STATUSES to count chunks with that status. For a list of status see c.CHUNK_STATUSES. @@ -1207,6 +1250,7 @@ def count_chunks(self, status=None): - counter -- An integer with the number of chunks with the given status. """ + counter = 0 for r in self.regionsets: count = r.count_chunks(status) @@ -1216,14 +1260,15 @@ def count_chunks(self, status=None): def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete_entities): """ Replaces problematic chunks using backups. - Keyword arguments: + Inputs: - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used in a ordered way. - status -- An integer indicating the status of chunks to be replaced. - See c.CHUNK_STATUSES for a complete list. + See CHUNK_STATUSES in constants.py for a complete list. - entity_limit -- The threshold to consider a chunk with the status TOO_MANY_ENTITIES. - delete_entities -- Boolean indicating if the chunks with too_many_entities should have their entities removed. + Return: - counter -- An integer with the number of chunks replaced. @@ -1307,8 +1352,9 @@ def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete def remove_problematic_chunks(self, status): """ Removes all the chunks with the given status. - Keyword arguments: - - status -- The chunk status to remove. See c.CHUNK_STATUSES for a list of possible statuses. + Inputs: + - status -- Integer with the chunk status to remove. See CHUNK_STATUSES in constants.py + for a list of possible statuses. Return: - counter -- Integer with the number of chunks removed @@ -1325,8 +1371,9 @@ def remove_problematic_chunks(self, status): def fix_problematic_chunks(self, status): """ Try to fix all the chunks with the given status. - Keyword arguments: - - status -- The chunk status to fix. See c.CHUNK_STATUSES for a list of possible statuses. + Inputs: + - status -- Integer with the chunk status to remove. See CHUNK_STATUSES in constants.py + for a list of possible statuses. Return: - counter -- Integer with the number of chunks fixed. @@ -1343,7 +1390,7 @@ def fix_problematic_chunks(self, status): def replace_problematic_regions(self, backup_worlds, status, entity_limit, delete_entities): """ Replaces problematic region files using backups. - Keyword arguments: + Inputs: - backup_worlds -- A list of World objects to use as backups. Backup worlds will be used in a ordered way. - status -- An integer indicating the status of region files to be replaced. @@ -1356,7 +1403,7 @@ def replace_problematic_regions(self, backup_worlds, status, entity_limit, delet - counter -- An integer with the number of chunks replaced. Note: entity_limit and delete_entities are not really used here. They are just there to make all - the methods homogeneus. + the methods homogeneous. """ @@ -1407,8 +1454,9 @@ def replace_problematic_regions(self, backup_worlds, status, entity_limit, delet def remove_problematic_regions(self, status): """ Removes all the regions files with the given status. See the warning! - Keyword arguments: - - status -- Status of the region files to remove. See c.REGION_STATUSES for a list. + Inputs: + - status -- Integer with the status of the region files to remove. + See REGION_STATUSES in constants. py for a list. Return: - counter -- An integer with the amount of removed region files. @@ -1437,7 +1485,7 @@ def remove_entities(self): return counter def rescan_entities(self, options): - """ Updates the c.CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. + """ Updates the CHUNK_TOO_MANY_ENTITIES status of all the chunks in the RegionSet. This should be ran when the option entity limit is changed. @@ -1449,8 +1497,8 @@ def rescan_entities(self, options): def generate_report(self, standalone): """ Generates a report with the results of the scan. - Keyword arguments: - - standalone -- If true the report will be a human readable String. If false the + Inputs: + - standalone -- Boolean, if true the report will be a human readable String. If false the report will be a dictionary with all the counts of chunks and regions. Return if standalone = True: @@ -1540,33 +1588,6 @@ def generate_report(self, standalone): -def parse_chunk_list(chunk_list, world_obj): - """ Generate a list of chunks to use with world.delete_chunk_list. - - It takes a list of global chunk coordinates and generates a list of - tuples containing: - - (region fullpath, chunk X, chunk Z) - - """ - # this is not used right now - parsed_list = [] - for line in chunk_list: - try: - chunk = eval(line) - except: - print("The chunk {0} is not valid.".format(line)) - continue - region_name = get_chunk_region(chunk[0], chunk[1]) - fullpath = join(world_obj.world_path, "region", region_name) - if fullpath in world_obj.all_mca_files: - parsed_list.append((fullpath, chunk[0], chunk[1])) - else: - print("The chunk {0} should be in the region file {1} and this region files doesn't extist!".format(chunk, fullpath)) - - return parsed_list - - def parse_paths(args): """ Parse a list of paths to and returns World and a RegionSet objects. @@ -1614,13 +1635,14 @@ def parse_world_list(world_path_list): """ Parses a world path list. Returns a list of World objects. Keywords arguments: - world_path_list -- A list of paths where minecraft worlds are supposed to be + world_path_list -- A list of string with paths where minecraft worlds are supposed to be Return: world_list -- A list of World objects using the paths from the input Parses a world path list checking if they exists and are a minecraft - world folders. Returns a list of World objects. + world folders. Returns a list of World objects. Prints errors for the + paths that are not minecraft worlds. """ world_list = [] @@ -1649,8 +1671,8 @@ def delete_entities(region_file, x, z): """ Removes entities in chunks with the status TOO_MANY_ENTITIES. Keyword entities: - - x -- X local coordinate of the chunk in the region files - - z -- Z local coordinate of the chunk in the region files + - x -- Integer, X local coordinate of the chunk in the region files + - z -- Integer, Z local coordinate of the chunk in the region files - region_file -- RegionFile object where the chunk is stored Return: @@ -1672,9 +1694,9 @@ def delete_entities(region_file, x, z): def _get_local_chunk_coords(chunkx, chunkz): """ Gives the chunk local coordinates from the global coordinates. - Keyword arguments: - - chunkx -- X chunk global coordinate in the world. - - chunkz -- Z chunk global coordinate in the world. + Inputs: + - chunkx -- Integer, X chunk global coordinate in the world. + - chunkz -- Integer, Z chunk global coordinate in the world. Return: - x, z -- X and Z local coordinates of the chunk in the region file. @@ -1687,9 +1709,9 @@ def _get_local_chunk_coords(chunkx, chunkz): def get_chunk_region(chunkX, chunkZ): """ Returns the name of the region file given global chunk coordinates. - Keyword arguments: - - chunkx -- X chunk global coordinate in the world. - - chunkz -- Z chunk global coordinate in the world. + Inputs: + - chunkx -- Integer, X chunk global coordinate in the world. + - chunkz -- Integer, Z chunk global coordinate in the world. Return: - region_name -- A string with the name of the region file where the chunk @@ -1708,8 +1730,8 @@ def get_chunk_region(chunkX, chunkZ): def get_chunk_data_coords(nbt_file): """ Gets and returns the coordinates stored in the NBT structure of the chunk. - Keyword arguments: - - nbt_file -- An NBT file. + Inputs: + - nbt_file -- An NBT file. From the nbt module. Return: - coordX, coordZ -- Integers with the X and Z global coordinates of the chunk. @@ -1730,7 +1752,7 @@ def get_chunk_data_coords(nbt_file): def get_region_coords(filename): """ Get and return a region file coordinates from path. - Keyword arguments: + Inputs: - filename -- Filename or path of the region file. Return: @@ -1748,10 +1770,10 @@ def get_region_coords(filename): def get_global_chunk_coords(region_name, chunkX, chunkZ): """ Get and return a region file coordinates from path. - Keyword arguments: - - region_name -- Filename or path of the region file. - - chunkX -- X local coordinate of the chunk - - chunkZ -- Z local coordinate of the chunk + Inputs: + - region_name -- String with filename or path of the region file. + - chunkX -- Integer, X local coordinate of the chunk + - chunkZ -- Integer, Z local coordinate of the chunk Return: - coordX, coordZ -- X and z global coordinates of the From a69b4a7219ab6550ce5f28352c7e2c7119418c53 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Tue, 21 Jul 2020 23:53:54 +0200 Subject: [PATCH 121/151] Remove interative mode. Right now is broken. --- regionfixer.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 9246199..e5767ac 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -328,13 +328,15 @@ def main(): action='store_true', default=False) - parser.add_argument('--interactive', - '-i', - help='Enter in interactive mode, where you can scan, see the ' - 'problems, and fix them in a terminal like mode', - dest='interactive', - default=False, - action='store_true', ) + #=========================================================================== + # parser.add_argument('--interactive', + # '-i', + # help='Enter in interactive mode, where you can scan, see the ' + # 'problems, and fix them in a terminal like mode', + # dest='interactive', + # default=False, + # action='store_true', ) + #=========================================================================== parser.add_argument('--log', '-l', @@ -390,11 +392,10 @@ def main(): args.replace_shared_offset any_region_replace_option = args.replace_too_small - if args.interactive or args.summary: + if False or args.summary: # removed interactive mode args.interactive if any_chunk_replace_option or any_region_replace_option: - parser.error('Error: Can\'t use the options --replace-* , --delete-* and ' - '--log with --interactive. You can choose all this ' - 'while in the interactive mode.') + parser.error('Error: Can\'t use the options --replace-* , --delete-* with ' + '--log') else: # Not options.interactive @@ -433,7 +434,7 @@ def main(): # The scanning process starts found_problems_in_regionsets = False found_problems_in_worlds = False - if args.interactive: + if False: # removed args.interactive ci = InteractiveLoop(world_list, regionset, args, backup_worlds) ci.cmdloop() return c.RV_OK From 10e422943645ad29aba69ef81a915507aaac753c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Wed, 22 Jul 2020 00:25:18 +0200 Subject: [PATCH 122/151] Improve the help provided by the --help option. --- regionfixer.py | 84 +++++++++++++++++++++++++------------------------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index e5767ac..ef8fc6b 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -42,7 +42,7 @@ def fix_bad_chunks(options, scanned_obj): """ Fixes chunks that can be repaired. - Keywords arguments: + Inputs: options -- argparse arguments, the whole argparse.ArgumentParser() object scanned_obj -- this can be a RegionSet or World objects from world.py @@ -78,7 +78,7 @@ def fix_bad_chunks(options, scanned_obj): def delete_bad_chunks(options, scanned_obj): """ Takes a scanned object and deletes all the bad chunks. - Keywords arguments + Inputs: options -- argparse arguments, the whole argparse.ArgumentParser() object scanned_obj -- this can be a RegionSet or World objects from world.py @@ -114,7 +114,7 @@ def delete_bad_chunks(options, scanned_obj): def delete_bad_regions(options, scanned_obj): """ Takes a scanned object and deletes all bad region files. - Keywords arguments: + Inputs: options -- argparse arguments, the whole argparse.ArgumentParser() object scanned_obj -- this can be a RegionSet or World objects from world.py @@ -177,71 +177,68 @@ def main(): parser.add_argument('--replace-corrupted', '--rc', - help='Tries to replace the corrupted chunks using the backup' - ' directories. This option can be only used scanning one' - ' world.', + help='Try to replace the corrupted chunks using the backup' + ' directories. Can be only used scanning one world.', default=False, dest='replace_corrupted', action='store_true') parser.add_argument('--replace-wrong-located', '--rw', - help='Tries to replace the wrong located chunks using the ' - 'backup directories. This option can be only used scanning' - ' one world.', + help='Try to replace the wrong located chunks using the ' + 'backup directories. Can be only used scanning one ' + 'world.', default=False, dest='replace_wrong_located', action='store_true') parser.add_argument('--replace-entities', '--re', - help='Tries to replace the chunks with too many entities using ' - 'the backup directories. This option can be only used ' - 'scanning one world.', + help='Try to replace the chunks with too many entities using ' + 'the backup directories. Can be only used scanning ' + 'one world.', default=False, dest='replace_entities', action='store_true') parser.add_argument('--replace-shared-offset', '--rs', - help='Tries to replace the chunks with a shared offset using ' - 'the backup directories. This option can be only used' - 'scanning one world.', + help='Try to replace the chunks with a shared offset using ' + 'the backup directories. Can be only used scanning ' + 'one world.', default=False, dest='replace_shared_offset', action='store_true') parser.add_argument('--replace-too-small', '--rt', - help='Tries to replace the region files that are too small to ' - 'be actually be a region file using the backup ' - 'directories. This option can be only used scanning one ' - 'world.', + help='Try to replace the region files that are too small to ' + 'be actually be a region file using the backup directories.' + 'Can be only used scanning one world.', default=False, dest='replace_too_small', action='store_true') parser.add_argument('--delete-corrupted', '--dc', - help='[WARNING!] This option deletes! This option will delete ' - 'all the corrupted chunks. Used with --replace-corrupted ' - 'or --replace-wrong-located it will delete all the ' - 'non-replaced chunks.', + help='[WARNING!] This option deletes! Delete all the corrupted ' + 'chunks. Used with --replace-corrupted or --replace-wrong-located' + ' will delete all the non-replaced chunks.', action='store_true', default=False) parser.add_argument('--delete-wrong-located', '--dw', - help=('[WARNING!] This option deletes!' - 'The same as --delete-corrupted but for wrong ' - 'located chunks'), + help='[WARNING!] This option deletes! Delete all the wrong located ' + 'chunks. Used with --replace-corrupted or --replace-wrong-located' + ' will delete all the non-replaced chunks.', action='store_true', default=False, dest='delete_wrong_located') parser.add_argument('--delete-entities', '--de', - help='[WARNING!] This option deletes! This option deletes ALL ' + help='[WARNING!] This option deletes! Delete ALL ' 'the entities in chunks with more entities than ' '--entity-limit (300 by default). In a Minecraft ' 'entities are mostly mobs and items dropped in the ' @@ -255,17 +252,17 @@ def main(): parser.add_argument('--delete-shared-offset', '--ds', - help='[WARNING!] This option deletes! This option will delete ' - 'all the chunk with status shared offset. It will remove ' - 'the region header for the false chunk, note that you ' - 'don\'t loos any chunk doing this.', + help='[WARNING!] This option deletes! Delete all the chunk ' + 'with status shared offset. It will remove the region ' + 'header for the false chunk, note that you ' + 'don\'t loose any chunk doing this.', action='store_true', default=False, dest='delete_shared_offset') parser.add_argument('--delete-missing-tag', '--dmt', - help='[WARNING!] This option deletes! Removes any chunks ' + help='[WARNING!] This option deletes! Remove any chunks ' 'with the mandatory entities tag missing.', dest='delete_missing_tag', default=False, @@ -273,22 +270,23 @@ def main(): parser.add_argument('--fix-corrupted', '--fc', - help='Tries to fix chunks that are corrupted. This will try to decompress as much as possible from' - 'the data stream and see if the size is reasonable.', + help='Try to fix chunks that are corrupted by extracting as much ' + 'information as possible', dest='fix_corrupted', default=False, action='store_true') parser.add_argument('--fix-missing-tag', '--fm', - help='Fixes chunks that have the Entities tag missing. This will add the missing tag.', + help='Fix chunks that have the Entities tag missing. This will add ' + 'the missing tag.', dest='fix_missing_tag', default=False, action='store_true') parser.add_argument('--fix-wrong-located', '--fw', - help='Fixes chunks that have that are wrong located. This will save them in the coordinates ' + help='Fix chunks that are wrong located. This will save them in the coordinates ' 'stored in their data.', dest='fix_wrong_located', default=False, @@ -296,7 +294,7 @@ def main(): parser.add_argument('--delete-too-small', '--dt', - help='[WARNING!] This option deletes! Removes any region files ' + help='[WARNING!] This option deletes! Remove any region files ' 'found to be too small to actually be a region file.', dest='delete_too_small', default=False, @@ -319,12 +317,14 @@ def main(): type=int, default=1) + status_abbr = "" + for status in c.CHUNK_PROBLEMS: + status_abbr += "{0}: {1}; ".format(c.CHUNK_PROBLEMS_ABBR[status], c.CHUNK_STATUS_TEXT[status]) parser.add_argument('--verbose', '-v', - help='Don\'t use a progress bar, instead print a line per ' - 'scanned region file with results information. The ' - 'letters mean c: corrupted; w: wrong located; t: total of ' - 'chunks; tme: too many entities problem', + help=('Don\'t use a progress bar, instead print a line per ' + 'scanned file with results information. The ' + 'letters mean:\n') + status_abbr, action='store_true', default=False) @@ -340,7 +340,7 @@ def main(): parser.add_argument('--log', '-l', - help='Saves a log of all the problems found in the specified ' + help='Save a log of all the problems found in the specified ' 'file. The log file contains all the problems found with ' 'this information: region file, chunk coordinates and ' 'problem. Use \'-\' as name to show the log at the end ' From dd0c799684a142cbd79f667e4edcd3110852d4e2 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 23 Jul 2020 10:59:51 +0200 Subject: [PATCH 123/151] Remove some syntaxis warnings. Fix the new algorithm for fixing chunks, it was trying to read chunk outside the region file. --- regionfixer_core/world.py | 50 +++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 64ead6f..0a4ca21 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -377,26 +377,36 @@ def fix_problematic_chunks(self, status): # read the data raw m = region_file.metadata[local_coords[0], local_coords[1]] region_file.file.seek(m.blockstart * region.SECTOR_LENGTH + 5) - raw_chunk = region_file.file.read(m.length - 1) - # decompress byte by byte so we can get as much as we can before the error happens - try: + # these status doesn't provide a good enough data, we could end up reading garbage + if m.status not in (region.STATUS_CHUNK_IN_HEADER, region.STATUS_CHUNK_MISMATCHED_LENGTHS, + region.STATUS_CHUNK_OUT_OF_FILE, region.STATUS_CHUNK_OVERLAPPING, + region.STATUS_CHUNK_ZERO_LENGTH): + # get the raw data of the chunk + raw_chunk = region_file.file.read(m.length - 1) + # decompress byte by byte so we can get as much as we can before the error happens dc = zlib.decompressobj() out = "" for i in raw_chunk: out += dc.decompress(i) - except: - pass - # compare the sizes of the new compressed strem and the old one to see if we've got something good - cdata = zlib.compress(out.encode()) - if len(cdata) == len(raw_chunk): - # the chunk is probably good, write it in the region file - region_file.write_blockdata(local_coords[0], local_coords[1], out) - print("The chunk {0},{1} in region file {2} was fixed successfully.".format(local_coords[0], local_coords[1], self.filename)) - else: - print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) - #print("Extracted: " + str(len(out))) - #print("Size of the compressed stream: " + str(len(raw_chunk))) - + # compare the sizes of the new compressed strem and the old one to see if we've got something good + cdata = zlib.compress(out.encode()) + if len(cdata) == len(raw_chunk): + # the chunk is probably good, write it in the region file + region_file.write_blockdata(local_coords[0], local_coords[1], out) + print("The chunk {0},{1} in region file {2} was fixed successfully.".format(local_coords[0], local_coords[1], self.filename)) + else: + print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) + #======================================================= + # print("Extracted: " + str(len(out))) + # print("Size of the compressed stream: " + str(len(raw_chunk))) + #======================================================= + except region.ChunkHeaderError: + # usually a chunk with zero length, pass + print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) + except region.RegionHeaderError: + # usually a chunk with zero length, pass + print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) + if status == c.CHUNK_MISSING_ENTITIES_TAG: # The arguments to create the empty TAG_List have been somehow extracted by comparing # the tag list from a healthy chunk with the one created by nbt @@ -994,7 +1004,7 @@ def generate_report(self, standalone): table_data = [] table_data.append(['Problem', 'Count']) for p in c.CHUNK_PROBLEMS: - if chunk_counts[p] is not 0: + if chunk_counts[p] != 0: table_data.append([c.CHUNK_STATUS_TEXT[p], chunk_counts[p]]) table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) @@ -1007,7 +1017,7 @@ def generate_report(self, standalone): table_data = [] table_data.append(['Problem', 'Count']) for p in c.REGION_PROBLEMS: - if region_counts[p] is not 0: + if region_counts[p] != 0: table_data.append([c.REGION_STATUS_TEXT[p], region_counts[p]]) table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) @@ -1561,7 +1571,7 @@ def generate_report(self, standalone): table_data = [] table_data.append(['Problem', 'Count']) for p in c.CHUNK_PROBLEMS: - if chunk_counts[p] is not 0: + if chunk_counts[p] != 0: table_data.append([c.CHUNK_STATUS_TEXT[p], chunk_counts[p]]) table_data.append(['Total', chunk_counts['TOTAL']]) text += table(table_data) @@ -1574,7 +1584,7 @@ def generate_report(self, standalone): table_data = [] table_data.append(['Problem', 'Count']) for p in c.REGION_PROBLEMS: - if region_counts[p] is not 0: + if region_counts[p] != 0: table_data.append([c.REGION_STATUS_TEXT[p], region_counts[p]]) table_data.append(['Total', region_counts['TOTAL']]) text += table(table_data) From e14efd834d438acff298fb2c7a9ca1b1f55fb566 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Thu, 23 Jul 2020 11:37:12 +0200 Subject: [PATCH 124/151] Add last directory of file to the verbose output. --- regionfixer_core/scan.py | 5 +++-- regionfixer_core/world.py | 11 ++++++----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 32a4fc1..39b2ac7 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -25,7 +25,7 @@ import sys import logging import multiprocessing -from os.path import split, abspath +from os.path import split, abspath, join from time import sleep, time from copy import copy from traceback import extract_tb @@ -623,7 +623,8 @@ def console_scan_loop(scanners, scan_titles, verbose): else: status = "(" + result.oneliner_status + ")" fn = result.filename - print("Scanned {0: <12} {1:.<43} {2}/{3}".format(fn, status, counter, total)) + fol = result.folder + print("Scanned {0: <12} {1:.<43} {2}/{3}".format(join(fol, fn), status, counter, total)) if not verbose: pbar.finish() except KeyboardInterrupt as e: diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 0a4ca21..0ba4207 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -53,6 +53,7 @@ def __init__(self, path=None): self.path = path if self.path and exists(self.path): self.filename = split(path)[1] + self.folder = split(split(path)[0])[1] else: self.filename = None # The status of the region file. @@ -92,7 +93,7 @@ def __init__(self, path, scanned_time=None): # general region file info self.path = path self.filename = split(path)[1] - self.folder = split(path)[0] + self.folder = split(split(path)[0])[1] self.x = self.z = None self.x, self.z = self.get_coords() self.coords = (self.x, self.z) @@ -393,19 +394,19 @@ def fix_problematic_chunks(self, status): if len(cdata) == len(raw_chunk): # the chunk is probably good, write it in the region file region_file.write_blockdata(local_coords[0], local_coords[1], out) - print("The chunk {0},{1} in region file {2} was fixed successfully.".format(local_coords[0], local_coords[1], self.filename)) + print("The chunk {0},{1} in region file {2} was fixed successfully.".format(local_coords[0], local_coords[1], join(self.folder,self.filename))) else: - print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) + print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], join(self.folder,self.filename))) #======================================================= # print("Extracted: " + str(len(out))) # print("Size of the compressed stream: " + str(len(raw_chunk))) #======================================================= except region.ChunkHeaderError: # usually a chunk with zero length, pass - print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) + print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], join(self.folder,self.filename))) except region.RegionHeaderError: # usually a chunk with zero length, pass - print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], self.filename)) + print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], join(self.folder,self.filename))) if status == c.CHUNK_MISSING_ENTITIES_TAG: # The arguments to create the empty TAG_List have been somehow extracted by comparing From 503efd26da9f024630d4f3b9b9e0ce085825b623 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 7 Sep 2020 11:52:29 +0200 Subject: [PATCH 125/151] Move except statement to the right place. --- regionfixer_core/scan.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 39b2ac7..3c0d1c3 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -934,15 +934,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): data_coords = None global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - - except UnicodeDecodeError: - # TODO: This should another kind of error, it's now being handled as corrupted chunk - status = c.CHUNK_CORRUPTED - chunk = None - data_coords = None - global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) - num_entities = None - + except TypeError: # TODO: This should another kind of error, it's now being handled as corrupted chunk status = c.CHUNK_CORRUPTED @@ -1004,6 +996,14 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None + except UnicodeDecodeError: + # TODO: This should another kind of error, it's now being handled as corrupted chunk + status = c.CHUNK_CORRUPTED + chunk = None + data_coords = None + global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) + num_entities = None + return chunk, (num_entities, status) if status != c.CHUNK_NOT_CREATED else None From 1fb93a40a15e5da4153d57824ec916d3493d8e54 Mon Sep 17 00:00:00 2001 From: Pisich <62033118+Pisich@users.noreply.github.com> Date: Thu, 10 Sep 2020 21:05:32 -0500 Subject: [PATCH 126/151] Minor changes to README Fixed some spelling and grammatical errors. --- README.rst | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index fd71abb..98c99a4 100644 --- a/README.rst +++ b/README.rst @@ -6,7 +6,7 @@ By Alejandro Aguilera (Fenixin) Locates problems and tries to fix Minecraft worlds (or region files). -Tries to fix corrupted chunks in region files using old backup copies +Minecraft Region Fixer tries to fix corrupted chunks in region files using old backup copies of the Minecraft world. If you don't have a copy, you can eliminate the corrupted chunks making Minecraft regenerate them. @@ -24,14 +24,14 @@ https://www.minecraftforum.net/forums/mapping-and-modding-java-edition/minecraft Supported platforms =================== This program only works with Python 3.x, and DOESN'T work with -python 2.x. There was a windows exe in older versions, but right +python 2.x. There was a Windows .exe for older versions, but right now you need to install the python interpreter to run this program. Notes ===== Older versions of Minecraft had big problems when loading broken -worlds. Newer versions of Minecraft are doing improving the way +worlds. Newer versions of Minecraft are improving the way they deal with corruption and other things. Region-Fixer still is useful for replacing chunks/regions with a @@ -54,7 +54,7 @@ Suggestions and bugs should go to the github page: https://github.com/Fenixin/Minecraft-Region-Fixer -Feedback and questions should go preferably to the forums posts: +Feedback and questions should preferably go to these forums posts: (server administration) https://www.minecraftforum.net/forums/support/server-support-and/1903200-minecraft-region-fixer @@ -76,11 +76,11 @@ See CONTRIBUTORS.txt Warning ======= -This program has been tested with a lot of worlds, but there may be +This program has been tested with a lot of worlds, but there may exist bugs, so please, MAKE A BACKUP OF YOUR WORLD BEFORE RUNNING it, I'M NOT RESPONSIBLE OF WHAT HAPPENS TO YOUR WORLD. Other way to say it is USE THIS TOOL AT YOUR OWN RISK. -Think that you are playing with you precious saved games :P . +Think that you are playing with your precious saved games :P . Good luck! :) From c2d7f51cca5086cd5cd7273f292b539ad48ca633 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 17 Oct 2020 23:26:12 +0200 Subject: [PATCH 127/151] Fix variable name in world.py:summary --- regionfixer_core/world.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 0ba4207..ef78d2b 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -303,7 +303,7 @@ def summary(self): text += " |-+-Chunk coords: header {0}, global {1}.\n".format(h_coords, g_coords) text += " | +-Status: {0}\n".format(c.CHUNK_STATUS_TEXT[status]) if self[ck][c.TUPLE_STATUS] == c.CHUNK_TOO_MANY_ENTITIES: - text += " | +-No. entities: {0}\n".format(self[c][c.TUPLE_NUM_ENTITIES]) + text += " | +-No. entities: {0}\n".format(self[ck][c.TUPLE_NUM_ENTITIES]) text += " |\n" return text From f026328266e8842d802c4d5bbbd7c05ae4dcc6ce Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 17 Oct 2020 23:56:57 +0200 Subject: [PATCH 128/151] For now add UnicodeDecodeError to the list of unfixable chunks. --- regionfixer_core/world.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index ef78d2b..2c3670a 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -401,13 +401,10 @@ def fix_problematic_chunks(self, status): # print("Extracted: " + str(len(out))) # print("Size of the compressed stream: " + str(len(raw_chunk))) #======================================================= - except region.ChunkHeaderError: - # usually a chunk with zero length, pass + except (region.ChunkHeaderError, region.RegionHeaderError, UnicodeDecodeError): + # usually a chunk with zero length in the first two cases, or veeery broken chunk in the third print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], join(self.folder,self.filename))) - except region.RegionHeaderError: - # usually a chunk with zero length, pass - print("The chunk {0},{1} in region file {2} couldn't be fixed.".format(local_coords[0], local_coords[1], join(self.folder,self.filename))) - + if status == c.CHUNK_MISSING_ENTITIES_TAG: # The arguments to create the empty TAG_List have been somehow extracted by comparing # the tag list from a healthy chunk with the one created by nbt From 80f2c325a15320db94b37a41db4c1b12fab85b1f Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 18 Oct 2020 00:13:29 +0200 Subject: [PATCH 129/151] Bump version number to 0.3.4 --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index cdedb71..8dd4a00 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -21,5 +21,5 @@ # along with this program. If not, see . # -version_string = "0.3.3" +version_string = "0.3.4" version_numbers = version_string.split('.') From d19a8ff0288c9fa28d619098e5b04a15fa64c65a Mon Sep 17 00:00:00 2001 From: 734F96 <50321866+734F96@users.noreply.github.com> Date: Mon, 1 Nov 2021 22:48:58 +0100 Subject: [PATCH 130/151] Since 1.17, chunks may not have the "Entities" tag --- regionfixer_core/scan.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 3c0d1c3..d415625 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -913,11 +913,20 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): # Level chunk try: data_coords = world.get_chunk_data_coords(chunk) - num_entities = len(chunk["Level"]["Entities"]) + + # Since snapshot 20w45a (1.17), entities MAY BE separated + if chunk["DataVersion"].value >= 2681 : + if "Entities" in chunk["Level"] : + num_entities = len(chunk["Level"]["Entities"]) + else : + num_entities = None + else : + num_entities = len(chunk["Level"]["Entities"]) + if data_coords != global_coords: # wrong located chunk status = c.CHUNK_WRONG_LOCATED - elif num_entities > el: + elif num_entities != None and num_entities > el: # too many entities in the chunk status = c.CHUNK_TOO_MANY_ENTITIES else: From 965ac6761cf2331a20db426c5981cd711ddaf2ca Mon Sep 17 00:00:00 2001 From: 734F96 <50321866+734F96@users.noreply.github.com> Date: Tue, 2 Nov 2021 14:43:06 +0100 Subject: [PATCH 131/151] Scan new "entities" folders (>=1.17), fix scan of "poi" folders In fact, "poi" and "entities" can be in any dimension --- regionfixer_core/constants.py | 14 +++- regionfixer_core/scan.py | 40 +++++++-- regionfixer_core/world.py | 147 +++++++++++++++++++++++++--------- 3 files changed, 152 insertions(+), 49 deletions(-) diff --git a/regionfixer_core/constants.py b/regionfixer_core/constants.py index 33b8ea1..26bc645 100644 --- a/regionfixer_core/constants.py +++ b/regionfixer_core/constants.py @@ -237,8 +237,16 @@ CHUNK_PROBLEMS_ARGS[problem])) # Dimension names: -DIMENSION_NAMES = {"region": "Overworld", +DIMENSION_NAMES = {"": "Overworld", "DIM1": "The End", - "DIM-1": "Nether", - "poi": "POIs" + "DIM-1": "Nether" } + +# Region files types +LEVEL_DIR = "region" +POI_DIR = "poi" +ENTITIES_DIR = "entities" +REGION_TYPES_NAMES = {LEVEL_DIR: ("level/region", "Level/Region"), + POI_DIR: ("POIs", "POIs"), + ENTITIES_DIR: ("entities", "Entities" ) + } diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 3c0d1c3..8206f7d 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -663,11 +663,15 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, # Scan the world directory print("World info:") - print(("There are {0} region files, {1} player files and {2} data" - " files in the world directory.").format( - w.get_number_regions(), - len(w.players) + len(w.old_players), - len(w.data_files))) + counters = w.get_number_regions() + if c.LEVEL_DIR in counters: + print(" - {0} region/level files,".format(counters[c.LEVEL_DIR])) + if c.POI_DIR in counters: + print(" - {0} POI files,".format(counters[c.POI_DIR])) + if c.ENTITIES_DIR in counters: + print(" - {0} entities files,".format(counters[c.ENTITIES_DIR])) + print(" - {0} player files,".format(len(w.players) + len(w.old_players))) + print(" - and {0} data files.".format(len(w.data_files))) # check the level.dat print("\n{0:-^60}".format(' Checking level.dat ')) @@ -691,7 +695,7 @@ def console_scan_world(world_obj, processes, entity_limit, remove_entities, scan_titles = [' Scanning UUID player files ', ' Scanning old format player files ', ' Scanning structures and map data files ', - ' Scanning region files '] + ' Scanning region, POI and entities files '] console_scan_loop(scanners, scan_titles, verbose) w.scanned = True @@ -956,10 +960,30 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): data_coords = None num_entities = None status = c.CHUNK_OK - + + elif "Entities" in chunk: + # To check if it's a entities chunk check for the tag "Entities" + # If entities are in the region files, the tag "Entities" is in "Level" + # https://minecraft.fandom.com/wiki/Entity_format + # We use "Entities" as a differentiating factor + + # Entities chunk + data_coords = world.get_chunk_data_coords(chunk) + num_entities = len(chunk["Entities"]) + + if data_coords != global_coords: + # wrong located chunk + status = c.CHUNK_WRONG_LOCATED + elif num_entities > el: + # too many entities in the chunk + status = c.CHUNK_TOO_MANY_ENTITIES + else: + # chunk ok + status = c.CHUNK_OK + else: # what is this? we shouldn't reach this part of the code, as far as - # we know there is only POI chunks and Level chunks + # we know there is only POI chunks, Entities chunks, and Level chunks raise AssertionError("Unrecognized scanned chunk in scan_chunk().") ############################################### diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 0ba4207..266e5ba 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -86,14 +86,15 @@ class ScannedRegionFile: - path -- A string with the path of the region file - scanned_time -- Float, time as returned by bult-in time module. The time at which the region file has been scanned. None by default. + - folder -- Used to enhance print() """ - def __init__(self, path, scanned_time=None): + def __init__(self, path, scanned_time=None, folder=""): # general region file info self.path = path self.filename = split(path)[1] - self.folder = split(split(path)[0])[1] + self.folder = folder self.x = self.z = None self.x, self.z = self.get_coords() self.coords = (self.x, self.z) @@ -646,11 +647,18 @@ class RegionSet(DataSet): Inputs: - regionset_path -- Path to the folder containing region files + IT MUST NOT END WITH A SLASH ("/") - region_list -- List of paths to all the region files + - overworld -- Tweak to tell it's a dimension and not the overworld """ - def __init__(self, regionset_path=None, region_list=[]): + def __init__(self, regionset_path=None, region_list=[], overworld=True): DataSet.__init__(self, ScannedRegionFile) + # Otherwise, problems in _get_dimension_directory() and _get_region_type_directory() + if regionset_path != None : + assert regionset_path[-1] != "/" + self.overworld = overworld + if regionset_path: self.path = regionset_path self.region_list = glob(join(self.path, "r.*.*.mca")) @@ -660,11 +668,15 @@ def __init__(self, regionset_path=None, region_list=[]): self._set = {} for path in self.region_list: try: - r = ScannedRegionFile(path) + r = ScannedRegionFile(path, folder=self._get_dim_type_string()) self._set[r.get_coords()] = r except InvalidFileName: - print("Warning: The file {0} is not a valid name for a region. I'll skip it.".format(path)) + try : + region_type = c.REGION_TYPES_NAMES[self._get_region_type_directory()][0] + except: + region_type = "region (?)" + print("Warning: The file {0} is not a valid name for a {1} file. I'll skip it.".format(path, region_type)) # region and chunk counters with all the data from the scan self._region_counters = {} @@ -689,16 +701,18 @@ def get_name(self): """ dim_directory = self._get_dimension_directory() - if dim_directory: - try: - return c.DIMENSION_NAMES[dim_directory] - except: - return dim_directory + region_type_directory = self._get_region_type_directory() + if dim_directory and region_type_directory: + try: dim_directory = c.DIMENSION_NAMES[dim_directory] + except: dim_directory = "\"" + dim_directory + "\"" + try: region_type_directory = c.REGION_TYPES_NAMES[region_type_directory][1] + except: region_type_directory = "\"" + region_type_directory + "\"" + return "{0} files for {1}".format(region_type_directory, dim_directory) else: return "" def _get_dimension_directory(self): - """ Returns a string with the directory containing the RegionSet. + """ Returns a string with the parent directory containing the RegionSet. If there is no such a directory returns None. If it's composed of sparse region files returns 'regionset'. @@ -706,14 +720,35 @@ def _get_dimension_directory(self): """ if self.path: - rest, region = split(self.path) + if self.overworld : + return "" + rest, type_dir = split(self.path) rest, dim_path = split(rest) - if dim_path == "": - dim_path = split(rest)[1] return dim_path else: return None + def _get_region_type_directory(self): + """ Returns a string with the directory containing the RegionSet. + + If there is no such a directory returns None. If it's composed + of sparse region files returns 'regionset'. + """ + + if self.path: + rest, type_dir = split(self.path) + return type_dir + else: + return None + + def _get_dim_type_string(self) : + dim = self._get_dimension_directory() + rg_type = self._get_region_type_directory() + string = "" + if rg_type != None : string = rg_type + if dim != None and dim != "" : string = dim + "/" + rg_type + return string + def _update_counts(self, scanned_regionfile): """ Updates the counters of the regionset with the new regionfile. """ @@ -873,7 +908,7 @@ def locate_chunk(self, global_coords): """ - path = self.path + get_chunk_region(*global_coords) + path = join(self.path, get_chunk_region(*global_coords)) local_coords = _get_local_chunk_coords(*global_coords) return path, local_coords @@ -1067,11 +1102,17 @@ def __init__(self, world_path): # list with RegionSets self.regionsets = [] - self.regionsets.append(RegionSet(join(self.path, "region/"))) + self.regionsets.append(RegionSet(join(self.path, "region"))) for directory in glob(join(self.path, "DIM*/region")): - self.regionsets.append(RegionSet(join(self.path, directory))) - # TODO: let's scan POI as region files maybe it's enough, - self.regionsets.append(RegionSet(join(self.path, "poi/"))) + self.regionsets.append(RegionSet(directory, overworld=False)) + + self.regionsets.append(RegionSet(join(self.path, "poi"))) + for directory in glob(join(self.path, "DIM*/poi")): + self.regionsets.append(RegionSet(directory, overworld=False)) + + self.regionsets.append(RegionSet(join(self.path, "entities"))) + for directory in glob(join(self.path, "DIM*/entities")): + self.regionsets.append(RegionSet(directory, overworld=False)) # level.dat # Let's scan level.dat here so we can extract the world name @@ -1124,10 +1165,16 @@ def __init__(self, world_path): self.scanned = False def __str__(self): + counters = get_number_regions() text = "World information:\n" text += " World path: {0}\n".format(self.path) text += " World name: {0}\n".format(self.name) - text += " Region files: {0}\n".format(self.get_number_regions()) + if c.LEVEL_DIR in counters : + text += " Region/Level files: {0}\n".format(counters[c.LEVEL_DIR]) + if c.POI_DIR in counters : + text += " POI files: {0}\n".format(counters[c.POI_DIR]) + if c.ENTITIES_DIR in counters : + text += " Entities files: {0}\n".format(counters[c.ENTITIES_DIR]) text += " Scanned: {0}".format(str(self.scanned)) return text @@ -1154,18 +1201,21 @@ def has_problems(self): return False def get_number_regions(self): - """ Returns a integer with the number of regions files in this world + """ Returns a dictionnary with the number of regions files in this world Return: - - counter -- An integer with the amount of region files. + - counters -- An dictionnary with the amount of region files. """ - counter = 0 + counters = {} for dim in self.regionsets: - counter += len(dim) + region_type = dim._get_region_type_directory() + if not region_type in counters : + counters[region_type] = 0 + counters[region_type] += len(dim) - return counter + return counters def summary(self): """ Returns a string with a summary of the problems in this world. @@ -1290,9 +1340,10 @@ def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete for regionset in self.regionsets: for backup in backup_worlds: # choose the correct regionset based on the dimension - # folder name + # folder name and the type name (region, POI and entities) for temp_regionset in backup.regionsets: - if temp_regionset._get_dimension_directory() == regionset._get_dimension_directory(): + if ( temp_regionset._get_dimension_directory() == regionset._get_dimension_directory() and + temp_regionset._get_region_type_directory() == regionset._get_region_type_directory()): b_regionset = temp_regionset break @@ -1300,8 +1351,10 @@ def replace_problematic_chunks(self, backup_worlds, status, entity_limit, delete # iterates the list returned by list_chunks() bad_chunks = regionset.list_chunks(status) - if bad_chunks and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): - print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory())) + if ( bad_chunks and + b_regionset._get_dimension_directory() != regionset._get_dimension_directory() and + b_regionset._get_region_type_directory() != regionset._get_region_type_directory() ): + print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dim_type_string())) else: for ck in bad_chunks: global_coords = ck[0] @@ -1422,15 +1475,18 @@ def replace_problematic_regions(self, backup_worlds, status, entity_limit, delet for regionset in self.regionsets: for backup in backup_worlds: # choose the correct regionset based on the dimension - # folder name + # folder name and the type name (region, POI and entities) for temp_regionset in backup.regionsets: - if temp_regionset._get_dimension_directory() == regionset._get_dimension_directory(): + if ( temp_regionset._get_dimension_directory() == regionset._get_dimension_directory() and + temp_regionset._get_region_type_directory() == regionset._get_region_type_directory()): b_regionset = temp_regionset break bad_regions = regionset.list_regions(status) - if bad_regions and b_regionset._get_dimension_directory() != regionset._get_dimension_directory(): - print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dimension_directory())) + if ( bad_chunks and + b_regionset._get_dimension_directory() != regionset._get_dimension_directory() and + b_regionset._get_region_type_directory() != regionset._get_region_type_directory() ): + print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dim_type_string())) else: for r in bad_regions: print("\n{0:-^60}".format(' New region file to replace! Coords {0} '.format(r.get_coords()))) @@ -1694,9 +1750,15 @@ def delete_entities(region_file, x, z): """ chunk = region_file.get_chunk(x, z) - counter = len(chunk['Level']['Entities']) empty_tag_list = nbt.TAG_List(nbt.TAG_Byte, '', 'Entities') - chunk['Level']['Entities'] = empty_tag_list + if 'Level' in chunk : # Region file + counter = len(chunk['Level']['Entities']) + chunk['Level']['Entities'] = empty_tag_list + elif 'Entities' in chunk : # Entities file (>=1.17) + counter = len(chunk['Entities']) + chunk['Entities'] = empty_tag_list + else : + raise AssertionError("Unrecognized chunk in delete_entities().") region_file.write_chunk(x, z, chunk) return counter @@ -1752,10 +1814,19 @@ def get_chunk_data_coords(nbt_file): """ - level = nbt_file.__getitem__('Level') + # Region file + if 'Level' in nbt_file : + level = nbt_file.__getitem__('Level') + + coordX = level.__getitem__('xPos').value + coordZ = level.__getitem__('zPos').value + + # Entities file : + elif 'Entities' in nbt_file : + coordX, coordZ = nbt_file.__getitem__('Position').value - coordX = level.__getitem__('xPos').value - coordZ = level.__getitem__('zPos').value + else : + raise AssertionError("Unrecognized chunk in get_chunk_data_coords().") return coordX, coordZ From e609d03a2a1e9fd8996139756888ed53b5eed459 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 6 Nov 2021 22:14:14 +0100 Subject: [PATCH 132/151] Fix not printing Overworld in log. --- regionfixer_core/world.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 266e5ba..d32a19f 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -702,7 +702,7 @@ def get_name(self): dim_directory = self._get_dimension_directory() region_type_directory = self._get_region_type_directory() - if dim_directory and region_type_directory: + if (dim_directory or self.overworld) and region_type_directory: try: dim_directory = c.DIMENSION_NAMES[dim_directory] except: dim_directory = "\"" + dim_directory + "\"" try: region_type_directory = c.REGION_TYPES_NAMES[region_type_directory][1] From 79fe30b8c9eed2eeaf8ad4e4badbac570f5cbcb2 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 6 Nov 2021 22:15:23 +0100 Subject: [PATCH 133/151] Fix wrong variable name in ScannedRegionFile.summary --- regionfixer_core/world.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index d32a19f..17dd13f 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -304,7 +304,7 @@ def summary(self): text += " |-+-Chunk coords: header {0}, global {1}.\n".format(h_coords, g_coords) text += " | +-Status: {0}\n".format(c.CHUNK_STATUS_TEXT[status]) if self[ck][c.TUPLE_STATUS] == c.CHUNK_TOO_MANY_ENTITIES: - text += " | +-No. entities: {0}\n".format(self[c][c.TUPLE_NUM_ENTITIES]) + text += " | +-No. entities: {0}\n".format(self[ck][c.TUPLE_NUM_ENTITIES]) text += " |\n" return text From 1df5cc8bb852508f570c366c1fb921e32bd80682 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 6 Nov 2021 22:19:52 +0100 Subject: [PATCH 134/151] Improve logging by adding region folder. --- regionfixer_core/world.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 17dd13f..0667204 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -889,7 +889,10 @@ def summary(self): for r in list(self.keys()): if not self[r].has_problems: continue - text += "Region file: {0}\n".format(self[r].filename) + if self.path: rest, dimension_folder = split(self.path) + else: dimension_folder = "" + text += "Region file: {0}\n".format(join(dimension_folder,self[r].filename)) + text += self[r].summary() text += " +\n\n" return text From a89831d8871e680828281293573029879c416c8c Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 6 Nov 2021 22:21:13 +0100 Subject: [PATCH 135/151] Bump version number to 0.3.4 --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index cdedb71..8dd4a00 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -21,5 +21,5 @@ # along with this program. If not, see . # -version_string = "0.3.3" +version_string = "0.3.4" version_numbers = version_string.split('.') From 2b4ad3593ae1fca4098cb76c94caca03094afa38 Mon Sep 17 00:00:00 2001 From: 734F96 <50321866+734F96@users.noreply.github.com> Date: Sat, 6 Nov 2021 23:10:09 +0100 Subject: [PATCH 136/151] Fix region folder in log files We display the "type" folder ("region", "poi" or "entities"), as well as the dimension folder if it's not the overworld --- regionfixer_core/world.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 0667204..4e64d09 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -889,9 +889,7 @@ def summary(self): for r in list(self.keys()): if not self[r].has_problems: continue - if self.path: rest, dimension_folder = split(self.path) - else: dimension_folder = "" - text += "Region file: {0}\n".format(join(dimension_folder,self[r].filename)) + text += "Region file: {0}\n".format(join(self._get_dim_type_string(),self[r].filename)) text += self[r].summary() text += " +\n\n" From b1fc8c2a3c8f1129b0f60dc38e0e62c2d5c9a60d Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 7 Nov 2021 10:18:43 +0100 Subject: [PATCH 137/151] Update nbt library to last version. Fixes compatibility issue with python 3.10. --- nbt/chunk.py | 367 ++++++++++++++++++++++++++++++++++++++++++-------- nbt/nbt.py | 22 ++- nbt/region.py | 34 ++++- nbt/world.py | 16 ++- 4 files changed, 364 insertions(+), 75 deletions(-) diff --git a/nbt/chunk.py b/nbt/chunk.py index 484223d..5444144 100644 --- a/nbt/chunk.py +++ b/nbt/chunk.py @@ -1,23 +1,107 @@ """ Handles a single chunk of data (16x16x128 blocks) from a Minecraft save. -WARNING: Chunk is currently McRegion only. -You likely should not use chunk, but instead just get the NBT datastructure, -and do the appropriate lookups and block conversions yourself. - -The authors decided to focus on NBT datastructure and Region files, -and are not actively working on chunk.py. -Code contributions to chunk.py are welcomed! +For more information about the chunck format: +https://minecraft.gamepedia.com/Chunk_format """ + from io import BytesIO -from struct import pack, unpack -import array, math +from struct import pack +from math import ceil +import array + + +# Legacy numeric block identifiers +# mapped to alpha identifiers in best effort +# See https://minecraft.gamepedia.com/Java_Edition_data_values/Pre-flattening +# TODO: move this map into a separate file + +block_ids = { + 0: 'air', + 1: 'stone', + 2: 'grass_block', + 3: 'dirt', + 4: 'cobblestone', + 5: 'oak_planks', + 6: 'sapling', + 7: 'bedrock', + 8: 'flowing_water', + 9: 'water', + 10: 'flowing_lava', + 11: 'lava', + 12: 'sand', + 13: 'gravel', + 14: 'gold_ore', + 15: 'iron_ore', + 16: 'coal_ore', + 17: 'oak_log', + 18: 'oak_leaves', + 19: 'sponge', + 20: 'glass', + 21: 'lapis_ore', + 24: 'sandstone', + 30: 'cobweb', + 31: 'grass', + 32: 'dead_bush', + 35: 'white_wool', + 37: 'dandelion', + 38: 'poppy', + 39: 'brown_mushroom', + 40: 'red_mushroom', + 43: 'stone_slab', + 44: 'stone_slab', + 47: 'bookshelf', + 48: 'mossy_cobblestone', + 49: 'obsidian', + 50: 'torch', + 51: 'fire', + 52: 'spawner', + 53: 'oak_stairs', + 54: 'chest', + 56: 'diamond_ore', + 58: 'crafting_table', + 59: 'wheat', + 60: 'farmland', + 61: 'furnace', + 62: 'furnace', + 63: 'sign', # will change to oak_sign in 1.14 + 64: 'oak_door', + 65: 'ladder', + 66: 'rail', + 67: 'cobblestone_stairs', + 72: 'oak_pressure_plate', + 73: 'redstone_ore', + 74: 'redstone_ore', + 78: 'snow', + 79: 'ice', + 81: 'cactus', + 82: 'clay', + 83: 'sugar_cane', + 85: 'oak_fence', + 86: 'pumpkin', + 91: 'lit_pumpkin', + 101: 'iron_bars', + 102: 'glass_pane', + } + + +def block_id_to_name(bid): + try: + name = block_ids[bid] + except KeyError: + name = 'unknown_%d' % (bid,) + print("warning: unknown block id %i" % bid) + print("hint: add that block to the 'block_ids' map") + return name + + +# Generic Chunk class Chunk(object): """Class for representing a single chunk.""" def __init__(self, nbt): - chunk_data = nbt['Level'] - self.coords = chunk_data['xPos'],chunk_data['zPos'] + self.chunk_data = nbt['Level'] + self.coords = self.chunk_data['xPos'],self.chunk_data['zPos'] def get_coords(self): """Return the coordinates of this chunk.""" @@ -28,12 +112,224 @@ def __repr__(self): return "Chunk("+str(self.coords[0])+","+str(self.coords[1])+")" +# Chunk in Region old format + class McRegionChunk(Chunk): + + def __init__(self, nbt): + Chunk.__init__(self, nbt) + self.blocks = BlockArray(self.chunk_data['Blocks'].value, self.chunk_data['Data'].value) + + def get_max_height(self): + return 127 + + def get_block(self, x, y, z): + name = block_id_to_name(self.blocks.get_block(x, y, z)) + return name + + def iter_block(self): + for y in range(0, 128): + for z in range(0, 16): + for x in range(0, 16): + yield self.get_block(x, y, z) + + +# Section in Anvil new format + +class AnvilSection(object): + + def __init__(self, nbt, version): + self.names = [] + self.indexes = [] + + # Is the section flattened ? + # See https://minecraft.gamepedia.com/1.13/Flattening + + if version == 0 or version == 1343: # 1343 = MC 1.12.2 + self._init_array(nbt) + elif version >= 1631 and version <= 2230: # MC 1.13 to MC 1.15.2 + self._init_index_unpadded(nbt) + elif version >= 2566 and version <= 2730: # MC 1.16.0 to MC 1.17.2 (latest tested version) + self._init_index_padded(nbt) + else: + raise NotImplementedError() + + # Section contains 4096 blocks whatever data version + + assert len(self.indexes) == 4096 + + + # Decode legacy section + # Contains an array of block numeric identifiers + + def _init_array(self, nbt): + bids = [] + for bid in nbt['Blocks'].value: + try: + i = bids.index(bid) + except ValueError: + bids.append(bid) + i = len(bids) - 1 + self.indexes.append(i) + + for bid in bids: + bname = block_id_to_name(bid) + self.names.append(bname) + + + # Decode modern section + # Contains palette of block names and indexes packed with run-on between elements (pre 1.16 format) + + def _init_index_unpadded(self, nbt): + + for p in nbt['Palette']: + name = p['Name'].value + self.names.append(name) + + states = nbt['BlockStates'].value + + # Block states are packed into an array of longs + # with variable number of bits per block (min: 4) + + num_bits = (len(self.names) - 1).bit_length() + if num_bits < 4: num_bits = 4 + assert num_bits == len(states) * 64 / 4096 + mask = pow(2, num_bits) - 1 + + i = 0 + bits_left = 64 + curr_long = states[0] + + for _ in range(0,4096): + if bits_left == 0: + i = i + 1 + curr_long = states[i] + bits_left = 64 + + if num_bits <= bits_left: + self.indexes.append(curr_long & mask) + curr_long = curr_long >> num_bits + bits_left = bits_left - num_bits + else: + i = i + 1 + next_long = states[i] + remaining_bits = num_bits - bits_left + + next_long = (next_long & (pow(2, remaining_bits) - 1)) << bits_left + curr_long = (curr_long & (pow(2, bits_left) - 1)) + self.indexes.append(next_long | curr_long) + + curr_long = states[i] + curr_long = curr_long >> remaining_bits + bits_left = 64 - remaining_bits + + + # Decode modern section + # Contains palette of block names and indexes packed with padding if elements don't fit (post 1.16 format) + + def _init_index_padded(self, nbt): + + for p in nbt['Palette']: + name = p['Name'].value + self.names.append(name) + + states = nbt['BlockStates'].value + num_bits = (len(self.names) - 1).bit_length() + if num_bits < 4: num_bits = 4 + mask = 2**num_bits - 1 + + indexes_per_element = 64 // num_bits + last_state_elements = 4096 % indexes_per_element + if last_state_elements == 0: last_state_elements = indexes_per_element + + assert len(states) == ceil(4096 / indexes_per_element) + + for i in range(len(states)-1): + long = states[i] + + for _ in range(indexes_per_element): + self.indexes.append(long & mask) + long = long >> num_bits + + + long = states[-1] + for _ in range(last_state_elements): + self.indexes.append(long & mask) + long = long >> num_bits + + + + def get_block(self, x, y, z): + # Blocks are stored in YZX order + i = y * 256 + z * 16 + x + p = self.indexes[i] + return self.names[p] + + + def iter_block(self): + for i in range(0, 4096): + p = self.indexes[i] + yield self.names[p] + + +# Chunck in Anvil new format + +class AnvilChunk(Chunk): + def __init__(self, nbt): Chunk.__init__(self, nbt) - self.blocks = BlockArray(nbt['Level']['Blocks'].value, nbt['Level']['Data'].value) -# TODO: Add class AnvilChunk(Chunk) + # Started to work on this class with MC version 1.13.2 + # so with the chunk data version 1631 + # Backported to first Anvil version (= 0) from examples + # Could work with other versions, but has to be tested first + + try: + version = nbt['DataVersion'].value + if version != 1343 and not (version >= 1631 or version <= 2730): + raise NotImplementedError('DataVersion %d not implemented' % (version,)) + except KeyError: + version = 0 + + # Load all sections + + self.sections = {} + if 'Sections' in self.chunk_data: + for s in self.chunk_data['Sections']: + if "BlockStates" in s.keys(): # sections may only contain lighting information + self.sections[s['Y'].value] = AnvilSection(s, version) + + + def get_section(self, y): + """Get a section from Y index.""" + if y in self.sections: + return self.sections[y] + + return None + + + def get_max_height(self): + ymax = 0 + for y in self.sections.keys(): + if y > ymax: ymax = y + return ymax * 16 + 15 + + + def get_block(self, x, y, z): + """Get a block from relative x,y,z.""" + sy,by = divmod(y, 16) + section = self.get_section(sy) + if section == None: + return None + + return section.get_block(x, by, z) + + + def iter_block(self): + for s in self.sections.values(): + for b in s.iter_block(): + yield b + class BlockArray(object): """Convenience class for dealing with a Block/data byte array.""" @@ -49,28 +345,6 @@ def __init__(self, blocksBytes=None, dataBytes=None): else: self.dataList = [0]*16384 # Create an empty data list (32768 4-bit entries of zero make 16384 byte entries) - # Get all block entries - def get_all_blocks(self): - """Return the blocks that are in this BlockArray.""" - return self.blocksList - - # Get all data entries - def get_all_data(self): - """Return the data of all the blocks in this BlockArray.""" - bits = [] - for b in self.dataList: - # The first byte of the Blocks arrays correspond - # to the LEAST significant bits of the first byte of the Data. - # NOT to the MOST significant bits, as you might expected. - bits.append(b & 15) # Little end of the byte - bits.append((b >> 4) & 15) # Big end of the byte - return bits - - # Get all block entries and data entries as tuples - def get_all_blocks_and_data(self): - """Return both blocks and data, packed together as tuples.""" - return list(zip(self.get_all_blocks(), self.get_all_data())) - def get_blocks_struct(self): """Return a dictionary with block ids keyed to (x, y, z).""" cur_x = 0 @@ -186,26 +460,3 @@ def get_block(self, x,y,z, coord=False): offset = y + z*128 + x*128*16 if (coord == False) else coord[1] + coord[2]*128 + coord[0]*128*16 return self.blocksList[offset] - - # Get a given X,Y,Z or a tuple of three coordinates - def get_data(self, x,y,z, coord=False): - """Return the data of the block at x, y, z.""" - offset = y + z*128 + x*128*16 if (coord == False) else coord[1] + coord[2]*128 + coord[0]*128*16 - # The first byte of the Blocks arrays correspond - # to the LEAST significant bits of the first byte of the Data. - # NOT to the MOST significant bits, as you might expected. - if (offset % 2 == 1): - # offset is odd - index = (offset-1)//2 - b = self.dataList[index] - return b & 15 # Get little (last 4 bits) end of byte - else: - # offset is even - index = offset//2 - b = self.dataList[index] - return (b >> 4) & 15 # Get big end (first 4 bits) of byte - - def get_block_and_data(self, x,y,z, coord=False): - """Return the tuple of (id, data) for the block at x, y, z""" - return (self.get_block(x,y,z,coord),self.get_data(x,y,z,coord)) - diff --git a/nbt/nbt.py b/nbt/nbt.py index 46ccac1..861a385 100644 --- a/nbt/nbt.py +++ b/nbt/nbt.py @@ -1,10 +1,16 @@ """ Handle the NBT (Named Binary Tag) data format + +For more information about the NBT format: +https://minecraft.gamepedia.com/NBT_format """ from struct import Struct, error as StructError from gzip import GzipFile -from collections import MutableMapping, MutableSequence, Sequence +try: + from collections.abc import MutableMapping, MutableSequence, Sequence +except ImportError: # for Python 2.7 + from collections import MutableMapping, MutableSequence, Sequence import sys _PY3 = sys.version_info >= (3,) @@ -61,6 +67,10 @@ def valuestr(self): returns a summary.""" return unicode(self.value) + def namestr(self): + """Return Unicode string of tag name.""" + return unicode(self.name) + def pretty_tree(self, indent=0): """Return formated Unicode string of self, where iterable items are recursively listed in detail.""" @@ -471,7 +481,10 @@ def __init__(self, buffer=None, name=None): # TODO: add a value parameter as well super(TAG_Compound, self).__init__() self.tags = [] - self.name = "" + if name: + self.name = name + else: + self.name = "" if buffer: self._parse_buffer(buffer) @@ -637,12 +650,14 @@ def __init__(self, filename=None, buffer=None, fileobj=None): def parse_file(self, filename=None, buffer=None, fileobj=None): """Completely parse a file, extracting all tags.""" + closefile = True if filename: self.file = GzipFile(filename, 'rb') elif buffer: if hasattr(buffer, 'name'): self.filename = buffer.name self.file = buffer + closefile = False elif fileobj: if hasattr(fileobj, 'name'): self.filename = fileobj.name @@ -654,7 +669,8 @@ def parse_file(self, filename=None, buffer=None, fileobj=None): name = TAG_String(buffer=self.file).value self._parse_buffer(self.file) self.name = name - self.file.close() + if closefile: + self.file.close() else: raise MalformedFileError( "First record is not a Compound Tag") diff --git a/nbt/region.py b/nbt/region.py index 90338ac..224099b 100644 --- a/nbt/region.py +++ b/nbt/region.py @@ -1,17 +1,20 @@ """ Handle a region file, containing 32x32 chunks. -For more info of the region file format look: -http://www.minecraftwiki.net/wiki/Region_file_format + +For more information about the region file format: +https://minecraft.gamepedia.com/Region_file_format """ from .nbt import NBTFile, MalformedFileError from struct import pack, unpack -from collections import Mapping +try: + from collections.abc import Mapping +except ImportError: # for Python 2.7 + from collections import Mapping import zlib import gzip from io import BytesIO -import math, time -from os.path import getsize +import time from os import SEEK_END # constants @@ -190,7 +193,7 @@ class RegionFile(object): """Constant indicating an normal status: the chunk does not exist. Deprecated. Use :const:`nbt.region.STATUS_CHUNK_NOT_CREATED` instead.""" - def __init__(self, filename=None, fileobj=None): + def __init__(self, filename=None, fileobj=None, chunkclass = None): """ Read a region file by filename or file object. If a fileobj is specified, it is not closed after use; it is the callers responibility to close it. @@ -198,6 +201,7 @@ def __init__(self, filename=None, fileobj=None): self.file = None self.filename = None self._closefile = False + self.chunkclass = chunkclass if filename: self.filename = filename self.file = open(filename, 'r+b') # open for read and write in binary mode @@ -477,7 +481,23 @@ def iter_chunks(self): yield self.get_chunk(m.x, m.z) except RegionFileFormatError: pass - + + # The following method will replace 'iter_chunks' + # but the previous is kept for the moment + # until the users update their code + + def iter_chunks_class(self): + """ + Yield each readable chunk present in the region. + Chunks that can not be read for whatever reason are silently skipped. + This function returns a :class:`nbt.chunk.Chunk` instance. + """ + for m in self.get_metadata(): + try: + yield self.chunkclass(self.get_chunk(m.x, m.z)) + except RegionFileFormatError: + pass + def __iter__(self): return self.iter_chunks() diff --git a/nbt/world.py b/nbt/world.py index 3689dfa..607e422 100644 --- a/nbt/world.py +++ b/nbt/world.py @@ -1,5 +1,8 @@ """ Handles a Minecraft world save using either the Anvil or McRegion format. + +For more information about the world format: +https://minecraft.gamepedia.com/Level_format """ import os, glob, re @@ -100,7 +103,7 @@ def iter_regions(self): else: # It is not yet cached. # Get file, but do not cache later. - regionfile = region.RegionFile(self.regionfiles[(x,z)]) + regionfile = region.RegionFile(self.regionfiles[(x,z)], chunkclass = self.chunkclass) regionfile.loc = Location(x=x,z=z) close_after_use = True try: @@ -122,7 +125,7 @@ def call_for_each_region(self, callback_function, boundingbox=None): See [What can be pickled and unpickled?](https://docs.python.org/library/pickle.html#what-can-be-pickled-and-unpickled) in the Python documentation for limitation on the output of `callback_function()`. """ - raise NotImplemented() + raise NotImplementedError() def get_nbt(self,x,z): """ @@ -143,7 +146,7 @@ def set_nbt(self,x,z,nbt): adds it to the Regionfile. May create a new Regionfile if that did not exist yet. nbt must be a nbt.NBTFile instance, not a Chunk or regular TAG_Compound object. """ - raise NotImplemented() + raise NotImplementedError() # TODO: implement def iter_nbt(self): @@ -170,7 +173,7 @@ def call_for_each_nbt(self, callback_function, boundingbox=None): See [What can be pickled and unpickled?](https://docs.python.org/library/pickle.html#what-can-be-pickled-and-unpickled) in the Python documentation for limitation on the output of `callback_function()`. """ - raise NotImplemented() + raise NotImplementedError() def get_chunk(self,x,z): """ @@ -239,8 +242,7 @@ class AnvilWorldFolder(_BaseWorldFolder): """Represents a world save using the new Anvil format.""" type = "Anvil" extension = 'mca' - chunkclass = chunk.Chunk - # chunkclass = chunk.AnvilChunk # TODO: change to AnvilChunk when done + chunkclass = chunk.AnvilChunk class _WorldFolderFactory(object): @@ -254,7 +256,7 @@ def __call__(self, *args, **kwargs): wf = cls(*args, **kwargs) if wf.nonempty(): # Check if the world is non-empty return wf - raise UnknownWorldFormat("Empty world or unknown format: %r" % world_folder) + raise UnknownWorldFormat("Empty world or unknown format") WorldFolder = _WorldFolderFactory([AnvilWorldFolder, McRegionWorldFolder]) """ From d8b3b379bd15e1e6c29ae07b1aea63bba2eec4fd Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 7 Nov 2021 11:33:47 +0100 Subject: [PATCH 138/151] Add wildcard parsing for windows shell using glob. --- regionfixer_core/world.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 799356c..d4d6977 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -1664,6 +1664,15 @@ def parse_paths(args): RegionSet -- A RegionSet object with all the regionfiles found in args """ + # windows shell doesn't parse wildcards, parse them here using glob + expanded_args = [] + for arg in args: + earg = glob(arg) + # glob eats away any argument that doesn't match a file, keep those, they will be world folders + if earg: expanded_args.extend(earg) + else: expanded_args.append(arg) + args = expanded_args + # parese the list of region files and worlds paths world_list = [] region_list = [] From e317aa170083f4844895fc527f86fed1aa92c268 Mon Sep 17 00:00:00 2001 From: 734F96 <50321866+734F96@users.noreply.github.com> Date: Mon, 8 Nov 2021 15:24:24 +0100 Subject: [PATCH 139/151] Support for snapshot 21w43a (Minecraft 1.18) In this snapshot, the "Level" tag doesn't exist anymore It means that for region chunks (Not entities or POI), data is now stored at the root of the chunk Changes in the code : - Detect chunk type with a function ("get_chunk_type()"), to avoid code duplication - Adjustements on REGION chunks which do not have the "Level" tag - Fix entities deletion in 1.17 entities chunks and 1.18 level chunks (Which shouldn't happen) Important note : I use the tag "DataVersion", which I think is the best thing to do --- regionfixer_core/scan.py | 24 ++++++++--- regionfixer_core/world.py | 89 +++++++++++++++++++++++++++++++-------- 2 files changed, 89 insertions(+), 24 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index bd6ffe0..1047305 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -910,7 +910,9 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): try: chunk = region_file.get_chunk(*coords) - if "Level" in chunk: + chunk_type = world.get_chunk_type(chunk) + + if chunk_type == c.LEVEL_DIR: # to know if is a poi chunk or a level chunk check the contents # if 'Level' is at root is a level chunk @@ -920,10 +922,18 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): # Since snapshot 20w45a (1.17), entities MAY BE separated if chunk["DataVersion"].value >= 2681 : - if "Entities" in chunk["Level"] : + num_entities = None + + # Since snapshot 21w43a (1.18), "Level" tag doesn't exist anymore + # According to the wiki, an "entities" tag can still be there (But I've never seen it) + if chunk["DataVersion"].value >= 2844 : + if "entities" in chunk : + num_entities = len(chunk["entities"]) + + # >= 20w45a and < 21w43a + # Don't check if "Level" tag exist, at this point, it should exist + elif "Entities" in chunk["Level"] : num_entities = len(chunk["Level"]["Entities"]) - else : - num_entities = None else : num_entities = len(chunk["Level"]["Entities"]) @@ -956,7 +966,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): global_coords = world.get_global_chunk_coords(split(region_file.filename)[1], coords[0], coords[1]) num_entities = None - elif "Sections" in chunk: + elif chunk_type == c.POI_DIR: # To check if it's a POI chunk check for the tag "Sections" # If we give a look to the wiki: # https://minecraft.gamepedia.com/Java_Edition_level_format#poi_format @@ -970,7 +980,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): num_entities = None status = c.CHUNK_OK - elif "Entities" in chunk: + elif chunk_type == c.ENTITIES_DIR: # To check if it's a entities chunk check for the tag "Entities" # If entities are in the region files, the tag "Entities" is in "Level" # https://minecraft.fandom.com/wiki/Entity_format @@ -993,7 +1003,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): else: # what is this? we shouldn't reach this part of the code, as far as # we know there is only POI chunks, Entities chunks, and Level chunks - raise AssertionError("Unrecognized scanned chunk in scan_chunk().") + raise AssertionError("Unsupported chunk type in scan_chunk().") ############################################### # POI chunk and Level chunk common errors diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index d4d6977..c7da0fe 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -409,7 +409,16 @@ def fix_problematic_chunks(self, status): if status == c.CHUNK_MISSING_ENTITIES_TAG: # The arguments to create the empty TAG_List have been somehow extracted by comparing # the tag list from a healthy chunk with the one created by nbt - chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) + chunk_type = get_chunk_type(chunk) + if chunk_type == c.LEVEL_DIR : + if chunk["DataVersion"].value >= 2844 : # Snapshot 21w43a (1.18) + chunk['entities'] = TAG_List(name='entities', type=nbt._TAG_End) + else : + chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) + elif chunk_type == c.ENTITIES_DIR : + chunk['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) + else : + raise AssertionError("Unsupported chunk type.") region_file.write_chunk(local_coords[0],local_coords[1], chunk) # create the new status tuple @@ -469,14 +478,7 @@ def remove_chunk_entities(self, x, z): """ - region_file = region.RegionFile(self.path) - chunk = region_file.get_chunk(x, z) - counter = len(chunk['Level']['Entities']) - empty_tag_list = nbt.TAG_List(nbt.TAG_Byte, '', 'Entities') - chunk['Level']['Entities'] = empty_tag_list - region_file.write_chunk(x, z, chunk) - - return counter + return delete_entities( region.RegionFile(self.path), x, z ) def rescan_entities(self, options): """ Updates the status of all the chunks after changing entity_limit. @@ -1757,15 +1759,24 @@ def delete_entities(region_file, x, z): """ chunk = region_file.get_chunk(x, z) + chunk_type = get_chunk_type(chunk) empty_tag_list = nbt.TAG_List(nbt.TAG_Byte, '', 'Entities') - if 'Level' in chunk : # Region file - counter = len(chunk['Level']['Entities']) - chunk['Level']['Entities'] = empty_tag_list - elif 'Entities' in chunk : # Entities file (>=1.17) + + if chunk_type == c.LEVEL_DIR : # Region file + if chunk["DataVersion"].value >= 2844 : # Snapshot 21w43a (1.18) + counter = len(chunk['entities']) + chunk['entities'] = empty_tag_list + else : + counter = len(chunk['Level']['Entities']) + chunk['Level']['Entities'] = empty_tag_list + + elif chunk_type == c.ENTITIES_DIR : # Entities file (>=1.17) counter = len(chunk['Entities']) chunk['Entities'] = empty_tag_list + else : - raise AssertionError("Unrecognized chunk in delete_entities().") + raise AssertionError("Unsupported chunk type in delete_entities().") + region_file.write_chunk(x, z, chunk) return counter @@ -1807,6 +1818,44 @@ def get_chunk_region(chunkX, chunkZ): return region_name +def get_chunk_type(chunk): + """Get the type of the chunk (Region/level, POIs or entities) + + Input: + - chunk -- A chunk, from the NBT module + + Return: + - type -- The chunk type (LEVEL_DIR, POI_DIR or ENTITIES_DIR) + """ + + # DataVersion was introduced in snapshot 15w32a (1.9) + # https://minecraft.fandom.com/wiki/Data_version + data_version = 0 + if "DataVersion" in chunk: + data_version = chunk["DataVersion"].value + + # Region/level < 21w43a (1.17) + if data_version < 2844 and "Level" in chunk: + return c.LEVEL_DIR + + # Region/level >= 21w43a (1.18) + # The "or" is important, because some tags doesn't seem to be mandatory + if data_version >= 2844 and ("structures" in chunk or "sections" in chunk): + return c.LEVEL_DIR + + # POIs >= 1.14 (Which snapshot ?) + # I couldn't find when POI files were added + # But it's certainly a snapshot after 18w43a (DataVersion = 1901) + if data_version >= 1901 and "Sections" in chunk: + return c.POI_DIR + + # Entities >= 20w45a (1.17) + if data_version >= 2681 and "Entities" in chunk: + return c.ENTITIES_DIR + + raise AssertionError("Unrecognized chunk type in get_chunk_type().") + + def get_chunk_data_coords(nbt_file): """ Gets and returns the coordinates stored in the NBT structure of the chunk. @@ -1821,15 +1870,21 @@ def get_chunk_data_coords(nbt_file): """ + chunk_type = get_chunk_type(nbt_file) + # Region file - if 'Level' in nbt_file : - level = nbt_file.__getitem__('Level') + if chunk_type == c.LEVEL_DIR : + # Since snapshot 21w43a (1.18), "Level" tag doesn't exist anymore + if nbt_file["DataVersion"].value >= 2844 : + level = nbt_file + else : + level = nbt_file.__getitem__('Level') coordX = level.__getitem__('xPos').value coordZ = level.__getitem__('zPos').value # Entities file : - elif 'Entities' in nbt_file : + elif chunk_type == c.ENTITIES_DIR : coordX, coordZ = nbt_file.__getitem__('Position').value else : From cd38d67d08a9dd92e8eb607170804456e4f6cc5a Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 19 Dec 2021 22:41:18 +0100 Subject: [PATCH 140/151] Bump version number. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 8dd4a00..1184ca9 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -21,5 +21,5 @@ # along with this program. If not, see . # -version_string = "0.3.4" +version_string = "0.3.5" version_numbers = version_string.split('.') From 995638002a7d3faa2dc50cd2046985238c879ed1 Mon Sep 17 00:00:00 2001 From: sandtechnology Date: Wed, 22 Dec 2021 18:12:54 +0800 Subject: [PATCH 141/151] Fix errors when fixing worlds below 1.9 worlds below 1.9 does not have DataVersion tag, so check it before get it it also causing wrong detection about missing entity tags --- regionfixer_core/scan.py | 2 +- regionfixer_core/world.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index 1047305..f7747b0 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -921,7 +921,7 @@ def scan_chunk(region_file, coords, global_coords, entity_limit): data_coords = world.get_chunk_data_coords(chunk) # Since snapshot 20w45a (1.17), entities MAY BE separated - if chunk["DataVersion"].value >= 2681 : + if "DataVersion" in chunk and chunk["DataVersion"].value >= 2681 : num_entities = None # Since snapshot 21w43a (1.18), "Level" tag doesn't exist anymore diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index c7da0fe..f07e0c4 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -411,7 +411,7 @@ def fix_problematic_chunks(self, status): # the tag list from a healthy chunk with the one created by nbt chunk_type = get_chunk_type(chunk) if chunk_type == c.LEVEL_DIR : - if chunk["DataVersion"].value >= 2844 : # Snapshot 21w43a (1.18) + if "DataVersion" in chunk and chunk["DataVersion"].value >= 2844 : # Snapshot 21w43a (1.18) chunk['entities'] = TAG_List(name='entities', type=nbt._TAG_End) else : chunk['Level']['Entities'] = TAG_List(name='Entities', type=nbt._TAG_End) @@ -1763,7 +1763,7 @@ def delete_entities(region_file, x, z): empty_tag_list = nbt.TAG_List(nbt.TAG_Byte, '', 'Entities') if chunk_type == c.LEVEL_DIR : # Region file - if chunk["DataVersion"].value >= 2844 : # Snapshot 21w43a (1.18) + if "DataVersion" in chunk and chunk["DataVersion"].value >= 2844 : # Snapshot 21w43a (1.18) counter = len(chunk['entities']) chunk['entities'] = empty_tag_list else : @@ -1875,7 +1875,7 @@ def get_chunk_data_coords(nbt_file): # Region file if chunk_type == c.LEVEL_DIR : # Since snapshot 21w43a (1.18), "Level" tag doesn't exist anymore - if nbt_file["DataVersion"].value >= 2844 : + if "DataVersion" in nbt_file and nbt_file["DataVersion"].value >= 2844 : level = nbt_file else : level = nbt_file.__getitem__('Level') From 8050259be31fc735a1b4804f780941a3d54f629a Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 26 Dec 2021 22:30:44 +0100 Subject: [PATCH 142/151] Update nbt to 1.5.1 --- nbt/CONTRIBUTORS.txt | 6 ++ nbt/README.txt | 247 ++++++++++++++++++++++++++----------------- nbt/__init__.py | 2 +- nbt/region.py | 3 + nbt/world.py | 2 +- 5 files changed, 159 insertions(+), 101 deletions(-) diff --git a/nbt/CONTRIBUTORS.txt b/nbt/CONTRIBUTORS.txt index a0ec799..d1f6a19 100644 --- a/nbt/CONTRIBUTORS.txt +++ b/nbt/CONTRIBUTORS.txt @@ -2,14 +2,20 @@ d0sboots (David Walker) dtrauma (Thomas Roesner) Fenixin (Alejandro Aguilera) fwaggle (Jamie Fraser) +jlsajfj (Joseph) k1988 (Terry Zhao) kamyu2 MacFreek (Freek Dijkstra) +MFLD.fr MidnightLightning (Brooks Boyd) MostAwesomeDude (Corbin Simpson) psolyca (Damien) +s-leroux (Sylvain Leroux) SBliven (Spencer Bliven) +steffen-kiess (Steffen Kieß) Stumpylog (Trenton Holmes) suresttexas00 (Surest Texas) tWoolie (Thomas Woolford) +underscoren (Marius Steffens) Xgkkp +Zachy (Zachary Howard) diff --git a/nbt/README.txt b/nbt/README.txt index 0b09590..668849d 100644 --- a/nbt/README.txt +++ b/nbt/README.txt @@ -1,99 +1,148 @@ -This is a Named Binary Tag parser based upon the specification by Markus Persson. - -From The spec: - "NBT (Named Binary Tag) is a tag based binary format designed to carry large - amounts of binary data with smaller amounts of additional data. - An NBT file consists of a single GZIPped Named Tag of type TAG_Compound." - -read the full spec at http://www.minecraft.net/docs/NBT.txt - -Usage: - 1) Reading files. - - The easiest way to read an nbt file is to instantiate an NBTFile object e.g. - - >>> import nbt - >>> nbtfile = nbt.NBTFile("bigtest.nbt",'rb') - >>> nbtfile.name - u'Level' - >>> nbtfile["nested compound test"].tag_info() - TAG_Compound("nested compound test"): 2 Entries - >>> for tag in nbtfile["nested compound test"]["ham"].tags: - ... print(tag.tag_info()) - ... - TAG_String("name"): Hampus - TAG_Float("value"): 0.75 - >>> [tag.value for tag in nbtfile["listTest (long)"].value] - [11, 12, 13, 14, 15] - - Files can also be read from a fileobj (file-like object that contains a compressed - stream) or a buffer (file-like object that contains an uncompressed stream of NBT - Tags) which can be accomplished thusly: - - >>> import nbt - >>> nbtfile = NBTFile(fileobj=previously_opened_file) - # or.... - >>> nbtfile = NBTFile(buffer=net_socket.makefile()) - - 2) Writing files. - - Writing files is easy too! if you have a NBTFile object, simply call it's - write_file() method. If the NBTFile was instantiated with a filename, then - write_file needs no extra arguments. It just works. If however you created a new - file object from scratch (or even if you just want to save it somewhere else) - call write_file('path\to\new\file.nbt') - - >>> import nbt - >>> nbtfile = nbt.NBTFile("bigtest.nbt",'rb') - >>> nbtfile["listTest (compound)"].tags[0]["name"].value = "Different name" - >>> nbtfile.write_file("newnbtfile.nbt") - - It is also possible to write to a buffer or fileobj using the same keyword args. - - >>> nbtfile.write_file(fileobj = my_file) #compressed - >>> nbtfile.write_file(buffer = sock.makefile()) #uncompressed - - 3) Creating files - - Creating files is trickier but ultimately should give you no issue, as long as - you have read the NBT spec (hint.. it's very short). Also be sure to note that - the NBTFile object is actually a TAG_Compound with some wrapper features, so - you can use all the standard tag features - - >>> from nbt import * - >>> nbtfile = NBTFile() - - first, don't forget to name the top level tag - - >>> nbtfile.name = "My Top Level Tag" - >>> nbtfile.tags.append(TAG_Float(name="My Float Name", value=3.152987593947)) - >>> mylist = TAG_List(name="TestList", type=TAG_Long) #type needs to be pre-declared! - >>> mylist.tags.append(TAG_Long(100)) - >>> mylist.tags.extend([TAG_Long(120),TAG_Long(320),TAG_Long(19)]) - >>> nbtfile.tags.append(mylist) - >>> print(nbtfile.pretty_tree()) - TAG_Compound("My Top Level Tag"): 2 Entries - { - TAG_Float("My Float Name"): 3.15298759395 - TAG_List("TestList"): 4 entries of type TAG_Long - { - TAG_Long: 100 - TAG_Long: 120 - TAG_Long: 320 - TAG_Long: 19 - } - } - >>> nbtfile["TestList"].tags.sort(key = lambda tag: tag.value) - >>> print(nbtfile.pretty_tree()) - TAG_Compound("My Top Level Tag"): 2 Entries - { - TAG_Float("My FloatName"): 3.15298759395 - TAG_List("TestList"): 4 entries of type TAG_Long - { - TAG_Long: 19 - TAG_Long: 100 - TAG_Long: 120 - TAG_Long: 320 - } - } - >>> nbtfile.write_file("mynbt.dat") +========================== +The NBT library for Python +========================== + +Forewords +========= + +This is mainly a `Named Binary Tag` parser & writer library. + +From the initial specification by Markus Persson:: + + NBT (Named Binary Tag) is a tag based binary format designed to carry large + amounts of binary data with smaller amounts of additional data. + An NBT file consists of a single GZIPped Named Tag of type TAG_Compound. + +Current specification is on the official [Minecraft Wiki](https://minecraft.gamepedia.com/NBT_format). + +This library is very suited to inspect & edit the Minecraft data files. Provided +examples demonstrate how to: +- get player and world statistics, +- list mobs, chest contents, biomes, +- draw a simple world map, +- etc. + +.. image:: world.png + +*Note: Examples are just here to help using and testing the library. +Developing Minecraft tools is out of the scope of this project.* + + +Status +====== + +The library supports all the currently known tag types (including the arrays +of 'Integer' and 'Long'), and the examples work with the McRegion, +pre-"flattened" and "flattened" Anvil formats. + +Last update was tested on Minecraft version **1.13.2**. + + +Dependencies +============ + +The library, the tests and the examples are only using the Python core library, +except `curl` for downloading some test reference data and `PIL` (Python +Imaging Library) for the `map` example. + +Supported Python releases: 2.7, 3.4 to 3.7 + + +Usage +===== + +Reading files +------------- + +The easiest way to read an nbt file is to instantiate an NBTFile object e.g.:: + + >>> from nbt import nbt + >>> nbtfile = nbt.NBTFile("bigtest.nbt",'rb') + >>> nbtfile.name + u'Level' + >>> nbtfile["nested compound test"].tag_info() + TAG_Compound("nested compound test"): 2 Entries + >>> for tag in nbtfile["nested compound test"]["ham"].tags: + ... print(tag.tag_info()) + ... + TAG_String("name"): Hampus + TAG_Float("value"): 0.75 + >>> [tag.value for tag in nbtfile["listTest (long)"].value] + [11, 12, 13, 14, 15] + +Files can also be read from a fileobj (file-like object that contains a compressed +stream) or a buffer (file-like object that contains an uncompressed stream of NBT +Tags) which can be accomplished thusly:: + + >>> from nbt.nbt import * + >>> nbtfile = NBTFile(fileobj=previously_opened_file) + # or.... + >>> nbtfile = NBTFile(buffer=net_socket.makefile()) + + +Writing files +------------- + +Writing files is easy too! if you have a NBTFile object, simply call it's +write_file() method. If the NBTFile was instantiated with a filename, then +write_file needs no extra arguments. It just works. If however you created a new +file object from scratch (or even if you just want to save it somewhere else) +call write_file('path\to\new\file.nbt'):: + + >>> from nbt import nbt + >>> nbtfile = nbt.NBTFile("bigtest.nbt",'rb') + >>> nbtfile["listTest (compound)"].tags[0]["name"].value = "Different name" + >>> nbtfile.write_file("newnbtfile.nbt") + +It is also possible to write to a buffer or fileobj using the same keyword args:: + + >>> nbtfile.write_file(fileobj = my_file) #compressed + >>> nbtfile.write_file(buffer = sock.makefile()) #uncompressed + + +Creating files +-------------- + +Creating files is trickier but ultimately should give you no issue, as long as +you have read the NBT spec (hint.. it's very short). Also be sure to note that +the NBTFile object is actually a TAG_Compound with some wrapper features, so +you can use all the standard tag features:: + + >>> from nbt.nbt import * + >>> nbtfile = NBTFile() + + +First, don't forget to name the top level tag:: + + >>> nbtfile.name = "My Top Level Tag" + >>> nbtfile.tags.append(TAG_Float(name="My Float Name", value=3.152987593947)) + >>> mylist = TAG_List(name="TestList", type=TAG_Long) #type needs to be pre-declared! + >>> mylist.tags.append(TAG_Long(100)) + >>> mylist.tags.extend([TAG_Long(120),TAG_Long(320),TAG_Long(19)]) + >>> nbtfile.tags.append(mylist) + >>> print(nbtfile.pretty_tree()) + TAG_Compound("My Top Level Tag"): 2 Entries + { + TAG_Float("My Float Name"): 3.15298759395 + TAG_List("TestList"): 4 entries of type TAG_Long + { + TAG_Long: 100 + TAG_Long: 120 + TAG_Long: 320 + TAG_Long: 19 + } + } + >>> nbtfile["TestList"].tags.sort(key = lambda tag: tag.value) + >>> print(nbtfile.pretty_tree()) + TAG_Compound("My Top Level Tag"): 2 Entries + { + TAG_Float("My FloatName"): 3.15298759395 + TAG_List("TestList"): 4 entries of type TAG_Long + { + TAG_Long: 19 + TAG_Long: 100 + TAG_Long: 120 + TAG_Long: 320 + } + } + >>> nbtfile.write_file("mynbt.dat") diff --git a/nbt/__init__.py b/nbt/__init__.py index e0e92ad..dd5211d 100644 --- a/nbt/__init__.py +++ b/nbt/__init__.py @@ -4,7 +4,7 @@ # Documentation only automatically includes functions specified in __all__. # If you add more functions, please manually include them in doc/index.rst. -VERSION = (1, 5, 0) +VERSION = (1, 5, 1) """NBT version as tuple. Note that the major and minor revision number are always present, but the patch identifier (the 3rd number) is only used in 1.4.""" diff --git a/nbt/region.py b/nbt/region.py index 224099b..765af8c 100644 --- a/nbt/region.py +++ b/nbt/region.py @@ -201,6 +201,8 @@ def __init__(self, filename=None, fileobj=None, chunkclass = None): self.file = None self.filename = None self._closefile = False + self.closed = False + """Set to true if `close()` was successfully called on that region""" self.chunkclass = chunkclass if filename: self.filename = filename @@ -290,6 +292,7 @@ def close(self): if self._closefile: try: self.file.close() + self.closed = True except IOError: pass diff --git a/nbt/world.py b/nbt/world.py index 607e422..34877ed 100644 --- a/nbt/world.py +++ b/nbt/world.py @@ -76,7 +76,7 @@ def nonempty(self): def get_region(self, x,z): """Get a region using x,z coordinates of a region. Cache results.""" - if (x,z) not in self.regions: + if (x,z) not in self.regions or self.regions[x,z].closed: if (x,z) in self.regionfiles: self.regions[(x,z)] = region.RegionFile(self.regionfiles[(x,z)]) else: From 627045e99e768ebd3d8dc7c9a29213c81e230554 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 26 Dec 2021 23:03:20 +0100 Subject: [PATCH 143/151] Add EOFError to the list of exceptions for data files. --- regionfixer_core/scan.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/regionfixer_core/scan.py b/regionfixer_core/scan.py index f7747b0..6e0f140 100644 --- a/regionfixer_core/scan.py +++ b/regionfixer_core/scan.py @@ -757,6 +757,9 @@ def scan_data(scanned_dat_file): s.status = c.DATAFILE_UNREADABLE except TypeError: s.status = c.DATAFILE_UNREADABLE + except EOFError: + # There is a compressed stream in the file but ends abruptly + s.status = c.DATAFILE_UNREADABLE except: s.status = c.DATAFILE_UNREADABLE From 4a2c29db42b6892ffb327853682f6a7497111beb Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 26 Dec 2021 23:22:42 +0100 Subject: [PATCH 144/151] Update contributors list. --- CONTRIBUTORS.txt | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 69f06c6..9cb2cc9 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -5,6 +5,13 @@ Fenixin (Alejandro Aguilera) - Main developer Contributors (in no particular order): aheadley (Alex Headley) - First multiprocessing version of Region Fixer. -carlallen (Carl Allen) - Fix problem in MacOS +734F96 (Lavander) - Update RegionFixer for Minecraft 1.18 +sleiss (Simon Leiß) - Fix typos kbn (Kristian Berge) - Small fixes -macfreek (Freek Dijkstra) - Fixes and lots of help \ No newline at end of file +KasperFranz (Kasper Sanguesa-Franz) - Fix typo in readme +macfreek (Freek Dijkstra) - Fixes and lots of help +Pisich (carloser) - Changes to the readme +carlallen (Carl Allen) - Fix problem in MacOS +charlyhue (Charly Hue) - Fix logging with onliners +andm (andm) - Fix typos +sandtechnology (sandtechnology) - Fix problem scanning old worlds From bfafd378ceb65116e4ea48cab24f1e6394051978 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sun, 26 Dec 2021 23:30:07 +0100 Subject: [PATCH 145/151] Fix but while replacing regions. Fix missing self. in World.__str__ --- regionfixer_core/world.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index f07e0c4..8cfe81d 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -1165,7 +1165,7 @@ def __init__(self, world_path): self.scanned = False def __str__(self): - counters = get_number_regions() + counters = self.get_number_regions() text = "World information:\n" text += " World path: {0}\n".format(self.path) text += " World name: {0}\n".format(self.name) @@ -1483,7 +1483,7 @@ def replace_problematic_regions(self, backup_worlds, status, entity_limit, delet break bad_regions = regionset.list_regions(status) - if ( bad_chunks and + if ( bad_regions and b_regionset._get_dimension_directory() != regionset._get_dimension_directory() and b_regionset._get_region_type_directory() != regionset._get_region_type_directory() ): print("The regionset \'{0}\' doesn't exist in the backup directory. Skipping this backup directory.".format(regionset._get_dim_type_string())) From 27f1b5689f42cf1d2b7a6659ab251eb0abdf55c8 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 27 Dec 2021 13:29:45 +0100 Subject: [PATCH 146/151] Add argument to include paths to scan from a text file. --- regionfixer.py | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index ef8fc6b..06d36e3 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -160,6 +160,16 @@ def main(): usage=usage, epilog=epilog) + parser.add_argument('--text-file-input', + '--tf', + help=('Path to a text file with a list of world folders and region ' + 'files. One line per element, wildcards can be used, empty lines' + 'will be ignored and # can be used at the start of a line as comment' + '. These will be treated the same as adding paths to command input.'), + metavar='', + type=str, + dest='text_file_input', + default=None) parser.add_argument('--backups', '-b', @@ -372,8 +382,28 @@ def main(): getpass("Press enter to continue:") return c.RV_CRASH - - world_list, regionset = world.parse_paths(args.paths) + # First, read paths from file + if args.text_file_input: + try: + tf = open(args.text_file_input, 'r') + path_lines = tf.readlines() + tmp = [] + # Process it + for i in range(len(path_lines)): + # Remove end of lines characters + line = path_lines[i].replace('\n', '') + # Remove comment lines and empty lines + if line and "#" not in line: + tmp.append(line) + + path_lines = tmp + tf.close() + + except: + print("Something went wrong while reading the text file input!") + + # Parse all the paths, from text file and command input + world_list, regionset = world.parse_paths(args.paths + path_lines) # print greetings an version number print("\nWelcome to Region Fixer!") From 13745561d2f8bc341680dffa5c29735351cbaff9 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 27 Dec 2021 13:45:00 +0100 Subject: [PATCH 147/151] Fix log writing not giving enough space between worlds. --- regionfixer_core/world.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/regionfixer_core/world.py b/regionfixer_core/world.py index 8cfe81d..d23413a 100644 --- a/regionfixer_core/world.py +++ b/regionfixer_core/world.py @@ -1264,6 +1264,8 @@ def summary(self): chunk_info += text if text else "" final += chunk_info if chunk_info else "All the chunks are ok." + final += "\n\n" + return final def get_name(self): From 80396db4ba2b13cd010bd70168193fff858e537d Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Mon, 27 Dec 2021 13:48:47 +0100 Subject: [PATCH 148/151] Fix missing variable declaration. --- regionfixer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/regionfixer.py b/regionfixer.py index 06d36e3..072fc5c 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -383,6 +383,7 @@ def main(): return c.RV_CRASH # First, read paths from file + path_lines = [] if args.text_file_input: try: tf = open(args.text_file_input, 'r') From 81a9183dce9bd7eb433d1085c4c248b2694efdda Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Fri, 1 Jul 2022 12:12:16 +0200 Subject: [PATCH 149/151] Change UTF8 coding-decoding to MUTF8. Fixes a few long standing issues. --- mutf8/LICENSE | 19 ++++ mutf8/README.md | 82 +++++++++++++++ mutf8/__init__.py | 21 ++++ mutf8/cmutf8.c | 256 ++++++++++++++++++++++++++++++++++++++++++++++ mutf8/mutf8.py | 147 ++++++++++++++++++++++++++ nbt/nbt.py | 9 +- 6 files changed, 532 insertions(+), 2 deletions(-) create mode 100644 mutf8/LICENSE create mode 100644 mutf8/README.md create mode 100644 mutf8/__init__.py create mode 100644 mutf8/cmutf8.c create mode 100644 mutf8/mutf8.py diff --git a/mutf8/LICENSE b/mutf8/LICENSE new file mode 100644 index 0000000..49e36a9 --- /dev/null +++ b/mutf8/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012-2015 Tyler Kennedy . All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/mutf8/README.md b/mutf8/README.md new file mode 100644 index 0000000..cf6a2ca --- /dev/null +++ b/mutf8/README.md @@ -0,0 +1,82 @@ +![Tests](https://github.com/TkTech/mutf8/workflows/Tests/badge.svg?branch=master) + +# mutf-8 + +This package contains simple pure-python as well as C encoders and decoders for +the MUTF-8 character encoding. In most cases, you can also parse the even-rarer +CESU-8. + +These days, you'll most likely encounter MUTF-8 when working on files or +protocols related to the JVM. Strings in a Java `.class` file are encoded using +MUTF-8, strings passed by the JNI, as well as strings exported by the object +serializer. + +This library was extracted from [Lawu][], a Python library for working with JVM +class files. + +## 🎉 Installation + +Install the package from PyPi: + +``` +pip install mutf8 +``` + +Binary wheels are available for the following: + +| | py3.6 | py3.7 | py3.8 | py3.9 | +| ---------------- | ----- | ----- | ----- | ----- | +| OS X (x86_64) | y | y | y | y | +| Windows (x86_64) | y | y | y | y | +| Linux (x86_64) | y | y | y | y | + +If binary wheels are not available, it will attempt to build the C extension +from source with any C99 compiler. If it could not build, it will fall back +to a pure-python version. + +## Usage + +Encoding and decoding is simple: + +```python +from mutf8 import encode_modified_utf8, decode_modified_utf8 + +unicode = decode_modified_utf8(byte_like_object) +bytes = encode_modified_utf8(unicode) +``` + +This module *does not* register itself globally as a codec, since importing +should be side-effect-free. + +## 📈 Benchmarks + +The C extension is significantly faster - often 20x to 40x faster. + + + +### MUTF-8 Decoding +| Name | Min (μs) | Max (μs) | StdDev | Ops | +|------------------------------|------------|------------|----------|---------------| +| cmutf8-decode_modified_utf8 | 0.00009 | 0.00080 | 0.00000 | 9957678.56358 | +| pymutf8-decode_modified_utf8 | 0.00190 | 0.06040 | 0.00000 | 450455.96019 | + +### MUTF-8 Encoding +| Name | Min (μs) | Max (μs) | StdDev | Ops | +|------------------------------|------------|------------|----------|----------------| +| cmutf8-encode_modified_utf8 | 0.00008 | 0.00151 | 0.00000 | 11897361.05101 | +| pymutf8-encode_modified_utf8 | 0.00180 | 0.16650 | 0.00000 | 474390.98091 | + + +## C Extension + +The C extension is optional. If a binary package is not available, or a C +compiler is not present, the pure-python version will be used instead. If you +want to ensure you're using the C version, import it directly: + +```python +from mutf8.cmutf8 import decode_modified_utf8 + +decode_modified_utf(b'\xED\xA1\x80\xED\xB0\x80') +``` + +[Lawu]: https://github.com/tktech/lawu diff --git a/mutf8/__init__.py b/mutf8/__init__.py new file mode 100644 index 0000000..943dc4d --- /dev/null +++ b/mutf8/__init__.py @@ -0,0 +1,21 @@ +""" +Utility methods for handling oddities in character encoding encountered +when parsing and writing JVM ClassFiles or object serialization archives. + +MUTF-8 is the same as CESU-8, but with different encoding for 0x00 bytes. + +.. note:: + + http://bugs.python.org/issue2857 was an attempt in 2008 to get support + for MUTF-8/CESU-8 into the python core. +""" + + +try: + from mutf8.cmutf8 import decode_modified_utf8, encode_modified_utf8 +except ImportError: + from mutf8.mutf8 import decode_modified_utf8, encode_modified_utf8 + + +# Shut up linters. +ALL_IMPORTS = [decode_modified_utf8, encode_modified_utf8] diff --git a/mutf8/cmutf8.c b/mutf8/cmutf8.c new file mode 100644 index 0000000..e05ddf3 --- /dev/null +++ b/mutf8/cmutf8.c @@ -0,0 +1,256 @@ +#define PY_SSIZE_T_CLEAN +#include +#include + +PyDoc_STRVAR(decode_doc, + "Decodes a bytestring containing MUTF-8 as defined in section\n" + "4.4.7 of the JVM specification.\n\n" + ":param s: A byte/buffer-like to be converted.\n" + ":returns: A unicode representation of the original string."); +static PyObject * +decode_modified_utf8(PyObject *self, PyObject *args) +{ +#define return_err(_msg) \ + do { \ + PyObject *exc = PyObject_CallFunction(PyExc_UnicodeDecodeError, \ + "sy#nns", "mutf-8", view.buf, \ + view.len, ix, ix + 1, _msg); \ + if (exc != NULL) { \ + PyCodec_StrictErrors(exc); \ + Py_DECREF(exc); \ + } \ + PyMem_Free(cp_out); \ + PyBuffer_Release(&view); \ + return NULL; \ + } while (0) + + Py_buffer view; + + if (!PyArg_ParseTuple(args, "y*", &view)) { + return NULL; + } + + // MUTF-8 input. + uint8_t *buf = (uint8_t *)view.buf; + // Array of temporary UCS-4 codepoints. + // There's no point using PyUnicode_new and _WriteChar, because + // it requires us to have iterated the string to get the maximum unicode + // codepoint and count anyways. + Py_UCS4 *cp_out = PyMem_Calloc(view.len, sizeof(Py_UCS4)); + if (!cp_out) { + return PyErr_NoMemory(); + } + + // # of codepoints we found & current index into cp_out. + Py_ssize_t cp_count = 0; + + for (Py_ssize_t ix = 0; ix < view.len; ix++) { + Py_UCS4 x = buf[ix]; + + if (x == 0) { + return_err("Embedded NULL byte in input."); + } + else if (x < 0x80) { + // ASCII/one-byte codepoint. + x &= 0x7F; + } + else if ((x & 0xE0) == 0xC0) { + // Two-byte codepoint. + if (ix + 1 >= view.len) { + return_err( + "2-byte codepoint started, but input too short" + " to finish."); + } + x = ((x & 0x1F) << 0x06 | (buf[ix + 1] & 0x3F)); + ix++; + } + else if ((x & 0xF0) == 0xE0) { + // Three-byte codepoint. + if (ix + 2 >= view.len) { + return_err( + "3-byte or 6-byte codepoint started, but input too short" + " to finish."); + } + uint8_t b2 = buf[ix + 1]; + uint8_t b3 = buf[ix + 2]; + + if (x == 0xED && (b2 & 0xF0) == 0xA0) { + if (ix + 5 >= view.len) { + return_err( + "6-byte codepoint started, but input too short" + " to finish."); + } + + // Possible six-byte codepoint. + uint8_t b4 = buf[ix + 3]; + uint8_t b5 = buf[ix + 4]; + uint8_t b6 = buf[ix + 5]; + + if (b4 == 0xED && (b5 & 0xF0) == 0xB0) { + // Definite six-byte codepoint. + x = ( + 0x10000 | + (b2 & 0x0F) << 0x10 | + (b3 & 0x3F) << 0x0A | + (b5 & 0x0F) << 0x06 | + (b6 & 0x3F) + ); + ix += 5; + cp_out[cp_count++] = x; + continue; + } + } + + x = ( + (x & 0x0F) << 0x0C | + (b2 & 0x3F) << 0x06 | + (b3 & 0x3F) + ); + + ix += 2; + } + cp_out[cp_count++] = x; + } + + PyObject *out = + PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, cp_out, cp_count); + + PyMem_Free(cp_out); + PyBuffer_Release(&view); + return out; +#undef return_err +} + +inline Py_ssize_t _encoded_size(void *data, Py_ssize_t length, int kind) { + Py_ssize_t byte_count = 0; + + for (Py_ssize_t i = 0; i < length; i++) { + Py_UCS4 cp = PyUnicode_READ(kind, data, i); + if (cp == 0x00) { + // NULLs will get encoded as C0 80. + byte_count += 2; + } else if (cp <= 0x7F) { + byte_count++; + } else if (cp <= 0x7FF) { + byte_count += 2; + } else if (cp <= 0xFFFF) { + byte_count += 3; + } else { + byte_count += 6; + } + } + + return byte_count; +} + +PyDoc_STRVAR(encoded_size_doc, + "Returns the number of bytes required to store the given\n" + "unicode string when encoded as MUTF-8.\n\n" + ":param u: Unicode string to be converted.\n" + ":returns: The number of bytes required."); +static PyObject * +encoded_size(PyObject *self, PyObject *args) +{ + PyObject *src = NULL; + + if (!PyArg_ParseTuple(args, "U", &src)) { + return NULL; + } + + return PyLong_FromSsize_t( + _encoded_size( + PyUnicode_DATA(src), + PyUnicode_GET_LENGTH(src), + PyUnicode_KIND(src) + ) + ); +} + +PyDoc_STRVAR(encode_doc, + "Encodes a unicode string as MUTF-8 as defined in section\n" + "4.4.7 of the JVM specification.\n\n" + ":param u: Unicode string to be converted.\n" + ":returns: The encoded string as a `bytes` object."); +static PyObject * +encode_modified_utf8(PyObject *self, PyObject *args) +{ + PyObject *src = NULL; + + if (!PyArg_ParseTuple(args, "U", &src)) { + return NULL; + } + + void *data = PyUnicode_DATA(src); + Py_ssize_t length = PyUnicode_GET_LENGTH(src); + int kind = PyUnicode_KIND(src); + char *byte_out = PyMem_Calloc(_encoded_size(data, length, kind), 1); + + if (!byte_out) { + return PyErr_NoMemory(); + } + + Py_ssize_t byte_count = 0; + + for (Py_ssize_t i = 0; i < length; i++) { + Py_UCS4 cp = PyUnicode_READ(kind, data, i); + if (cp == 0x00) { + // NULL byte encoding shortcircuit. + byte_out[byte_count++] = 0xC0; + byte_out[byte_count++] = 0x80; + } + else if (cp <= 0x7F) { + // ASCII + byte_out[byte_count++] = cp; + } + else if (cp <= 0x7FF) { + // Two-byte codepoint. + byte_out[byte_count++] = (0xC0 | (0x1F & (cp >> 0x06))); + byte_out[byte_count++] = (0x80 | (0x3F & cp)); + } + else if (cp <= 0xFFFF) { + // Three-byte codepoint + byte_out[byte_count++] = (0xE0 | (0x0F & (cp >> 0x0C))); + byte_out[byte_count++] = (0x80 | (0x3F & (cp >> 0x06))); + byte_out[byte_count++] = (0x80 | (0x3F & cp)); + } + else { + // "Two-times-three" byte codepoint. + byte_out[byte_count++] = 0xED; + byte_out[byte_count++] = 0xA0 | ((cp >> 0x10) & 0x0F); + byte_out[byte_count++] = 0x80 | ((cp >> 0x0A) & 0x3F); + byte_out[byte_count++] = 0xED; + byte_out[byte_count++] = 0xB0 | ((cp >> 0x06) & 0x0F); + byte_out[byte_count++] = 0x80 | (cp & 0x3F); + } + } + + PyObject *out = PyBytes_FromStringAndSize(byte_out, byte_count); + PyMem_Free(byte_out); + return out; +} + +static PyMethodDef module_methods[] = { + {"decode_modified_utf8", decode_modified_utf8, METH_VARARGS, decode_doc}, + {"encode_modified_utf8", encode_modified_utf8, METH_VARARGS, encode_doc}, + {"encoded_size", encoded_size, METH_VARARGS, encoded_size_doc}, + {NULL, NULL, 0, NULL}}; + +static struct PyModuleDef cmutf8_module = { + PyModuleDef_HEAD_INIT, + "mutf8.cmutf8", + PyDoc_STR("Encoders and decoders for the MUTF-8 encoding."), + -1, + module_methods, +}; + +PyMODINIT_FUNC +PyInit_cmutf8(void) +{ + PyObject *m; + + m = PyModule_Create(&cmutf8_module); + if (m == NULL) + return NULL; + + return m; +} diff --git a/mutf8/mutf8.py b/mutf8/mutf8.py new file mode 100644 index 0000000..ceec8f5 --- /dev/null +++ b/mutf8/mutf8.py @@ -0,0 +1,147 @@ +def decode_modified_utf8(s: bytes) -> str: + """ + Decodes a bytestring containing modified UTF-8 as defined in section + 4.4.7 of the JVM specification. + + :param s: bytestring to be converted. + :returns: A unicode representation of the original string. + """ + s_out = [] + s_len = len(s) + s_ix = 0 + + while s_ix < s_len: + b1 = s[s_ix] + s_ix += 1 + + if b1 == 0: + raise UnicodeDecodeError( + 'mutf-8', + s, + s_ix - 1, + s_ix, + 'Embedded NULL byte in input.' + ) + if b1 < 0x80: + # ASCII/one-byte codepoint. + s_out.append(chr(b1)) + elif (b1 & 0xE0) == 0xC0: + # Two-byte codepoint. + if s_ix >= s_len: + raise UnicodeDecodeError( + 'mutf-8', + s, + s_ix - 1, + s_ix, + '2-byte codepoint started, but input too short to' + ' finish.' + ) + + s_out.append( + chr( + (b1 & 0x1F) << 0x06 | + (s[s_ix] & 0x3F) + ) + ) + s_ix += 1 + elif (b1 & 0xF0) == 0xE0: + # Three-byte codepoint. + if s_ix + 1 >= s_len: + raise UnicodeDecodeError( + 'mutf-8', + s, + s_ix - 1, + s_ix, + '3-byte or 6-byte codepoint started, but input too' + ' short to finish.' + ) + + b2 = s[s_ix] + b3 = s[s_ix + 1] + + if b1 == 0xED and (b2 & 0xF0) == 0xA0: + # Possible six-byte codepoint. + if s_ix + 4 >= s_len: + raise UnicodeDecodeError( + 'mutf-8', + s, + s_ix - 1, + s_ix, + '3-byte or 6-byte codepoint started, but input too' + ' short to finish.' + ) + + b4 = s[s_ix + 2] + b5 = s[s_ix + 3] + b6 = s[s_ix + 4] + + if b4 == 0xED and (b5 & 0xF0) == 0xB0: + # Definite six-byte codepoint. + s_out.append( + chr( + 0x10000 | + (b2 & 0x0F) << 0x10 | + (b3 & 0x3F) << 0x0A | + (b5 & 0x0F) << 0x06 | + (b6 & 0x3F) + ) + ) + s_ix += 5 + continue + + s_out.append( + chr( + (b1 & 0x0F) << 0x0C | + (b2 & 0x3F) << 0x06 | + (b3 & 0x3F) + ) + ) + s_ix += 2 + else: + raise RuntimeError + + return u''.join(s_out) + + +def encode_modified_utf8(u: str) -> bytes: + """ + Encodes a unicode string as modified UTF-8 as defined in section 4.4.7 + of the JVM specification. + + :param u: unicode string to be converted. + :returns: A decoded bytearray. + """ + final_string = bytearray() + + for c in (ord(char) for char in u): + if c == 0x00: + # NULL byte encoding shortcircuit. + final_string.extend([0xC0, 0x80]) + elif c <= 0x7F: + # ASCII + final_string.append(c) + elif c <= 0x7FF: + # Two-byte codepoint. + final_string.extend([ + (0xC0 | (0x1F & (c >> 0x06))), + (0x80 | (0x3F & c)) + ]) + elif c <= 0xFFFF: + # Three-byte codepoint. + final_string.extend([ + (0xE0 | (0x0F & (c >> 0x0C))), + (0x80 | (0x3F & (c >> 0x06))), + (0x80 | (0x3F & c)) + ]) + else: + # Six-byte codepoint. + final_string.extend([ + 0xED, + 0xA0 | ((c >> 0x10) & 0x0F), + 0x80 | ((c >> 0x0A) & 0x3f), + 0xED, + 0xb0 | ((c >> 0x06) & 0x0f), + 0x80 | (c & 0x3f) + ]) + + return bytes(final_string) diff --git a/nbt/nbt.py b/nbt/nbt.py index 861a385..3622e99 100644 --- a/nbt/nbt.py +++ b/nbt/nbt.py @@ -7,6 +7,9 @@ from struct import Struct, error as StructError from gzip import GzipFile + +from mutf8 import encode_modified_utf8, decode_modified_utf8 + try: from collections.abc import MutableMapping, MutableSequence, Sequence except ImportError: # for Python 2.7 @@ -360,10 +363,12 @@ def _parse_buffer(self, buffer): read = buffer.read(length.value) if len(read) != length.value: raise StructError() - self.value = read.decode("utf-8") + #self.value = read.decode("utf-8") + self.value = decode_modified_utf8(read) def _render_buffer(self, buffer): - save_val = self.value.encode("utf-8") + #save_val = self.value.encode("utf-8") + save_val = encode_modified_utf8(self.value) length = TAG_Short(len(save_val)) length._render_buffer(buffer) buffer.write(save_val) From 5ca1a70cd44dbb18c7058af1759d627cf72ed636 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 2 Jul 2022 10:18:19 +0200 Subject: [PATCH 150/151] Arguments are now autogenerated from constants.py. --- regionfixer.py | 127 +++++++++++----------------------- regionfixer_core/constants.py | 15 ++-- 2 files changed, 46 insertions(+), 96 deletions(-) diff --git a/regionfixer.py b/regionfixer.py index 072fc5c..ba1baf0 100644 --- a/regionfixer.py +++ b/regionfixer.py @@ -185,66 +185,47 @@ def main(): dest='backups', default=None) - parser.add_argument('--replace-corrupted', - '--rc', - help='Try to replace the corrupted chunks using the backup' - ' directories. Can be only used scanning one world.', - default=False, - dest='replace_corrupted', - action='store_true') - - parser.add_argument('--replace-wrong-located', - '--rw', - help='Try to replace the wrong located chunks using the ' - 'backup directories. Can be only used scanning one ' - 'world.', - default=False, - dest='replace_wrong_located', - action='store_true') - - parser.add_argument('--replace-entities', - '--re', - help='Try to replace the chunks with too many entities using ' - 'the backup directories. Can be only used scanning ' - 'one world.', - default=False, - dest='replace_entities', - action='store_true') - - parser.add_argument('--replace-shared-offset', - '--rs', - help='Try to replace the chunks with a shared offset using ' - 'the backup directories. Can be only used scanning ' - 'one world.', - default=False, - dest='replace_shared_offset', - action='store_true') - - parser.add_argument('--replace-too-small', - '--rt', - help='Try to replace the region files that are too small to ' - 'be actually be a region file using the backup directories.' - 'Can be only used scanning one world.', - default=False, - dest='replace_too_small', - action='store_true') - - parser.add_argument('--delete-corrupted', - '--dc', - help='[WARNING!] This option deletes! Delete all the corrupted ' - 'chunks. Used with --replace-corrupted or --replace-wrong-located' - ' will delete all the non-replaced chunks.', - action='store_true', - default=False) - - parser.add_argument('--delete-wrong-located', - '--dw', - help='[WARNING!] This option deletes! Delete all the wrong located ' - 'chunks. Used with --replace-corrupted or --replace-wrong-located' - ' will delete all the non-replaced chunks.', - action='store_true', - default=False, - dest='delete_wrong_located') + for solvable_status in c.CHUNK_PROBLEMS_SOLUTIONS: + if c.CHUNK_SOLUTION_REMOVE in c.CHUNK_PROBLEMS_SOLUTIONS[solvable_status]: + parser.add_argument('--delete-' + c.CHUNK_PROBLEMS_ARGS[solvable_status], + '--d' + c.CHUNK_PROBLEMS_ABBR[solvable_status], + help='[WARNING!] This option deletes! Delete all chunks with ' + 'status: ' + c.CHUNK_STATUS_TEXT[solvable_status], + action='store_true', + default=False) + if c.CHUNK_SOLUTION_REPLACE in c.CHUNK_PROBLEMS_SOLUTIONS[solvable_status]: + parser.add_argument('--replace-' + c.CHUNK_PROBLEMS_ARGS[solvable_status], + '--r' + c.CHUNK_PROBLEMS_ABBR[solvable_status], + help='This option can be only used while scanning one world. ' + 'Try to replace the problematic chunks with the status "{0}" ' + 'using backup directories.'.format(c.CHUNK_STATUS_TEXT[solvable_status]), + action='store_true', + default=False) + if c.CHUNK_SOLUTION_RELOCATE_USING_DATA in c.CHUNK_PROBLEMS_SOLUTIONS[solvable_status]: + parser.add_argument('--relocate-' + c.CHUNK_PROBLEMS_ARGS[solvable_status], + '--rl' + c.CHUNK_PROBLEMS_ABBR[solvable_status], + help='This option can be only used while scanning one world. ' + 'Try to replace the problematic chunks with the status "{0}" ' + 'using backup directories.'.format(c.CHUNK_STATUS_TEXT[solvable_status]), + action='store_true', + default=False) + + for solvable_status in c.REGION_PROBLEMS_SOLUTIONS: + if c.REGION_SOLUTION_REMOVE in c.REGION_PROBLEMS_SOLUTIONS[solvable_status]: + parser.add_argument('--delete-' + c.REGION_PROBLEMS_ARGS[solvable_status], + '--d' + c.REGION_PROBLEMS_ABBR[solvable_status], + help='[WARNING!] This option deletes! Delete all chunks with ' + 'status: ' + c.REGION_STATUS_TEXT[solvable_status], + action='store_true', + default=False) + if c.REGION_SOLUTION_REPLACE in c.REGION_PROBLEMS_SOLUTIONS[solvable_status]: + parser.add_argument('--replace-' + c.REGION_PROBLEMS_ARGS[solvable_status], + '--r' + c.REGION_PROBLEMS_ABBR[solvable_status], + help='This option can be only used while scanning one world. ' + 'Try to replace the problematic chunks with the status "{0}" ' + 'using backup directories.'.format(c.REGION_STATUS_TEXT[solvable_status]), + action='store_true', + default=False) parser.add_argument('--delete-entities', '--de', @@ -260,24 +241,6 @@ def main(): default=False, dest='delete_entities') - parser.add_argument('--delete-shared-offset', - '--ds', - help='[WARNING!] This option deletes! Delete all the chunk ' - 'with status shared offset. It will remove the region ' - 'header for the false chunk, note that you ' - 'don\'t loose any chunk doing this.', - action='store_true', - default=False, - dest='delete_shared_offset') - - parser.add_argument('--delete-missing-tag', - '--dmt', - help='[WARNING!] This option deletes! Remove any chunks ' - 'with the mandatory entities tag missing.', - dest='delete_missing_tag', - default=False, - action='store_true') - parser.add_argument('--fix-corrupted', '--fc', help='Try to fix chunks that are corrupted by extracting as much ' @@ -302,14 +265,6 @@ def main(): default=False, action='store_true') - parser.add_argument('--delete-too-small', - '--dt', - help='[WARNING!] This option deletes! Remove any region files ' - 'found to be too small to actually be a region file.', - dest='delete_too_small', - default=False, - action='store_true') - parser.add_argument('--entity-limit', '--el', help='Specify the limit for the --delete-entities option ' diff --git a/regionfixer_core/constants.py b/regionfixer_core/constants.py index 26bc645..46951a7 100644 --- a/regionfixer_core/constants.py +++ b/regionfixer_core/constants.py @@ -76,10 +76,10 @@ # arguments used in the options CHUNK_PROBLEMS_ARGS = {CHUNK_CORRUPTED: 'corrupted', - CHUNK_WRONG_LOCATED: 'wrong', + CHUNK_WRONG_LOCATED: 'wrong-located', CHUNK_TOO_MANY_ENTITIES: 'entities', - CHUNK_SHARED_OFFSET: 'sharing', - CHUNK_MISSING_ENTITIES_TAG: 'miss_tag' + CHUNK_SHARED_OFFSET: 'shared-offset', + CHUNK_MISSING_ENTITIES_TAG: 'missing_tag' } # used in some places where there is less space @@ -100,7 +100,7 @@ CHUNK_PROBLEMS_SOLUTIONS = {CHUNK_CORRUPTED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], CHUNK_WRONG_LOCATED: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE, CHUNK_SOLUTION_RELOCATE_USING_DATA], - CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES], + CHUNK_TOO_MANY_ENTITIES: [CHUNK_SOLUTION_REMOVE_ENTITIES, CHUNK_SOLUTION_REPLACE], CHUNK_SHARED_OFFSET: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE], CHUNK_MISSING_ENTITIES_TAG: [CHUNK_SOLUTION_REMOVE, CHUNK_SOLUTION_REPLACE]} @@ -168,9 +168,7 @@ REGION_SOLUTION_REMOVE = 151 REGION_SOLUTION_REPLACE = 152 -REGION_PROBLEMS_SOLUTIONS = {REGION_TOO_SMALL: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE], - REGION_UNREADABLE: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE] - } +REGION_PROBLEMS_SOLUTIONS = {REGION_TOO_SMALL: [REGION_SOLUTION_REMOVE, REGION_SOLUTION_REPLACE]} # list with problem, status-text, problem arg tuples REGION_PROBLEMS_ITERATOR = [] @@ -182,9 +180,6 @@ except KeyError: pass -REGION_PROBLEMS_ARGS = {REGION_TOO_SMALL: 'too-small'} - - # ------------------ From d890085536649fdc6ee6e3053e01f89bf16ea4e0 Mon Sep 17 00:00:00 2001 From: Alejandro Aguilera Date: Sat, 2 Jul 2022 10:19:30 +0200 Subject: [PATCH 151/151] Bump version number. --- regionfixer_core/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regionfixer_core/version.py b/regionfixer_core/version.py index 1184ca9..75458ac 100644 --- a/regionfixer_core/version.py +++ b/regionfixer_core/version.py @@ -21,5 +21,5 @@ # along with this program. If not, see . # -version_string = "0.3.5" +version_string = "0.3.6" version_numbers = version_string.split('.')