bin: fix issues found by Ruff linter
Change-Id: Ic81d21a90059f5db87543c8ce2abb8007f278f14 Reviewed-on: https://gerrit.libreoffice.org/c/core/+/171008 Tested-by: Jenkins Reviewed-by: Ilmari Lauhakangas <ilmari.lauhakangas@libreoffice.org> Tested-by: Ilmari Lauhakangas <ilmari.lauhakangas@libreoffice.org>
This commit is contained in:
parent
8ee0b01f22
commit
5fc6ef5990
5 changed files with 191 additions and 100 deletions
|
@ -1,6 +1,10 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import json, re, subprocess, sys, urllib3
|
import json
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import urllib3
|
||||||
|
|
||||||
http = urllib3.PoolManager()
|
http = urllib3.PoolManager()
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import re
|
import re
|
||||||
import multiprocessing
|
|
||||||
|
|
||||||
exported_symbols1 = set()
|
exported_symbols1 = set()
|
||||||
imported_symbols1 = set()
|
imported_symbols1 = set()
|
||||||
|
@ -46,7 +45,8 @@ with subprocess_find.stdout as txt:
|
||||||
# 0000000000000000 DF *UND* 0000000000000000 _ZN16FilterConfigItem10WriteInt32ERKN3rtl8OUStringEi
|
# 0000000000000000 DF *UND* 0000000000000000 _ZN16FilterConfigItem10WriteInt32ERKN3rtl8OUStringEi
|
||||||
for line2_bytes in txt2:
|
for line2_bytes in txt2:
|
||||||
line2 = line2_bytes.strip().decode("utf-8")
|
line2 = line2_bytes.strip().decode("utf-8")
|
||||||
if "*UND*" not in line2: continue
|
if "*UND*" not in line2:
|
||||||
|
continue
|
||||||
tokens = line2.split(" ")
|
tokens = line2.split(" ")
|
||||||
sym = tokens[len(tokens)-1].strip()
|
sym = tokens[len(tokens)-1].strip()
|
||||||
imported_symbols1.add(sym)
|
imported_symbols1.add(sym)
|
||||||
|
@ -76,18 +76,24 @@ progress = 0
|
||||||
progress_max_len = len(imported_symbols1) + len(exported_symbols1)
|
progress_max_len = len(imported_symbols1) + len(exported_symbols1)
|
||||||
for sym in imported_symbols1:
|
for sym in imported_symbols1:
|
||||||
progress += 1
|
progress += 1
|
||||||
if (progress % 128 == 0): print( str(int(progress * 100 / progress_max_len)) + "%")
|
if (progress % 128 == 0):
|
||||||
|
print( str(int(progress * 100 / progress_max_len)) + "%")
|
||||||
filtered_sym = subprocess.check_output(["c++filt", sym]).strip().decode("utf-8")
|
filtered_sym = subprocess.check_output(["c++filt", sym]).strip().decode("utf-8")
|
||||||
if filtered_sym.startswith("non-virtual thunk to "): filtered_sym = filtered_sym[21:]
|
if filtered_sym.startswith("non-virtual thunk to "):
|
||||||
elif filtered_sym.startswith("virtual thunk to "): filtered_sym = filtered_sym[17:]
|
filtered_sym = filtered_sym[21:]
|
||||||
|
elif filtered_sym.startswith("virtual thunk to "):
|
||||||
|
filtered_sym = filtered_sym[17:]
|
||||||
imported_symbols2.add(filtered_sym)
|
imported_symbols2.add(filtered_sym)
|
||||||
progress = 0
|
progress = 0
|
||||||
for sym in exported_symbols1:
|
for sym in exported_symbols1:
|
||||||
progress += 1
|
progress += 1
|
||||||
if (progress % 128 == 0): print( str(int(progress * 100 / progress_max_len)) + "%")
|
if (progress % 128 == 0):
|
||||||
|
print( str(int(progress * 100 / progress_max_len)) + "%")
|
||||||
filtered_sym = subprocess.check_output(["c++filt", sym]).strip().decode("utf-8")
|
filtered_sym = subprocess.check_output(["c++filt", sym]).strip().decode("utf-8")
|
||||||
if filtered_sym.startswith("non-virtual thunk to "): filtered_sym = filtered_sym[21:]
|
if filtered_sym.startswith("non-virtual thunk to "):
|
||||||
elif filtered_sym.startswith("virtual thunk to "): filtered_sym = filtered_sym[17:]
|
filtered_sym = filtered_sym[21:]
|
||||||
|
elif filtered_sym.startswith("virtual thunk to "):
|
||||||
|
filtered_sym = filtered_sym[17:]
|
||||||
exported_symbols2.add(filtered_sym)
|
exported_symbols2.add(filtered_sym)
|
||||||
|
|
||||||
|
|
||||||
|
@ -100,7 +106,8 @@ print("unused_exports = " + str(len(unused_exports)))
|
||||||
can_be_hidden_count = dict()
|
can_be_hidden_count = dict()
|
||||||
for sym in exported_symbols2:
|
for sym in exported_symbols2:
|
||||||
i = sym.rfind("::")
|
i = sym.rfind("::")
|
||||||
if i == -1: continue
|
if i == -1:
|
||||||
|
continue
|
||||||
clz = sym[:i]
|
clz = sym[:i]
|
||||||
if clz in can_be_hidden_count:
|
if clz in can_be_hidden_count:
|
||||||
can_be_hidden_count[clz] = can_be_hidden_count[clz] + 1
|
can_be_hidden_count[clz] = can_be_hidden_count[clz] + 1
|
||||||
|
@ -108,7 +115,8 @@ for sym in exported_symbols2:
|
||||||
can_be_hidden_count[clz] = 1
|
can_be_hidden_count[clz] = 1
|
||||||
for sym in imported_symbols2:
|
for sym in imported_symbols2:
|
||||||
i = sym.rfind("::")
|
i = sym.rfind("::")
|
||||||
if i == -1: continue
|
if i == -1:
|
||||||
|
continue
|
||||||
clz = sym[:i]
|
clz = sym[:i]
|
||||||
if clz in can_be_hidden_count:
|
if clz in can_be_hidden_count:
|
||||||
can_be_hidden_count[clz] = can_be_hidden_count[clz] - 1
|
can_be_hidden_count[clz] = can_be_hidden_count[clz] - 1
|
||||||
|
@ -123,7 +131,8 @@ for clz in can_be_hidden_count:
|
||||||
can_be_hidden_list.sort(reverse=True)
|
can_be_hidden_list.sort(reverse=True)
|
||||||
with open("bin/find-mergedlib-can-be-private-symbols.classes.results", "wt") as f:
|
with open("bin/find-mergedlib-can-be-private-symbols.classes.results", "wt") as f:
|
||||||
for i in can_be_hidden_list:
|
for i in can_be_hidden_list:
|
||||||
if i[0] < 10: break
|
if i[0] < 10:
|
||||||
|
break
|
||||||
f.write(str(i[0]) + " " + i[1] + "\n")
|
f.write(str(i[0]) + " " + i[1] + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
@ -131,85 +140,163 @@ with open("bin/find-mergedlib-can-be-private-symbols.functions.results", "wt") a
|
||||||
for sym in sorted(unused_exports):
|
for sym in sorted(unused_exports):
|
||||||
# Filter out most of the noise.
|
# Filter out most of the noise.
|
||||||
# No idea where these are coming from, but not our code.
|
# No idea where these are coming from, but not our code.
|
||||||
if sym.startswith("CERT_"): continue
|
if sym.startswith("CERT_"):
|
||||||
elif sym.startswith("DER_"): continue
|
continue
|
||||||
elif sym.startswith("FORM_"): continue
|
elif sym.startswith("DER_"):
|
||||||
elif sym.startswith("FPDF"): continue
|
continue
|
||||||
elif sym.startswith("HASH_"): continue
|
elif sym.startswith("FORM_"):
|
||||||
elif sym.startswith("Hunspell_"): continue
|
continue
|
||||||
elif sym.startswith("LL_"): continue
|
elif sym.startswith("FPDF"):
|
||||||
elif sym.startswith("LP_"): continue
|
continue
|
||||||
elif sym.startswith("LU"): continue
|
elif sym.startswith("HASH_"):
|
||||||
elif sym.startswith("MIP"): continue
|
continue
|
||||||
elif sym.startswith("MPS"): continue
|
elif sym.startswith("Hunspell_"):
|
||||||
elif sym.startswith("NSS"): continue
|
continue
|
||||||
elif sym.startswith("NSC_"): continue
|
elif sym.startswith("LL_"):
|
||||||
elif sym.startswith("PK11"): continue
|
continue
|
||||||
elif sym.startswith("PL_"): continue
|
elif sym.startswith("LP_"):
|
||||||
elif sym.startswith("PQ"): continue
|
continue
|
||||||
elif sym.startswith("PBE_"): continue
|
elif sym.startswith("LU"):
|
||||||
elif sym.startswith("PORT_"): continue
|
continue
|
||||||
elif sym.startswith("PRP_"): continue
|
elif sym.startswith("MIP"):
|
||||||
elif sym.startswith("PR_"): continue
|
continue
|
||||||
elif sym.startswith("PT_"): continue
|
elif sym.startswith("MPS"):
|
||||||
elif sym.startswith("QS_"): continue
|
continue
|
||||||
elif sym.startswith("REPORT_"): continue
|
elif sym.startswith("NSS"):
|
||||||
elif sym.startswith("RSA_"): continue
|
continue
|
||||||
elif sym.startswith("SEC"): continue
|
elif sym.startswith("NSC_"):
|
||||||
elif sym.startswith("SGN"): continue
|
continue
|
||||||
elif sym.startswith("SOS"): continue
|
elif sym.startswith("PK11"):
|
||||||
elif sym.startswith("SSL_"): continue
|
continue
|
||||||
elif sym.startswith("VFY_"): continue
|
elif sym.startswith("PL_"):
|
||||||
elif sym.startswith("_PR_"): continue
|
continue
|
||||||
elif sym.startswith("ber_"): continue
|
elif sym.startswith("PQ"):
|
||||||
elif sym.startswith("bfp_"): continue
|
continue
|
||||||
elif sym.startswith("ldap_"): continue
|
elif sym.startswith("PBE_"):
|
||||||
elif sym.startswith("ne_"): continue
|
continue
|
||||||
elif sym.startswith("opj_"): continue
|
elif sym.startswith("PORT_"):
|
||||||
elif sym.startswith("pg_"): continue
|
continue
|
||||||
elif sym.startswith("pq"): continue
|
elif sym.startswith("PRP_"):
|
||||||
elif sym.startswith("presolve_"): continue
|
continue
|
||||||
elif sym.startswith("sqlite3_"): continue
|
elif sym.startswith("PR_"):
|
||||||
elif sym.startswith("libepubgen::"): continue
|
continue
|
||||||
elif sym.startswith("lucene::"): continue
|
elif sym.startswith("PT_"):
|
||||||
elif sym.startswith("Hunspell::"): continue
|
continue
|
||||||
elif sym.startswith("sk_"): continue
|
elif sym.startswith("QS_"):
|
||||||
elif sym.startswith("_Z"): continue
|
continue
|
||||||
|
elif sym.startswith("REPORT_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("RSA_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("SEC"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("SGN"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("SOS"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("SSL_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("VFY_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("_PR_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("ber_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("bfp_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("ldap_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("ne_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("opj_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("pg_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("pq"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("presolve_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("sqlite3_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("libepubgen::"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("lucene::"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("Hunspell::"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("sk_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("_Z"):
|
||||||
|
continue
|
||||||
# dynamically loaded
|
# dynamically loaded
|
||||||
elif sym.endswith("get_implementation"): continue
|
elif sym.endswith("get_implementation"):
|
||||||
elif sym.endswith("component_getFactory"): continue
|
continue
|
||||||
elif sym == "CreateUnoWrapper": continue
|
elif sym.endswith("component_getFactory"):
|
||||||
elif sym == "ExportDOC": continue
|
continue
|
||||||
elif sym == "ExportRTF": continue
|
elif sym == "CreateUnoWrapper":
|
||||||
elif sym == "GetSaveWarningOfMSVBAStorage_ww8": continue
|
continue
|
||||||
elif sym == "GetSpecialCharsForEdit": continue
|
elif sym == "ExportDOC":
|
||||||
elif sym.startswith("Import"): continue
|
continue
|
||||||
elif sym.startswith("Java_com_sun_star_"): continue
|
elif sym == "ExportRTF":
|
||||||
elif sym.startswith("TestImport"): continue
|
continue
|
||||||
elif sym.startswith("getAllCalendars_"): continue
|
elif sym == "GetSaveWarningOfMSVBAStorage_ww8":
|
||||||
elif sym.startswith("getAllCurrencies_"): continue
|
continue
|
||||||
elif sym.startswith("getAllFormats"): continue
|
elif sym == "GetSpecialCharsForEdit":
|
||||||
elif sym.startswith("getBreakIteratorRules_"): continue
|
continue
|
||||||
elif sym.startswith("getCollationOptions_"): continue
|
elif sym.startswith("Import"):
|
||||||
elif sym.startswith("getCollatorImplementation_"): continue
|
continue
|
||||||
elif sym.startswith("getContinuousNumberingLevels_"): continue
|
elif sym.startswith("Java_com_sun_star_"):
|
||||||
elif sym.startswith("getDateAcceptancePatterns_"): continue
|
continue
|
||||||
elif sym.startswith("getForbiddenCharacters_"): continue
|
elif sym.startswith("TestImport"):
|
||||||
elif sym.startswith("getIndexAlgorithm_"): continue
|
continue
|
||||||
elif sym.startswith("getLCInfo_"): continue
|
elif sym.startswith("getAllCalendars_"):
|
||||||
elif sym.startswith("getLocaleItem_"): continue
|
continue
|
||||||
elif sym.startswith("getOutlineNumberingLevels_"): continue
|
elif sym.startswith("getAllCurrencies_"):
|
||||||
elif sym.startswith("getReservedWords_"): continue
|
continue
|
||||||
elif sym.startswith("getSTC_"): continue
|
elif sym.startswith("getAllFormats"):
|
||||||
elif sym.startswith("getSearchOptions_"): continue
|
continue
|
||||||
elif sym.startswith("getTransliterations_"): continue
|
elif sym.startswith("getBreakIteratorRules_"):
|
||||||
elif sym.startswith("getUnicodeScripts_"): continue
|
continue
|
||||||
elif sym.startswith("lok_"): continue
|
elif sym.startswith("getCollationOptions_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getCollatorImplementation_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getContinuousNumberingLevels_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getDateAcceptancePatterns_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getForbiddenCharacters_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getIndexAlgorithm_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getLCInfo_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getLocaleItem_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getOutlineNumberingLevels_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getReservedWords_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getSTC_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getSearchOptions_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getTransliterations_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("getUnicodeScripts_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("lok_"):
|
||||||
|
continue
|
||||||
# UDK API
|
# UDK API
|
||||||
elif sym.startswith("osl_"): continue
|
elif sym.startswith("osl_"):
|
||||||
elif sym.startswith("rtl_"): continue
|
continue
|
||||||
elif sym.startswith("typelib_"): continue
|
elif sym.startswith("rtl_"):
|
||||||
elif sym.startswith("typereg_"): continue
|
continue
|
||||||
elif sym.startswith("uno_"): continue
|
elif sym.startswith("typelib_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("typereg_"):
|
||||||
|
continue
|
||||||
|
elif sym.startswith("uno_"):
|
||||||
|
continue
|
||||||
f.write(sym + "\n")
|
f.write(sym + "\n")
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ def add_parent_styles(usedstyles, styles):
|
||||||
def remove_unused_styles(root, usedstyles, styles, name):
|
def remove_unused_styles(root, usedstyles, styles, name):
|
||||||
for style in styles:
|
for style in styles:
|
||||||
print(style.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
print(style.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
||||||
if not(style.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name") in usedstyles):
|
if style.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name") not in usedstyles:
|
||||||
print("removing unused " + name + " " + style.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
print("removing unused " + name + " " + style.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
||||||
# it is really dumb that there is no parent pointer in dom
|
# it is really dumb that there is no parent pointer in dom
|
||||||
try:
|
try:
|
||||||
|
@ -82,7 +82,7 @@ def remove_unused_styles(root, usedstyles, styles, name):
|
||||||
def remove_unused_drawings(root, useddrawings, drawings, name):
|
def remove_unused_drawings(root, useddrawings, drawings, name):
|
||||||
for drawing in drawings:
|
for drawing in drawings:
|
||||||
print(drawing.get("{urn:oasis:names:tc:opendocument:xmlns:drawing:1.0}name"))
|
print(drawing.get("{urn:oasis:names:tc:opendocument:xmlns:drawing:1.0}name"))
|
||||||
if not(drawing.get("{urn:oasis:names:tc:opendocument:xmlns:drawing:1.0}name") in useddrawings):
|
if drawing.get("{urn:oasis:names:tc:opendocument:xmlns:drawing:1.0}name") not in useddrawings:
|
||||||
print("removing unused " + name + " " + drawing.get("{urn:oasis:names:tc:opendocument:xmlns:drawing:1.0}name"))
|
print("removing unused " + name + " " + drawing.get("{urn:oasis:names:tc:opendocument:xmlns:drawing:1.0}name"))
|
||||||
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:office:1.0}styles").remove(drawing)
|
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:office:1.0}styles").remove(drawing)
|
||||||
|
|
||||||
|
@ -132,7 +132,7 @@ def remove_unused(root):
|
||||||
usedmasterpages.add(mp.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}next-style-name"))
|
usedmasterpages.add(mp.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}next-style-name"))
|
||||||
# remove unused masterpages
|
# remove unused masterpages
|
||||||
for mp in root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:style:1.0}master-page"):
|
for mp in root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:style:1.0}master-page"):
|
||||||
if not(mp.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name") in usedmasterpages):
|
if mp.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name") not in usedmasterpages:
|
||||||
print("removing unused master page " + mp.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
print("removing unused master page " + mp.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
||||||
# there is no way to get the parent element???
|
# there is no way to get the parent element???
|
||||||
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:office:1.0}master-styles").remove(mp)
|
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:office:1.0}master-styles").remove(mp)
|
||||||
|
@ -370,7 +370,7 @@ def remove_unused(root):
|
||||||
collect_all_attribute(usedfonts, "{urn:oasis:names:tc:opendocument:xmlns:style:1.0}font-name-complex")
|
collect_all_attribute(usedfonts, "{urn:oasis:names:tc:opendocument:xmlns:style:1.0}font-name-complex")
|
||||||
fonts = root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:style:1.0}font-face")
|
fonts = root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:style:1.0}font-face")
|
||||||
for font in fonts:
|
for font in fonts:
|
||||||
if not(font.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name") in usedfonts):
|
if font.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name") not in usedfonts:
|
||||||
print("removing unused font-face " + font.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
print("removing unused font-face " + font.get("{urn:oasis:names:tc:opendocument:xmlns:style:1.0}name"))
|
||||||
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:office:1.0}font-face-decls").remove(font)
|
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:office:1.0}font-face-decls").remove(font)
|
||||||
|
|
||||||
|
@ -393,7 +393,7 @@ def remove_unused(root):
|
||||||
for field in root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:text:1.0}user-field-input"):
|
for field in root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:text:1.0}user-field-input"):
|
||||||
useduserfields.add(field.get("{urn:oasis:names:tc:opendocument:xmlns:text:1.0}name"))
|
useduserfields.add(field.get("{urn:oasis:names:tc:opendocument:xmlns:text:1.0}name"))
|
||||||
for field in root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:text:1.0}user-field-decl"):
|
for field in root.findall(".//{urn:oasis:names:tc:opendocument:xmlns:text:1.0}user-field-decl"):
|
||||||
if not(field.get("{urn:oasis:names:tc:opendocument:xmlns:text:1.0}name") in useduserfields):
|
if field.get("{urn:oasis:names:tc:opendocument:xmlns:text:1.0}name") not in useduserfields:
|
||||||
print("removing unused user-field-decl " + field.get("{urn:oasis:names:tc:opendocument:xmlns:text:1.0}name"))
|
print("removing unused user-field-decl " + field.get("{urn:oasis:names:tc:opendocument:xmlns:text:1.0}name"))
|
||||||
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:text:1.0}user-field-decls").remove(field)
|
root.find(".//{urn:oasis:names:tc:opendocument:xmlns:text:1.0}user-field-decls").remove(field)
|
||||||
|
|
||||||
|
|
|
@ -10,11 +10,11 @@
|
||||||
# a consistent look for dialogs
|
# a consistent look for dialogs
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
import re
|
||||||
# Force python XML parser not faster C accelerators
|
# Force python XML parser not faster C accelerators
|
||||||
# because we can't hook the C implementation
|
# because we can't hook the C implementation
|
||||||
sys.modules['_elementtree'] = None
|
sys.modules['_elementtree'] = None
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
import re
|
|
||||||
|
|
||||||
DEFAULT_WARNING_STR = 'Lint assertion failed'
|
DEFAULT_WARNING_STR = 'Lint assertion failed'
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ class LineNumberingParser(ET.XMLParser):
|
||||||
|
|
||||||
def lint_assert(predicate, warning=DEFAULT_WARNING_STR, node=None):
|
def lint_assert(predicate, warning=DEFAULT_WARNING_STR, node=None):
|
||||||
if not predicate:
|
if not predicate:
|
||||||
if not(node is None):
|
if node is not None:
|
||||||
print(sys.argv[1] + ":" + str(node._start_line_number) + ": " + warning)
|
print(sys.argv[1] + ":" + str(node._start_line_number) + ": " + warning)
|
||||||
else:
|
else:
|
||||||
print(sys.argv[1] + ": " + warning)
|
print(sys.argv[1] + ": " + warning)
|
||||||
|
@ -59,7 +59,7 @@ def lint_assert(predicate, warning=DEFAULT_WARNING_STR, node=None):
|
||||||
def check_top_level_widget(element):
|
def check_top_level_widget(element):
|
||||||
# check widget type
|
# check widget type
|
||||||
widget_type = element.attrib['class']
|
widget_type = element.attrib['class']
|
||||||
if not(widget_type in POSSIBLE_TOP_LEVEL_WIDGETS):
|
if widget_type not in POSSIBLE_TOP_LEVEL_WIDGETS:
|
||||||
return
|
return
|
||||||
|
|
||||||
# check border_width property
|
# check border_width property
|
||||||
|
|
|
@ -64,7 +64,7 @@ class Trace:
|
||||||
self.clock = Trace.clock
|
self.clock = Trace.clock
|
||||||
|
|
||||||
def addTrace(traces, lines):
|
def addTrace(traces, lines):
|
||||||
if not(traces is None) and len(lines) > 0:
|
if traces is not None and len(lines) > 0:
|
||||||
traces.append(Trace(lines))
|
traces.append(Trace(lines))
|
||||||
|
|
||||||
def readGdbLog(infile):
|
def readGdbLog(infile):
|
||||||
|
|
Loading…
Reference in a new issue