1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
|
#!/usr/bin/env python
# Copyright (C) 2022 The Qt Company Ltd.
# SPDX-License-Identifier: LicenseRef-Qt-Commercial OR GPL-3.0 WITH Qt-GPL-exception-1.0
# See argparse description in main
#
# Run on all .ts files in Qt Creator from qtcreator root dir:
# for tsfile in share/qtcreator/translations/qtcreator_*.ts; do python scripts/scrubts.py $tsfile -context FooBar; done
import argparse
import pathlib
import sys
from dataclasses import dataclass
def rewriteLines(input, scrubbedContext, tsFilePath):
result = []
previouslyInContext = False
contextWasPresent = False
messageHashes = []
mergedContextsCount = 0
removedDuplicatesCount = 0
lineIter = iter(input)
for line in lineIter:
# Context merging
if line.count(r"</name>") == 1: # Any new context
if line.count(scrubbedContext + r"</name>") == 1: # It the context being scrubbed
contextWasPresent = True
if previouslyInContext: # Previous context was a scrubbed context, so merge them
mergedContextsCount += 1
result = result[ : -2] # Remove recent: </context>\n<context>
continue # ...and skip this input line
else:
previouslyInContext = True
else:
previouslyInContext = False
# Message de-duplicating
if previouslyInContext and line.count(r"<message") == 1: # message in scrubbed context
# Iterate through message
messageLines = [line]
for messageLine in lineIter:
messageLines.append(messageLine)
if messageLine.count(r"</message>") == 1: # message finished
break
# Duplication check
messageHash = hash(str(messageLines))
if messageHash not in messageHashes:
result = result + messageLines
messageHashes.append(messageHash) # Append if not a duplicate
else:
removedDuplicatesCount += 1
continue
result.append(line)
if not contextWasPresent:
error = f"Context \"{scrubbedContext}\" was not found in {tsFilePath}"
sys.exit(error)
print (f"{tsFilePath}:")
print (f" {removedDuplicatesCount} identical duplicate message(s) removed.")
print (f" {mergedContextsCount} occurrence(s) of context \"{scrubbedContext}\" merged.")
return result
def findDistinctDuplicates(input, scrubbedContext, tsFilePath):
inContext = False
inputLineNr = 0
@dataclass
class Translation:
lineNr: int
translationXml: []
@dataclass
class Source:
sourceXml: str
translations: []
messages = {}
lineIter = iter(input)
for line in lineIter:
inputLineNr += 1
if line.count(r"</name>") == 1: # Any new context
inContext = (line.count(scrubbedContext + r"</name>") == 1)
continue
if line.count(r"<message") == 0:
continue
if inContext:
sourceXml = []
lineNr = inputLineNr
for sourceLine in lineIter: # <source>..</source> (possibly multi-line)
inputLineNr += 1
sourceXml.append(sourceLine)
if sourceLine.count(r"</source>") == 1:
break
sourceXmlHash = hash(str(sourceXml))
translationXml = []
for translationLine in lineIter: # <translation>..</translation> (possibly multi-line)
inputLineNr += 1
translationXml.append(translationLine)
if translationLine.count(r"</translation>") == 1:
break
translation = Translation(lineNr, translationXml)
if sourceXmlHash in messages:
messages[sourceXmlHash].translations.append(translation)
else:
messages[sourceXmlHash] = Source(sourceXml, [translation])
for sourceId in messages:
source = messages[sourceId]
translationsCount = len(source.translations)
if translationsCount > 1:
print (f"\n{translationsCount} duplicates for source:")
for sourceXmlLine in source.sourceXml:
print (sourceXmlLine.rstrip())
for translation in source.translations:
print (f"\n{tsFilePath}:{translation.lineNr}")
for translationXmlLine in translation.translationXml:
print (translationXmlLine.rstrip())
def processTsFile(tsFilePath, scrubbedContext):
with open(tsFilePath, 'r') as tsInputFile:
lines = tsInputFile.readlines()
result = rewriteLines(lines, scrubbedContext, tsFilePath)
if lines != result:
with open(tsFilePath, 'w') as tsOutputFile:
for line in result:
tsOutputFile.write(line)
findDistinctDuplicates(result, scrubbedContext, tsFilePath)
def main():
parser = argparse.ArgumentParser(
description='''Rewrites a .ts file, removing identical duplicate messages of a specified
translation context and joining adjacent occurrences of that context.
Unlike lrelease and lconvert, this script does an exact comparison of the
whole <message/> xml tag when removing duplicates.
Subsequently, the remaining duplicate messages with identical source but
different translation are listed with filename:linenumber.''')
parser.add_argument('tsfile',
help='The .ts file to be processed.',
type=pathlib.Path)
parser.add_argument('-context',
help='Translation context to scrubbed.',
required=True)
args = parser.parse_args()
processTsFile(args.tsfile, args.context)
return 0
if __name__ == '__main__':
sys.exit(main())
|