pax_global_header00006660000000000000000000000064127436356110014522gustar00rootroot0000000000000052 comment=c7f2d9fde5ff18ffe00f673709ef13401ec1344f python-l20n-4.0.0a1/000077500000000000000000000000001274363561100140375ustar00rootroot00000000000000python-l20n-4.0.0a1/.gitignore000066400000000000000000000000421274363561100160230ustar00rootroot00000000000000l20n.egg-info/ build/ dist/ *.pyc python-l20n-4.0.0a1/.gitmodules000066400000000000000000000002041274363561100162100ustar00rootroot00000000000000[submodule "tests/l20n-syntax-fixtures"] path = tests/l20n-syntax-fixtures url = https://github.com/l20n/l20n-syntax-fixtures.git python-l20n-4.0.0a1/LICENSE000066400000000000000000000010571274363561100150470ustar00rootroot00000000000000Copyright 2016 Mozilla Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. python-l20n-4.0.0a1/README.rst000066400000000000000000000002551274363561100155300ustar00rootroot00000000000000L20n ==== Python library to support parsing and serializing files for localiztion through the l20n localization infrastructure. Learn more about l20n on https://l20n.org. python-l20n-4.0.0a1/l20n/000077500000000000000000000000001274363561100146125ustar00rootroot00000000000000python-l20n-4.0.0a1/l20n/__init__.py000066400000000000000000000000001274363561100167110ustar00rootroot00000000000000python-l20n-4.0.0a1/l20n/format/000077500000000000000000000000001274363561100161025ustar00rootroot00000000000000python-l20n-4.0.0a1/l20n/format/__init__.py000066400000000000000000000000001274363561100202010ustar00rootroot00000000000000python-l20n-4.0.0a1/l20n/format/ast.py000066400000000000000000000072241274363561100172500ustar00rootroot00000000000000import json def attr2json(attr): if isinstance(attr, Node): return attr.toJSON() elif isinstance(attr, list): return [attr2json(i) for i in attr] else: return attr class Node(object): def __init__(self): self.type = self.__class__.__name__ def toJSON(self): fields = {} for key in vars(self): attr = getattr(self, key) fields[key] = attr2json(attr) return fields def __str__(self): return json.dumps(self.toJSON()) def setPosition(self, start, end): self._pos = { "start": start, "end": end } class Resource(Node): def __init__(self, body=None, comment=None): super(Resource, self).__init__() self.body = body or [] self.comment = comment class Entry(Node): def __init__(self): super(Entry, self).__init__() class Identifier(Node): def __init__(self, name): super(Identifier, self).__init__() self.name = name class Section(Node): def __init__(self, key, body=None, comment=None): super(Section, self).__init__() self.key = key self.body = body or [] self.comment = comment class Pattern(Node): def __init__(self, source, elements): super(Pattern, self).__init__() self.source = source self.elements = elements class Member(Node): def __init__(self, key, value, default=False): super(Member, self).__init__() self.key = key self.value = value self.default = default class Entity(Entry): def __init__(self, id, value=None, traits=None, comment=None): super(Entity, self).__init__() self.id = id self.value = value self.traits = traits or [] self.comment = comment class Placeable(Node): def __init__(self, expressions): super(Placeable, self).__init__() self.expressions = expressions class SelectExpression(Node): def __init__(self, expression, variants=None): super(SelectExpression, self).__init__() self.expression = expression self.variants = variants class MemberExpression(Node): def __init__(self, obj, keyword): super(MemberExpression, self).__init__() self.object = obj self.keyword = keyword class CallExpression(Node): def __init__(self, callee, args): super(CallExpression, self).__init__() self.callee = callee self.args = args class ExternalArgument(Node): def __init__(self, name): super(ExternalArgument, self).__init__() self.name = name class KeyValueArg(Node): def __init__(self, name, value): super(KeyValueArg, self).__init__() self.name = name self.value = value class EntityReference(Identifier): def __init__(self, name): super(EntityReference, self).__init__(name) class FunctionReference(Identifier): def __init__(self, name): super(FunctionReference, self).__init__(name) class Keyword(Identifier): def __init__(self, name, namespace=None): super(Keyword, self).__init__(name) self.namespace = namespace class Number(Node): def __init__(self, value): super(Number, self).__init__() self.value = value class TextElement(Node): def __init__(self, value): super(TextElement, self).__init__() self.value = value class Comment(Node): def __init__(self, content): super(Comment, self).__init__() self.content = content class JunkEntry(Node): def __init__(self, content): super(JunkEntry, self).__init__() self.content = content python-l20n-4.0.0a1/l20n/format/parser.py000066400000000000000000000406631274363561100177610ustar00rootroot00000000000000 from . import ast class L10nError(Exception): def __init__(self, message, pos=None, context=None): self.name = 'L10nError' self.message = message self.pos = pos self.context = context MAX_PLACEABLES = 100 class ParseContext(): def __init__(self, string): self._source = string self._index = 0 self._length = len(string) self._lastGoodEntryEnd = 0 def _isIdentifierStart(self, cc): return (cc >= 97 and cc <= 122) or \ (cc >= 65 and cc <= 90) or \ cc == 95 def _getch(self, pos=None, offset=0): if pos is None: pos = self._index if pos + offset >= self._length: return '' return self._source[pos + offset] def _getcc(self, pos=None, offset=0): if pos is None: pos = self._index if pos + offset >= self._length: return -1 return ord(self._source[pos + offset]) def getResource(self): resource = ast.Resource() errors = [] comment = None section = resource.body if self._getch() == '#': comment = self.getComment() cc = self._getcc() if not self._isIdentifierStart(cc): resource.comment = comment comment = None self.getWS() while self._index < self._length: try: entry = self.getEntry(comment) if entry.type == 'Section': resource.body.append(entry) section = entry.body else: section.append(entry) self._lastGoodEntryEnd = self._index comment = None except L10nError as e: errors.append(e) section.append(self.getJunkEntry()) self.getWS() return [resource, errors] def getEntry(self, comment=None): if self._index is not 0 and \ self._source[self._index - 1] != '\n': raise self.error('Expected new line and a new entry') if comment is None and self._getch() == '#': comment = self.getComment() self.getLineWS() if self._getch() == '[': return self.getSection(comment) if self._index < self._length and \ self._getch() != '\n': return self.getEntity(comment) return comment def getSection(self, comment=None): self._index += 1 if self._getch() != '[': raise self.error('Expected "[[" to open a section') self._index += 1 self.getLineWS() key = self.getKeyword() self.getLineWS() if self._getch() != ']' or \ self._getch(None, 1) != ']': raise self.error('Expected "]]" to close a section') self._index += 2 return ast.Section(key, [], comment) def getEntity(self, comment=None): id = self.getIdentifier() members = [] value = None self.getLineWS() ch = self._getch() if ch != '=': raise self.error('Expected "=" after Entity ID') self._index += 1 ch = self._getch() self.getLineWS() value = self.getPattern() ch = self._getch() if ch == '\n': self._index += 1 self.getLineWS() ch = self._getch() if (ch == '[' and self._source[self._index + 1] != '[') or \ ch == '*': members = self.getMembers() elif value is None: raise self.error('Expected a value (like: " = value") or a ' + 'trait (like: "[key] value")') return ast.Entity(id, value, members, comment) def getWS(self): cc = self._getcc() while cc == 32 or cc == 10 or cc == 0 or cc == 13: self._index += 1 cc = self._getcc() def getLineWS(self): cc = self._getcc() while cc == 32 or cc == 9: self._index += 1 cc = self._getcc() def getIdentifier(self): name = '' start = self._index cc = self._getcc() if self._isIdentifierStart(cc): self._index += 1 cc = self._getcc() elif len(name) == 0: raise self.error( 'Expected an identifier (starting with [a-zA-Z_])') while (cc >= 97 and cc <= 122) or \ (cc >= 65 and cc <= 90) or \ (cc >= 48 and cc <= 57) or \ cc == 95 or cc == 45: self._index += 1 cc = self._getcc() name += self._source[start:self._index] return ast.Identifier(name) def getKeyword(self): name = '' namespace = self.getIdentifier().name if self._getch() == '/': self._index += 1 elif namespace: name = namespace namespace = None start = self._index cc = self._getcc() if self._isIdentifierStart(cc): self._index += 1 cc = self._getcc() elif len(name) == 0: raise self.error( 'Expected an identifier (starting with [a-zA-Z_])') while (cc >= 97 and cc <= 122) or \ (cc >= 65 and cc <= 90) or \ (cc >= 48 and cc <= 57) or \ cc == 95 or cc == 45 or cc == 32: self._index += 1 cc = self._getcc() name += self._source[start:self._index].rstrip() return ast.Keyword(name, namespace) def getPattern(self): buffer = '' source = '' content = [] quoteDelimited = None firstLine = True ch = self._getch() if ch == '\\': ch2 = self._getch(None, 1) if ch2 == '"' or ch2 == '{' or ch2 == '\\': self._index += 1 buffer += self._getch() self._index += 1 ch = self._getch() elif ch == '"': quoteDelimited = True self._index += 1 ch = self._getch() while self._index < self._length: if ch == '\n': if quoteDelimited: raise self.error('Unclosed string') self._index += 1 self.getLineWS() if self._getch() != '|': break if firstLine and len(buffer): raise self.error('Multiline string should have the ID ' + 'empty') firstLine = False self._index += 1 if self._getch() == ' ': self._index += 1 if len(buffer): buffer += '\n' ch = self._getch() continue elif ch == '\\': ch2 = self._getch(None, 1) if (quoteDelimited and ch2 == '"') or ch2 == '{': ch = ch2 self._index += 1 elif quoteDelimited and ch == '"': self._index += 1 quoteDelimited = False break elif ch == '{': if len(buffer): content.append(ast.TextElement(buffer)) source += buffer buffer = '' start = self._index content.append(self.getPlaceable()) source += self._source[start:self._index] ch = self._getch() continue if ch: buffer += ch self._index += 1 ch = self._getch() if quoteDelimited: raise self.error('Unclosed string') if len(buffer): source += buffer content.append(ast.TextElement(buffer)) if len(content) == 0: if quoteDelimited is not None: content.append(ast.TextElement(source)) else: return None pattern = ast.Pattern(source, content) pattern._quoteDelim = quoteDelimited is not None return pattern def getPlaceable(self): self._index += 1 expressions = [] self.getLineWS() while self._index < self._length: start = self._index try: expressions.append(self.getPlaceableExpression()) except L10nError as e: raise self.error(e.description, start) self.getWS() if self._getch() == '}': self._index += 1 break elif self._getch() == ',': self._index += 1 self.getWS() else: raise self.error('Exepected "}" or ","') return ast.Placeable(expressions) def getPlaceableExpression(self): selector = self.getCallExpression() members = None self.getWS() if self._getch() != '}' and \ self._getch() != ',': if self._getch() != '-' or \ self._getch(None, 1) != '>': raise self.error('Expected "}", "," or "->"') self._index += 2 self.getLineWS() if self._getch() != '\n': raise self.error('Members should be listed in a new line') self.getWS() members = self.getMembers() if len(members) == 0: raise self.error('Expected members for the select expression') if members is None: return selector return ast.SelectExpression(selector, members) def getCallExpression(self): exp = self.getMemberExpression() if self._getch() != '(': return exp self._index += 1 args = self.getCallArgs() self._index += 1 if isinstance(exp, ast.EntityReference): exp = ast.FunctionReference(exp.name) return ast.CallExpression(exp, args) def getCallArgs(self): args = [] if self._getch() == ')': return args while self._index < self._length: self.getLineWS() exp = self.getCallExpression() if not isinstance(exp, ast.EntityReference): args.append(exp) else: self.getLineWS() if self._getch() == ':': self._index += 1 self.getLineWS() val = self.getCallExpression() if isinstance(val, ast.EntityReference) or \ isinstance(val, ast.MemberExpression): self._index = \ self._source.rfind('=', 0, self._index) + 1 raise self.error('Expected string in quotes') args.append(ast.KeyValueArg(exp.name, val)) else: args.append(exp) self.getLineWS() if self._getch() == ')': break elif self._getch() == ',': self._index += 1 else: raise self.error('Expected "," or ")"') return args def getNumber(self): num = '' cc = self._getcc() if cc == 45: num += '-' self._index += 1 cc = self._getcc() if cc < 48 or cc > 57: raise self.error('Unknown literal "' + num + '"') while cc >= 48 and cc <= 57: num += self._source[self._index] self._index += 1 cc = self._getcc() if cc == 46: num += self._getch() self._index += 1 cc = self._getcc() if cc < 48 or cc > 57: raise self.error('Unknown literal "' + num + '"') while cc >= 48 and cc <= 57: num += self._source[self._index] self._index += 1 cc = self._getcc() return ast.Number(num) def getMemberExpression(self): exp = self.getLiteral() while self._getch() == '[': keyword = self.getMemberKey() exp = ast.MemberExpression(exp, keyword) return exp def getMembers(self): members = [] while self._index < self._length: if (self._getch() != '[' or self._getch(None, 1) == '[') and \ self._getch() != '*': break default = False if self._getch() == '*': self._index += 1 default = True if self._getch() != '[': raise self.error('Expected "["') key = self.getMemberKey() self.getLineWS() value = self.getPattern() member = ast.Member(key, value, default) members.append(member) self.getWS() return members def getMemberKey(self): self._index += 1 cc = self._getcc() literal = None if (cc >= 48 and cc <= 57) or cc == 45: literal = self.getNumber() else: literal = self.getKeyword() if self._getch() != ']': raise self.error('Expected "]"') self._index += 1 return literal def getLiteral(self): cc = self._getcc() if (cc >= 48 and cc <= 57) or cc == 45: return self.getNumber() elif cc == 34: return self.getPattern() elif cc == 36: self._index += 1 name = self.getIdentifier().name return ast.ExternalArgument(name) name = self.getIdentifier().name return ast.EntityReference(name) def getComment(self): self._index += 1 if self._getch() == ' ': self._index += 1 content = '' eol = self._source.find('\n', self._index) content += self._source[self._index:eol] while eol != -1 and self._getch(eol + 1) == '#': self._index = eol + 2 if self._getch() == ' ': self._index += 1 eol = self._source.find('\n', self._index) if eol == -1: break content += '\n' + self._source[self._index:eol] if eol == -1: self._index = self._length else: self._index = eol + 1 return ast.Comment(content) def error(self, message, start=None): pos = self._index if start is None: start = pos start = self._findEntityStart(start) context = self._source[start: pos + 10] msg = '\n\n ' + message + '\nat pos ' + str(pos) + \ ':\n------\n...' + context + '\n------' err = L10nError(msg) # row = len(self._source[0:pos].split('\n')) # col = pos - self._source.rfind('\n', 0, pos - 1) # err._pos = {start: pos, end: None, col: col, row: row} err.offset = pos - start err.description = message err.context = context return err def getJunkEntry(self): pos = self._index nextEntity = self._findNextEntryStart(pos) if nextEntity == -1: nextEntity = self._length self._index = nextEntity entityStart = self._findEntityStart(pos) if entityStart < self._lastGoodEntryEnd: entityStart = self._lastGoodEntryEnd junk = ast.JunkEntry(self._source[entityStart:nextEntity]) return junk def _findEntityStart(self, pos): start = pos while True: end = start - 2 if end < 0: end = 0 start = self._source.rfind('\n', 0, end) if start == -1 or start == 0: start = 0 break cc = self._getcc(start + 1) if self._isIdentifierStart(cc): start += 1 break return start def _findNextEntryStart(self, pos): start = pos while True: if start == 0 or self._getch(start - 1) == '\n': cc = self._getcc(start) if self._isIdentifierStart(cc) or cc == 35 or cc == 91: break start = self._source.find('\n', start) if start == -1: break start += 1 return start class FTLParser(): def parseResource(self, string): parseContext = ParseContext(string) [ast, errors] = parseContext.getResource() return [ast.toJSON(), errors] python-l20n-4.0.0a1/l20n/format/serializer.py000066400000000000000000000107641274363561100206350ustar00rootroot00000000000000class FTLSerializer(): def serialize(self, ast): body = ast['body'] comment = ast['comment'] string = u'' if comment is not None: string += self.dumpComment(comment) + u'\n\n' for entry in body: string += self.dumpEntry(entry) return string def dumpEntry(self, entry): if entry['type'] == 'Entity': return self.dumpEntity(entry) + u'\n' elif entry['type'] == 'Comment': return self.dumpComment(entry) + u'\n\n' elif entry['type'] == 'Section': return self.dumpSection(entry) + u'\n' elif entry['type'] == 'JunkEntry': return u'' else: print(entry) raise Exception('Unknown entry type.') return u'' def dumpEntity(self, entity): str = u'' if entity['comment']: str += u'\n' + self.dumpComment(entity['comment']) + u'\n' id = self.dumpIdentifier(entity['id']) value = self.dumpPattern(entity['value']) if len(entity['traits']): traits = self.dumpMembers(entity['traits'], 2) str += u'{} = {}\n{}'.format(id, value, traits) else: str += u'{} = {}'.format(id, value) return str def dumpComment(self, comment): return u'# {}'.format(comment['content'].replace('\n', u'\n# ')) def dumpSection(self, section): comment = u'{}\n'.format(self.dumpComment( section['comment'])) if section['comment'] else u'' sec = self.dumpKeyword(section['key']) str = u'\n\n{}[[ {} ]]\n\n'.format(comment, sec) for entry in section['body']: str += self.dumpEntry(entry) return str def dumpIdentifier(self, id): return id['name'] def dumpKeyword(self, kw): if kw['namespace']: return u'{}/{}'.format(kw['namespace'], kw['name']) return kw['name'] def dumpPattern(self, pattern): if pattern is None: return u'' if pattern['_quoteDelim']: return u'"{}"'.format(pattern['source']) str = u'' for elem in pattern['elements']: if elem['type'] == 'TextElement': if '\n' in elem['value']: str += u'\n | {}'.format( elem['value'].replace('\n', '\n | ')) else: str += elem['value'] elif elem['type'] == 'Placeable': str += self.dumpPlaceable(elem) return str def dumpPlaceable(self, placeable): source = u', '.join(map(self.dumpExpression, placeable['expressions'])) if source.endswith('\n'): return u'{{ {}}}'.format(source) return u'{{ {} }}'.format(source) def dumpExpression(self, exp): if exp['type'] == 'Identifier' or \ exp['type'] == 'FunctionReference' or \ exp['type'] == 'EntityReference': return self.dumpIdentifier(exp) if exp['type'] == 'ExternalArgument': return u'${}'.format(self.dumpIdentifier(exp)) elif exp['type'] == 'SelectExpression': sel = self.dumpExpression(exp['expression']) variants = self.dumpMembers(exp['variants'], 2) return u'{} ->\n{}\n'.format(sel, variants) elif exp['type'] == 'CallExpression': id = self.dumpExpression(exp['callee']) args = self.dumpCallArgs(exp['args']) return u'{}({})'.format(id, args) elif exp['type'] == 'Pattern': return self.dumpPattern(exp) elif exp['type'] == 'Number': return exp['value'] elif exp['type'] == 'Keyword': return self.dumpKeyword(exp) elif exp['type'] == 'MemberExpression': obj = self.dumpExpression(exp['object']) key = self.dumpExpression(exp['keyword']) return u'{}[{}]'.format(obj, key) def dumpCallArgs(self, args): return u', '.join(map( lambda arg: u'{}: {}'.format(arg['name'], self.dumpExpression(arg['value'])) if arg['type'] == 'KeyValueArg' else self.dumpExpression(arg), args)) def dumpMembers(self, members, indent): return u'\n'.join(map(lambda member: u'{}[{}] {}'.format( u' ' * (indent - 1) + u'*' if member['default'] else u' ' * indent, self.dumpExpression(member['key']), self.dumpPattern(member['value']) ), members)) python-l20n-4.0.0a1/setup.cfg000066400000000000000000000001541274363561100156600ustar00rootroot00000000000000[bdist_wheel] # This flag says that the code is written to work on both Python 2 and Python # 3. universal=1python-l20n-4.0.0a1/setup.py000066400000000000000000000013301274363561100155460ustar00rootroot00000000000000#!/usr/bin/env python from setuptools import setup setup(name='l20n', version='4.0.0a1', description='Python L20n library', author='Zibi Braniecki', author_email='gandalf@mozilla.com', license='APL 2', url='https://github.com/l20n/python-l20n', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5', ], packages=['l20n', 'l20n.format'], install_requires=[ 'six' ] ) python-l20n-4.0.0a1/tests/000077500000000000000000000000001274363561100152015ustar00rootroot00000000000000python-l20n-4.0.0a1/tests/__init__.py000066400000000000000000000000001274363561100173000ustar00rootroot00000000000000python-l20n-4.0.0a1/tests/l20n-syntax-fixtures/000077500000000000000000000000001274363561100211475ustar00rootroot00000000000000python-l20n-4.0.0a1/tests/test_format.py000066400000000000000000000040761274363561100201110ustar00rootroot00000000000000import unittest import os import codecs import json from six import with_metaclass from l20n.format.parser import FTLParser from l20n.format.serializer import FTLSerializer fixtures = os.path.join( os.path.dirname(__file__), 'l20n-syntax-fixtures', 'parser', 'ftl') def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text class TestMeta(type): '''Metaclass to add test discovery''' def __new__(mcs, name, bases, dict): def gen_test(ftl): def test(self): self.process(ftl) return test for f in os.listdir(fixtures): ftl, ext = os.path.splitext(f) if ext != '.ftl': continue test_name = 'test_%s' % ftl.replace('-', '_') dict[test_name] = gen_test(ftl) return type.__new__(mcs, name, bases, dict) class L20nParserTestCase(with_metaclass(TestMeta, unittest.TestCase)): def setUp(self): self.parser = FTLParser() self.maxDiff = None def process(self, ftl): ftlPath = os.path.join(fixtures, ftl + '.ftl') jsonPath = os.path.join(fixtures, ftl + '.ast.json') source = read_file(ftlPath) jsonSource = read_file(jsonPath) [ast, errors] = self.parser.parseResource(source) refAST = json.loads(jsonSource) self.assertEqual(ast, refAST, 'Error in fixture: ' + ftl) class L20nSerializerTestCase(L20nParserTestCase): def setUp(self): L20nParserTestCase.setUp(self) self.serializer = FTLSerializer() def process(self, ftl): if 'error' in ftl: self.skipTest('Error tests not run in Serializer') ftlPath = os.path.join(fixtures, ftl + '.ftl') source = read_file(ftlPath) [ast, errors] = self.parser.parseResource(source) out = self.serializer.serialize(ast) [ast2, errors] = self.parser.parseResource(out) self.assertEqual(ast['body'], ast2['body'], 'Error in fixture: ' + ftl) if __name__ == '__main__': unittest.main() python-l20n-4.0.0a1/tools/000077500000000000000000000000001274363561100151775ustar00rootroot00000000000000python-l20n-4.0.0a1/tools/compare-prop-lol.py000066400000000000000000000047441274363561100207520ustar00rootroot00000000000000#!/usr/bin/python import sys import codecs import mozilla.format.properties.parser import l20n.format.lol.parser import l20n.format.lol.ast from collections import OrderedDict def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text def update_id(id): return id.replace('-','_') def update_prop(prop): newprop = {} for i in prop.keys(): id = update_id(i) val = prop[i]['value'] if id.find('.') != -1: id, attr = id.split('.') if id not in newprop: newprop[id] = {'id': id, 'value': None, 'attrs': {}} newprop[id]['attrs'][attr] = val else: if id not in newprop: newprop[id] = {'id': id, 'value': None, 'attrs': {}} newprop[id]['value'] = val return newprop def compare(path1, path2): diff = { 'obsolete': [], 'missing': [], 'modified': [] } prop_source = read_file(path1) lol_source = read_file(path2) prop_parser = mozilla.format.properties.parser.Parser() prop = prop_parser.parse_to_entitylist(prop_source) prop = update_prop(prop) lol_parser = l20n.format.lol.parser.Parser() lol = lol_parser.parse(lol_source) lol_entities = OrderedDict() for i in lol.body: if isinstance(i, l20n.format.lol.ast.Entity): lol_entities[i.id.name] = i for i in prop.keys(): if i not in lol_entities: diff['missing'].append(prop[i]) else: val = lol_entities[i].value if val is not None: val = str(val) if prop[i]['value'] != val: ediff = { 'id': i, 'value': [prop[i]['value'], val], 'attrs': OrderedDict() } diff['modified'].append(ediff) for i in lol_entities.keys(): if i not in prop: diff['obsolete'].append(lol_entities[i]) return diff if __name__ == "__main__": diff = compare(sys.argv[1], sys.argv[2]) if diff['missing']: print('missing:') for i in diff['missing']: print(' %s' % i['id']) if diff['obsolete']: print('obsolete:') for i in diff['obsolete']: print(' %s' % i.id) if diff['modified']: print('modified:') for i in diff['modified']: print(' %s - {"%s" -> "%s"}' % (i['id'], i['value'][0], i['value'][1])) python-l20n-4.0.0a1/tools/compiler.py000077500000000000000000000015441274363561100173720ustar00rootroot00000000000000import sys import os from l20n.compiler.js import Compiler from l20n.format.lol.parser import Parser from pyjs.serializer import Serializer def read_file(path): with file(path) as f: return f.read() def get_lol(path): s = read_file(path) parser = Parser() lol = parser.parse(s) return lol def compile(path, output=None): lol = get_lol(path) compiler = Compiler() js = compiler.compile(lol) string = Serializer.dump_program(js) if output == 'console': print(string) return if output is None: output = os.path.splitext(path)[0] output = '%s.%s' % (output, 'j20n') f = open(output, mode='w') f.write(string) f.close() return if __name__ == '__main__': if len(sys.argv) > 2: compile(sys.argv[1], sys.argv[2]) else: compile(sys.argv[1]) python-l20n-4.0.0a1/tools/convert-gaia.py000077500000000000000000000073741274363561100201460ustar00rootroot00000000000000#!/usr/bin/python import re import os import codecs import l20n.format.lol.ast as ast import l20n.format.lol.serializer as serializer def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text def write_file(path, s): f = codecs.open(path, encoding='utf_8', mode='w+') f.write(s) f.close() class PropertiesConverter: patterns = { 'entity': re.compile('([^=]+)=(.+)'), 'locale': re.compile('\[([a-zA-Z\-]+)\]'), 'comment': re.compile('#(.*)') } def __init__(self, s, locale=None): self.s = s.split('\n') self.lols = {} self._current_locale = locale if locale: self.lols[locale] = ast.LOL() def parse(self): for line in self.s: self.get_token(line) return self.lols def get_token(self, line): s = line.strip() if len(s) == 0: return if s[0] == '[': locale = self.get_locale(s) return locale if s[0] == '#': return self.get_comment(s) return self.get_entity(s) def get_locale(self, line): m = self.patterns['locale'].match(line) if m: locale = m.group(1) self.lols[locale] = ast.LOL() self._current_locale = locale #print('locale: %s' % locale) def get_comment(self, line): m = self.patterns['comment'].match(line) if m: comment = m.group(1) #print('comment: %s' % comment) c = ast.Comment(comment) self.lols[self._current_locale].body.append(c) def get_entity(self, line): m = self.patterns['entity'].match(line) if m: id = m.group(1).replace('-', '_') val = m.group(2) #print("entity %s = %s" % (id, val)) id = ast.Identifier(id) entity = ast.Entity(id) entity.value = ast.String(val) self.lols[self._current_locale].body.append(entity) def convert_file(path, res_path): f = read_file(path) pc = PropertiesConverter(f, locale='en-US') lols = pc.parse() ser = serializer.Serializer() for (loc, lol) in lols.items(): s = ser.serialize(lol, default=True) write_file(res_path, s) def convert(paths, app, source_locale): locales_path = os.path.join(paths['gaia'], 'apps', app, 'locales') source_locale_path = os.path.join(locales_path, '%s.%s.properties' % (app, source_locale)) #convert_file(source_locale_path, os.path.join(locales_path, '%s.%s.lol' % (app, source_locale))) locales_path = os.path.join(paths['gaia'], 'shared', 'locales', 'branding') source_locale_path = os.path.join(locales_path, 'official', 'branding.en-US.properties') convert_file(source_locale_path, os.path.join(locales_path, 'branding.en-US.lol')) locales_path = os.path.join(paths['gaia'], 'shared', 'locales', 'date') source_locale_path = os.path.join(locales_path, 'date.en-US.properties') #convert_file(source_locale_path, os.path.join(locales_path, 'date.en-US.lol')) locales_path = os.path.join(paths['gaia'], 'shared', 'locales', 'permissions') source_locale_path = os.path.join(locales_path, 'permissions.en-US.properties') convert_file(source_locale_path, os.path.join(locales_path, 'permissions.en-US.lol')) locales_path = os.path.join(paths['gaia'], 'shared', 'locales', 'tz') source_locale_path = os.path.join(locales_path, 'tz.en-US.properties') convert_file(source_locale_path, os.path.join(locales_path, 'tz.en-US.lol')) if __name__ == '__main__': paths = { 'gaia': '/Users/zbraniecki/projects/gaia', } app = 'settings' source_locale = 'en-US' convert(paths, app, source_locale) python-l20n-4.0.0a1/tools/dump_lol.py000066400000000000000000000016571274363561100173750ustar00rootroot00000000000000import argparse from l20n.format.lol.parser import Parser import pyast.dump.raw, pyast.dump.js def read_file(filename, charset='utf-8', errors='strict'): with open(filename, 'rb') as f: return f.read().decode(charset, errors) def dump_lol(path, t): source = read_file(path) p = Parser() lol = p.parse(source) if t == 'raw': print(pyast.dump.raw.dump(lol)) else: print(pyast.dump.js.dump(lol)) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Dump LOL\'s AST.', prog="dump_lol") parser.add_argument('path', type=str, help='path to lol file') parser.add_argument('--type', '-t', type=str, choices=('json', 'raw'), default='raw', help='path to lol file') args = parser.parse_args() dump_lol(args.path, args.type) python-l20n-4.0.0a1/tools/env/000077500000000000000000000000001274363561100157675ustar00rootroot00000000000000python-l20n-4.0.0a1/tools/env/bootstrap-locale.py000077500000000000000000000034701274363561100216220ustar00rootroot00000000000000import os import l20n.format.lol.parser as parser import l20n.format.lol.serializer as serializer import l20n.format.lol.ast as ast import codecs def read_file(path): with file(path) as f: return f.read() def write_file(path, s): f = codecs.open(path, encoding='utf_8', mode='w+') f.write(s) f.close() def get_source_locale(path): #from dulwich.client import TCPGitClient #os.makedirs(path) #client = TCPGitClient(server_address, server_port) pass repo_paths = { 'mozilla': { 'gaia': { '' } } } project = "mozilla/firefox" ser = serializer.Serializer() def bootstrap_lol(lol): elems = len(lol.body) i=0 while i /* sleep menu*/ python-l20n-4.0.0a1/tools/env/data/pl/homescreen.lol.orig000066400000000000000000000005141274363561100231120ustar00rootroot00000000000000 /* sleep menu*/ python-l20n-4.0.0a1/tools/env/data/pl/homescreen.lol.orig2000066400000000000000000000006331274363561100231760ustar00rootroot00000000000000 /* sleep menu*/ python-l20n-4.0.0a1/tools/env/data/pl/homescreen.lol2000066400000000000000000000004361274363561100222400ustar00rootroot00000000000000 /* sleep menu*/ python-l20n-4.0.0a1/tools/env/locale-status.py000077500000000000000000000103771274363561100211340ustar00rootroot00000000000000import os import codecs import sys import l20n.format.lol.parser as parser import l20n.format.lol.ast as ast import l10ndiff if sys.version >= '3': basestring = str string = str else: string = unicode def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text def write_file(path, s): f = codecs.open(path, encoding='utf_8', mode='w+') f.write(s) f.close() def reduce_complex_string(s): if isinstance(s, ast.ComplexString): return string(s) elif isinstance(s, ast.String): return s.content elif s is None: return s raise Exception("FUCK!") def diff_entity(source, orig, trans): sval = reduce_complex_string(source) oval = reduce_complex_string(orig) trans = reduce_complex_string(trans) if oval is None: return 'added' if trans is None: return 'nottranslated' if sval is None: return 'removed' if not sval == oval: return 'outdated' return 'uptodate' def get_entity_dict(lol): res = {} for entry in lol.body: if isinstance(entry, ast.Entity): if entry.value is None: res[entry.id.name] = None elif isinstance(entry.value, ast.String): res[entry.id.name] = entry.value return res def do_stats(result): res = { 'all': 0.0, 'translated': 0.0, 'outdated': 0.0, 'obsolete': 0.0, 'added': 0.0, 'untranslated': 0.0 } for t in result.keys(): if t == 'nottranslated': res['untranslated'] += len(result[t]) elif t == 'outdated': res['outdated'] += len(result[t]) elif t == 'uptodate': res['translated'] += len(result[t]) elif t == 'added': res['added'] += len(result[t]) elif t == 'obsolete': res['obsolete'] += len(result[t]) res['all'] += len(result[t]) return res def print_result(rname, result): print(' %s' % rname) for k in result.keys(): if not len(result[k]): continue print(' %s:' % k) for i in result[k].keys(): print(' %s' % i) stats = do_stats(result) print('\n===== stats =====') print('entities: %d' % stats['all']) print('translated: %d (%.1f%%), \ outdated: %d (%.1f%%), \ obsolete: %d (%.1f%%), \ missing: %d (%.1f%%), \ untranslated: %d (%.1f%%)' % (stats['translated'], stats['translated']/stats['all']*100, stats['outdated'], stats['outdated']/stats['all']*100, stats['obsolete'], stats['obsolete']/stats['all']*100, stats['added'], stats['added']/stats['all']*100, stats['untranslated'], stats['untranslated']/stats['all']*100)) def locale_status(): source_locale = 'en-US' locale = 'pl' module = 'homescreen' mpath = '/Users/zbraniecki/projects/mozilla/gaia/apps/homescreen' orig_file = read_file(os.path.join('data', locale, '%s.lol.orig' % module)) trans_file = read_file(os.path.join('data', locale, '%s.lol' % module)) source_file = read_file(os.path.join(mpath, 'locale', '%s.lol' % source_locale)) result = { 'nottranslated': {}, 'outdated': {}, 'obsolete': {}, 'added': {}, 'uptodate': {}, } p = parser.Parser() orig_lol = p.parse(orig_file) trans_lol = p.parse(trans_file) source_lol = p.parse(source_file) l10ndiff.list(orig_lol, trans_lol) orig_dict = get_entity_dict(orig_lol) trans_dict = get_entity_dict(trans_lol) source_dict = get_entity_dict(source_lol) for k,entity in source_dict.items(): res = diff_entity(source_dict[k], orig_dict.get(k, None), trans_dict.get(k, None)) result[res][k] = entity for k,entity in orig_dict.items(): if k not in source_dict: result['obsolete'][k] = entity print_result('homescreen', result) if __name__ == '__main__': locale_status() python-l20n-4.0.0a1/tools/env/test.py000077500000000000000000000050151274363561100173240ustar00rootroot00000000000000import os import codecs from collections import OrderedDict import l20n.format.lol.parser as parser import l20n.format.lol.serializer as serializer import l20n.format.lol.ast as ast import l10ndiff def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text def get_entity_dict(lol): res = OrderedDict() for entry in lol.body: if isinstance(entry, ast.Entity): res[entry.id.name] = entry return res def compare_resources(*paths): pass def update_locale(): source_locale = 'en-US' locale = 'pl' module = 'homescreen' mpath = '/Users/zbraniecki/projects/mozilla/gaia/apps/homescreen' orig_file = read_file(os.path.join('data', locale, '%s.lol.orig' % module)) trans_file = read_file(os.path.join('data', locale, '%s.lol' % module)) source_file = read_file(os.path.join(mpath, 'locale', '%s.lol' % source_locale)) result = { 'nottranslated': {}, 'outdated': {}, 'obsolete': {}, 'added': {}, 'uptodate': {}, } p = parser.Parser() orig_lol = p.parse(orig_file) trans_lol = p.parse(trans_file) source_lol = p.parse(source_file) orig_dict = get_entity_dict(orig_lol) trans_dict = get_entity_dict(trans_lol) source_dict = get_entity_dict(source_lol) ldiff = l10ndiff.lists(orig_dict, trans_dict, values=False) for key in ldiff: print('%s: %s' % (key, ldiff[key])) def test(): e1 = ast.Entity(id=ast.Identifier('foo')) e1.value = ast.Array([ast.String('c'), ast.String('b')]) #e1.value = ast.String('faa2') val = ast.Array(content=[ast.String('f'), ast.String('g')]) attr = ast.Attribute(key=ast.Identifier('l'), value=val) e1.attrs['l'] = attr e2 = ast.Entity(id=ast.Identifier('foo')) e2.value = ast.Array([ast.String('c'), ast.String('b')]) #e2.value = ast.String('faa') val = ast.Array(content=[ast.String('p'), ast.String('g')]) attr = ast.Attribute(key=ast.Identifier('l'), value=val) e2.attrs['l'] = attr e3 = ast.Entity(id=ast.Identifier('foo')) e3.value = ast.Array([ast.String('c'), ast.String('b')]) #e3.value = ast.String('faa') val = ast.Array(content=[ast.String('f'), ast.String('g')]) attr = ast.Attribute(key=ast.Identifier('l'), value=val) e3.attrs['l'] = attr ediff = l10ndiff.entities(e1, e2, e3) print(ediff) if __name__ == '__main__': #test() update_locale() python-l20n-4.0.0a1/tools/env/update-locale.py000077500000000000000000000106611274363561100210670ustar00rootroot00000000000000import os import codecs from collections import OrderedDict import l20n.format.lol.parser as parser import l20n.format.lol.serializer as serializer import l20n.format.lol.ast as ast import pyast import l10ndiff def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text def write_file(path, s): f = codecs.open(path, encoding='utf_8', mode='w+') f.write(s) f.close() def reduce_complex_string(s): if isinstance(s, ast.ComplexString): return unicode(s) elif isinstance(s, ast.String): return s.content elif s is None: return s raise Exception("FUCK!") def add_entity(lol, k, value): id = ast.Identifier(k) entity = ast.Entity(id) entity.value = value lol.body.append(entity) def remove_entity(lol, id): for n,elem in enumerate(lol.body): if isinstance(elem, ast.Entity): if elem.id.name == id: del lol.body[n] def update_entity(lol, id, entity): pass def get_entity_dict(lol): res = OrderedDict() for entry in lol.body: if isinstance(entry, ast.Entity): res[entry.id.name] = entry return res def get_entity_pos(lol, eid): pos = -1 i = -1 for entry in lol.body: i += 1 if isinstance(entry, ast.Entity): if entry.id.name == eid: pos = i break return pos def locate_pos(lol, pos): after = get_entity_pos(lol, pos['after']) if after == -1: before = get_entity_pos(lol, pos['before']) return before return after+1 def apply_ediff(lol, ediff): pass def apply_ldiff(lol, ldiff, source=0, result=1): for key, hunk in ldiff.items(): #print(key) #print(hunk) if 'added' in hunk['flags']: if hunk['elem'][source] is None: #inject new entity pos = locate_pos(lol, hunk['pos']) lol.body.insert(pos, hunk['elem'][result]) #print(pos) pass if hunk['elem'][result] is None: #removing obsolete entity pos = get_entity_pos(lol, key) del lol.body[pos] del lol._template_body[pos] if 'present' in hunk['flags']: print(hunk) if 'value' in hunk['elem']: pos = get_entity_pos(lol, key) if lol.body[pos].value is None: pass else: lol.body[pos].value.content = hunk['elem']['value']['content'][result] print(lol.body[pos].value.content) return def update_locale(): source_locale = 'en-US' locale = 'pl' module = 'homescreen' mpath = '/Users/zbraniecki/projects/mozilla/gaia/apps/homescreen' orig_file = read_file(os.path.join('data', locale, '%s.lol.orig' % module)) trans_file = read_file(os.path.join('data', locale, '%s.lol' % module)) source_file = read_file(os.path.join(mpath, 'locale', '%s.lol' % source_locale)) result = { 'nottranslated': {}, 'outdated': {}, 'obsolete': {}, 'added': {}, 'uptodate': {}, } p = parser.Parser() s = serializer.Serializer() orig_lol = p.parse(orig_file) trans_lol = p.parse(trans_file) source_lol = p.parse(source_file) orig_dict = get_entity_dict(orig_lol) trans_dict = get_entity_dict(trans_lol) source_dict = get_entity_dict(source_lol) # deal with added/removed entities ldiff = l10ndiff.lists(trans_dict, source_dict, values=False) apply_ldiff(trans_lol, ldiff) # deal with modified entities ldiff = l10ndiff.lists(orig_dict, source_dict, values=True) ldiff2 = {} for key in ldiff: if 'present' in ldiff[key]['flags']: ldiff2[key] = ldiff[key] #print('%s: %s' % (key, ldiff2[key])) print('---') print(trans_lol) print('---') apply_ldiff(trans_lol, ldiff2) print('====') print(trans_lol) print('====') #new_trans_lol = s.serialize(trans_lol) #new_orig_lol = s.serialize(orig_lol) #write_file(os.path.join('data', locale, '%s.lol.orig2' % module), new_orig_lol) #write_file(os.path.join('data', locale, '%s.lol2' % module), new_trans_lol) #print_result('homescreen', result) class Example(pyast.Node): seq = pyast.seq(pyast.re("[a-z]{2}")) if __name__ == '__main__': e = Example(['foo']) #update_locale() python-l20n-4.0.0a1/tools/extract.py000066400000000000000000000020301274363561100172160ustar00rootroot00000000000000#!/usr/bin/env python2 import sys from l20n.format.lol import ast from l20n.format.lol.serializer import Serializer from BeautifulSoup import BeautifulSoup ATTRS = ( 'id', 'data-l10n-id', 'type', 'class', 'style', 'href', ) isloc = lambda attr: attr[0] not in ATTRS def extract(node): id = ast.Identifier(node['data-l10n-id']) attrs2 = filter(isloc, node.attrs) attrs = {} for i in attrs2: attrs[i[0]] = ast.Attribute(ast.Identifier(i[0]), ast.String(i[1])) for child in node.findAll(): child.attrs = filter(isloc, child.attrs) value = ast.String(node.renderContents()) return id, value, attrs if __name__ == '__main__': f = open(sys.argv[1], 'r') dom = BeautifulSoup(f) nodes = dom.findAll(True, {'data-l10n-id': True}) lol = ast.LOL() for node in nodes: id, value, attrs = extract(node) entity = ast.Entity(id, None, value, attrs) lol.body.append(entity) print(Serializer.serialize(lol)) python-l20n-4.0.0a1/tools/inject_entity.py000066400000000000000000000023161274363561100204230ustar00rootroot00000000000000from l20n.format.lol.parser import Parser from l20n.format.lol.serializer import Serializer import l20n.format.lol.ast as ast import sys def read_file(path): with file(path) as f: return f.read() def get_lol(path): s = read_file(path) parser = Parser() lol = parser.parse(s) return lol def inject(path): lol = get_lol(path) #lol = ast.LOL() #entity1 = ast.Entity(id=ast.Identifier('foo'), value=ast.String('flex')) #entity2 = ast.Entity(id=ast.Identifier('foo2')) #exp1 = ast.BinaryExpression( # ast.BinaryOperator('+'), # ast.ParenthesisExpression(ast.Literal(1)), # ast.UnaryExpression( # ast.UnaryOperator('!'), # ast.Literal(2) # ) #) #macro1 = ast.Macro(id=ast.Identifier('foo3'), args=[ast.Identifier('a'), # ast.Identifier('b')], # expression=exp1) #com1 = ast.Comment("foo") #lol.body.append(entity1) #lol.body.append(com1) #lol.body.append(entity2) #lol.body.append(macro1) serializer = Serializer() string = serializer.serialize(lol) print(string) if __name__ == '__main__': inject(sys.argv[1]) python-l20n-4.0.0a1/tools/lol-status.py000077500000000000000000000077371274363561100177010ustar00rootroot00000000000000from l20n.format.lol.parser import Parser import l20n.format.lol.ast as ast import sys from collections import OrderedDict def read_file(path): with file(path) as f: return f.read() def get_lol(path): s = read_file(path) parser = Parser() lol = parser.parse(s) return lol def get_entities(lol): entities = OrderedDict() for elem in lol.body: if isinstance(elem, ast.Entity): entities[elem.id.name] = elem return entities def compare_values(value1, value2): if type(value1) != type(value2): return False if not value1: if not value2: return True return False if isinstance(value1, ast.String): if value1.content == value2.content: return True return False if isinstance(value1, ast.Array): if len(value1.content) != len(value2.content): return False for k,v in enumerate(value1.content): if not compare_values(value1.content[k], value2.content[k]): return False return True if value1.content == value2.content: return True return False if isinstance(value1, ast.Hash): if len(value1.content) != len(value2.content): return False for k,v in enumerate(value1.content): if not compare_values(v.value, value2.content[k].value): return False return True def compare_entities(entity1, entity2): value1 = entity1.value value2 = entity2.value return compare_values(value1, value2) def get_status(path): summary = {} lol = get_lol(path) summary['entities'] = 0 summary['macros'] = 0 summary['ids'] = set() for i in lol.body: if isinstance(i, ast.Entity): summary['entities'] += 1 summary['ids'].add(i.id.name) elif isinstance(i, ast.Macro): summary['macros'] += 1 summary['ids'].add(i.id.name) return summary def get_status_against_source(path, path2): summary = {} lol = get_lol(path) # l10n file lol2 = get_lol(path2) # new source entities1 = get_entities(lol) entities2 = get_entities(lol2) summary['entities'] = {'missing': [], 'obsolete': []} for key in entities1.keys(): if key not in entities2: summary['entities']['obsolete'].append(key) else: del entities2[key] for key in entities2: summary['entities']['missing'].append(key) return summary def get_status_against_two_sources(path, path2, path3): summary = {} lol = get_lol(path) # l10n file lol2 = get_lol(path2) # new source lol3 = get_lol(path3) # old source entities1 = get_entities(lol) entities2 = get_entities(lol2) entities3 = get_entities(lol3) summary['entities'] = {'missing': [], 'obsolete': [], 'modified': []} for key in entities2.keys(): if key in entities3: if not compare_entities(entities2[key], entities3[key]): summary['entities']['modified'].append(key) for key in entities1.keys(): if key not in entities2: summary['entities']['obsolete'].append(key) else: del entities2[key] for key in entities2: summary['entities']['missing'].append(key) return summary if __name__ == '__main__': if len(sys.argv) < 2: print('Please provide at least the path to LOL file') elif len(sys.argv) < 3: summary = get_status(sys.argv[1]) print(summary) elif len(sys.argv) < 4: summary = get_status_against_source(sys.argv[1], sys.argv[2]) print(summary) else: summary = get_status_against_two_sources(sys.argv[1], sys.argv[2], sys.argv[3]) print(summary) python-l20n-4.0.0a1/tools/parse.py000077500000000000000000000011551274363561100166700ustar00rootroot00000000000000#!/usr/bin/python import sys sys.path.append('./') import codecs import ftl.format.parser import ftl.format.ast import json def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text def print_ast(fileType, data): ftlParser = ftl.format.parser.FTLParser() [ast, errors] = ftlParser.parseResource(data) print(json.dumps(ast, indent=2, ensure_ascii=False)) print('Errors:') for error in errors: print(error.message) if __name__ == "__main__": file_type = 'ftl' f = read_file(sys.argv[1]) print_ast(file_type, f) python-l20n-4.0.0a1/tools/serialize.py000077500000000000000000000020271274363561100175440ustar00rootroot00000000000000#!/usr/bin/python # coding=utf-8 import sys, os sys.path.append('./') import codecs import ftl.format.parser import ftl.format.serializer import json def read_file(path): with codecs.open(path, 'r', encoding='utf-8') as file: text = file.read() return text def print_l20n(fileType, data): l20nSerializer = ftl.format.serializer.FTLSerializer() result = None if fileType == 'json': result = l20nSerializer.serialize(json.loads(data)) elif fileType == 'ftl': #print('----- ORIGINAL -----') #print(data) l20nParser = ftl.format.parser.FTLParser() #print('----- AST -----') [ast, errors] = l20nParser.parseResource(data) #print(json.dumps(ast, indent=2, ensure_ascii=False)) #print('--------------------') result = l20nSerializer.serialize(ast) print(result.encode('utf-8')) if __name__ == "__main__": fileName, fileExtension = os.path.splitext(sys.argv[1]) f = read_file(sys.argv[1]) print_l20n(fileExtension[1:], f) python-l20n-4.0.0a1/tools/validate.py000066400000000000000000000022031274363561100173370ustar00rootroot00000000000000from l20n.format.lol.parser import Parser, ParserError import l20n.format.lol.ast as ast import pyast import sys def read_file(path): with file(path) as f: return f.read() def get_lol(path): s = read_file(path) parser = Parser() lol = parser.parse(s) print(pyast.dump(lol)) return lol def validate(path): errors = [] warnings = [] lol = get_lol(path) return [[],[]] try: lol = get_lol(path) except ParserError, e: errors.append('Cannot parse LOL file') else: return [[],[]] ids = [] for i in lol.body: if isinstance(i, (ast.Entity, ast.Macro)): if i.id.name in ids: errors.append('Duplicated ID %s' % i.id.name) else: ids.append(i.id.name) return (errors, warnings) if __name__ == '__main__': (errors, warnings) = validate(sys.argv[1]) if errors: print('Errors:\n') for error in errors: print(' * %s' % error) if warnings: print('Warnings:\n') for warning in warnings: print(' * %s' % warning)