1*e1fe3e4aSElliott Hughesfrom fontTools.feaLib.error import FeatureLibError 2*e1fe3e4aSElliott Hughesfrom fontTools.feaLib.lexer import Lexer, IncludingLexer, NonIncludingLexer 3*e1fe3e4aSElliott Hughesfrom fontTools.feaLib.variableScalar import VariableScalar 4*e1fe3e4aSElliott Hughesfrom fontTools.misc.encodingTools import getEncoding 5*e1fe3e4aSElliott Hughesfrom fontTools.misc.textTools import bytechr, tobytes, tostr 6*e1fe3e4aSElliott Hughesimport fontTools.feaLib.ast as ast 7*e1fe3e4aSElliott Hughesimport logging 8*e1fe3e4aSElliott Hughesimport os 9*e1fe3e4aSElliott Hughesimport re 10*e1fe3e4aSElliott Hughes 11*e1fe3e4aSElliott Hughes 12*e1fe3e4aSElliott Hugheslog = logging.getLogger(__name__) 13*e1fe3e4aSElliott Hughes 14*e1fe3e4aSElliott Hughes 15*e1fe3e4aSElliott Hughesclass Parser(object): 16*e1fe3e4aSElliott Hughes """Initializes a Parser object. 17*e1fe3e4aSElliott Hughes 18*e1fe3e4aSElliott Hughes Example: 19*e1fe3e4aSElliott Hughes 20*e1fe3e4aSElliott Hughes .. code:: python 21*e1fe3e4aSElliott Hughes 22*e1fe3e4aSElliott Hughes from fontTools.feaLib.parser import Parser 23*e1fe3e4aSElliott Hughes parser = Parser(file, font.getReverseGlyphMap()) 24*e1fe3e4aSElliott Hughes parsetree = parser.parse() 25*e1fe3e4aSElliott Hughes 26*e1fe3e4aSElliott Hughes Note: the ``glyphNames`` iterable serves a double role to help distinguish 27*e1fe3e4aSElliott Hughes glyph names from ranges in the presence of hyphens and to ensure that glyph 28*e1fe3e4aSElliott Hughes names referenced in a feature file are actually part of a font's glyph set. 29*e1fe3e4aSElliott Hughes If the iterable is left empty, no glyph name in glyph set checking takes 30*e1fe3e4aSElliott Hughes place, and all glyph tokens containing hyphens are treated as literal glyph 31*e1fe3e4aSElliott Hughes names, not as ranges. (Adding a space around the hyphen can, in any case, 32*e1fe3e4aSElliott Hughes help to disambiguate ranges from glyph names containing hyphens.) 33*e1fe3e4aSElliott Hughes 34*e1fe3e4aSElliott Hughes By default, the parser will follow ``include()`` statements in the feature 35*e1fe3e4aSElliott Hughes file. To turn this off, pass ``followIncludes=False``. Pass a directory string as 36*e1fe3e4aSElliott Hughes ``includeDir`` to explicitly declare a directory to search included feature files 37*e1fe3e4aSElliott Hughes in. 38*e1fe3e4aSElliott Hughes """ 39*e1fe3e4aSElliott Hughes 40*e1fe3e4aSElliott Hughes extensions = {} 41*e1fe3e4aSElliott Hughes ast = ast 42*e1fe3e4aSElliott Hughes SS_FEATURE_TAGS = {"ss%02d" % i for i in range(1, 20 + 1)} 43*e1fe3e4aSElliott Hughes CV_FEATURE_TAGS = {"cv%02d" % i for i in range(1, 99 + 1)} 44*e1fe3e4aSElliott Hughes 45*e1fe3e4aSElliott Hughes def __init__( 46*e1fe3e4aSElliott Hughes self, featurefile, glyphNames=(), followIncludes=True, includeDir=None, **kwargs 47*e1fe3e4aSElliott Hughes ): 48*e1fe3e4aSElliott Hughes if "glyphMap" in kwargs: 49*e1fe3e4aSElliott Hughes from fontTools.misc.loggingTools import deprecateArgument 50*e1fe3e4aSElliott Hughes 51*e1fe3e4aSElliott Hughes deprecateArgument("glyphMap", "use 'glyphNames' (iterable) instead") 52*e1fe3e4aSElliott Hughes if glyphNames: 53*e1fe3e4aSElliott Hughes raise TypeError( 54*e1fe3e4aSElliott Hughes "'glyphNames' and (deprecated) 'glyphMap' are " "mutually exclusive" 55*e1fe3e4aSElliott Hughes ) 56*e1fe3e4aSElliott Hughes glyphNames = kwargs.pop("glyphMap") 57*e1fe3e4aSElliott Hughes if kwargs: 58*e1fe3e4aSElliott Hughes raise TypeError( 59*e1fe3e4aSElliott Hughes "unsupported keyword argument%s: %s" 60*e1fe3e4aSElliott Hughes % ("" if len(kwargs) == 1 else "s", ", ".join(repr(k) for k in kwargs)) 61*e1fe3e4aSElliott Hughes ) 62*e1fe3e4aSElliott Hughes 63*e1fe3e4aSElliott Hughes self.glyphNames_ = set(glyphNames) 64*e1fe3e4aSElliott Hughes self.doc_ = self.ast.FeatureFile() 65*e1fe3e4aSElliott Hughes self.anchors_ = SymbolTable() 66*e1fe3e4aSElliott Hughes self.glyphclasses_ = SymbolTable() 67*e1fe3e4aSElliott Hughes self.lookups_ = SymbolTable() 68*e1fe3e4aSElliott Hughes self.valuerecords_ = SymbolTable() 69*e1fe3e4aSElliott Hughes self.symbol_tables_ = {self.anchors_, self.valuerecords_} 70*e1fe3e4aSElliott Hughes self.next_token_type_, self.next_token_ = (None, None) 71*e1fe3e4aSElliott Hughes self.cur_comments_ = [] 72*e1fe3e4aSElliott Hughes self.next_token_location_ = None 73*e1fe3e4aSElliott Hughes lexerClass = IncludingLexer if followIncludes else NonIncludingLexer 74*e1fe3e4aSElliott Hughes self.lexer_ = lexerClass(featurefile, includeDir=includeDir) 75*e1fe3e4aSElliott Hughes self.missing = {} 76*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 77*e1fe3e4aSElliott Hughes 78*e1fe3e4aSElliott Hughes def parse(self): 79*e1fe3e4aSElliott Hughes """Parse the file, and return a :class:`fontTools.feaLib.ast.FeatureFile` 80*e1fe3e4aSElliott Hughes object representing the root of the abstract syntax tree containing the 81*e1fe3e4aSElliott Hughes parsed contents of the file.""" 82*e1fe3e4aSElliott Hughes statements = self.doc_.statements 83*e1fe3e4aSElliott Hughes while self.next_token_type_ is not None or self.cur_comments_: 84*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 85*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 86*e1fe3e4aSElliott Hughes statements.append( 87*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 88*e1fe3e4aSElliott Hughes ) 89*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("include"): 90*e1fe3e4aSElliott Hughes statements.append(self.parse_include_()) 91*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.GLYPHCLASS: 92*e1fe3e4aSElliott Hughes statements.append(self.parse_glyphclass_definition_()) 93*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_(("anon", "anonymous")): 94*e1fe3e4aSElliott Hughes statements.append(self.parse_anonymous_()) 95*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("anchorDef"): 96*e1fe3e4aSElliott Hughes statements.append(self.parse_anchordef_()) 97*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("languagesystem"): 98*e1fe3e4aSElliott Hughes statements.append(self.parse_languagesystem_()) 99*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("lookup"): 100*e1fe3e4aSElliott Hughes statements.append(self.parse_lookup_(vertical=False)) 101*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("markClass"): 102*e1fe3e4aSElliott Hughes statements.append(self.parse_markClass_()) 103*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("feature"): 104*e1fe3e4aSElliott Hughes statements.append(self.parse_feature_block_()) 105*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("conditionset"): 106*e1fe3e4aSElliott Hughes statements.append(self.parse_conditionset_()) 107*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("variation"): 108*e1fe3e4aSElliott Hughes statements.append(self.parse_feature_block_(variation=True)) 109*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("table"): 110*e1fe3e4aSElliott Hughes statements.append(self.parse_table_()) 111*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("valueRecordDef"): 112*e1fe3e4aSElliott Hughes statements.append(self.parse_valuerecord_definition_(vertical=False)) 113*e1fe3e4aSElliott Hughes elif ( 114*e1fe3e4aSElliott Hughes self.cur_token_type_ is Lexer.NAME 115*e1fe3e4aSElliott Hughes and self.cur_token_ in self.extensions 116*e1fe3e4aSElliott Hughes ): 117*e1fe3e4aSElliott Hughes statements.append(self.extensions[self.cur_token_](self)) 118*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.SYMBOL and self.cur_token_ == ";": 119*e1fe3e4aSElliott Hughes continue 120*e1fe3e4aSElliott Hughes else: 121*e1fe3e4aSElliott Hughes raise FeatureLibError( 122*e1fe3e4aSElliott Hughes "Expected feature, languagesystem, lookup, markClass, " 123*e1fe3e4aSElliott Hughes 'table, or glyph class definition, got {} "{}"'.format( 124*e1fe3e4aSElliott Hughes self.cur_token_type_, self.cur_token_ 125*e1fe3e4aSElliott Hughes ), 126*e1fe3e4aSElliott Hughes self.cur_token_location_, 127*e1fe3e4aSElliott Hughes ) 128*e1fe3e4aSElliott Hughes # Report any missing glyphs at the end of parsing 129*e1fe3e4aSElliott Hughes if self.missing: 130*e1fe3e4aSElliott Hughes error = [ 131*e1fe3e4aSElliott Hughes " %s (first found at %s)" % (name, loc) 132*e1fe3e4aSElliott Hughes for name, loc in self.missing.items() 133*e1fe3e4aSElliott Hughes ] 134*e1fe3e4aSElliott Hughes raise FeatureLibError( 135*e1fe3e4aSElliott Hughes "The following glyph names are referenced but are missing from the " 136*e1fe3e4aSElliott Hughes "glyph set:\n" + ("\n".join(error)), 137*e1fe3e4aSElliott Hughes None, 138*e1fe3e4aSElliott Hughes ) 139*e1fe3e4aSElliott Hughes return self.doc_ 140*e1fe3e4aSElliott Hughes 141*e1fe3e4aSElliott Hughes def parse_anchor_(self): 142*e1fe3e4aSElliott Hughes # Parses an anchor in any of the four formats given in the feature 143*e1fe3e4aSElliott Hughes # file specification (2.e.vii). 144*e1fe3e4aSElliott Hughes self.expect_symbol_("<") 145*e1fe3e4aSElliott Hughes self.expect_keyword_("anchor") 146*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 147*e1fe3e4aSElliott Hughes 148*e1fe3e4aSElliott Hughes if self.next_token_ == "NULL": # Format D 149*e1fe3e4aSElliott Hughes self.expect_keyword_("NULL") 150*e1fe3e4aSElliott Hughes self.expect_symbol_(">") 151*e1fe3e4aSElliott Hughes return None 152*e1fe3e4aSElliott Hughes 153*e1fe3e4aSElliott Hughes if self.next_token_type_ == Lexer.NAME: # Format E 154*e1fe3e4aSElliott Hughes name = self.expect_name_() 155*e1fe3e4aSElliott Hughes anchordef = self.anchors_.resolve(name) 156*e1fe3e4aSElliott Hughes if anchordef is None: 157*e1fe3e4aSElliott Hughes raise FeatureLibError( 158*e1fe3e4aSElliott Hughes 'Unknown anchor "%s"' % name, self.cur_token_location_ 159*e1fe3e4aSElliott Hughes ) 160*e1fe3e4aSElliott Hughes self.expect_symbol_(">") 161*e1fe3e4aSElliott Hughes return self.ast.Anchor( 162*e1fe3e4aSElliott Hughes anchordef.x, 163*e1fe3e4aSElliott Hughes anchordef.y, 164*e1fe3e4aSElliott Hughes name=name, 165*e1fe3e4aSElliott Hughes contourpoint=anchordef.contourpoint, 166*e1fe3e4aSElliott Hughes xDeviceTable=None, 167*e1fe3e4aSElliott Hughes yDeviceTable=None, 168*e1fe3e4aSElliott Hughes location=location, 169*e1fe3e4aSElliott Hughes ) 170*e1fe3e4aSElliott Hughes 171*e1fe3e4aSElliott Hughes x, y = self.expect_number_(variable=True), self.expect_number_(variable=True) 172*e1fe3e4aSElliott Hughes 173*e1fe3e4aSElliott Hughes contourpoint = None 174*e1fe3e4aSElliott Hughes if self.next_token_ == "contourpoint": # Format B 175*e1fe3e4aSElliott Hughes self.expect_keyword_("contourpoint") 176*e1fe3e4aSElliott Hughes contourpoint = self.expect_number_() 177*e1fe3e4aSElliott Hughes 178*e1fe3e4aSElliott Hughes if self.next_token_ == "<": # Format C 179*e1fe3e4aSElliott Hughes xDeviceTable = self.parse_device_() 180*e1fe3e4aSElliott Hughes yDeviceTable = self.parse_device_() 181*e1fe3e4aSElliott Hughes else: 182*e1fe3e4aSElliott Hughes xDeviceTable, yDeviceTable = None, None 183*e1fe3e4aSElliott Hughes 184*e1fe3e4aSElliott Hughes self.expect_symbol_(">") 185*e1fe3e4aSElliott Hughes return self.ast.Anchor( 186*e1fe3e4aSElliott Hughes x, 187*e1fe3e4aSElliott Hughes y, 188*e1fe3e4aSElliott Hughes name=None, 189*e1fe3e4aSElliott Hughes contourpoint=contourpoint, 190*e1fe3e4aSElliott Hughes xDeviceTable=xDeviceTable, 191*e1fe3e4aSElliott Hughes yDeviceTable=yDeviceTable, 192*e1fe3e4aSElliott Hughes location=location, 193*e1fe3e4aSElliott Hughes ) 194*e1fe3e4aSElliott Hughes 195*e1fe3e4aSElliott Hughes def parse_anchor_marks_(self): 196*e1fe3e4aSElliott Hughes # Parses a sequence of ``[<anchor> mark @MARKCLASS]*.`` 197*e1fe3e4aSElliott Hughes anchorMarks = [] # [(self.ast.Anchor, markClassName)*] 198*e1fe3e4aSElliott Hughes while self.next_token_ == "<": 199*e1fe3e4aSElliott Hughes anchor = self.parse_anchor_() 200*e1fe3e4aSElliott Hughes if anchor is None and self.next_token_ != "mark": 201*e1fe3e4aSElliott Hughes continue # <anchor NULL> without mark, eg. in GPOS type 5 202*e1fe3e4aSElliott Hughes self.expect_keyword_("mark") 203*e1fe3e4aSElliott Hughes markClass = self.expect_markClass_reference_() 204*e1fe3e4aSElliott Hughes anchorMarks.append((anchor, markClass)) 205*e1fe3e4aSElliott Hughes return anchorMarks 206*e1fe3e4aSElliott Hughes 207*e1fe3e4aSElliott Hughes def parse_anchordef_(self): 208*e1fe3e4aSElliott Hughes # Parses a named anchor definition (`section 2.e.viii <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#2.e.vii>`_). 209*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("anchorDef") 210*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 211*e1fe3e4aSElliott Hughes x, y = self.expect_number_(), self.expect_number_() 212*e1fe3e4aSElliott Hughes contourpoint = None 213*e1fe3e4aSElliott Hughes if self.next_token_ == "contourpoint": 214*e1fe3e4aSElliott Hughes self.expect_keyword_("contourpoint") 215*e1fe3e4aSElliott Hughes contourpoint = self.expect_number_() 216*e1fe3e4aSElliott Hughes name = self.expect_name_() 217*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 218*e1fe3e4aSElliott Hughes anchordef = self.ast.AnchorDefinition( 219*e1fe3e4aSElliott Hughes name, x, y, contourpoint=contourpoint, location=location 220*e1fe3e4aSElliott Hughes ) 221*e1fe3e4aSElliott Hughes self.anchors_.define(name, anchordef) 222*e1fe3e4aSElliott Hughes return anchordef 223*e1fe3e4aSElliott Hughes 224*e1fe3e4aSElliott Hughes def parse_anonymous_(self): 225*e1fe3e4aSElliott Hughes # Parses an anonymous data block (`section 10 <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#10>`_). 226*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_(("anon", "anonymous")) 227*e1fe3e4aSElliott Hughes tag = self.expect_tag_() 228*e1fe3e4aSElliott Hughes _, content, location = self.lexer_.scan_anonymous_block(tag) 229*e1fe3e4aSElliott Hughes self.advance_lexer_() 230*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 231*e1fe3e4aSElliott Hughes end_tag = self.expect_tag_() 232*e1fe3e4aSElliott Hughes assert tag == end_tag, "bad splitting in Lexer.scan_anonymous_block()" 233*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 234*e1fe3e4aSElliott Hughes return self.ast.AnonymousBlock(tag, content, location=location) 235*e1fe3e4aSElliott Hughes 236*e1fe3e4aSElliott Hughes def parse_attach_(self): 237*e1fe3e4aSElliott Hughes # Parses a GDEF Attach statement (`section 9.b <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#9.b>`_) 238*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("Attach") 239*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 240*e1fe3e4aSElliott Hughes glyphs = self.parse_glyphclass_(accept_glyphname=True) 241*e1fe3e4aSElliott Hughes contourPoints = {self.expect_number_()} 242*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 243*e1fe3e4aSElliott Hughes contourPoints.add(self.expect_number_()) 244*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 245*e1fe3e4aSElliott Hughes return self.ast.AttachStatement(glyphs, contourPoints, location=location) 246*e1fe3e4aSElliott Hughes 247*e1fe3e4aSElliott Hughes def parse_enumerate_(self, vertical): 248*e1fe3e4aSElliott Hughes # Parse an enumerated pair positioning rule (`section 6.b.ii <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#6.b.ii>`_). 249*e1fe3e4aSElliott Hughes assert self.cur_token_ in {"enumerate", "enum"} 250*e1fe3e4aSElliott Hughes self.advance_lexer_() 251*e1fe3e4aSElliott Hughes return self.parse_position_(enumerated=True, vertical=vertical) 252*e1fe3e4aSElliott Hughes 253*e1fe3e4aSElliott Hughes def parse_GlyphClassDef_(self): 254*e1fe3e4aSElliott Hughes # Parses 'GlyphClassDef @BASE, @LIGATURES, @MARKS, @COMPONENTS;' 255*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("GlyphClassDef") 256*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 257*e1fe3e4aSElliott Hughes if self.next_token_ != ",": 258*e1fe3e4aSElliott Hughes baseGlyphs = self.parse_glyphclass_(accept_glyphname=False) 259*e1fe3e4aSElliott Hughes else: 260*e1fe3e4aSElliott Hughes baseGlyphs = None 261*e1fe3e4aSElliott Hughes self.expect_symbol_(",") 262*e1fe3e4aSElliott Hughes if self.next_token_ != ",": 263*e1fe3e4aSElliott Hughes ligatureGlyphs = self.parse_glyphclass_(accept_glyphname=False) 264*e1fe3e4aSElliott Hughes else: 265*e1fe3e4aSElliott Hughes ligatureGlyphs = None 266*e1fe3e4aSElliott Hughes self.expect_symbol_(",") 267*e1fe3e4aSElliott Hughes if self.next_token_ != ",": 268*e1fe3e4aSElliott Hughes markGlyphs = self.parse_glyphclass_(accept_glyphname=False) 269*e1fe3e4aSElliott Hughes else: 270*e1fe3e4aSElliott Hughes markGlyphs = None 271*e1fe3e4aSElliott Hughes self.expect_symbol_(",") 272*e1fe3e4aSElliott Hughes if self.next_token_ != ";": 273*e1fe3e4aSElliott Hughes componentGlyphs = self.parse_glyphclass_(accept_glyphname=False) 274*e1fe3e4aSElliott Hughes else: 275*e1fe3e4aSElliott Hughes componentGlyphs = None 276*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 277*e1fe3e4aSElliott Hughes return self.ast.GlyphClassDefStatement( 278*e1fe3e4aSElliott Hughes baseGlyphs, markGlyphs, ligatureGlyphs, componentGlyphs, location=location 279*e1fe3e4aSElliott Hughes ) 280*e1fe3e4aSElliott Hughes 281*e1fe3e4aSElliott Hughes def parse_glyphclass_definition_(self): 282*e1fe3e4aSElliott Hughes # Parses glyph class definitions such as '@UPPERCASE = [A-Z];' 283*e1fe3e4aSElliott Hughes location, name = self.cur_token_location_, self.cur_token_ 284*e1fe3e4aSElliott Hughes self.expect_symbol_("=") 285*e1fe3e4aSElliott Hughes glyphs = self.parse_glyphclass_(accept_glyphname=False) 286*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 287*e1fe3e4aSElliott Hughes glyphclass = self.ast.GlyphClassDefinition(name, glyphs, location=location) 288*e1fe3e4aSElliott Hughes self.glyphclasses_.define(name, glyphclass) 289*e1fe3e4aSElliott Hughes return glyphclass 290*e1fe3e4aSElliott Hughes 291*e1fe3e4aSElliott Hughes def split_glyph_range_(self, name, location): 292*e1fe3e4aSElliott Hughes # Since v1.20, the OpenType Feature File specification allows 293*e1fe3e4aSElliott Hughes # for dashes in glyph names. A sequence like "a-b-c-d" could 294*e1fe3e4aSElliott Hughes # therefore mean a single glyph whose name happens to be 295*e1fe3e4aSElliott Hughes # "a-b-c-d", or it could mean a range from glyph "a" to glyph 296*e1fe3e4aSElliott Hughes # "b-c-d", or a range from glyph "a-b" to glyph "c-d", or a 297*e1fe3e4aSElliott Hughes # range from glyph "a-b-c" to glyph "d".Technically, this 298*e1fe3e4aSElliott Hughes # example could be resolved because the (pretty complex) 299*e1fe3e4aSElliott Hughes # definition of glyph ranges renders most of these splits 300*e1fe3e4aSElliott Hughes # invalid. But the specification does not say that a compiler 301*e1fe3e4aSElliott Hughes # should try to apply such fancy heuristics. To encourage 302*e1fe3e4aSElliott Hughes # unambiguous feature files, we therefore try all possible 303*e1fe3e4aSElliott Hughes # splits and reject the feature file if there are multiple 304*e1fe3e4aSElliott Hughes # splits possible. It is intentional that we don't just emit a 305*e1fe3e4aSElliott Hughes # warning; warnings tend to get ignored. To fix the problem, 306*e1fe3e4aSElliott Hughes # font designers can trivially add spaces around the intended 307*e1fe3e4aSElliott Hughes # split point, and we emit a compiler error that suggests 308*e1fe3e4aSElliott Hughes # how exactly the source should be rewritten to make things 309*e1fe3e4aSElliott Hughes # unambiguous. 310*e1fe3e4aSElliott Hughes parts = name.split("-") 311*e1fe3e4aSElliott Hughes solutions = [] 312*e1fe3e4aSElliott Hughes for i in range(len(parts)): 313*e1fe3e4aSElliott Hughes start, limit = "-".join(parts[0:i]), "-".join(parts[i:]) 314*e1fe3e4aSElliott Hughes if start in self.glyphNames_ and limit in self.glyphNames_: 315*e1fe3e4aSElliott Hughes solutions.append((start, limit)) 316*e1fe3e4aSElliott Hughes if len(solutions) == 1: 317*e1fe3e4aSElliott Hughes start, limit = solutions[0] 318*e1fe3e4aSElliott Hughes return start, limit 319*e1fe3e4aSElliott Hughes elif len(solutions) == 0: 320*e1fe3e4aSElliott Hughes raise FeatureLibError( 321*e1fe3e4aSElliott Hughes '"%s" is not a glyph in the font, and it can not be split ' 322*e1fe3e4aSElliott Hughes "into a range of known glyphs" % name, 323*e1fe3e4aSElliott Hughes location, 324*e1fe3e4aSElliott Hughes ) 325*e1fe3e4aSElliott Hughes else: 326*e1fe3e4aSElliott Hughes ranges = " or ".join(['"%s - %s"' % (s, l) for s, l in solutions]) 327*e1fe3e4aSElliott Hughes raise FeatureLibError( 328*e1fe3e4aSElliott Hughes 'Ambiguous glyph range "%s"; ' 329*e1fe3e4aSElliott Hughes "please use %s to clarify what you mean" % (name, ranges), 330*e1fe3e4aSElliott Hughes location, 331*e1fe3e4aSElliott Hughes ) 332*e1fe3e4aSElliott Hughes 333*e1fe3e4aSElliott Hughes def parse_glyphclass_(self, accept_glyphname, accept_null=False): 334*e1fe3e4aSElliott Hughes # Parses a glyph class, either named or anonymous, or (if 335*e1fe3e4aSElliott Hughes # ``bool(accept_glyphname)``) a glyph name. If ``bool(accept_null)`` then 336*e1fe3e4aSElliott Hughes # also accept the special NULL glyph. 337*e1fe3e4aSElliott Hughes if accept_glyphname and self.next_token_type_ in (Lexer.NAME, Lexer.CID): 338*e1fe3e4aSElliott Hughes if accept_null and self.next_token_ == "NULL": 339*e1fe3e4aSElliott Hughes # If you want a glyph called NULL, you should escape it. 340*e1fe3e4aSElliott Hughes self.advance_lexer_() 341*e1fe3e4aSElliott Hughes return self.ast.NullGlyph(location=self.cur_token_location_) 342*e1fe3e4aSElliott Hughes glyph = self.expect_glyph_() 343*e1fe3e4aSElliott Hughes self.check_glyph_name_in_glyph_set(glyph) 344*e1fe3e4aSElliott Hughes return self.ast.GlyphName(glyph, location=self.cur_token_location_) 345*e1fe3e4aSElliott Hughes if self.next_token_type_ is Lexer.GLYPHCLASS: 346*e1fe3e4aSElliott Hughes self.advance_lexer_() 347*e1fe3e4aSElliott Hughes gc = self.glyphclasses_.resolve(self.cur_token_) 348*e1fe3e4aSElliott Hughes if gc is None: 349*e1fe3e4aSElliott Hughes raise FeatureLibError( 350*e1fe3e4aSElliott Hughes "Unknown glyph class @%s" % self.cur_token_, 351*e1fe3e4aSElliott Hughes self.cur_token_location_, 352*e1fe3e4aSElliott Hughes ) 353*e1fe3e4aSElliott Hughes if isinstance(gc, self.ast.MarkClass): 354*e1fe3e4aSElliott Hughes return self.ast.MarkClassName(gc, location=self.cur_token_location_) 355*e1fe3e4aSElliott Hughes else: 356*e1fe3e4aSElliott Hughes return self.ast.GlyphClassName(gc, location=self.cur_token_location_) 357*e1fe3e4aSElliott Hughes 358*e1fe3e4aSElliott Hughes self.expect_symbol_("[") 359*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 360*e1fe3e4aSElliott Hughes glyphs = self.ast.GlyphClass(location=location) 361*e1fe3e4aSElliott Hughes while self.next_token_ != "]": 362*e1fe3e4aSElliott Hughes if self.next_token_type_ is Lexer.NAME: 363*e1fe3e4aSElliott Hughes glyph = self.expect_glyph_() 364*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 365*e1fe3e4aSElliott Hughes if "-" in glyph and self.glyphNames_ and glyph not in self.glyphNames_: 366*e1fe3e4aSElliott Hughes start, limit = self.split_glyph_range_(glyph, location) 367*e1fe3e4aSElliott Hughes self.check_glyph_name_in_glyph_set(start, limit) 368*e1fe3e4aSElliott Hughes glyphs.add_range( 369*e1fe3e4aSElliott Hughes start, limit, self.make_glyph_range_(location, start, limit) 370*e1fe3e4aSElliott Hughes ) 371*e1fe3e4aSElliott Hughes elif self.next_token_ == "-": 372*e1fe3e4aSElliott Hughes start = glyph 373*e1fe3e4aSElliott Hughes self.expect_symbol_("-") 374*e1fe3e4aSElliott Hughes limit = self.expect_glyph_() 375*e1fe3e4aSElliott Hughes self.check_glyph_name_in_glyph_set(start, limit) 376*e1fe3e4aSElliott Hughes glyphs.add_range( 377*e1fe3e4aSElliott Hughes start, limit, self.make_glyph_range_(location, start, limit) 378*e1fe3e4aSElliott Hughes ) 379*e1fe3e4aSElliott Hughes else: 380*e1fe3e4aSElliott Hughes if "-" in glyph and not self.glyphNames_: 381*e1fe3e4aSElliott Hughes log.warning( 382*e1fe3e4aSElliott Hughes str( 383*e1fe3e4aSElliott Hughes FeatureLibError( 384*e1fe3e4aSElliott Hughes f"Ambiguous glyph name that looks like a range: {glyph!r}", 385*e1fe3e4aSElliott Hughes location, 386*e1fe3e4aSElliott Hughes ) 387*e1fe3e4aSElliott Hughes ) 388*e1fe3e4aSElliott Hughes ) 389*e1fe3e4aSElliott Hughes self.check_glyph_name_in_glyph_set(glyph) 390*e1fe3e4aSElliott Hughes glyphs.append(glyph) 391*e1fe3e4aSElliott Hughes elif self.next_token_type_ is Lexer.CID: 392*e1fe3e4aSElliott Hughes glyph = self.expect_glyph_() 393*e1fe3e4aSElliott Hughes if self.next_token_ == "-": 394*e1fe3e4aSElliott Hughes range_location = self.cur_token_location_ 395*e1fe3e4aSElliott Hughes range_start = self.cur_token_ 396*e1fe3e4aSElliott Hughes self.expect_symbol_("-") 397*e1fe3e4aSElliott Hughes range_end = self.expect_cid_() 398*e1fe3e4aSElliott Hughes self.check_glyph_name_in_glyph_set( 399*e1fe3e4aSElliott Hughes f"cid{range_start:05d}", 400*e1fe3e4aSElliott Hughes f"cid{range_end:05d}", 401*e1fe3e4aSElliott Hughes ) 402*e1fe3e4aSElliott Hughes glyphs.add_cid_range( 403*e1fe3e4aSElliott Hughes range_start, 404*e1fe3e4aSElliott Hughes range_end, 405*e1fe3e4aSElliott Hughes self.make_cid_range_(range_location, range_start, range_end), 406*e1fe3e4aSElliott Hughes ) 407*e1fe3e4aSElliott Hughes else: 408*e1fe3e4aSElliott Hughes glyph_name = f"cid{self.cur_token_:05d}" 409*e1fe3e4aSElliott Hughes self.check_glyph_name_in_glyph_set(glyph_name) 410*e1fe3e4aSElliott Hughes glyphs.append(glyph_name) 411*e1fe3e4aSElliott Hughes elif self.next_token_type_ is Lexer.GLYPHCLASS: 412*e1fe3e4aSElliott Hughes self.advance_lexer_() 413*e1fe3e4aSElliott Hughes gc = self.glyphclasses_.resolve(self.cur_token_) 414*e1fe3e4aSElliott Hughes if gc is None: 415*e1fe3e4aSElliott Hughes raise FeatureLibError( 416*e1fe3e4aSElliott Hughes "Unknown glyph class @%s" % self.cur_token_, 417*e1fe3e4aSElliott Hughes self.cur_token_location_, 418*e1fe3e4aSElliott Hughes ) 419*e1fe3e4aSElliott Hughes if isinstance(gc, self.ast.MarkClass): 420*e1fe3e4aSElliott Hughes gc = self.ast.MarkClassName(gc, location=self.cur_token_location_) 421*e1fe3e4aSElliott Hughes else: 422*e1fe3e4aSElliott Hughes gc = self.ast.GlyphClassName(gc, location=self.cur_token_location_) 423*e1fe3e4aSElliott Hughes glyphs.add_class(gc) 424*e1fe3e4aSElliott Hughes else: 425*e1fe3e4aSElliott Hughes raise FeatureLibError( 426*e1fe3e4aSElliott Hughes "Expected glyph name, glyph range, " 427*e1fe3e4aSElliott Hughes f"or glyph class reference, found {self.next_token_!r}", 428*e1fe3e4aSElliott Hughes self.next_token_location_, 429*e1fe3e4aSElliott Hughes ) 430*e1fe3e4aSElliott Hughes self.expect_symbol_("]") 431*e1fe3e4aSElliott Hughes return glyphs 432*e1fe3e4aSElliott Hughes 433*e1fe3e4aSElliott Hughes def parse_glyph_pattern_(self, vertical): 434*e1fe3e4aSElliott Hughes # Parses a glyph pattern, including lookups and context, e.g.:: 435*e1fe3e4aSElliott Hughes # 436*e1fe3e4aSElliott Hughes # a b 437*e1fe3e4aSElliott Hughes # a b c' d e 438*e1fe3e4aSElliott Hughes # a b c' lookup ChangeC d e 439*e1fe3e4aSElliott Hughes prefix, glyphs, lookups, values, suffix = ([], [], [], [], []) 440*e1fe3e4aSElliott Hughes hasMarks = False 441*e1fe3e4aSElliott Hughes while self.next_token_ not in {"by", "from", ";", ","}: 442*e1fe3e4aSElliott Hughes gc = self.parse_glyphclass_(accept_glyphname=True) 443*e1fe3e4aSElliott Hughes marked = False 444*e1fe3e4aSElliott Hughes if self.next_token_ == "'": 445*e1fe3e4aSElliott Hughes self.expect_symbol_("'") 446*e1fe3e4aSElliott Hughes hasMarks = marked = True 447*e1fe3e4aSElliott Hughes if marked: 448*e1fe3e4aSElliott Hughes if suffix: 449*e1fe3e4aSElliott Hughes # makeotf also reports this as an error, while FontForge 450*e1fe3e4aSElliott Hughes # silently inserts ' in all the intervening glyphs. 451*e1fe3e4aSElliott Hughes # https://github.com/fonttools/fonttools/pull/1096 452*e1fe3e4aSElliott Hughes raise FeatureLibError( 453*e1fe3e4aSElliott Hughes "Unsupported contextual target sequence: at most " 454*e1fe3e4aSElliott Hughes "one run of marked (') glyph/class names allowed", 455*e1fe3e4aSElliott Hughes self.cur_token_location_, 456*e1fe3e4aSElliott Hughes ) 457*e1fe3e4aSElliott Hughes glyphs.append(gc) 458*e1fe3e4aSElliott Hughes elif glyphs: 459*e1fe3e4aSElliott Hughes suffix.append(gc) 460*e1fe3e4aSElliott Hughes else: 461*e1fe3e4aSElliott Hughes prefix.append(gc) 462*e1fe3e4aSElliott Hughes 463*e1fe3e4aSElliott Hughes if self.is_next_value_(): 464*e1fe3e4aSElliott Hughes values.append(self.parse_valuerecord_(vertical)) 465*e1fe3e4aSElliott Hughes else: 466*e1fe3e4aSElliott Hughes values.append(None) 467*e1fe3e4aSElliott Hughes 468*e1fe3e4aSElliott Hughes lookuplist = None 469*e1fe3e4aSElliott Hughes while self.next_token_ == "lookup": 470*e1fe3e4aSElliott Hughes if lookuplist is None: 471*e1fe3e4aSElliott Hughes lookuplist = [] 472*e1fe3e4aSElliott Hughes self.expect_keyword_("lookup") 473*e1fe3e4aSElliott Hughes if not marked: 474*e1fe3e4aSElliott Hughes raise FeatureLibError( 475*e1fe3e4aSElliott Hughes "Lookups can only follow marked glyphs", 476*e1fe3e4aSElliott Hughes self.cur_token_location_, 477*e1fe3e4aSElliott Hughes ) 478*e1fe3e4aSElliott Hughes lookup_name = self.expect_name_() 479*e1fe3e4aSElliott Hughes lookup = self.lookups_.resolve(lookup_name) 480*e1fe3e4aSElliott Hughes if lookup is None: 481*e1fe3e4aSElliott Hughes raise FeatureLibError( 482*e1fe3e4aSElliott Hughes 'Unknown lookup "%s"' % lookup_name, self.cur_token_location_ 483*e1fe3e4aSElliott Hughes ) 484*e1fe3e4aSElliott Hughes lookuplist.append(lookup) 485*e1fe3e4aSElliott Hughes if marked: 486*e1fe3e4aSElliott Hughes lookups.append(lookuplist) 487*e1fe3e4aSElliott Hughes 488*e1fe3e4aSElliott Hughes if not glyphs and not suffix: # eg., "sub f f i by" 489*e1fe3e4aSElliott Hughes assert lookups == [] 490*e1fe3e4aSElliott Hughes return ([], prefix, [None] * len(prefix), values, [], hasMarks) 491*e1fe3e4aSElliott Hughes else: 492*e1fe3e4aSElliott Hughes if any(values[: len(prefix)]): 493*e1fe3e4aSElliott Hughes raise FeatureLibError( 494*e1fe3e4aSElliott Hughes "Positioning cannot be applied in the bactrack glyph sequence, " 495*e1fe3e4aSElliott Hughes "before the marked glyph sequence.", 496*e1fe3e4aSElliott Hughes self.cur_token_location_, 497*e1fe3e4aSElliott Hughes ) 498*e1fe3e4aSElliott Hughes marked_values = values[len(prefix) : len(prefix) + len(glyphs)] 499*e1fe3e4aSElliott Hughes if any(marked_values): 500*e1fe3e4aSElliott Hughes if any(values[len(prefix) + len(glyphs) :]): 501*e1fe3e4aSElliott Hughes raise FeatureLibError( 502*e1fe3e4aSElliott Hughes "Positioning values are allowed only in the marked glyph " 503*e1fe3e4aSElliott Hughes "sequence, or after the final glyph node when only one glyph " 504*e1fe3e4aSElliott Hughes "node is marked.", 505*e1fe3e4aSElliott Hughes self.cur_token_location_, 506*e1fe3e4aSElliott Hughes ) 507*e1fe3e4aSElliott Hughes values = marked_values 508*e1fe3e4aSElliott Hughes elif values and values[-1]: 509*e1fe3e4aSElliott Hughes if len(glyphs) > 1 or any(values[:-1]): 510*e1fe3e4aSElliott Hughes raise FeatureLibError( 511*e1fe3e4aSElliott Hughes "Positioning values are allowed only in the marked glyph " 512*e1fe3e4aSElliott Hughes "sequence, or after the final glyph node when only one glyph " 513*e1fe3e4aSElliott Hughes "node is marked.", 514*e1fe3e4aSElliott Hughes self.cur_token_location_, 515*e1fe3e4aSElliott Hughes ) 516*e1fe3e4aSElliott Hughes values = values[-1:] 517*e1fe3e4aSElliott Hughes elif any(values): 518*e1fe3e4aSElliott Hughes raise FeatureLibError( 519*e1fe3e4aSElliott Hughes "Positioning values are allowed only in the marked glyph " 520*e1fe3e4aSElliott Hughes "sequence, or after the final glyph node when only one glyph " 521*e1fe3e4aSElliott Hughes "node is marked.", 522*e1fe3e4aSElliott Hughes self.cur_token_location_, 523*e1fe3e4aSElliott Hughes ) 524*e1fe3e4aSElliott Hughes return (prefix, glyphs, lookups, values, suffix, hasMarks) 525*e1fe3e4aSElliott Hughes 526*e1fe3e4aSElliott Hughes def parse_ignore_glyph_pattern_(self, sub): 527*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 528*e1fe3e4aSElliott Hughes prefix, glyphs, lookups, values, suffix, hasMarks = self.parse_glyph_pattern_( 529*e1fe3e4aSElliott Hughes vertical=False 530*e1fe3e4aSElliott Hughes ) 531*e1fe3e4aSElliott Hughes if any(lookups): 532*e1fe3e4aSElliott Hughes raise FeatureLibError( 533*e1fe3e4aSElliott Hughes f'No lookups can be specified for "ignore {sub}"', location 534*e1fe3e4aSElliott Hughes ) 535*e1fe3e4aSElliott Hughes if not hasMarks: 536*e1fe3e4aSElliott Hughes error = FeatureLibError( 537*e1fe3e4aSElliott Hughes f'Ambiguous "ignore {sub}", there should be least one marked glyph', 538*e1fe3e4aSElliott Hughes location, 539*e1fe3e4aSElliott Hughes ) 540*e1fe3e4aSElliott Hughes log.warning(str(error)) 541*e1fe3e4aSElliott Hughes suffix, glyphs = glyphs[1:], glyphs[0:1] 542*e1fe3e4aSElliott Hughes chainContext = (prefix, glyphs, suffix) 543*e1fe3e4aSElliott Hughes return chainContext 544*e1fe3e4aSElliott Hughes 545*e1fe3e4aSElliott Hughes def parse_ignore_context_(self, sub): 546*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 547*e1fe3e4aSElliott Hughes chainContext = [self.parse_ignore_glyph_pattern_(sub)] 548*e1fe3e4aSElliott Hughes while self.next_token_ == ",": 549*e1fe3e4aSElliott Hughes self.expect_symbol_(",") 550*e1fe3e4aSElliott Hughes chainContext.append(self.parse_ignore_glyph_pattern_(sub)) 551*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 552*e1fe3e4aSElliott Hughes return chainContext 553*e1fe3e4aSElliott Hughes 554*e1fe3e4aSElliott Hughes def parse_ignore_(self): 555*e1fe3e4aSElliott Hughes # Parses an ignore sub/pos rule. 556*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("ignore") 557*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 558*e1fe3e4aSElliott Hughes self.advance_lexer_() 559*e1fe3e4aSElliott Hughes if self.cur_token_ in ["substitute", "sub"]: 560*e1fe3e4aSElliott Hughes chainContext = self.parse_ignore_context_("sub") 561*e1fe3e4aSElliott Hughes return self.ast.IgnoreSubstStatement(chainContext, location=location) 562*e1fe3e4aSElliott Hughes if self.cur_token_ in ["position", "pos"]: 563*e1fe3e4aSElliott Hughes chainContext = self.parse_ignore_context_("pos") 564*e1fe3e4aSElliott Hughes return self.ast.IgnorePosStatement(chainContext, location=location) 565*e1fe3e4aSElliott Hughes raise FeatureLibError( 566*e1fe3e4aSElliott Hughes 'Expected "substitute" or "position"', self.cur_token_location_ 567*e1fe3e4aSElliott Hughes ) 568*e1fe3e4aSElliott Hughes 569*e1fe3e4aSElliott Hughes def parse_include_(self): 570*e1fe3e4aSElliott Hughes assert self.cur_token_ == "include" 571*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 572*e1fe3e4aSElliott Hughes filename = self.expect_filename_() 573*e1fe3e4aSElliott Hughes # self.expect_symbol_(";") 574*e1fe3e4aSElliott Hughes return ast.IncludeStatement(filename, location=location) 575*e1fe3e4aSElliott Hughes 576*e1fe3e4aSElliott Hughes def parse_language_(self): 577*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("language") 578*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 579*e1fe3e4aSElliott Hughes language = self.expect_language_tag_() 580*e1fe3e4aSElliott Hughes include_default, required = (True, False) 581*e1fe3e4aSElliott Hughes if self.next_token_ in {"exclude_dflt", "include_dflt"}: 582*e1fe3e4aSElliott Hughes include_default = self.expect_name_() == "include_dflt" 583*e1fe3e4aSElliott Hughes if self.next_token_ == "required": 584*e1fe3e4aSElliott Hughes self.expect_keyword_("required") 585*e1fe3e4aSElliott Hughes required = True 586*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 587*e1fe3e4aSElliott Hughes return self.ast.LanguageStatement( 588*e1fe3e4aSElliott Hughes language, include_default, required, location=location 589*e1fe3e4aSElliott Hughes ) 590*e1fe3e4aSElliott Hughes 591*e1fe3e4aSElliott Hughes def parse_ligatureCaretByIndex_(self): 592*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("LigatureCaretByIndex") 593*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 594*e1fe3e4aSElliott Hughes glyphs = self.parse_glyphclass_(accept_glyphname=True) 595*e1fe3e4aSElliott Hughes carets = [self.expect_number_()] 596*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 597*e1fe3e4aSElliott Hughes carets.append(self.expect_number_()) 598*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 599*e1fe3e4aSElliott Hughes return self.ast.LigatureCaretByIndexStatement(glyphs, carets, location=location) 600*e1fe3e4aSElliott Hughes 601*e1fe3e4aSElliott Hughes def parse_ligatureCaretByPos_(self): 602*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("LigatureCaretByPos") 603*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 604*e1fe3e4aSElliott Hughes glyphs = self.parse_glyphclass_(accept_glyphname=True) 605*e1fe3e4aSElliott Hughes carets = [self.expect_number_(variable=True)] 606*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 607*e1fe3e4aSElliott Hughes carets.append(self.expect_number_(variable=True)) 608*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 609*e1fe3e4aSElliott Hughes return self.ast.LigatureCaretByPosStatement(glyphs, carets, location=location) 610*e1fe3e4aSElliott Hughes 611*e1fe3e4aSElliott Hughes def parse_lookup_(self, vertical): 612*e1fe3e4aSElliott Hughes # Parses a ``lookup`` - either a lookup block, or a lookup reference 613*e1fe3e4aSElliott Hughes # inside a feature. 614*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("lookup") 615*e1fe3e4aSElliott Hughes location, name = self.cur_token_location_, self.expect_name_() 616*e1fe3e4aSElliott Hughes 617*e1fe3e4aSElliott Hughes if self.next_token_ == ";": 618*e1fe3e4aSElliott Hughes lookup = self.lookups_.resolve(name) 619*e1fe3e4aSElliott Hughes if lookup is None: 620*e1fe3e4aSElliott Hughes raise FeatureLibError( 621*e1fe3e4aSElliott Hughes 'Unknown lookup "%s"' % name, self.cur_token_location_ 622*e1fe3e4aSElliott Hughes ) 623*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 624*e1fe3e4aSElliott Hughes return self.ast.LookupReferenceStatement(lookup, location=location) 625*e1fe3e4aSElliott Hughes 626*e1fe3e4aSElliott Hughes use_extension = False 627*e1fe3e4aSElliott Hughes if self.next_token_ == "useExtension": 628*e1fe3e4aSElliott Hughes self.expect_keyword_("useExtension") 629*e1fe3e4aSElliott Hughes use_extension = True 630*e1fe3e4aSElliott Hughes 631*e1fe3e4aSElliott Hughes block = self.ast.LookupBlock(name, use_extension, location=location) 632*e1fe3e4aSElliott Hughes self.parse_block_(block, vertical) 633*e1fe3e4aSElliott Hughes self.lookups_.define(name, block) 634*e1fe3e4aSElliott Hughes return block 635*e1fe3e4aSElliott Hughes 636*e1fe3e4aSElliott Hughes def parse_lookupflag_(self): 637*e1fe3e4aSElliott Hughes # Parses a ``lookupflag`` statement, either specified by number or 638*e1fe3e4aSElliott Hughes # in words. 639*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("lookupflag") 640*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 641*e1fe3e4aSElliott Hughes 642*e1fe3e4aSElliott Hughes # format B: "lookupflag 6;" 643*e1fe3e4aSElliott Hughes if self.next_token_type_ == Lexer.NUMBER: 644*e1fe3e4aSElliott Hughes value = self.expect_number_() 645*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 646*e1fe3e4aSElliott Hughes return self.ast.LookupFlagStatement(value, location=location) 647*e1fe3e4aSElliott Hughes 648*e1fe3e4aSElliott Hughes # format A: "lookupflag RightToLeft MarkAttachmentType @M;" 649*e1fe3e4aSElliott Hughes value_seen = False 650*e1fe3e4aSElliott Hughes value, markAttachment, markFilteringSet = 0, None, None 651*e1fe3e4aSElliott Hughes flags = { 652*e1fe3e4aSElliott Hughes "RightToLeft": 1, 653*e1fe3e4aSElliott Hughes "IgnoreBaseGlyphs": 2, 654*e1fe3e4aSElliott Hughes "IgnoreLigatures": 4, 655*e1fe3e4aSElliott Hughes "IgnoreMarks": 8, 656*e1fe3e4aSElliott Hughes } 657*e1fe3e4aSElliott Hughes seen = set() 658*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 659*e1fe3e4aSElliott Hughes if self.next_token_ in seen: 660*e1fe3e4aSElliott Hughes raise FeatureLibError( 661*e1fe3e4aSElliott Hughes "%s can be specified only once" % self.next_token_, 662*e1fe3e4aSElliott Hughes self.next_token_location_, 663*e1fe3e4aSElliott Hughes ) 664*e1fe3e4aSElliott Hughes seen.add(self.next_token_) 665*e1fe3e4aSElliott Hughes if self.next_token_ == "MarkAttachmentType": 666*e1fe3e4aSElliott Hughes self.expect_keyword_("MarkAttachmentType") 667*e1fe3e4aSElliott Hughes markAttachment = self.parse_glyphclass_(accept_glyphname=False) 668*e1fe3e4aSElliott Hughes elif self.next_token_ == "UseMarkFilteringSet": 669*e1fe3e4aSElliott Hughes self.expect_keyword_("UseMarkFilteringSet") 670*e1fe3e4aSElliott Hughes markFilteringSet = self.parse_glyphclass_(accept_glyphname=False) 671*e1fe3e4aSElliott Hughes elif self.next_token_ in flags: 672*e1fe3e4aSElliott Hughes value_seen = True 673*e1fe3e4aSElliott Hughes value = value | flags[self.expect_name_()] 674*e1fe3e4aSElliott Hughes else: 675*e1fe3e4aSElliott Hughes raise FeatureLibError( 676*e1fe3e4aSElliott Hughes '"%s" is not a recognized lookupflag' % self.next_token_, 677*e1fe3e4aSElliott Hughes self.next_token_location_, 678*e1fe3e4aSElliott Hughes ) 679*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 680*e1fe3e4aSElliott Hughes 681*e1fe3e4aSElliott Hughes if not any([value_seen, markAttachment, markFilteringSet]): 682*e1fe3e4aSElliott Hughes raise FeatureLibError( 683*e1fe3e4aSElliott Hughes "lookupflag must have a value", self.next_token_location_ 684*e1fe3e4aSElliott Hughes ) 685*e1fe3e4aSElliott Hughes 686*e1fe3e4aSElliott Hughes return self.ast.LookupFlagStatement( 687*e1fe3e4aSElliott Hughes value, 688*e1fe3e4aSElliott Hughes markAttachment=markAttachment, 689*e1fe3e4aSElliott Hughes markFilteringSet=markFilteringSet, 690*e1fe3e4aSElliott Hughes location=location, 691*e1fe3e4aSElliott Hughes ) 692*e1fe3e4aSElliott Hughes 693*e1fe3e4aSElliott Hughes def parse_markClass_(self): 694*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("markClass") 695*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 696*e1fe3e4aSElliott Hughes glyphs = self.parse_glyphclass_(accept_glyphname=True) 697*e1fe3e4aSElliott Hughes if not glyphs.glyphSet(): 698*e1fe3e4aSElliott Hughes raise FeatureLibError( 699*e1fe3e4aSElliott Hughes "Empty glyph class in mark class definition", location 700*e1fe3e4aSElliott Hughes ) 701*e1fe3e4aSElliott Hughes anchor = self.parse_anchor_() 702*e1fe3e4aSElliott Hughes name = self.expect_class_name_() 703*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 704*e1fe3e4aSElliott Hughes markClass = self.doc_.markClasses.get(name) 705*e1fe3e4aSElliott Hughes if markClass is None: 706*e1fe3e4aSElliott Hughes markClass = self.ast.MarkClass(name) 707*e1fe3e4aSElliott Hughes self.doc_.markClasses[name] = markClass 708*e1fe3e4aSElliott Hughes self.glyphclasses_.define(name, markClass) 709*e1fe3e4aSElliott Hughes mcdef = self.ast.MarkClassDefinition( 710*e1fe3e4aSElliott Hughes markClass, anchor, glyphs, location=location 711*e1fe3e4aSElliott Hughes ) 712*e1fe3e4aSElliott Hughes markClass.addDefinition(mcdef) 713*e1fe3e4aSElliott Hughes return mcdef 714*e1fe3e4aSElliott Hughes 715*e1fe3e4aSElliott Hughes def parse_position_(self, enumerated, vertical): 716*e1fe3e4aSElliott Hughes assert self.cur_token_ in {"position", "pos"} 717*e1fe3e4aSElliott Hughes if self.next_token_ == "cursive": # GPOS type 3 718*e1fe3e4aSElliott Hughes return self.parse_position_cursive_(enumerated, vertical) 719*e1fe3e4aSElliott Hughes elif self.next_token_ == "base": # GPOS type 4 720*e1fe3e4aSElliott Hughes return self.parse_position_base_(enumerated, vertical) 721*e1fe3e4aSElliott Hughes elif self.next_token_ == "ligature": # GPOS type 5 722*e1fe3e4aSElliott Hughes return self.parse_position_ligature_(enumerated, vertical) 723*e1fe3e4aSElliott Hughes elif self.next_token_ == "mark": # GPOS type 6 724*e1fe3e4aSElliott Hughes return self.parse_position_mark_(enumerated, vertical) 725*e1fe3e4aSElliott Hughes 726*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 727*e1fe3e4aSElliott Hughes prefix, glyphs, lookups, values, suffix, hasMarks = self.parse_glyph_pattern_( 728*e1fe3e4aSElliott Hughes vertical 729*e1fe3e4aSElliott Hughes ) 730*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 731*e1fe3e4aSElliott Hughes 732*e1fe3e4aSElliott Hughes if any(lookups): 733*e1fe3e4aSElliott Hughes # GPOS type 8: Chaining contextual positioning; explicit lookups 734*e1fe3e4aSElliott Hughes if any(values): 735*e1fe3e4aSElliott Hughes raise FeatureLibError( 736*e1fe3e4aSElliott Hughes 'If "lookup" is present, no values must be specified', location 737*e1fe3e4aSElliott Hughes ) 738*e1fe3e4aSElliott Hughes return self.ast.ChainContextPosStatement( 739*e1fe3e4aSElliott Hughes prefix, glyphs, suffix, lookups, location=location 740*e1fe3e4aSElliott Hughes ) 741*e1fe3e4aSElliott Hughes 742*e1fe3e4aSElliott Hughes # Pair positioning, format A: "pos V 10 A -10;" 743*e1fe3e4aSElliott Hughes # Pair positioning, format B: "pos V A -20;" 744*e1fe3e4aSElliott Hughes if not prefix and not suffix and len(glyphs) == 2 and not hasMarks: 745*e1fe3e4aSElliott Hughes if values[0] is None: # Format B: "pos V A -20;" 746*e1fe3e4aSElliott Hughes values.reverse() 747*e1fe3e4aSElliott Hughes return self.ast.PairPosStatement( 748*e1fe3e4aSElliott Hughes glyphs[0], 749*e1fe3e4aSElliott Hughes values[0], 750*e1fe3e4aSElliott Hughes glyphs[1], 751*e1fe3e4aSElliott Hughes values[1], 752*e1fe3e4aSElliott Hughes enumerated=enumerated, 753*e1fe3e4aSElliott Hughes location=location, 754*e1fe3e4aSElliott Hughes ) 755*e1fe3e4aSElliott Hughes 756*e1fe3e4aSElliott Hughes if enumerated: 757*e1fe3e4aSElliott Hughes raise FeatureLibError( 758*e1fe3e4aSElliott Hughes '"enumerate" is only allowed with pair positionings', location 759*e1fe3e4aSElliott Hughes ) 760*e1fe3e4aSElliott Hughes return self.ast.SinglePosStatement( 761*e1fe3e4aSElliott Hughes list(zip(glyphs, values)), 762*e1fe3e4aSElliott Hughes prefix, 763*e1fe3e4aSElliott Hughes suffix, 764*e1fe3e4aSElliott Hughes forceChain=hasMarks, 765*e1fe3e4aSElliott Hughes location=location, 766*e1fe3e4aSElliott Hughes ) 767*e1fe3e4aSElliott Hughes 768*e1fe3e4aSElliott Hughes def parse_position_cursive_(self, enumerated, vertical): 769*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 770*e1fe3e4aSElliott Hughes self.expect_keyword_("cursive") 771*e1fe3e4aSElliott Hughes if enumerated: 772*e1fe3e4aSElliott Hughes raise FeatureLibError( 773*e1fe3e4aSElliott Hughes '"enumerate" is not allowed with ' "cursive attachment positioning", 774*e1fe3e4aSElliott Hughes location, 775*e1fe3e4aSElliott Hughes ) 776*e1fe3e4aSElliott Hughes glyphclass = self.parse_glyphclass_(accept_glyphname=True) 777*e1fe3e4aSElliott Hughes entryAnchor = self.parse_anchor_() 778*e1fe3e4aSElliott Hughes exitAnchor = self.parse_anchor_() 779*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 780*e1fe3e4aSElliott Hughes return self.ast.CursivePosStatement( 781*e1fe3e4aSElliott Hughes glyphclass, entryAnchor, exitAnchor, location=location 782*e1fe3e4aSElliott Hughes ) 783*e1fe3e4aSElliott Hughes 784*e1fe3e4aSElliott Hughes def parse_position_base_(self, enumerated, vertical): 785*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 786*e1fe3e4aSElliott Hughes self.expect_keyword_("base") 787*e1fe3e4aSElliott Hughes if enumerated: 788*e1fe3e4aSElliott Hughes raise FeatureLibError( 789*e1fe3e4aSElliott Hughes '"enumerate" is not allowed with ' 790*e1fe3e4aSElliott Hughes "mark-to-base attachment positioning", 791*e1fe3e4aSElliott Hughes location, 792*e1fe3e4aSElliott Hughes ) 793*e1fe3e4aSElliott Hughes base = self.parse_glyphclass_(accept_glyphname=True) 794*e1fe3e4aSElliott Hughes marks = self.parse_anchor_marks_() 795*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 796*e1fe3e4aSElliott Hughes return self.ast.MarkBasePosStatement(base, marks, location=location) 797*e1fe3e4aSElliott Hughes 798*e1fe3e4aSElliott Hughes def parse_position_ligature_(self, enumerated, vertical): 799*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 800*e1fe3e4aSElliott Hughes self.expect_keyword_("ligature") 801*e1fe3e4aSElliott Hughes if enumerated: 802*e1fe3e4aSElliott Hughes raise FeatureLibError( 803*e1fe3e4aSElliott Hughes '"enumerate" is not allowed with ' 804*e1fe3e4aSElliott Hughes "mark-to-ligature attachment positioning", 805*e1fe3e4aSElliott Hughes location, 806*e1fe3e4aSElliott Hughes ) 807*e1fe3e4aSElliott Hughes ligatures = self.parse_glyphclass_(accept_glyphname=True) 808*e1fe3e4aSElliott Hughes marks = [self.parse_anchor_marks_()] 809*e1fe3e4aSElliott Hughes while self.next_token_ == "ligComponent": 810*e1fe3e4aSElliott Hughes self.expect_keyword_("ligComponent") 811*e1fe3e4aSElliott Hughes marks.append(self.parse_anchor_marks_()) 812*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 813*e1fe3e4aSElliott Hughes return self.ast.MarkLigPosStatement(ligatures, marks, location=location) 814*e1fe3e4aSElliott Hughes 815*e1fe3e4aSElliott Hughes def parse_position_mark_(self, enumerated, vertical): 816*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 817*e1fe3e4aSElliott Hughes self.expect_keyword_("mark") 818*e1fe3e4aSElliott Hughes if enumerated: 819*e1fe3e4aSElliott Hughes raise FeatureLibError( 820*e1fe3e4aSElliott Hughes '"enumerate" is not allowed with ' 821*e1fe3e4aSElliott Hughes "mark-to-mark attachment positioning", 822*e1fe3e4aSElliott Hughes location, 823*e1fe3e4aSElliott Hughes ) 824*e1fe3e4aSElliott Hughes baseMarks = self.parse_glyphclass_(accept_glyphname=True) 825*e1fe3e4aSElliott Hughes marks = self.parse_anchor_marks_() 826*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 827*e1fe3e4aSElliott Hughes return self.ast.MarkMarkPosStatement(baseMarks, marks, location=location) 828*e1fe3e4aSElliott Hughes 829*e1fe3e4aSElliott Hughes def parse_script_(self): 830*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("script") 831*e1fe3e4aSElliott Hughes location, script = self.cur_token_location_, self.expect_script_tag_() 832*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 833*e1fe3e4aSElliott Hughes return self.ast.ScriptStatement(script, location=location) 834*e1fe3e4aSElliott Hughes 835*e1fe3e4aSElliott Hughes def parse_substitute_(self): 836*e1fe3e4aSElliott Hughes assert self.cur_token_ in {"substitute", "sub", "reversesub", "rsub"} 837*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 838*e1fe3e4aSElliott Hughes reverse = self.cur_token_ in {"reversesub", "rsub"} 839*e1fe3e4aSElliott Hughes ( 840*e1fe3e4aSElliott Hughes old_prefix, 841*e1fe3e4aSElliott Hughes old, 842*e1fe3e4aSElliott Hughes lookups, 843*e1fe3e4aSElliott Hughes values, 844*e1fe3e4aSElliott Hughes old_suffix, 845*e1fe3e4aSElliott Hughes hasMarks, 846*e1fe3e4aSElliott Hughes ) = self.parse_glyph_pattern_(vertical=False) 847*e1fe3e4aSElliott Hughes if any(values): 848*e1fe3e4aSElliott Hughes raise FeatureLibError( 849*e1fe3e4aSElliott Hughes "Substitution statements cannot contain values", location 850*e1fe3e4aSElliott Hughes ) 851*e1fe3e4aSElliott Hughes new = [] 852*e1fe3e4aSElliott Hughes if self.next_token_ == "by": 853*e1fe3e4aSElliott Hughes keyword = self.expect_keyword_("by") 854*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 855*e1fe3e4aSElliott Hughes gc = self.parse_glyphclass_(accept_glyphname=True, accept_null=True) 856*e1fe3e4aSElliott Hughes new.append(gc) 857*e1fe3e4aSElliott Hughes elif self.next_token_ == "from": 858*e1fe3e4aSElliott Hughes keyword = self.expect_keyword_("from") 859*e1fe3e4aSElliott Hughes new = [self.parse_glyphclass_(accept_glyphname=False)] 860*e1fe3e4aSElliott Hughes else: 861*e1fe3e4aSElliott Hughes keyword = None 862*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 863*e1fe3e4aSElliott Hughes if len(new) == 0 and not any(lookups): 864*e1fe3e4aSElliott Hughes raise FeatureLibError( 865*e1fe3e4aSElliott Hughes 'Expected "by", "from" or explicit lookup references', 866*e1fe3e4aSElliott Hughes self.cur_token_location_, 867*e1fe3e4aSElliott Hughes ) 868*e1fe3e4aSElliott Hughes 869*e1fe3e4aSElliott Hughes # GSUB lookup type 3: Alternate substitution. 870*e1fe3e4aSElliott Hughes # Format: "substitute a from [a.1 a.2 a.3];" 871*e1fe3e4aSElliott Hughes if keyword == "from": 872*e1fe3e4aSElliott Hughes if reverse: 873*e1fe3e4aSElliott Hughes raise FeatureLibError( 874*e1fe3e4aSElliott Hughes 'Reverse chaining substitutions do not support "from"', location 875*e1fe3e4aSElliott Hughes ) 876*e1fe3e4aSElliott Hughes if len(old) != 1 or len(old[0].glyphSet()) != 1: 877*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected a single glyph before "from"', location) 878*e1fe3e4aSElliott Hughes if len(new) != 1: 879*e1fe3e4aSElliott Hughes raise FeatureLibError( 880*e1fe3e4aSElliott Hughes 'Expected a single glyphclass after "from"', location 881*e1fe3e4aSElliott Hughes ) 882*e1fe3e4aSElliott Hughes return self.ast.AlternateSubstStatement( 883*e1fe3e4aSElliott Hughes old_prefix, old[0], old_suffix, new[0], location=location 884*e1fe3e4aSElliott Hughes ) 885*e1fe3e4aSElliott Hughes 886*e1fe3e4aSElliott Hughes num_lookups = len([l for l in lookups if l is not None]) 887*e1fe3e4aSElliott Hughes 888*e1fe3e4aSElliott Hughes is_deletion = False 889*e1fe3e4aSElliott Hughes if len(new) == 1 and isinstance(new[0], ast.NullGlyph): 890*e1fe3e4aSElliott Hughes new = [] # Deletion 891*e1fe3e4aSElliott Hughes is_deletion = True 892*e1fe3e4aSElliott Hughes 893*e1fe3e4aSElliott Hughes # GSUB lookup type 1: Single substitution. 894*e1fe3e4aSElliott Hughes # Format A: "substitute a by a.sc;" 895*e1fe3e4aSElliott Hughes # Format B: "substitute [one.fitted one.oldstyle] by one;" 896*e1fe3e4aSElliott Hughes # Format C: "substitute [a-d] by [A.sc-D.sc];" 897*e1fe3e4aSElliott Hughes if not reverse and len(old) == 1 and len(new) == 1 and num_lookups == 0: 898*e1fe3e4aSElliott Hughes glyphs = list(old[0].glyphSet()) 899*e1fe3e4aSElliott Hughes replacements = list(new[0].glyphSet()) 900*e1fe3e4aSElliott Hughes if len(replacements) == 1: 901*e1fe3e4aSElliott Hughes replacements = replacements * len(glyphs) 902*e1fe3e4aSElliott Hughes if len(glyphs) != len(replacements): 903*e1fe3e4aSElliott Hughes raise FeatureLibError( 904*e1fe3e4aSElliott Hughes 'Expected a glyph class with %d elements after "by", ' 905*e1fe3e4aSElliott Hughes "but found a glyph class with %d elements" 906*e1fe3e4aSElliott Hughes % (len(glyphs), len(replacements)), 907*e1fe3e4aSElliott Hughes location, 908*e1fe3e4aSElliott Hughes ) 909*e1fe3e4aSElliott Hughes return self.ast.SingleSubstStatement( 910*e1fe3e4aSElliott Hughes old, new, old_prefix, old_suffix, forceChain=hasMarks, location=location 911*e1fe3e4aSElliott Hughes ) 912*e1fe3e4aSElliott Hughes 913*e1fe3e4aSElliott Hughes # Glyph deletion, built as GSUB lookup type 2: Multiple substitution 914*e1fe3e4aSElliott Hughes # with empty replacement. 915*e1fe3e4aSElliott Hughes if is_deletion and len(old) == 1 and num_lookups == 0: 916*e1fe3e4aSElliott Hughes return self.ast.MultipleSubstStatement( 917*e1fe3e4aSElliott Hughes old_prefix, 918*e1fe3e4aSElliott Hughes old[0], 919*e1fe3e4aSElliott Hughes old_suffix, 920*e1fe3e4aSElliott Hughes (), 921*e1fe3e4aSElliott Hughes forceChain=hasMarks, 922*e1fe3e4aSElliott Hughes location=location, 923*e1fe3e4aSElliott Hughes ) 924*e1fe3e4aSElliott Hughes 925*e1fe3e4aSElliott Hughes # GSUB lookup type 2: Multiple substitution. 926*e1fe3e4aSElliott Hughes # Format: "substitute f_f_i by f f i;" 927*e1fe3e4aSElliott Hughes # 928*e1fe3e4aSElliott Hughes # GlyphsApp introduces two additional formats: 929*e1fe3e4aSElliott Hughes # Format 1: "substitute [f_i f_l] by [f f] [i l];" 930*e1fe3e4aSElliott Hughes # Format 2: "substitute [f_i f_l] by f [i l];" 931*e1fe3e4aSElliott Hughes # http://handbook.glyphsapp.com/en/layout/multiple-substitution-with-classes/ 932*e1fe3e4aSElliott Hughes if not reverse and len(old) == 1 and len(new) > 1 and num_lookups == 0: 933*e1fe3e4aSElliott Hughes count = len(old[0].glyphSet()) 934*e1fe3e4aSElliott Hughes for n in new: 935*e1fe3e4aSElliott Hughes if not list(n.glyphSet()): 936*e1fe3e4aSElliott Hughes raise FeatureLibError("Empty class in replacement", location) 937*e1fe3e4aSElliott Hughes if len(n.glyphSet()) != 1 and len(n.glyphSet()) != count: 938*e1fe3e4aSElliott Hughes raise FeatureLibError( 939*e1fe3e4aSElliott Hughes f'Expected a glyph class with 1 or {count} elements after "by", ' 940*e1fe3e4aSElliott Hughes f"but found a glyph class with {len(n.glyphSet())} elements", 941*e1fe3e4aSElliott Hughes location, 942*e1fe3e4aSElliott Hughes ) 943*e1fe3e4aSElliott Hughes return self.ast.MultipleSubstStatement( 944*e1fe3e4aSElliott Hughes old_prefix, 945*e1fe3e4aSElliott Hughes old[0], 946*e1fe3e4aSElliott Hughes old_suffix, 947*e1fe3e4aSElliott Hughes new, 948*e1fe3e4aSElliott Hughes forceChain=hasMarks, 949*e1fe3e4aSElliott Hughes location=location, 950*e1fe3e4aSElliott Hughes ) 951*e1fe3e4aSElliott Hughes 952*e1fe3e4aSElliott Hughes # GSUB lookup type 4: Ligature substitution. 953*e1fe3e4aSElliott Hughes # Format: "substitute f f i by f_f_i;" 954*e1fe3e4aSElliott Hughes if ( 955*e1fe3e4aSElliott Hughes not reverse 956*e1fe3e4aSElliott Hughes and len(old) > 1 957*e1fe3e4aSElliott Hughes and len(new) == 1 958*e1fe3e4aSElliott Hughes and len(new[0].glyphSet()) == 1 959*e1fe3e4aSElliott Hughes and num_lookups == 0 960*e1fe3e4aSElliott Hughes ): 961*e1fe3e4aSElliott Hughes return self.ast.LigatureSubstStatement( 962*e1fe3e4aSElliott Hughes old_prefix, 963*e1fe3e4aSElliott Hughes old, 964*e1fe3e4aSElliott Hughes old_suffix, 965*e1fe3e4aSElliott Hughes list(new[0].glyphSet())[0], 966*e1fe3e4aSElliott Hughes forceChain=hasMarks, 967*e1fe3e4aSElliott Hughes location=location, 968*e1fe3e4aSElliott Hughes ) 969*e1fe3e4aSElliott Hughes 970*e1fe3e4aSElliott Hughes # GSUB lookup type 8: Reverse chaining substitution. 971*e1fe3e4aSElliott Hughes if reverse: 972*e1fe3e4aSElliott Hughes if len(old) != 1: 973*e1fe3e4aSElliott Hughes raise FeatureLibError( 974*e1fe3e4aSElliott Hughes "In reverse chaining single substitutions, " 975*e1fe3e4aSElliott Hughes "only a single glyph or glyph class can be replaced", 976*e1fe3e4aSElliott Hughes location, 977*e1fe3e4aSElliott Hughes ) 978*e1fe3e4aSElliott Hughes if len(new) != 1: 979*e1fe3e4aSElliott Hughes raise FeatureLibError( 980*e1fe3e4aSElliott Hughes "In reverse chaining single substitutions, " 981*e1fe3e4aSElliott Hughes 'the replacement (after "by") must be a single glyph ' 982*e1fe3e4aSElliott Hughes "or glyph class", 983*e1fe3e4aSElliott Hughes location, 984*e1fe3e4aSElliott Hughes ) 985*e1fe3e4aSElliott Hughes if num_lookups != 0: 986*e1fe3e4aSElliott Hughes raise FeatureLibError( 987*e1fe3e4aSElliott Hughes "Reverse chaining substitutions cannot call named lookups", location 988*e1fe3e4aSElliott Hughes ) 989*e1fe3e4aSElliott Hughes glyphs = sorted(list(old[0].glyphSet())) 990*e1fe3e4aSElliott Hughes replacements = sorted(list(new[0].glyphSet())) 991*e1fe3e4aSElliott Hughes if len(replacements) == 1: 992*e1fe3e4aSElliott Hughes replacements = replacements * len(glyphs) 993*e1fe3e4aSElliott Hughes if len(glyphs) != len(replacements): 994*e1fe3e4aSElliott Hughes raise FeatureLibError( 995*e1fe3e4aSElliott Hughes 'Expected a glyph class with %d elements after "by", ' 996*e1fe3e4aSElliott Hughes "but found a glyph class with %d elements" 997*e1fe3e4aSElliott Hughes % (len(glyphs), len(replacements)), 998*e1fe3e4aSElliott Hughes location, 999*e1fe3e4aSElliott Hughes ) 1000*e1fe3e4aSElliott Hughes return self.ast.ReverseChainSingleSubstStatement( 1001*e1fe3e4aSElliott Hughes old_prefix, old_suffix, old, new, location=location 1002*e1fe3e4aSElliott Hughes ) 1003*e1fe3e4aSElliott Hughes 1004*e1fe3e4aSElliott Hughes if len(old) > 1 and len(new) > 1: 1005*e1fe3e4aSElliott Hughes raise FeatureLibError( 1006*e1fe3e4aSElliott Hughes "Direct substitution of multiple glyphs by multiple glyphs " 1007*e1fe3e4aSElliott Hughes "is not supported", 1008*e1fe3e4aSElliott Hughes location, 1009*e1fe3e4aSElliott Hughes ) 1010*e1fe3e4aSElliott Hughes 1011*e1fe3e4aSElliott Hughes # If there are remaining glyphs to parse, this is an invalid GSUB statement 1012*e1fe3e4aSElliott Hughes if len(new) != 0 or is_deletion: 1013*e1fe3e4aSElliott Hughes raise FeatureLibError("Invalid substitution statement", location) 1014*e1fe3e4aSElliott Hughes 1015*e1fe3e4aSElliott Hughes # GSUB lookup type 6: Chaining contextual substitution. 1016*e1fe3e4aSElliott Hughes rule = self.ast.ChainContextSubstStatement( 1017*e1fe3e4aSElliott Hughes old_prefix, old, old_suffix, lookups, location=location 1018*e1fe3e4aSElliott Hughes ) 1019*e1fe3e4aSElliott Hughes return rule 1020*e1fe3e4aSElliott Hughes 1021*e1fe3e4aSElliott Hughes def parse_subtable_(self): 1022*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("subtable") 1023*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1024*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1025*e1fe3e4aSElliott Hughes return self.ast.SubtableStatement(location=location) 1026*e1fe3e4aSElliott Hughes 1027*e1fe3e4aSElliott Hughes def parse_size_parameters_(self): 1028*e1fe3e4aSElliott Hughes # Parses a ``parameters`` statement used in ``size`` features. See 1029*e1fe3e4aSElliott Hughes # `section 8.b <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#8.b>`_. 1030*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("parameters") 1031*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1032*e1fe3e4aSElliott Hughes DesignSize = self.expect_decipoint_() 1033*e1fe3e4aSElliott Hughes SubfamilyID = self.expect_number_() 1034*e1fe3e4aSElliott Hughes RangeStart = 0.0 1035*e1fe3e4aSElliott Hughes RangeEnd = 0.0 1036*e1fe3e4aSElliott Hughes if self.next_token_type_ in (Lexer.NUMBER, Lexer.FLOAT) or SubfamilyID != 0: 1037*e1fe3e4aSElliott Hughes RangeStart = self.expect_decipoint_() 1038*e1fe3e4aSElliott Hughes RangeEnd = self.expect_decipoint_() 1039*e1fe3e4aSElliott Hughes 1040*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1041*e1fe3e4aSElliott Hughes return self.ast.SizeParameters( 1042*e1fe3e4aSElliott Hughes DesignSize, SubfamilyID, RangeStart, RangeEnd, location=location 1043*e1fe3e4aSElliott Hughes ) 1044*e1fe3e4aSElliott Hughes 1045*e1fe3e4aSElliott Hughes def parse_size_menuname_(self): 1046*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("sizemenuname") 1047*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1048*e1fe3e4aSElliott Hughes platformID, platEncID, langID, string = self.parse_name_() 1049*e1fe3e4aSElliott Hughes return self.ast.FeatureNameStatement( 1050*e1fe3e4aSElliott Hughes "size", platformID, platEncID, langID, string, location=location 1051*e1fe3e4aSElliott Hughes ) 1052*e1fe3e4aSElliott Hughes 1053*e1fe3e4aSElliott Hughes def parse_table_(self): 1054*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("table") 1055*e1fe3e4aSElliott Hughes location, name = self.cur_token_location_, self.expect_tag_() 1056*e1fe3e4aSElliott Hughes table = self.ast.TableBlock(name, location=location) 1057*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1058*e1fe3e4aSElliott Hughes handler = { 1059*e1fe3e4aSElliott Hughes "GDEF": self.parse_table_GDEF_, 1060*e1fe3e4aSElliott Hughes "head": self.parse_table_head_, 1061*e1fe3e4aSElliott Hughes "hhea": self.parse_table_hhea_, 1062*e1fe3e4aSElliott Hughes "vhea": self.parse_table_vhea_, 1063*e1fe3e4aSElliott Hughes "name": self.parse_table_name_, 1064*e1fe3e4aSElliott Hughes "BASE": self.parse_table_BASE_, 1065*e1fe3e4aSElliott Hughes "OS/2": self.parse_table_OS_2_, 1066*e1fe3e4aSElliott Hughes "STAT": self.parse_table_STAT_, 1067*e1fe3e4aSElliott Hughes }.get(name) 1068*e1fe3e4aSElliott Hughes if handler: 1069*e1fe3e4aSElliott Hughes handler(table) 1070*e1fe3e4aSElliott Hughes else: 1071*e1fe3e4aSElliott Hughes raise FeatureLibError( 1072*e1fe3e4aSElliott Hughes '"table %s" is not supported' % name.strip(), location 1073*e1fe3e4aSElliott Hughes ) 1074*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1075*e1fe3e4aSElliott Hughes end_tag = self.expect_tag_() 1076*e1fe3e4aSElliott Hughes if end_tag != name: 1077*e1fe3e4aSElliott Hughes raise FeatureLibError( 1078*e1fe3e4aSElliott Hughes 'Expected "%s"' % name.strip(), self.cur_token_location_ 1079*e1fe3e4aSElliott Hughes ) 1080*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1081*e1fe3e4aSElliott Hughes return table 1082*e1fe3e4aSElliott Hughes 1083*e1fe3e4aSElliott Hughes def parse_table_GDEF_(self, table): 1084*e1fe3e4aSElliott Hughes statements = table.statements 1085*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1086*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1087*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1088*e1fe3e4aSElliott Hughes statements.append( 1089*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1090*e1fe3e4aSElliott Hughes ) 1091*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("Attach"): 1092*e1fe3e4aSElliott Hughes statements.append(self.parse_attach_()) 1093*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("GlyphClassDef"): 1094*e1fe3e4aSElliott Hughes statements.append(self.parse_GlyphClassDef_()) 1095*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("LigatureCaretByIndex"): 1096*e1fe3e4aSElliott Hughes statements.append(self.parse_ligatureCaretByIndex_()) 1097*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("LigatureCaretByPos"): 1098*e1fe3e4aSElliott Hughes statements.append(self.parse_ligatureCaretByPos_()) 1099*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1100*e1fe3e4aSElliott Hughes continue 1101*e1fe3e4aSElliott Hughes else: 1102*e1fe3e4aSElliott Hughes raise FeatureLibError( 1103*e1fe3e4aSElliott Hughes "Expected Attach, LigatureCaretByIndex, " "or LigatureCaretByPos", 1104*e1fe3e4aSElliott Hughes self.cur_token_location_, 1105*e1fe3e4aSElliott Hughes ) 1106*e1fe3e4aSElliott Hughes 1107*e1fe3e4aSElliott Hughes def parse_table_head_(self, table): 1108*e1fe3e4aSElliott Hughes statements = table.statements 1109*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1110*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1111*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1112*e1fe3e4aSElliott Hughes statements.append( 1113*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1114*e1fe3e4aSElliott Hughes ) 1115*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("FontRevision"): 1116*e1fe3e4aSElliott Hughes statements.append(self.parse_FontRevision_()) 1117*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1118*e1fe3e4aSElliott Hughes continue 1119*e1fe3e4aSElliott Hughes else: 1120*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected FontRevision", self.cur_token_location_) 1121*e1fe3e4aSElliott Hughes 1122*e1fe3e4aSElliott Hughes def parse_table_hhea_(self, table): 1123*e1fe3e4aSElliott Hughes statements = table.statements 1124*e1fe3e4aSElliott Hughes fields = ("CaretOffset", "Ascender", "Descender", "LineGap") 1125*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1126*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1127*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1128*e1fe3e4aSElliott Hughes statements.append( 1129*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1130*e1fe3e4aSElliott Hughes ) 1131*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.NAME and self.cur_token_ in fields: 1132*e1fe3e4aSElliott Hughes key = self.cur_token_.lower() 1133*e1fe3e4aSElliott Hughes value = self.expect_number_() 1134*e1fe3e4aSElliott Hughes statements.append( 1135*e1fe3e4aSElliott Hughes self.ast.HheaField(key, value, location=self.cur_token_location_) 1136*e1fe3e4aSElliott Hughes ) 1137*e1fe3e4aSElliott Hughes if self.next_token_ != ";": 1138*e1fe3e4aSElliott Hughes raise FeatureLibError( 1139*e1fe3e4aSElliott Hughes "Incomplete statement", self.next_token_location_ 1140*e1fe3e4aSElliott Hughes ) 1141*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1142*e1fe3e4aSElliott Hughes continue 1143*e1fe3e4aSElliott Hughes else: 1144*e1fe3e4aSElliott Hughes raise FeatureLibError( 1145*e1fe3e4aSElliott Hughes "Expected CaretOffset, Ascender, " "Descender or LineGap", 1146*e1fe3e4aSElliott Hughes self.cur_token_location_, 1147*e1fe3e4aSElliott Hughes ) 1148*e1fe3e4aSElliott Hughes 1149*e1fe3e4aSElliott Hughes def parse_table_vhea_(self, table): 1150*e1fe3e4aSElliott Hughes statements = table.statements 1151*e1fe3e4aSElliott Hughes fields = ("VertTypoAscender", "VertTypoDescender", "VertTypoLineGap") 1152*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1153*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1154*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1155*e1fe3e4aSElliott Hughes statements.append( 1156*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1157*e1fe3e4aSElliott Hughes ) 1158*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.NAME and self.cur_token_ in fields: 1159*e1fe3e4aSElliott Hughes key = self.cur_token_.lower() 1160*e1fe3e4aSElliott Hughes value = self.expect_number_() 1161*e1fe3e4aSElliott Hughes statements.append( 1162*e1fe3e4aSElliott Hughes self.ast.VheaField(key, value, location=self.cur_token_location_) 1163*e1fe3e4aSElliott Hughes ) 1164*e1fe3e4aSElliott Hughes if self.next_token_ != ";": 1165*e1fe3e4aSElliott Hughes raise FeatureLibError( 1166*e1fe3e4aSElliott Hughes "Incomplete statement", self.next_token_location_ 1167*e1fe3e4aSElliott Hughes ) 1168*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1169*e1fe3e4aSElliott Hughes continue 1170*e1fe3e4aSElliott Hughes else: 1171*e1fe3e4aSElliott Hughes raise FeatureLibError( 1172*e1fe3e4aSElliott Hughes "Expected VertTypoAscender, " 1173*e1fe3e4aSElliott Hughes "VertTypoDescender or VertTypoLineGap", 1174*e1fe3e4aSElliott Hughes self.cur_token_location_, 1175*e1fe3e4aSElliott Hughes ) 1176*e1fe3e4aSElliott Hughes 1177*e1fe3e4aSElliott Hughes def parse_table_name_(self, table): 1178*e1fe3e4aSElliott Hughes statements = table.statements 1179*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1180*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1181*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1182*e1fe3e4aSElliott Hughes statements.append( 1183*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1184*e1fe3e4aSElliott Hughes ) 1185*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("nameid"): 1186*e1fe3e4aSElliott Hughes statement = self.parse_nameid_() 1187*e1fe3e4aSElliott Hughes if statement: 1188*e1fe3e4aSElliott Hughes statements.append(statement) 1189*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1190*e1fe3e4aSElliott Hughes continue 1191*e1fe3e4aSElliott Hughes else: 1192*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected nameid", self.cur_token_location_) 1193*e1fe3e4aSElliott Hughes 1194*e1fe3e4aSElliott Hughes def parse_name_(self): 1195*e1fe3e4aSElliott Hughes """Parses a name record. See `section 9.e <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#9.e>`_.""" 1196*e1fe3e4aSElliott Hughes platEncID = None 1197*e1fe3e4aSElliott Hughes langID = None 1198*e1fe3e4aSElliott Hughes if self.next_token_type_ in Lexer.NUMBERS: 1199*e1fe3e4aSElliott Hughes platformID = self.expect_any_number_() 1200*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1201*e1fe3e4aSElliott Hughes if platformID not in (1, 3): 1202*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected platform id 1 or 3", location) 1203*e1fe3e4aSElliott Hughes if self.next_token_type_ in Lexer.NUMBERS: 1204*e1fe3e4aSElliott Hughes platEncID = self.expect_any_number_() 1205*e1fe3e4aSElliott Hughes langID = self.expect_any_number_() 1206*e1fe3e4aSElliott Hughes else: 1207*e1fe3e4aSElliott Hughes platformID = 3 1208*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1209*e1fe3e4aSElliott Hughes 1210*e1fe3e4aSElliott Hughes if platformID == 1: # Macintosh 1211*e1fe3e4aSElliott Hughes platEncID = platEncID or 0 # Roman 1212*e1fe3e4aSElliott Hughes langID = langID or 0 # English 1213*e1fe3e4aSElliott Hughes else: # 3, Windows 1214*e1fe3e4aSElliott Hughes platEncID = platEncID or 1 # Unicode 1215*e1fe3e4aSElliott Hughes langID = langID or 0x0409 # English 1216*e1fe3e4aSElliott Hughes 1217*e1fe3e4aSElliott Hughes string = self.expect_string_() 1218*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1219*e1fe3e4aSElliott Hughes 1220*e1fe3e4aSElliott Hughes encoding = getEncoding(platformID, platEncID, langID) 1221*e1fe3e4aSElliott Hughes if encoding is None: 1222*e1fe3e4aSElliott Hughes raise FeatureLibError("Unsupported encoding", location) 1223*e1fe3e4aSElliott Hughes unescaped = self.unescape_string_(string, encoding) 1224*e1fe3e4aSElliott Hughes return platformID, platEncID, langID, unescaped 1225*e1fe3e4aSElliott Hughes 1226*e1fe3e4aSElliott Hughes def parse_stat_name_(self): 1227*e1fe3e4aSElliott Hughes platEncID = None 1228*e1fe3e4aSElliott Hughes langID = None 1229*e1fe3e4aSElliott Hughes if self.next_token_type_ in Lexer.NUMBERS: 1230*e1fe3e4aSElliott Hughes platformID = self.expect_any_number_() 1231*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1232*e1fe3e4aSElliott Hughes if platformID not in (1, 3): 1233*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected platform id 1 or 3", location) 1234*e1fe3e4aSElliott Hughes if self.next_token_type_ in Lexer.NUMBERS: 1235*e1fe3e4aSElliott Hughes platEncID = self.expect_any_number_() 1236*e1fe3e4aSElliott Hughes langID = self.expect_any_number_() 1237*e1fe3e4aSElliott Hughes else: 1238*e1fe3e4aSElliott Hughes platformID = 3 1239*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1240*e1fe3e4aSElliott Hughes 1241*e1fe3e4aSElliott Hughes if platformID == 1: # Macintosh 1242*e1fe3e4aSElliott Hughes platEncID = platEncID or 0 # Roman 1243*e1fe3e4aSElliott Hughes langID = langID or 0 # English 1244*e1fe3e4aSElliott Hughes else: # 3, Windows 1245*e1fe3e4aSElliott Hughes platEncID = platEncID or 1 # Unicode 1246*e1fe3e4aSElliott Hughes langID = langID or 0x0409 # English 1247*e1fe3e4aSElliott Hughes 1248*e1fe3e4aSElliott Hughes string = self.expect_string_() 1249*e1fe3e4aSElliott Hughes encoding = getEncoding(platformID, platEncID, langID) 1250*e1fe3e4aSElliott Hughes if encoding is None: 1251*e1fe3e4aSElliott Hughes raise FeatureLibError("Unsupported encoding", location) 1252*e1fe3e4aSElliott Hughes unescaped = self.unescape_string_(string, encoding) 1253*e1fe3e4aSElliott Hughes return platformID, platEncID, langID, unescaped 1254*e1fe3e4aSElliott Hughes 1255*e1fe3e4aSElliott Hughes def parse_nameid_(self): 1256*e1fe3e4aSElliott Hughes assert self.cur_token_ == "nameid", self.cur_token_ 1257*e1fe3e4aSElliott Hughes location, nameID = self.cur_token_location_, self.expect_any_number_() 1258*e1fe3e4aSElliott Hughes if nameID > 32767: 1259*e1fe3e4aSElliott Hughes raise FeatureLibError( 1260*e1fe3e4aSElliott Hughes "Name id value cannot be greater than 32767", self.cur_token_location_ 1261*e1fe3e4aSElliott Hughes ) 1262*e1fe3e4aSElliott Hughes platformID, platEncID, langID, string = self.parse_name_() 1263*e1fe3e4aSElliott Hughes return self.ast.NameRecord( 1264*e1fe3e4aSElliott Hughes nameID, platformID, platEncID, langID, string, location=location 1265*e1fe3e4aSElliott Hughes ) 1266*e1fe3e4aSElliott Hughes 1267*e1fe3e4aSElliott Hughes def unescape_string_(self, string, encoding): 1268*e1fe3e4aSElliott Hughes if encoding == "utf_16_be": 1269*e1fe3e4aSElliott Hughes s = re.sub(r"\\[0-9a-fA-F]{4}", self.unescape_unichr_, string) 1270*e1fe3e4aSElliott Hughes else: 1271*e1fe3e4aSElliott Hughes unescape = lambda m: self.unescape_byte_(m, encoding) 1272*e1fe3e4aSElliott Hughes s = re.sub(r"\\[0-9a-fA-F]{2}", unescape, string) 1273*e1fe3e4aSElliott Hughes # We now have a Unicode string, but it might contain surrogate pairs. 1274*e1fe3e4aSElliott Hughes # We convert surrogates to actual Unicode by round-tripping through 1275*e1fe3e4aSElliott Hughes # Python's UTF-16 codec in a special mode. 1276*e1fe3e4aSElliott Hughes utf16 = tobytes(s, "utf_16_be", "surrogatepass") 1277*e1fe3e4aSElliott Hughes return tostr(utf16, "utf_16_be") 1278*e1fe3e4aSElliott Hughes 1279*e1fe3e4aSElliott Hughes @staticmethod 1280*e1fe3e4aSElliott Hughes def unescape_unichr_(match): 1281*e1fe3e4aSElliott Hughes n = match.group(0)[1:] 1282*e1fe3e4aSElliott Hughes return chr(int(n, 16)) 1283*e1fe3e4aSElliott Hughes 1284*e1fe3e4aSElliott Hughes @staticmethod 1285*e1fe3e4aSElliott Hughes def unescape_byte_(match, encoding): 1286*e1fe3e4aSElliott Hughes n = match.group(0)[1:] 1287*e1fe3e4aSElliott Hughes return bytechr(int(n, 16)).decode(encoding) 1288*e1fe3e4aSElliott Hughes 1289*e1fe3e4aSElliott Hughes def parse_table_BASE_(self, table): 1290*e1fe3e4aSElliott Hughes statements = table.statements 1291*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1292*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1293*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1294*e1fe3e4aSElliott Hughes statements.append( 1295*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1296*e1fe3e4aSElliott Hughes ) 1297*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("HorizAxis.BaseTagList"): 1298*e1fe3e4aSElliott Hughes horiz_bases = self.parse_base_tag_list_() 1299*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("HorizAxis.BaseScriptList"): 1300*e1fe3e4aSElliott Hughes horiz_scripts = self.parse_base_script_list_(len(horiz_bases)) 1301*e1fe3e4aSElliott Hughes statements.append( 1302*e1fe3e4aSElliott Hughes self.ast.BaseAxis( 1303*e1fe3e4aSElliott Hughes horiz_bases, 1304*e1fe3e4aSElliott Hughes horiz_scripts, 1305*e1fe3e4aSElliott Hughes False, 1306*e1fe3e4aSElliott Hughes location=self.cur_token_location_, 1307*e1fe3e4aSElliott Hughes ) 1308*e1fe3e4aSElliott Hughes ) 1309*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("VertAxis.BaseTagList"): 1310*e1fe3e4aSElliott Hughes vert_bases = self.parse_base_tag_list_() 1311*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("VertAxis.BaseScriptList"): 1312*e1fe3e4aSElliott Hughes vert_scripts = self.parse_base_script_list_(len(vert_bases)) 1313*e1fe3e4aSElliott Hughes statements.append( 1314*e1fe3e4aSElliott Hughes self.ast.BaseAxis( 1315*e1fe3e4aSElliott Hughes vert_bases, 1316*e1fe3e4aSElliott Hughes vert_scripts, 1317*e1fe3e4aSElliott Hughes True, 1318*e1fe3e4aSElliott Hughes location=self.cur_token_location_, 1319*e1fe3e4aSElliott Hughes ) 1320*e1fe3e4aSElliott Hughes ) 1321*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1322*e1fe3e4aSElliott Hughes continue 1323*e1fe3e4aSElliott Hughes 1324*e1fe3e4aSElliott Hughes def parse_table_OS_2_(self, table): 1325*e1fe3e4aSElliott Hughes statements = table.statements 1326*e1fe3e4aSElliott Hughes numbers = ( 1327*e1fe3e4aSElliott Hughes "FSType", 1328*e1fe3e4aSElliott Hughes "TypoAscender", 1329*e1fe3e4aSElliott Hughes "TypoDescender", 1330*e1fe3e4aSElliott Hughes "TypoLineGap", 1331*e1fe3e4aSElliott Hughes "winAscent", 1332*e1fe3e4aSElliott Hughes "winDescent", 1333*e1fe3e4aSElliott Hughes "XHeight", 1334*e1fe3e4aSElliott Hughes "CapHeight", 1335*e1fe3e4aSElliott Hughes "WeightClass", 1336*e1fe3e4aSElliott Hughes "WidthClass", 1337*e1fe3e4aSElliott Hughes "LowerOpSize", 1338*e1fe3e4aSElliott Hughes "UpperOpSize", 1339*e1fe3e4aSElliott Hughes ) 1340*e1fe3e4aSElliott Hughes ranges = ("UnicodeRange", "CodePageRange") 1341*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1342*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1343*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1344*e1fe3e4aSElliott Hughes statements.append( 1345*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1346*e1fe3e4aSElliott Hughes ) 1347*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.NAME: 1348*e1fe3e4aSElliott Hughes key = self.cur_token_.lower() 1349*e1fe3e4aSElliott Hughes value = None 1350*e1fe3e4aSElliott Hughes if self.cur_token_ in numbers: 1351*e1fe3e4aSElliott Hughes value = self.expect_number_() 1352*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("Panose"): 1353*e1fe3e4aSElliott Hughes value = [] 1354*e1fe3e4aSElliott Hughes for i in range(10): 1355*e1fe3e4aSElliott Hughes value.append(self.expect_number_()) 1356*e1fe3e4aSElliott Hughes elif self.cur_token_ in ranges: 1357*e1fe3e4aSElliott Hughes value = [] 1358*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 1359*e1fe3e4aSElliott Hughes value.append(self.expect_number_()) 1360*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("Vendor"): 1361*e1fe3e4aSElliott Hughes value = self.expect_string_() 1362*e1fe3e4aSElliott Hughes statements.append( 1363*e1fe3e4aSElliott Hughes self.ast.OS2Field(key, value, location=self.cur_token_location_) 1364*e1fe3e4aSElliott Hughes ) 1365*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1366*e1fe3e4aSElliott Hughes continue 1367*e1fe3e4aSElliott Hughes 1368*e1fe3e4aSElliott Hughes def parse_STAT_ElidedFallbackName(self): 1369*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("ElidedFallbackName") 1370*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1371*e1fe3e4aSElliott Hughes names = [] 1372*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1373*e1fe3e4aSElliott Hughes self.advance_lexer_() 1374*e1fe3e4aSElliott Hughes if self.is_cur_keyword_("name"): 1375*e1fe3e4aSElliott Hughes platformID, platEncID, langID, string = self.parse_stat_name_() 1376*e1fe3e4aSElliott Hughes nameRecord = self.ast.STATNameStatement( 1377*e1fe3e4aSElliott Hughes "stat", 1378*e1fe3e4aSElliott Hughes platformID, 1379*e1fe3e4aSElliott Hughes platEncID, 1380*e1fe3e4aSElliott Hughes langID, 1381*e1fe3e4aSElliott Hughes string, 1382*e1fe3e4aSElliott Hughes location=self.cur_token_location_, 1383*e1fe3e4aSElliott Hughes ) 1384*e1fe3e4aSElliott Hughes names.append(nameRecord) 1385*e1fe3e4aSElliott Hughes else: 1386*e1fe3e4aSElliott Hughes if self.cur_token_ != ";": 1387*e1fe3e4aSElliott Hughes raise FeatureLibError( 1388*e1fe3e4aSElliott Hughes f"Unexpected token {self.cur_token_} " f"in ElidedFallbackName", 1389*e1fe3e4aSElliott Hughes self.cur_token_location_, 1390*e1fe3e4aSElliott Hughes ) 1391*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1392*e1fe3e4aSElliott Hughes if not names: 1393*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected "name"', self.cur_token_location_) 1394*e1fe3e4aSElliott Hughes return names 1395*e1fe3e4aSElliott Hughes 1396*e1fe3e4aSElliott Hughes def parse_STAT_design_axis(self): 1397*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("DesignAxis") 1398*e1fe3e4aSElliott Hughes names = [] 1399*e1fe3e4aSElliott Hughes axisTag = self.expect_tag_() 1400*e1fe3e4aSElliott Hughes if ( 1401*e1fe3e4aSElliott Hughes axisTag not in ("ital", "opsz", "slnt", "wdth", "wght") 1402*e1fe3e4aSElliott Hughes and not axisTag.isupper() 1403*e1fe3e4aSElliott Hughes ): 1404*e1fe3e4aSElliott Hughes log.warning(f"Unregistered axis tag {axisTag} should be uppercase.") 1405*e1fe3e4aSElliott Hughes axisOrder = self.expect_number_() 1406*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1407*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1408*e1fe3e4aSElliott Hughes self.advance_lexer_() 1409*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1410*e1fe3e4aSElliott Hughes continue 1411*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("name"): 1412*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1413*e1fe3e4aSElliott Hughes platformID, platEncID, langID, string = self.parse_stat_name_() 1414*e1fe3e4aSElliott Hughes name = self.ast.STATNameStatement( 1415*e1fe3e4aSElliott Hughes "stat", platformID, platEncID, langID, string, location=location 1416*e1fe3e4aSElliott Hughes ) 1417*e1fe3e4aSElliott Hughes names.append(name) 1418*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1419*e1fe3e4aSElliott Hughes continue 1420*e1fe3e4aSElliott Hughes else: 1421*e1fe3e4aSElliott Hughes raise FeatureLibError( 1422*e1fe3e4aSElliott Hughes f'Expected "name", got {self.cur_token_}', self.cur_token_location_ 1423*e1fe3e4aSElliott Hughes ) 1424*e1fe3e4aSElliott Hughes 1425*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1426*e1fe3e4aSElliott Hughes return self.ast.STATDesignAxisStatement( 1427*e1fe3e4aSElliott Hughes axisTag, axisOrder, names, self.cur_token_location_ 1428*e1fe3e4aSElliott Hughes ) 1429*e1fe3e4aSElliott Hughes 1430*e1fe3e4aSElliott Hughes def parse_STAT_axis_value_(self): 1431*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("AxisValue") 1432*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1433*e1fe3e4aSElliott Hughes locations = [] 1434*e1fe3e4aSElliott Hughes names = [] 1435*e1fe3e4aSElliott Hughes flags = 0 1436*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1437*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1438*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1439*e1fe3e4aSElliott Hughes continue 1440*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("name"): 1441*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1442*e1fe3e4aSElliott Hughes platformID, platEncID, langID, string = self.parse_stat_name_() 1443*e1fe3e4aSElliott Hughes name = self.ast.STATNameStatement( 1444*e1fe3e4aSElliott Hughes "stat", platformID, platEncID, langID, string, location=location 1445*e1fe3e4aSElliott Hughes ) 1446*e1fe3e4aSElliott Hughes names.append(name) 1447*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("location"): 1448*e1fe3e4aSElliott Hughes location = self.parse_STAT_location() 1449*e1fe3e4aSElliott Hughes locations.append(location) 1450*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("flag"): 1451*e1fe3e4aSElliott Hughes flags = self.expect_stat_flags() 1452*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1453*e1fe3e4aSElliott Hughes continue 1454*e1fe3e4aSElliott Hughes else: 1455*e1fe3e4aSElliott Hughes raise FeatureLibError( 1456*e1fe3e4aSElliott Hughes f"Unexpected token {self.cur_token_} " f"in AxisValue", 1457*e1fe3e4aSElliott Hughes self.cur_token_location_, 1458*e1fe3e4aSElliott Hughes ) 1459*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1460*e1fe3e4aSElliott Hughes if not names: 1461*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected "Axis Name"', self.cur_token_location_) 1462*e1fe3e4aSElliott Hughes if not locations: 1463*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected "Axis location"', self.cur_token_location_) 1464*e1fe3e4aSElliott Hughes if len(locations) > 1: 1465*e1fe3e4aSElliott Hughes for location in locations: 1466*e1fe3e4aSElliott Hughes if len(location.values) > 1: 1467*e1fe3e4aSElliott Hughes raise FeatureLibError( 1468*e1fe3e4aSElliott Hughes "Only one value is allowed in a " 1469*e1fe3e4aSElliott Hughes "Format 4 Axis Value Record, but " 1470*e1fe3e4aSElliott Hughes f"{len(location.values)} were found.", 1471*e1fe3e4aSElliott Hughes self.cur_token_location_, 1472*e1fe3e4aSElliott Hughes ) 1473*e1fe3e4aSElliott Hughes format4_tags = [] 1474*e1fe3e4aSElliott Hughes for location in locations: 1475*e1fe3e4aSElliott Hughes tag = location.tag 1476*e1fe3e4aSElliott Hughes if tag in format4_tags: 1477*e1fe3e4aSElliott Hughes raise FeatureLibError( 1478*e1fe3e4aSElliott Hughes f"Axis tag {tag} already " "defined.", self.cur_token_location_ 1479*e1fe3e4aSElliott Hughes ) 1480*e1fe3e4aSElliott Hughes format4_tags.append(tag) 1481*e1fe3e4aSElliott Hughes 1482*e1fe3e4aSElliott Hughes return self.ast.STATAxisValueStatement( 1483*e1fe3e4aSElliott Hughes names, locations, flags, self.cur_token_location_ 1484*e1fe3e4aSElliott Hughes ) 1485*e1fe3e4aSElliott Hughes 1486*e1fe3e4aSElliott Hughes def parse_STAT_location(self): 1487*e1fe3e4aSElliott Hughes values = [] 1488*e1fe3e4aSElliott Hughes tag = self.expect_tag_() 1489*e1fe3e4aSElliott Hughes if len(tag.strip()) != 4: 1490*e1fe3e4aSElliott Hughes raise FeatureLibError( 1491*e1fe3e4aSElliott Hughes f"Axis tag {self.cur_token_} must be 4 " "characters", 1492*e1fe3e4aSElliott Hughes self.cur_token_location_, 1493*e1fe3e4aSElliott Hughes ) 1494*e1fe3e4aSElliott Hughes 1495*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 1496*e1fe3e4aSElliott Hughes if self.next_token_type_ is Lexer.FLOAT: 1497*e1fe3e4aSElliott Hughes value = self.expect_float_() 1498*e1fe3e4aSElliott Hughes values.append(value) 1499*e1fe3e4aSElliott Hughes elif self.next_token_type_ is Lexer.NUMBER: 1500*e1fe3e4aSElliott Hughes value = self.expect_number_() 1501*e1fe3e4aSElliott Hughes values.append(value) 1502*e1fe3e4aSElliott Hughes else: 1503*e1fe3e4aSElliott Hughes raise FeatureLibError( 1504*e1fe3e4aSElliott Hughes f'Unexpected value "{self.next_token_}". ' 1505*e1fe3e4aSElliott Hughes "Expected integer or float.", 1506*e1fe3e4aSElliott Hughes self.next_token_location_, 1507*e1fe3e4aSElliott Hughes ) 1508*e1fe3e4aSElliott Hughes if len(values) == 3: 1509*e1fe3e4aSElliott Hughes nominal, min_val, max_val = values 1510*e1fe3e4aSElliott Hughes if nominal < min_val or nominal > max_val: 1511*e1fe3e4aSElliott Hughes raise FeatureLibError( 1512*e1fe3e4aSElliott Hughes f"Default value {nominal} is outside " 1513*e1fe3e4aSElliott Hughes f"of specified range " 1514*e1fe3e4aSElliott Hughes f"{min_val}-{max_val}.", 1515*e1fe3e4aSElliott Hughes self.next_token_location_, 1516*e1fe3e4aSElliott Hughes ) 1517*e1fe3e4aSElliott Hughes return self.ast.AxisValueLocationStatement(tag, values) 1518*e1fe3e4aSElliott Hughes 1519*e1fe3e4aSElliott Hughes def parse_table_STAT_(self, table): 1520*e1fe3e4aSElliott Hughes statements = table.statements 1521*e1fe3e4aSElliott Hughes design_axes = [] 1522*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1523*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1524*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1525*e1fe3e4aSElliott Hughes statements.append( 1526*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1527*e1fe3e4aSElliott Hughes ) 1528*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.NAME: 1529*e1fe3e4aSElliott Hughes if self.is_cur_keyword_("ElidedFallbackName"): 1530*e1fe3e4aSElliott Hughes names = self.parse_STAT_ElidedFallbackName() 1531*e1fe3e4aSElliott Hughes statements.append(self.ast.ElidedFallbackName(names)) 1532*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("ElidedFallbackNameID"): 1533*e1fe3e4aSElliott Hughes value = self.expect_number_() 1534*e1fe3e4aSElliott Hughes statements.append(self.ast.ElidedFallbackNameID(value)) 1535*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1536*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("DesignAxis"): 1537*e1fe3e4aSElliott Hughes designAxis = self.parse_STAT_design_axis() 1538*e1fe3e4aSElliott Hughes design_axes.append(designAxis.tag) 1539*e1fe3e4aSElliott Hughes statements.append(designAxis) 1540*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1541*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("AxisValue"): 1542*e1fe3e4aSElliott Hughes axisValueRecord = self.parse_STAT_axis_value_() 1543*e1fe3e4aSElliott Hughes for location in axisValueRecord.locations: 1544*e1fe3e4aSElliott Hughes if location.tag not in design_axes: 1545*e1fe3e4aSElliott Hughes # Tag must be defined in a DesignAxis before it 1546*e1fe3e4aSElliott Hughes # can be referenced 1547*e1fe3e4aSElliott Hughes raise FeatureLibError( 1548*e1fe3e4aSElliott Hughes "DesignAxis not defined for " f"{location.tag}.", 1549*e1fe3e4aSElliott Hughes self.cur_token_location_, 1550*e1fe3e4aSElliott Hughes ) 1551*e1fe3e4aSElliott Hughes statements.append(axisValueRecord) 1552*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1553*e1fe3e4aSElliott Hughes else: 1554*e1fe3e4aSElliott Hughes raise FeatureLibError( 1555*e1fe3e4aSElliott Hughes f"Unexpected token {self.cur_token_}", self.cur_token_location_ 1556*e1fe3e4aSElliott Hughes ) 1557*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1558*e1fe3e4aSElliott Hughes continue 1559*e1fe3e4aSElliott Hughes 1560*e1fe3e4aSElliott Hughes def parse_base_tag_list_(self): 1561*e1fe3e4aSElliott Hughes # Parses BASE table entries. (See `section 9.a <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#9.a>`_) 1562*e1fe3e4aSElliott Hughes assert self.cur_token_ in ( 1563*e1fe3e4aSElliott Hughes "HorizAxis.BaseTagList", 1564*e1fe3e4aSElliott Hughes "VertAxis.BaseTagList", 1565*e1fe3e4aSElliott Hughes ), self.cur_token_ 1566*e1fe3e4aSElliott Hughes bases = [] 1567*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 1568*e1fe3e4aSElliott Hughes bases.append(self.expect_script_tag_()) 1569*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1570*e1fe3e4aSElliott Hughes return bases 1571*e1fe3e4aSElliott Hughes 1572*e1fe3e4aSElliott Hughes def parse_base_script_list_(self, count): 1573*e1fe3e4aSElliott Hughes assert self.cur_token_ in ( 1574*e1fe3e4aSElliott Hughes "HorizAxis.BaseScriptList", 1575*e1fe3e4aSElliott Hughes "VertAxis.BaseScriptList", 1576*e1fe3e4aSElliott Hughes ), self.cur_token_ 1577*e1fe3e4aSElliott Hughes scripts = [(self.parse_base_script_record_(count))] 1578*e1fe3e4aSElliott Hughes while self.next_token_ == ",": 1579*e1fe3e4aSElliott Hughes self.expect_symbol_(",") 1580*e1fe3e4aSElliott Hughes scripts.append(self.parse_base_script_record_(count)) 1581*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1582*e1fe3e4aSElliott Hughes return scripts 1583*e1fe3e4aSElliott Hughes 1584*e1fe3e4aSElliott Hughes def parse_base_script_record_(self, count): 1585*e1fe3e4aSElliott Hughes script_tag = self.expect_script_tag_() 1586*e1fe3e4aSElliott Hughes base_tag = self.expect_script_tag_() 1587*e1fe3e4aSElliott Hughes coords = [self.expect_number_() for i in range(count)] 1588*e1fe3e4aSElliott Hughes return script_tag, base_tag, coords 1589*e1fe3e4aSElliott Hughes 1590*e1fe3e4aSElliott Hughes def parse_device_(self): 1591*e1fe3e4aSElliott Hughes result = None 1592*e1fe3e4aSElliott Hughes self.expect_symbol_("<") 1593*e1fe3e4aSElliott Hughes self.expect_keyword_("device") 1594*e1fe3e4aSElliott Hughes if self.next_token_ == "NULL": 1595*e1fe3e4aSElliott Hughes self.expect_keyword_("NULL") 1596*e1fe3e4aSElliott Hughes else: 1597*e1fe3e4aSElliott Hughes result = [(self.expect_number_(), self.expect_number_())] 1598*e1fe3e4aSElliott Hughes while self.next_token_ == ",": 1599*e1fe3e4aSElliott Hughes self.expect_symbol_(",") 1600*e1fe3e4aSElliott Hughes result.append((self.expect_number_(), self.expect_number_())) 1601*e1fe3e4aSElliott Hughes result = tuple(result) # make it hashable 1602*e1fe3e4aSElliott Hughes self.expect_symbol_(">") 1603*e1fe3e4aSElliott Hughes return result 1604*e1fe3e4aSElliott Hughes 1605*e1fe3e4aSElliott Hughes def is_next_value_(self): 1606*e1fe3e4aSElliott Hughes return ( 1607*e1fe3e4aSElliott Hughes self.next_token_type_ is Lexer.NUMBER 1608*e1fe3e4aSElliott Hughes or self.next_token_ == "<" 1609*e1fe3e4aSElliott Hughes or self.next_token_ == "(" 1610*e1fe3e4aSElliott Hughes ) 1611*e1fe3e4aSElliott Hughes 1612*e1fe3e4aSElliott Hughes def parse_valuerecord_(self, vertical): 1613*e1fe3e4aSElliott Hughes if ( 1614*e1fe3e4aSElliott Hughes self.next_token_type_ is Lexer.SYMBOL and self.next_token_ == "(" 1615*e1fe3e4aSElliott Hughes ) or self.next_token_type_ is Lexer.NUMBER: 1616*e1fe3e4aSElliott Hughes number, location = ( 1617*e1fe3e4aSElliott Hughes self.expect_number_(variable=True), 1618*e1fe3e4aSElliott Hughes self.cur_token_location_, 1619*e1fe3e4aSElliott Hughes ) 1620*e1fe3e4aSElliott Hughes if vertical: 1621*e1fe3e4aSElliott Hughes val = self.ast.ValueRecord( 1622*e1fe3e4aSElliott Hughes yAdvance=number, vertical=vertical, location=location 1623*e1fe3e4aSElliott Hughes ) 1624*e1fe3e4aSElliott Hughes else: 1625*e1fe3e4aSElliott Hughes val = self.ast.ValueRecord( 1626*e1fe3e4aSElliott Hughes xAdvance=number, vertical=vertical, location=location 1627*e1fe3e4aSElliott Hughes ) 1628*e1fe3e4aSElliott Hughes return val 1629*e1fe3e4aSElliott Hughes self.expect_symbol_("<") 1630*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1631*e1fe3e4aSElliott Hughes if self.next_token_type_ is Lexer.NAME: 1632*e1fe3e4aSElliott Hughes name = self.expect_name_() 1633*e1fe3e4aSElliott Hughes if name == "NULL": 1634*e1fe3e4aSElliott Hughes self.expect_symbol_(">") 1635*e1fe3e4aSElliott Hughes return self.ast.ValueRecord() 1636*e1fe3e4aSElliott Hughes vrd = self.valuerecords_.resolve(name) 1637*e1fe3e4aSElliott Hughes if vrd is None: 1638*e1fe3e4aSElliott Hughes raise FeatureLibError( 1639*e1fe3e4aSElliott Hughes 'Unknown valueRecordDef "%s"' % name, self.cur_token_location_ 1640*e1fe3e4aSElliott Hughes ) 1641*e1fe3e4aSElliott Hughes value = vrd.value 1642*e1fe3e4aSElliott Hughes xPlacement, yPlacement = (value.xPlacement, value.yPlacement) 1643*e1fe3e4aSElliott Hughes xAdvance, yAdvance = (value.xAdvance, value.yAdvance) 1644*e1fe3e4aSElliott Hughes else: 1645*e1fe3e4aSElliott Hughes xPlacement, yPlacement, xAdvance, yAdvance = ( 1646*e1fe3e4aSElliott Hughes self.expect_number_(variable=True), 1647*e1fe3e4aSElliott Hughes self.expect_number_(variable=True), 1648*e1fe3e4aSElliott Hughes self.expect_number_(variable=True), 1649*e1fe3e4aSElliott Hughes self.expect_number_(variable=True), 1650*e1fe3e4aSElliott Hughes ) 1651*e1fe3e4aSElliott Hughes 1652*e1fe3e4aSElliott Hughes if self.next_token_ == "<": 1653*e1fe3e4aSElliott Hughes xPlaDevice, yPlaDevice, xAdvDevice, yAdvDevice = ( 1654*e1fe3e4aSElliott Hughes self.parse_device_(), 1655*e1fe3e4aSElliott Hughes self.parse_device_(), 1656*e1fe3e4aSElliott Hughes self.parse_device_(), 1657*e1fe3e4aSElliott Hughes self.parse_device_(), 1658*e1fe3e4aSElliott Hughes ) 1659*e1fe3e4aSElliott Hughes allDeltas = sorted( 1660*e1fe3e4aSElliott Hughes [ 1661*e1fe3e4aSElliott Hughes delta 1662*e1fe3e4aSElliott Hughes for size, delta in (xPlaDevice if xPlaDevice else ()) 1663*e1fe3e4aSElliott Hughes + (yPlaDevice if yPlaDevice else ()) 1664*e1fe3e4aSElliott Hughes + (xAdvDevice if xAdvDevice else ()) 1665*e1fe3e4aSElliott Hughes + (yAdvDevice if yAdvDevice else ()) 1666*e1fe3e4aSElliott Hughes ] 1667*e1fe3e4aSElliott Hughes ) 1668*e1fe3e4aSElliott Hughes if allDeltas[0] < -128 or allDeltas[-1] > 127: 1669*e1fe3e4aSElliott Hughes raise FeatureLibError( 1670*e1fe3e4aSElliott Hughes "Device value out of valid range (-128..127)", 1671*e1fe3e4aSElliott Hughes self.cur_token_location_, 1672*e1fe3e4aSElliott Hughes ) 1673*e1fe3e4aSElliott Hughes else: 1674*e1fe3e4aSElliott Hughes xPlaDevice, yPlaDevice, xAdvDevice, yAdvDevice = (None, None, None, None) 1675*e1fe3e4aSElliott Hughes 1676*e1fe3e4aSElliott Hughes self.expect_symbol_(">") 1677*e1fe3e4aSElliott Hughes return self.ast.ValueRecord( 1678*e1fe3e4aSElliott Hughes xPlacement, 1679*e1fe3e4aSElliott Hughes yPlacement, 1680*e1fe3e4aSElliott Hughes xAdvance, 1681*e1fe3e4aSElliott Hughes yAdvance, 1682*e1fe3e4aSElliott Hughes xPlaDevice, 1683*e1fe3e4aSElliott Hughes yPlaDevice, 1684*e1fe3e4aSElliott Hughes xAdvDevice, 1685*e1fe3e4aSElliott Hughes yAdvDevice, 1686*e1fe3e4aSElliott Hughes vertical=vertical, 1687*e1fe3e4aSElliott Hughes location=location, 1688*e1fe3e4aSElliott Hughes ) 1689*e1fe3e4aSElliott Hughes 1690*e1fe3e4aSElliott Hughes def parse_valuerecord_definition_(self, vertical): 1691*e1fe3e4aSElliott Hughes # Parses a named value record definition. (See section `2.e.v <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#2.e.v>`_) 1692*e1fe3e4aSElliott Hughes assert self.is_cur_keyword_("valueRecordDef") 1693*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1694*e1fe3e4aSElliott Hughes value = self.parse_valuerecord_(vertical) 1695*e1fe3e4aSElliott Hughes name = self.expect_name_() 1696*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1697*e1fe3e4aSElliott Hughes vrd = self.ast.ValueRecordDefinition(name, value, location=location) 1698*e1fe3e4aSElliott Hughes self.valuerecords_.define(name, vrd) 1699*e1fe3e4aSElliott Hughes return vrd 1700*e1fe3e4aSElliott Hughes 1701*e1fe3e4aSElliott Hughes def parse_languagesystem_(self): 1702*e1fe3e4aSElliott Hughes assert self.cur_token_ == "languagesystem" 1703*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1704*e1fe3e4aSElliott Hughes script = self.expect_script_tag_() 1705*e1fe3e4aSElliott Hughes language = self.expect_language_tag_() 1706*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1707*e1fe3e4aSElliott Hughes return self.ast.LanguageSystemStatement(script, language, location=location) 1708*e1fe3e4aSElliott Hughes 1709*e1fe3e4aSElliott Hughes def parse_feature_block_(self, variation=False): 1710*e1fe3e4aSElliott Hughes if variation: 1711*e1fe3e4aSElliott Hughes assert self.cur_token_ == "variation" 1712*e1fe3e4aSElliott Hughes else: 1713*e1fe3e4aSElliott Hughes assert self.cur_token_ == "feature" 1714*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1715*e1fe3e4aSElliott Hughes tag = self.expect_tag_() 1716*e1fe3e4aSElliott Hughes vertical = tag in {"vkrn", "vpal", "vhal", "valt"} 1717*e1fe3e4aSElliott Hughes 1718*e1fe3e4aSElliott Hughes stylisticset = None 1719*e1fe3e4aSElliott Hughes cv_feature = None 1720*e1fe3e4aSElliott Hughes size_feature = False 1721*e1fe3e4aSElliott Hughes if tag in self.SS_FEATURE_TAGS: 1722*e1fe3e4aSElliott Hughes stylisticset = tag 1723*e1fe3e4aSElliott Hughes elif tag in self.CV_FEATURE_TAGS: 1724*e1fe3e4aSElliott Hughes cv_feature = tag 1725*e1fe3e4aSElliott Hughes elif tag == "size": 1726*e1fe3e4aSElliott Hughes size_feature = True 1727*e1fe3e4aSElliott Hughes 1728*e1fe3e4aSElliott Hughes if variation: 1729*e1fe3e4aSElliott Hughes conditionset = self.expect_name_() 1730*e1fe3e4aSElliott Hughes 1731*e1fe3e4aSElliott Hughes use_extension = False 1732*e1fe3e4aSElliott Hughes if self.next_token_ == "useExtension": 1733*e1fe3e4aSElliott Hughes self.expect_keyword_("useExtension") 1734*e1fe3e4aSElliott Hughes use_extension = True 1735*e1fe3e4aSElliott Hughes 1736*e1fe3e4aSElliott Hughes if variation: 1737*e1fe3e4aSElliott Hughes block = self.ast.VariationBlock( 1738*e1fe3e4aSElliott Hughes tag, conditionset, use_extension=use_extension, location=location 1739*e1fe3e4aSElliott Hughes ) 1740*e1fe3e4aSElliott Hughes else: 1741*e1fe3e4aSElliott Hughes block = self.ast.FeatureBlock( 1742*e1fe3e4aSElliott Hughes tag, use_extension=use_extension, location=location 1743*e1fe3e4aSElliott Hughes ) 1744*e1fe3e4aSElliott Hughes self.parse_block_(block, vertical, stylisticset, size_feature, cv_feature) 1745*e1fe3e4aSElliott Hughes return block 1746*e1fe3e4aSElliott Hughes 1747*e1fe3e4aSElliott Hughes def parse_feature_reference_(self): 1748*e1fe3e4aSElliott Hughes assert self.cur_token_ == "feature", self.cur_token_ 1749*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1750*e1fe3e4aSElliott Hughes featureName = self.expect_tag_() 1751*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1752*e1fe3e4aSElliott Hughes return self.ast.FeatureReferenceStatement(featureName, location=location) 1753*e1fe3e4aSElliott Hughes 1754*e1fe3e4aSElliott Hughes def parse_featureNames_(self, tag): 1755*e1fe3e4aSElliott Hughes """Parses a ``featureNames`` statement found in stylistic set features. 1756*e1fe3e4aSElliott Hughes See section `8.c <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#8.c>`_. 1757*e1fe3e4aSElliott Hughes """ 1758*e1fe3e4aSElliott Hughes assert self.cur_token_ == "featureNames", self.cur_token_ 1759*e1fe3e4aSElliott Hughes block = self.ast.NestedBlock( 1760*e1fe3e4aSElliott Hughes tag, self.cur_token_, location=self.cur_token_location_ 1761*e1fe3e4aSElliott Hughes ) 1762*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1763*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1764*e1fe3e4aSElliott Hughes symtab.enter_scope() 1765*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1766*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1767*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1768*e1fe3e4aSElliott Hughes block.statements.append( 1769*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1770*e1fe3e4aSElliott Hughes ) 1771*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("name"): 1772*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1773*e1fe3e4aSElliott Hughes platformID, platEncID, langID, string = self.parse_name_() 1774*e1fe3e4aSElliott Hughes block.statements.append( 1775*e1fe3e4aSElliott Hughes self.ast.FeatureNameStatement( 1776*e1fe3e4aSElliott Hughes tag, platformID, platEncID, langID, string, location=location 1777*e1fe3e4aSElliott Hughes ) 1778*e1fe3e4aSElliott Hughes ) 1779*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1780*e1fe3e4aSElliott Hughes continue 1781*e1fe3e4aSElliott Hughes else: 1782*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected "name"', self.cur_token_location_) 1783*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1784*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1785*e1fe3e4aSElliott Hughes symtab.exit_scope() 1786*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1787*e1fe3e4aSElliott Hughes return block 1788*e1fe3e4aSElliott Hughes 1789*e1fe3e4aSElliott Hughes def parse_cvParameters_(self, tag): 1790*e1fe3e4aSElliott Hughes # Parses a ``cvParameters`` block found in Character Variant features. 1791*e1fe3e4aSElliott Hughes # See section `8.d <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#8.d>`_. 1792*e1fe3e4aSElliott Hughes assert self.cur_token_ == "cvParameters", self.cur_token_ 1793*e1fe3e4aSElliott Hughes block = self.ast.NestedBlock( 1794*e1fe3e4aSElliott Hughes tag, self.cur_token_, location=self.cur_token_location_ 1795*e1fe3e4aSElliott Hughes ) 1796*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1797*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1798*e1fe3e4aSElliott Hughes symtab.enter_scope() 1799*e1fe3e4aSElliott Hughes 1800*e1fe3e4aSElliott Hughes statements = block.statements 1801*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1802*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1803*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1804*e1fe3e4aSElliott Hughes statements.append( 1805*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1806*e1fe3e4aSElliott Hughes ) 1807*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_( 1808*e1fe3e4aSElliott Hughes { 1809*e1fe3e4aSElliott Hughes "FeatUILabelNameID", 1810*e1fe3e4aSElliott Hughes "FeatUITooltipTextNameID", 1811*e1fe3e4aSElliott Hughes "SampleTextNameID", 1812*e1fe3e4aSElliott Hughes "ParamUILabelNameID", 1813*e1fe3e4aSElliott Hughes } 1814*e1fe3e4aSElliott Hughes ): 1815*e1fe3e4aSElliott Hughes statements.append(self.parse_cvNameIDs_(tag, self.cur_token_)) 1816*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("Character"): 1817*e1fe3e4aSElliott Hughes statements.append(self.parse_cvCharacter_(tag)) 1818*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1819*e1fe3e4aSElliott Hughes continue 1820*e1fe3e4aSElliott Hughes else: 1821*e1fe3e4aSElliott Hughes raise FeatureLibError( 1822*e1fe3e4aSElliott Hughes "Expected statement: got {} {}".format( 1823*e1fe3e4aSElliott Hughes self.cur_token_type_, self.cur_token_ 1824*e1fe3e4aSElliott Hughes ), 1825*e1fe3e4aSElliott Hughes self.cur_token_location_, 1826*e1fe3e4aSElliott Hughes ) 1827*e1fe3e4aSElliott Hughes 1828*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1829*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1830*e1fe3e4aSElliott Hughes symtab.exit_scope() 1831*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1832*e1fe3e4aSElliott Hughes return block 1833*e1fe3e4aSElliott Hughes 1834*e1fe3e4aSElliott Hughes def parse_cvNameIDs_(self, tag, block_name): 1835*e1fe3e4aSElliott Hughes assert self.cur_token_ == block_name, self.cur_token_ 1836*e1fe3e4aSElliott Hughes block = self.ast.NestedBlock(tag, block_name, location=self.cur_token_location_) 1837*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1838*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1839*e1fe3e4aSElliott Hughes symtab.enter_scope() 1840*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1841*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1842*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1843*e1fe3e4aSElliott Hughes block.statements.append( 1844*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1845*e1fe3e4aSElliott Hughes ) 1846*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("name"): 1847*e1fe3e4aSElliott Hughes location = self.cur_token_location_ 1848*e1fe3e4aSElliott Hughes platformID, platEncID, langID, string = self.parse_name_() 1849*e1fe3e4aSElliott Hughes block.statements.append( 1850*e1fe3e4aSElliott Hughes self.ast.CVParametersNameStatement( 1851*e1fe3e4aSElliott Hughes tag, 1852*e1fe3e4aSElliott Hughes platformID, 1853*e1fe3e4aSElliott Hughes platEncID, 1854*e1fe3e4aSElliott Hughes langID, 1855*e1fe3e4aSElliott Hughes string, 1856*e1fe3e4aSElliott Hughes block_name, 1857*e1fe3e4aSElliott Hughes location=location, 1858*e1fe3e4aSElliott Hughes ) 1859*e1fe3e4aSElliott Hughes ) 1860*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1861*e1fe3e4aSElliott Hughes continue 1862*e1fe3e4aSElliott Hughes else: 1863*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected "name"', self.cur_token_location_) 1864*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1865*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1866*e1fe3e4aSElliott Hughes symtab.exit_scope() 1867*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1868*e1fe3e4aSElliott Hughes return block 1869*e1fe3e4aSElliott Hughes 1870*e1fe3e4aSElliott Hughes def parse_cvCharacter_(self, tag): 1871*e1fe3e4aSElliott Hughes assert self.cur_token_ == "Character", self.cur_token_ 1872*e1fe3e4aSElliott Hughes location, character = self.cur_token_location_, self.expect_any_number_() 1873*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1874*e1fe3e4aSElliott Hughes if not (0xFFFFFF >= character >= 0): 1875*e1fe3e4aSElliott Hughes raise FeatureLibError( 1876*e1fe3e4aSElliott Hughes "Character value must be between " 1877*e1fe3e4aSElliott Hughes "{:#x} and {:#x}".format(0, 0xFFFFFF), 1878*e1fe3e4aSElliott Hughes location, 1879*e1fe3e4aSElliott Hughes ) 1880*e1fe3e4aSElliott Hughes return self.ast.CharacterStatement(character, tag, location=location) 1881*e1fe3e4aSElliott Hughes 1882*e1fe3e4aSElliott Hughes def parse_FontRevision_(self): 1883*e1fe3e4aSElliott Hughes # Parses a ``FontRevision`` statement found in the head table. See 1884*e1fe3e4aSElliott Hughes # `section 9.c <https://adobe-type-tools.github.io/afdko/OpenTypeFeatureFileSpecification.html#9.c>`_. 1885*e1fe3e4aSElliott Hughes assert self.cur_token_ == "FontRevision", self.cur_token_ 1886*e1fe3e4aSElliott Hughes location, version = self.cur_token_location_, self.expect_float_() 1887*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1888*e1fe3e4aSElliott Hughes if version <= 0: 1889*e1fe3e4aSElliott Hughes raise FeatureLibError("Font revision numbers must be positive", location) 1890*e1fe3e4aSElliott Hughes return self.ast.FontRevisionStatement(version, location=location) 1891*e1fe3e4aSElliott Hughes 1892*e1fe3e4aSElliott Hughes def parse_conditionset_(self): 1893*e1fe3e4aSElliott Hughes name = self.expect_name_() 1894*e1fe3e4aSElliott Hughes 1895*e1fe3e4aSElliott Hughes conditions = {} 1896*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1897*e1fe3e4aSElliott Hughes 1898*e1fe3e4aSElliott Hughes while self.next_token_ != "}": 1899*e1fe3e4aSElliott Hughes self.advance_lexer_() 1900*e1fe3e4aSElliott Hughes if self.cur_token_type_ is not Lexer.NAME: 1901*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected an axis name", self.cur_token_location_) 1902*e1fe3e4aSElliott Hughes 1903*e1fe3e4aSElliott Hughes axis = self.cur_token_ 1904*e1fe3e4aSElliott Hughes if axis in conditions: 1905*e1fe3e4aSElliott Hughes raise FeatureLibError( 1906*e1fe3e4aSElliott Hughes f"Repeated condition for axis {axis}", self.cur_token_location_ 1907*e1fe3e4aSElliott Hughes ) 1908*e1fe3e4aSElliott Hughes 1909*e1fe3e4aSElliott Hughes if self.next_token_type_ is Lexer.FLOAT: 1910*e1fe3e4aSElliott Hughes min_value = self.expect_float_() 1911*e1fe3e4aSElliott Hughes elif self.next_token_type_ is Lexer.NUMBER: 1912*e1fe3e4aSElliott Hughes min_value = self.expect_number_(variable=False) 1913*e1fe3e4aSElliott Hughes 1914*e1fe3e4aSElliott Hughes if self.next_token_type_ is Lexer.FLOAT: 1915*e1fe3e4aSElliott Hughes max_value = self.expect_float_() 1916*e1fe3e4aSElliott Hughes elif self.next_token_type_ is Lexer.NUMBER: 1917*e1fe3e4aSElliott Hughes max_value = self.expect_number_(variable=False) 1918*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 1919*e1fe3e4aSElliott Hughes 1920*e1fe3e4aSElliott Hughes conditions[axis] = (min_value, max_value) 1921*e1fe3e4aSElliott Hughes 1922*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1923*e1fe3e4aSElliott Hughes 1924*e1fe3e4aSElliott Hughes finalname = self.expect_name_() 1925*e1fe3e4aSElliott Hughes if finalname != name: 1926*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected "%s"' % name, self.cur_token_location_) 1927*e1fe3e4aSElliott Hughes return self.ast.ConditionsetStatement(name, conditions) 1928*e1fe3e4aSElliott Hughes 1929*e1fe3e4aSElliott Hughes def parse_block_( 1930*e1fe3e4aSElliott Hughes self, block, vertical, stylisticset=None, size_feature=False, cv_feature=None 1931*e1fe3e4aSElliott Hughes ): 1932*e1fe3e4aSElliott Hughes self.expect_symbol_("{") 1933*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1934*e1fe3e4aSElliott Hughes symtab.enter_scope() 1935*e1fe3e4aSElliott Hughes 1936*e1fe3e4aSElliott Hughes statements = block.statements 1937*e1fe3e4aSElliott Hughes while self.next_token_ != "}" or self.cur_comments_: 1938*e1fe3e4aSElliott Hughes self.advance_lexer_(comments=True) 1939*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.COMMENT: 1940*e1fe3e4aSElliott Hughes statements.append( 1941*e1fe3e4aSElliott Hughes self.ast.Comment(self.cur_token_, location=self.cur_token_location_) 1942*e1fe3e4aSElliott Hughes ) 1943*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.GLYPHCLASS: 1944*e1fe3e4aSElliott Hughes statements.append(self.parse_glyphclass_definition_()) 1945*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("anchorDef"): 1946*e1fe3e4aSElliott Hughes statements.append(self.parse_anchordef_()) 1947*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_({"enum", "enumerate"}): 1948*e1fe3e4aSElliott Hughes statements.append(self.parse_enumerate_(vertical=vertical)) 1949*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("feature"): 1950*e1fe3e4aSElliott Hughes statements.append(self.parse_feature_reference_()) 1951*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("ignore"): 1952*e1fe3e4aSElliott Hughes statements.append(self.parse_ignore_()) 1953*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("language"): 1954*e1fe3e4aSElliott Hughes statements.append(self.parse_language_()) 1955*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("lookup"): 1956*e1fe3e4aSElliott Hughes statements.append(self.parse_lookup_(vertical)) 1957*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("lookupflag"): 1958*e1fe3e4aSElliott Hughes statements.append(self.parse_lookupflag_()) 1959*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("markClass"): 1960*e1fe3e4aSElliott Hughes statements.append(self.parse_markClass_()) 1961*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_({"pos", "position"}): 1962*e1fe3e4aSElliott Hughes statements.append( 1963*e1fe3e4aSElliott Hughes self.parse_position_(enumerated=False, vertical=vertical) 1964*e1fe3e4aSElliott Hughes ) 1965*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("script"): 1966*e1fe3e4aSElliott Hughes statements.append(self.parse_script_()) 1967*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_({"sub", "substitute", "rsub", "reversesub"}): 1968*e1fe3e4aSElliott Hughes statements.append(self.parse_substitute_()) 1969*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("subtable"): 1970*e1fe3e4aSElliott Hughes statements.append(self.parse_subtable_()) 1971*e1fe3e4aSElliott Hughes elif self.is_cur_keyword_("valueRecordDef"): 1972*e1fe3e4aSElliott Hughes statements.append(self.parse_valuerecord_definition_(vertical)) 1973*e1fe3e4aSElliott Hughes elif stylisticset and self.is_cur_keyword_("featureNames"): 1974*e1fe3e4aSElliott Hughes statements.append(self.parse_featureNames_(stylisticset)) 1975*e1fe3e4aSElliott Hughes elif cv_feature and self.is_cur_keyword_("cvParameters"): 1976*e1fe3e4aSElliott Hughes statements.append(self.parse_cvParameters_(cv_feature)) 1977*e1fe3e4aSElliott Hughes elif size_feature and self.is_cur_keyword_("parameters"): 1978*e1fe3e4aSElliott Hughes statements.append(self.parse_size_parameters_()) 1979*e1fe3e4aSElliott Hughes elif size_feature and self.is_cur_keyword_("sizemenuname"): 1980*e1fe3e4aSElliott Hughes statements.append(self.parse_size_menuname_()) 1981*e1fe3e4aSElliott Hughes elif ( 1982*e1fe3e4aSElliott Hughes self.cur_token_type_ is Lexer.NAME 1983*e1fe3e4aSElliott Hughes and self.cur_token_ in self.extensions 1984*e1fe3e4aSElliott Hughes ): 1985*e1fe3e4aSElliott Hughes statements.append(self.extensions[self.cur_token_](self)) 1986*e1fe3e4aSElliott Hughes elif self.cur_token_ == ";": 1987*e1fe3e4aSElliott Hughes continue 1988*e1fe3e4aSElliott Hughes else: 1989*e1fe3e4aSElliott Hughes raise FeatureLibError( 1990*e1fe3e4aSElliott Hughes "Expected glyph class definition or statement: got {} {}".format( 1991*e1fe3e4aSElliott Hughes self.cur_token_type_, self.cur_token_ 1992*e1fe3e4aSElliott Hughes ), 1993*e1fe3e4aSElliott Hughes self.cur_token_location_, 1994*e1fe3e4aSElliott Hughes ) 1995*e1fe3e4aSElliott Hughes 1996*e1fe3e4aSElliott Hughes self.expect_symbol_("}") 1997*e1fe3e4aSElliott Hughes for symtab in self.symbol_tables_: 1998*e1fe3e4aSElliott Hughes symtab.exit_scope() 1999*e1fe3e4aSElliott Hughes 2000*e1fe3e4aSElliott Hughes name = self.expect_name_() 2001*e1fe3e4aSElliott Hughes if name != block.name.strip(): 2002*e1fe3e4aSElliott Hughes raise FeatureLibError( 2003*e1fe3e4aSElliott Hughes 'Expected "%s"' % block.name.strip(), self.cur_token_location_ 2004*e1fe3e4aSElliott Hughes ) 2005*e1fe3e4aSElliott Hughes self.expect_symbol_(";") 2006*e1fe3e4aSElliott Hughes 2007*e1fe3e4aSElliott Hughes # A multiple substitution may have a single destination, in which case 2008*e1fe3e4aSElliott Hughes # it will look just like a single substitution. So if there are both 2009*e1fe3e4aSElliott Hughes # multiple and single substitutions, upgrade all the single ones to 2010*e1fe3e4aSElliott Hughes # multiple substitutions. 2011*e1fe3e4aSElliott Hughes 2012*e1fe3e4aSElliott Hughes # Check if we have a mix of non-contextual singles and multiples. 2013*e1fe3e4aSElliott Hughes has_single = False 2014*e1fe3e4aSElliott Hughes has_multiple = False 2015*e1fe3e4aSElliott Hughes for s in statements: 2016*e1fe3e4aSElliott Hughes if isinstance(s, self.ast.SingleSubstStatement): 2017*e1fe3e4aSElliott Hughes has_single = not any([s.prefix, s.suffix, s.forceChain]) 2018*e1fe3e4aSElliott Hughes elif isinstance(s, self.ast.MultipleSubstStatement): 2019*e1fe3e4aSElliott Hughes has_multiple = not any([s.prefix, s.suffix, s.forceChain]) 2020*e1fe3e4aSElliott Hughes 2021*e1fe3e4aSElliott Hughes # Upgrade all single substitutions to multiple substitutions. 2022*e1fe3e4aSElliott Hughes if has_single and has_multiple: 2023*e1fe3e4aSElliott Hughes statements = [] 2024*e1fe3e4aSElliott Hughes for s in block.statements: 2025*e1fe3e4aSElliott Hughes if isinstance(s, self.ast.SingleSubstStatement): 2026*e1fe3e4aSElliott Hughes glyphs = s.glyphs[0].glyphSet() 2027*e1fe3e4aSElliott Hughes replacements = s.replacements[0].glyphSet() 2028*e1fe3e4aSElliott Hughes if len(replacements) == 1: 2029*e1fe3e4aSElliott Hughes replacements *= len(glyphs) 2030*e1fe3e4aSElliott Hughes for i, glyph in enumerate(glyphs): 2031*e1fe3e4aSElliott Hughes statements.append( 2032*e1fe3e4aSElliott Hughes self.ast.MultipleSubstStatement( 2033*e1fe3e4aSElliott Hughes s.prefix, 2034*e1fe3e4aSElliott Hughes glyph, 2035*e1fe3e4aSElliott Hughes s.suffix, 2036*e1fe3e4aSElliott Hughes [replacements[i]], 2037*e1fe3e4aSElliott Hughes s.forceChain, 2038*e1fe3e4aSElliott Hughes location=s.location, 2039*e1fe3e4aSElliott Hughes ) 2040*e1fe3e4aSElliott Hughes ) 2041*e1fe3e4aSElliott Hughes else: 2042*e1fe3e4aSElliott Hughes statements.append(s) 2043*e1fe3e4aSElliott Hughes block.statements = statements 2044*e1fe3e4aSElliott Hughes 2045*e1fe3e4aSElliott Hughes def is_cur_keyword_(self, k): 2046*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.NAME: 2047*e1fe3e4aSElliott Hughes if isinstance(k, type("")): # basestring is gone in Python3 2048*e1fe3e4aSElliott Hughes return self.cur_token_ == k 2049*e1fe3e4aSElliott Hughes else: 2050*e1fe3e4aSElliott Hughes return self.cur_token_ in k 2051*e1fe3e4aSElliott Hughes return False 2052*e1fe3e4aSElliott Hughes 2053*e1fe3e4aSElliott Hughes def expect_class_name_(self): 2054*e1fe3e4aSElliott Hughes self.advance_lexer_() 2055*e1fe3e4aSElliott Hughes if self.cur_token_type_ is not Lexer.GLYPHCLASS: 2056*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected @NAME", self.cur_token_location_) 2057*e1fe3e4aSElliott Hughes return self.cur_token_ 2058*e1fe3e4aSElliott Hughes 2059*e1fe3e4aSElliott Hughes def expect_cid_(self): 2060*e1fe3e4aSElliott Hughes self.advance_lexer_() 2061*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.CID: 2062*e1fe3e4aSElliott Hughes return self.cur_token_ 2063*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected a CID", self.cur_token_location_) 2064*e1fe3e4aSElliott Hughes 2065*e1fe3e4aSElliott Hughes def expect_filename_(self): 2066*e1fe3e4aSElliott Hughes self.advance_lexer_() 2067*e1fe3e4aSElliott Hughes if self.cur_token_type_ is not Lexer.FILENAME: 2068*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected file name", self.cur_token_location_) 2069*e1fe3e4aSElliott Hughes return self.cur_token_ 2070*e1fe3e4aSElliott Hughes 2071*e1fe3e4aSElliott Hughes def expect_glyph_(self): 2072*e1fe3e4aSElliott Hughes self.advance_lexer_() 2073*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.NAME: 2074*e1fe3e4aSElliott Hughes return self.cur_token_.lstrip("\\") 2075*e1fe3e4aSElliott Hughes elif self.cur_token_type_ is Lexer.CID: 2076*e1fe3e4aSElliott Hughes return "cid%05d" % self.cur_token_ 2077*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected a glyph name or CID", self.cur_token_location_) 2078*e1fe3e4aSElliott Hughes 2079*e1fe3e4aSElliott Hughes def check_glyph_name_in_glyph_set(self, *names): 2080*e1fe3e4aSElliott Hughes """Adds a glyph name (just `start`) or glyph names of a 2081*e1fe3e4aSElliott Hughes range (`start` and `end`) which are not in the glyph set 2082*e1fe3e4aSElliott Hughes to the "missing list" for future error reporting. 2083*e1fe3e4aSElliott Hughes 2084*e1fe3e4aSElliott Hughes If no glyph set is present, does nothing. 2085*e1fe3e4aSElliott Hughes """ 2086*e1fe3e4aSElliott Hughes if self.glyphNames_: 2087*e1fe3e4aSElliott Hughes for name in names: 2088*e1fe3e4aSElliott Hughes if name in self.glyphNames_: 2089*e1fe3e4aSElliott Hughes continue 2090*e1fe3e4aSElliott Hughes if name not in self.missing: 2091*e1fe3e4aSElliott Hughes self.missing[name] = self.cur_token_location_ 2092*e1fe3e4aSElliott Hughes 2093*e1fe3e4aSElliott Hughes def expect_markClass_reference_(self): 2094*e1fe3e4aSElliott Hughes name = self.expect_class_name_() 2095*e1fe3e4aSElliott Hughes mc = self.glyphclasses_.resolve(name) 2096*e1fe3e4aSElliott Hughes if mc is None: 2097*e1fe3e4aSElliott Hughes raise FeatureLibError( 2098*e1fe3e4aSElliott Hughes "Unknown markClass @%s" % name, self.cur_token_location_ 2099*e1fe3e4aSElliott Hughes ) 2100*e1fe3e4aSElliott Hughes if not isinstance(mc, self.ast.MarkClass): 2101*e1fe3e4aSElliott Hughes raise FeatureLibError( 2102*e1fe3e4aSElliott Hughes "@%s is not a markClass" % name, self.cur_token_location_ 2103*e1fe3e4aSElliott Hughes ) 2104*e1fe3e4aSElliott Hughes return mc 2105*e1fe3e4aSElliott Hughes 2106*e1fe3e4aSElliott Hughes def expect_tag_(self): 2107*e1fe3e4aSElliott Hughes self.advance_lexer_() 2108*e1fe3e4aSElliott Hughes if self.cur_token_type_ is not Lexer.NAME: 2109*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected a tag", self.cur_token_location_) 2110*e1fe3e4aSElliott Hughes if len(self.cur_token_) > 4: 2111*e1fe3e4aSElliott Hughes raise FeatureLibError( 2112*e1fe3e4aSElliott Hughes "Tags cannot be longer than 4 characters", self.cur_token_location_ 2113*e1fe3e4aSElliott Hughes ) 2114*e1fe3e4aSElliott Hughes return (self.cur_token_ + " ")[:4] 2115*e1fe3e4aSElliott Hughes 2116*e1fe3e4aSElliott Hughes def expect_script_tag_(self): 2117*e1fe3e4aSElliott Hughes tag = self.expect_tag_() 2118*e1fe3e4aSElliott Hughes if tag == "dflt": 2119*e1fe3e4aSElliott Hughes raise FeatureLibError( 2120*e1fe3e4aSElliott Hughes '"dflt" is not a valid script tag; use "DFLT" instead', 2121*e1fe3e4aSElliott Hughes self.cur_token_location_, 2122*e1fe3e4aSElliott Hughes ) 2123*e1fe3e4aSElliott Hughes return tag 2124*e1fe3e4aSElliott Hughes 2125*e1fe3e4aSElliott Hughes def expect_language_tag_(self): 2126*e1fe3e4aSElliott Hughes tag = self.expect_tag_() 2127*e1fe3e4aSElliott Hughes if tag == "DFLT": 2128*e1fe3e4aSElliott Hughes raise FeatureLibError( 2129*e1fe3e4aSElliott Hughes '"DFLT" is not a valid language tag; use "dflt" instead', 2130*e1fe3e4aSElliott Hughes self.cur_token_location_, 2131*e1fe3e4aSElliott Hughes ) 2132*e1fe3e4aSElliott Hughes return tag 2133*e1fe3e4aSElliott Hughes 2134*e1fe3e4aSElliott Hughes def expect_symbol_(self, symbol): 2135*e1fe3e4aSElliott Hughes self.advance_lexer_() 2136*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.SYMBOL and self.cur_token_ == symbol: 2137*e1fe3e4aSElliott Hughes return symbol 2138*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected '%s'" % symbol, self.cur_token_location_) 2139*e1fe3e4aSElliott Hughes 2140*e1fe3e4aSElliott Hughes def expect_keyword_(self, keyword): 2141*e1fe3e4aSElliott Hughes self.advance_lexer_() 2142*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.NAME and self.cur_token_ == keyword: 2143*e1fe3e4aSElliott Hughes return self.cur_token_ 2144*e1fe3e4aSElliott Hughes raise FeatureLibError('Expected "%s"' % keyword, self.cur_token_location_) 2145*e1fe3e4aSElliott Hughes 2146*e1fe3e4aSElliott Hughes def expect_name_(self): 2147*e1fe3e4aSElliott Hughes self.advance_lexer_() 2148*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.NAME: 2149*e1fe3e4aSElliott Hughes return self.cur_token_ 2150*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected a name", self.cur_token_location_) 2151*e1fe3e4aSElliott Hughes 2152*e1fe3e4aSElliott Hughes def expect_number_(self, variable=False): 2153*e1fe3e4aSElliott Hughes self.advance_lexer_() 2154*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.NUMBER: 2155*e1fe3e4aSElliott Hughes return self.cur_token_ 2156*e1fe3e4aSElliott Hughes if variable and self.cur_token_type_ is Lexer.SYMBOL and self.cur_token_ == "(": 2157*e1fe3e4aSElliott Hughes return self.expect_variable_scalar_() 2158*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected a number", self.cur_token_location_) 2159*e1fe3e4aSElliott Hughes 2160*e1fe3e4aSElliott Hughes def expect_variable_scalar_(self): 2161*e1fe3e4aSElliott Hughes self.advance_lexer_() # "(" 2162*e1fe3e4aSElliott Hughes scalar = VariableScalar() 2163*e1fe3e4aSElliott Hughes while True: 2164*e1fe3e4aSElliott Hughes if self.cur_token_type_ == Lexer.SYMBOL and self.cur_token_ == ")": 2165*e1fe3e4aSElliott Hughes break 2166*e1fe3e4aSElliott Hughes location, value = self.expect_master_() 2167*e1fe3e4aSElliott Hughes scalar.add_value(location, value) 2168*e1fe3e4aSElliott Hughes return scalar 2169*e1fe3e4aSElliott Hughes 2170*e1fe3e4aSElliott Hughes def expect_master_(self): 2171*e1fe3e4aSElliott Hughes location = {} 2172*e1fe3e4aSElliott Hughes while True: 2173*e1fe3e4aSElliott Hughes if self.cur_token_type_ is not Lexer.NAME: 2174*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected an axis name", self.cur_token_location_) 2175*e1fe3e4aSElliott Hughes axis = self.cur_token_ 2176*e1fe3e4aSElliott Hughes self.advance_lexer_() 2177*e1fe3e4aSElliott Hughes if not (self.cur_token_type_ is Lexer.SYMBOL and self.cur_token_ == "="): 2178*e1fe3e4aSElliott Hughes raise FeatureLibError( 2179*e1fe3e4aSElliott Hughes "Expected an equals sign", self.cur_token_location_ 2180*e1fe3e4aSElliott Hughes ) 2181*e1fe3e4aSElliott Hughes value = self.expect_number_() 2182*e1fe3e4aSElliott Hughes location[axis] = value 2183*e1fe3e4aSElliott Hughes if self.next_token_type_ is Lexer.NAME and self.next_token_[0] == ":": 2184*e1fe3e4aSElliott Hughes # Lexer has just read the value as a glyph name. We'll correct it later 2185*e1fe3e4aSElliott Hughes break 2186*e1fe3e4aSElliott Hughes self.advance_lexer_() 2187*e1fe3e4aSElliott Hughes if not (self.cur_token_type_ is Lexer.SYMBOL and self.cur_token_ == ","): 2188*e1fe3e4aSElliott Hughes raise FeatureLibError( 2189*e1fe3e4aSElliott Hughes "Expected an comma or an equals sign", self.cur_token_location_ 2190*e1fe3e4aSElliott Hughes ) 2191*e1fe3e4aSElliott Hughes self.advance_lexer_() 2192*e1fe3e4aSElliott Hughes self.advance_lexer_() 2193*e1fe3e4aSElliott Hughes value = int(self.cur_token_[1:]) 2194*e1fe3e4aSElliott Hughes self.advance_lexer_() 2195*e1fe3e4aSElliott Hughes return location, value 2196*e1fe3e4aSElliott Hughes 2197*e1fe3e4aSElliott Hughes def expect_any_number_(self): 2198*e1fe3e4aSElliott Hughes self.advance_lexer_() 2199*e1fe3e4aSElliott Hughes if self.cur_token_type_ in Lexer.NUMBERS: 2200*e1fe3e4aSElliott Hughes return self.cur_token_ 2201*e1fe3e4aSElliott Hughes raise FeatureLibError( 2202*e1fe3e4aSElliott Hughes "Expected a decimal, hexadecimal or octal number", self.cur_token_location_ 2203*e1fe3e4aSElliott Hughes ) 2204*e1fe3e4aSElliott Hughes 2205*e1fe3e4aSElliott Hughes def expect_float_(self): 2206*e1fe3e4aSElliott Hughes self.advance_lexer_() 2207*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.FLOAT: 2208*e1fe3e4aSElliott Hughes return self.cur_token_ 2209*e1fe3e4aSElliott Hughes raise FeatureLibError( 2210*e1fe3e4aSElliott Hughes "Expected a floating-point number", self.cur_token_location_ 2211*e1fe3e4aSElliott Hughes ) 2212*e1fe3e4aSElliott Hughes 2213*e1fe3e4aSElliott Hughes def expect_decipoint_(self): 2214*e1fe3e4aSElliott Hughes if self.next_token_type_ == Lexer.FLOAT: 2215*e1fe3e4aSElliott Hughes return self.expect_float_() 2216*e1fe3e4aSElliott Hughes elif self.next_token_type_ is Lexer.NUMBER: 2217*e1fe3e4aSElliott Hughes return self.expect_number_() / 10 2218*e1fe3e4aSElliott Hughes else: 2219*e1fe3e4aSElliott Hughes raise FeatureLibError( 2220*e1fe3e4aSElliott Hughes "Expected an integer or floating-point number", self.cur_token_location_ 2221*e1fe3e4aSElliott Hughes ) 2222*e1fe3e4aSElliott Hughes 2223*e1fe3e4aSElliott Hughes def expect_stat_flags(self): 2224*e1fe3e4aSElliott Hughes value = 0 2225*e1fe3e4aSElliott Hughes flags = { 2226*e1fe3e4aSElliott Hughes "OlderSiblingFontAttribute": 1, 2227*e1fe3e4aSElliott Hughes "ElidableAxisValueName": 2, 2228*e1fe3e4aSElliott Hughes } 2229*e1fe3e4aSElliott Hughes while self.next_token_ != ";": 2230*e1fe3e4aSElliott Hughes if self.next_token_ in flags: 2231*e1fe3e4aSElliott Hughes name = self.expect_name_() 2232*e1fe3e4aSElliott Hughes value = value | flags[name] 2233*e1fe3e4aSElliott Hughes else: 2234*e1fe3e4aSElliott Hughes raise FeatureLibError( 2235*e1fe3e4aSElliott Hughes f"Unexpected STAT flag {self.cur_token_}", self.cur_token_location_ 2236*e1fe3e4aSElliott Hughes ) 2237*e1fe3e4aSElliott Hughes return value 2238*e1fe3e4aSElliott Hughes 2239*e1fe3e4aSElliott Hughes def expect_stat_values_(self): 2240*e1fe3e4aSElliott Hughes if self.next_token_type_ == Lexer.FLOAT: 2241*e1fe3e4aSElliott Hughes return self.expect_float_() 2242*e1fe3e4aSElliott Hughes elif self.next_token_type_ is Lexer.NUMBER: 2243*e1fe3e4aSElliott Hughes return self.expect_number_() 2244*e1fe3e4aSElliott Hughes else: 2245*e1fe3e4aSElliott Hughes raise FeatureLibError( 2246*e1fe3e4aSElliott Hughes "Expected an integer or floating-point number", self.cur_token_location_ 2247*e1fe3e4aSElliott Hughes ) 2248*e1fe3e4aSElliott Hughes 2249*e1fe3e4aSElliott Hughes def expect_string_(self): 2250*e1fe3e4aSElliott Hughes self.advance_lexer_() 2251*e1fe3e4aSElliott Hughes if self.cur_token_type_ is Lexer.STRING: 2252*e1fe3e4aSElliott Hughes return self.cur_token_ 2253*e1fe3e4aSElliott Hughes raise FeatureLibError("Expected a string", self.cur_token_location_) 2254*e1fe3e4aSElliott Hughes 2255*e1fe3e4aSElliott Hughes def advance_lexer_(self, comments=False): 2256*e1fe3e4aSElliott Hughes if comments and self.cur_comments_: 2257*e1fe3e4aSElliott Hughes self.cur_token_type_ = Lexer.COMMENT 2258*e1fe3e4aSElliott Hughes self.cur_token_, self.cur_token_location_ = self.cur_comments_.pop(0) 2259*e1fe3e4aSElliott Hughes return 2260*e1fe3e4aSElliott Hughes else: 2261*e1fe3e4aSElliott Hughes self.cur_token_type_, self.cur_token_, self.cur_token_location_ = ( 2262*e1fe3e4aSElliott Hughes self.next_token_type_, 2263*e1fe3e4aSElliott Hughes self.next_token_, 2264*e1fe3e4aSElliott Hughes self.next_token_location_, 2265*e1fe3e4aSElliott Hughes ) 2266*e1fe3e4aSElliott Hughes while True: 2267*e1fe3e4aSElliott Hughes try: 2268*e1fe3e4aSElliott Hughes ( 2269*e1fe3e4aSElliott Hughes self.next_token_type_, 2270*e1fe3e4aSElliott Hughes self.next_token_, 2271*e1fe3e4aSElliott Hughes self.next_token_location_, 2272*e1fe3e4aSElliott Hughes ) = next(self.lexer_) 2273*e1fe3e4aSElliott Hughes except StopIteration: 2274*e1fe3e4aSElliott Hughes self.next_token_type_, self.next_token_ = (None, None) 2275*e1fe3e4aSElliott Hughes if self.next_token_type_ != Lexer.COMMENT: 2276*e1fe3e4aSElliott Hughes break 2277*e1fe3e4aSElliott Hughes self.cur_comments_.append((self.next_token_, self.next_token_location_)) 2278*e1fe3e4aSElliott Hughes 2279*e1fe3e4aSElliott Hughes @staticmethod 2280*e1fe3e4aSElliott Hughes def reverse_string_(s): 2281*e1fe3e4aSElliott Hughes """'abc' --> 'cba'""" 2282*e1fe3e4aSElliott Hughes return "".join(reversed(list(s))) 2283*e1fe3e4aSElliott Hughes 2284*e1fe3e4aSElliott Hughes def make_cid_range_(self, location, start, limit): 2285*e1fe3e4aSElliott Hughes """(location, 999, 1001) --> ["cid00999", "cid01000", "cid01001"]""" 2286*e1fe3e4aSElliott Hughes result = list() 2287*e1fe3e4aSElliott Hughes if start > limit: 2288*e1fe3e4aSElliott Hughes raise FeatureLibError( 2289*e1fe3e4aSElliott Hughes "Bad range: start should be less than limit", location 2290*e1fe3e4aSElliott Hughes ) 2291*e1fe3e4aSElliott Hughes for cid in range(start, limit + 1): 2292*e1fe3e4aSElliott Hughes result.append("cid%05d" % cid) 2293*e1fe3e4aSElliott Hughes return result 2294*e1fe3e4aSElliott Hughes 2295*e1fe3e4aSElliott Hughes def make_glyph_range_(self, location, start, limit): 2296*e1fe3e4aSElliott Hughes """(location, "a.sc", "d.sc") --> ["a.sc", "b.sc", "c.sc", "d.sc"]""" 2297*e1fe3e4aSElliott Hughes result = list() 2298*e1fe3e4aSElliott Hughes if len(start) != len(limit): 2299*e1fe3e4aSElliott Hughes raise FeatureLibError( 2300*e1fe3e4aSElliott Hughes 'Bad range: "%s" and "%s" should have the same length' % (start, limit), 2301*e1fe3e4aSElliott Hughes location, 2302*e1fe3e4aSElliott Hughes ) 2303*e1fe3e4aSElliott Hughes 2304*e1fe3e4aSElliott Hughes rev = self.reverse_string_ 2305*e1fe3e4aSElliott Hughes prefix = os.path.commonprefix([start, limit]) 2306*e1fe3e4aSElliott Hughes suffix = rev(os.path.commonprefix([rev(start), rev(limit)])) 2307*e1fe3e4aSElliott Hughes if len(suffix) > 0: 2308*e1fe3e4aSElliott Hughes start_range = start[len(prefix) : -len(suffix)] 2309*e1fe3e4aSElliott Hughes limit_range = limit[len(prefix) : -len(suffix)] 2310*e1fe3e4aSElliott Hughes else: 2311*e1fe3e4aSElliott Hughes start_range = start[len(prefix) :] 2312*e1fe3e4aSElliott Hughes limit_range = limit[len(prefix) :] 2313*e1fe3e4aSElliott Hughes 2314*e1fe3e4aSElliott Hughes if start_range >= limit_range: 2315*e1fe3e4aSElliott Hughes raise FeatureLibError( 2316*e1fe3e4aSElliott Hughes "Start of range must be smaller than its end", location 2317*e1fe3e4aSElliott Hughes ) 2318*e1fe3e4aSElliott Hughes 2319*e1fe3e4aSElliott Hughes uppercase = re.compile(r"^[A-Z]$") 2320*e1fe3e4aSElliott Hughes if uppercase.match(start_range) and uppercase.match(limit_range): 2321*e1fe3e4aSElliott Hughes for c in range(ord(start_range), ord(limit_range) + 1): 2322*e1fe3e4aSElliott Hughes result.append("%s%c%s" % (prefix, c, suffix)) 2323*e1fe3e4aSElliott Hughes return result 2324*e1fe3e4aSElliott Hughes 2325*e1fe3e4aSElliott Hughes lowercase = re.compile(r"^[a-z]$") 2326*e1fe3e4aSElliott Hughes if lowercase.match(start_range) and lowercase.match(limit_range): 2327*e1fe3e4aSElliott Hughes for c in range(ord(start_range), ord(limit_range) + 1): 2328*e1fe3e4aSElliott Hughes result.append("%s%c%s" % (prefix, c, suffix)) 2329*e1fe3e4aSElliott Hughes return result 2330*e1fe3e4aSElliott Hughes 2331*e1fe3e4aSElliott Hughes digits = re.compile(r"^[0-9]{1,3}$") 2332*e1fe3e4aSElliott Hughes if digits.match(start_range) and digits.match(limit_range): 2333*e1fe3e4aSElliott Hughes for i in range(int(start_range, 10), int(limit_range, 10) + 1): 2334*e1fe3e4aSElliott Hughes number = ("000" + str(i))[-len(start_range) :] 2335*e1fe3e4aSElliott Hughes result.append("%s%s%s" % (prefix, number, suffix)) 2336*e1fe3e4aSElliott Hughes return result 2337*e1fe3e4aSElliott Hughes 2338*e1fe3e4aSElliott Hughes raise FeatureLibError('Bad range: "%s-%s"' % (start, limit), location) 2339*e1fe3e4aSElliott Hughes 2340*e1fe3e4aSElliott Hughes 2341*e1fe3e4aSElliott Hughesclass SymbolTable(object): 2342*e1fe3e4aSElliott Hughes def __init__(self): 2343*e1fe3e4aSElliott Hughes self.scopes_ = [{}] 2344*e1fe3e4aSElliott Hughes 2345*e1fe3e4aSElliott Hughes def enter_scope(self): 2346*e1fe3e4aSElliott Hughes self.scopes_.append({}) 2347*e1fe3e4aSElliott Hughes 2348*e1fe3e4aSElliott Hughes def exit_scope(self): 2349*e1fe3e4aSElliott Hughes self.scopes_.pop() 2350*e1fe3e4aSElliott Hughes 2351*e1fe3e4aSElliott Hughes def define(self, name, item): 2352*e1fe3e4aSElliott Hughes self.scopes_[-1][name] = item 2353*e1fe3e4aSElliott Hughes 2354*e1fe3e4aSElliott Hughes def resolve(self, name): 2355*e1fe3e4aSElliott Hughes for scope in reversed(self.scopes_): 2356*e1fe3e4aSElliott Hughes item = scope.get(name) 2357*e1fe3e4aSElliott Hughes if item: 2358*e1fe3e4aSElliott Hughes return item 2359*e1fe3e4aSElliott Hughes return None 2360