Statistics
| Revision:

gvsig-scripting / org.gvsig.scripting / trunk / org.gvsig.scripting / org.gvsig.scripting.app / org.gvsig.scripting.app.mainplugin / src / main / resources-plugin / scripting / lib / cssutils / prodparser.py @ 475

History | View | Annotate | Download (28.4 KB)

1
# -*- coding: utf-8 -*-
2
"""Productions parser used by css and stylesheets classes to parse
3
test into a cssutils.util.Seq and at the same time retrieving
4
additional specific cssutils.util.Item objects for later use.
5

6
TODO:
7
    - ProdsParser
8
        - handle EOF or STOP?
9
        - handle unknown @rules
10
        - handle S: maybe save to Seq? parameterized?
11
        - store['_raw']: always?
12

13
    - Sequence:
14
        - opt first(), naive impl for now
15

16
"""
17
__all__ = ['ProdParser', 'Sequence', 'Choice', 'Prod', 'PreDef']
18
__docformat__ = 'restructuredtext'
19
__version__ = '$Id: parse.py 1418 2008-08-09 19:27:50Z cthedot $'
20

    
21
from helper import pushtoken
22
import cssutils
23
import itertools
24
import re
25
import string
26
import sys
27
import types
28

    
29

    
30
class ParseError(Exception):
31
    """Base Exception class for ProdParser (used internally)."""
32
    pass
33

    
34
class Done(ParseError):
35
    """Raised if Sequence or Choice is finished and no more Prods left."""
36
    pass
37

    
38
class Exhausted(ParseError):
39
    """Raised if Sequence or Choice is finished but token is given."""
40
    pass
41

    
42
class Missing(ParseError):
43
    """Raised if Sequence or Choice is not finished but no matching token given."""
44
    pass
45

    
46
class NoMatch(ParseError):
47
    """Raised if nothing in Sequence or Choice does match."""
48
    pass
49

    
50

    
51
class Choice(object):
52
    """A Choice of productions (Sequence or single Prod)."""
53

    
54
    def __init__(self, *prods, **options):
55
        """
56
        *prods
57
            Prod or Sequence objects
58
        options:
59
            optional=False
60
        """
61
        self._prods = prods
62

    
63
        try:
64
            self.optional = options['optional']
65
        except KeyError, e:
66
            for p in self._prods:
67
                if p.optional:
68
                    self.optional = True
69
                    break
70
            else:
71
                self.optional = False
72

    
73
        self.reset()
74

    
75
    def reset(self):
76
        """Start Choice from zero"""
77
        self._exhausted = False
78

    
79
    def matches(self, token):
80
        """Check if token matches"""
81
        for prod in self._prods:
82
            if prod.matches(token):
83
                return True
84
        return False
85

    
86
    def nextProd(self, token):
87
        """
88
        Return:
89

90
        - next matching Prod or Sequence
91
        - ``None`` if any Prod or Sequence is optional and no token matched
92
        - raise ParseError if nothing matches and all are mandatory
93
        - raise Exhausted if choice already done
94

95
        ``token`` may be None but this occurs when no tokens left."""
96
        #print u'TEST for %s in %s' % (token, self)
97
        if not self._exhausted:
98
            optional = False
99
            for p in self._prods:
100
                if p.matches(token):
101
                    self._exhausted = True
102
                    p.reset()
103
                    #print u'FOUND for %s: %s' % (token, p);#print
104
                    return p
105
                elif p.optional:
106
                    optional = True
107
            else:
108
                if not optional:
109
                    # None matched but also None is optional
110
                    raise NoMatch(u'No match for %s in %s' % (token, self))
111
                    #raise ParseError(u'No match in %s for %s' % (self, token))
112
        elif token:
113
            raise Exhausted(u'Extra token')
114

    
115
    def __repr__(self):
116
        return "<cssutils.prodsparser.%s object sequence=%r optional=%r at 0x%x>" % (
117
                self.__class__.__name__, self.__str__(), self.optional, id(self))
118

    
119
    def __str__(self):
120
        return u'Choice(%s)' % u', '.join([str(x) for x in self._prods])
121

    
122

    
123
class Sequence(object):
124
    """A Sequence of productions (Choice or single Prod)."""
125
    def __init__(self, *prods, **options):
126
        """
127
        *prods
128
            Prod or Choice or Sequence objects
129
        **options:
130
            minmax = lambda: (1, 1)
131
                callback returning number of times this sequence may run
132
        """
133
        self._prods = prods
134
        try:
135
            minmax = options['minmax']
136
        except KeyError:
137
            minmax = lambda: (1, 1)
138

    
139
        self._min, self._max = minmax()
140
        if self._max is None:
141
            # unlimited
142
            try:
143
                # py2.6/3
144
                self._max = sys.maxsize
145
            except AttributeError:
146
                # py<2.6
147
                self._max = sys.maxint
148

    
149
        self._prodcount = len(self._prods)
150
        self.reset()
151

    
152
    def matches(self, token):
153
        """Called by Choice to try to find if Sequence matches."""
154
        for prod in self._prods:
155
            if prod.matches(token):
156
                return True
157
            try:
158
                if not prod.optional:
159
                    break
160
            except AttributeError:
161
                pass
162
        return False
163

    
164
    def reset(self):
165
        """Reset this Sequence if it is nested."""
166
        self._roundstarted = False
167
        self._i = 0
168
        self._round = 0
169

    
170
    def _currentName(self):
171
        """Return current element of Sequence, used by name"""
172
        # TODO: current impl first only if 1st if an prod!
173
        for prod in self._prods[self._i:]:
174
            if not prod.optional:
175
                return str(prod)
176
        else:
177
            return 'Sequence'
178

    
179
    optional = property(lambda self: self._min == 0)
180

    
181
    def nextProd(self, token):
182
        """Return
183

184
        - next matching Prod or Choice
185
        - raises ParseError if nothing matches
186
        - raises Exhausted if sequence already done
187
        """
188
        #print u'TEST for %s in %s' % (token, self)
189
        while self._round < self._max:
190

    
191
            # for this round
192
            i = self._i
193
            round = self._round
194
            p = self._prods[i]
195
            if i == 0:
196
                self._roundstarted = False
197

    
198
            # for next round
199
            self._i += 1
200
            if self._i == self._prodcount:
201
                self._round += 1
202
                self._i = 0
203

    
204
            if p.matches(token):
205
                self._roundstarted = True
206
                # reset nested Choice or Prod to use from start
207
                p.reset()
208
                #print u'FOUND for %s: %s' % (token, p);#print
209
                return p
210

    
211
            elif p.optional:
212
                continue
213

    
214
            elif round < self._min or self._roundstarted: #or (round == 0 and self._min == 0):
215
                raise Missing(u'Missing token for production %s' % p)
216

    
217
            elif not token:
218
                if self._roundstarted:
219
                    raise Missing(u'Missing token for production %s' % p)
220
                else:
221
                    raise Done()
222

    
223
            else:
224
                raise NoMatch(u'No match for %s in %s' % (token, self))
225

    
226
        if token:
227
            raise Exhausted(u'Extra token')
228

    
229
    def __repr__(self):
230
        return "<cssutils.prodsparser.%s object sequence=%r optional=%r at 0x%x>" % (
231
                self.__class__.__name__, self.__str__(), self.optional, id(self))
232

    
233
    def __str__(self):
234
        return u'Sequence(%s)' % u', '.join([str(x) for x in self._prods])
235

    
236

    
237

    
238
class Prod(object):
239
    """Single Prod in Sequence or Choice."""
240
    def __init__(self, name, match, optional=False,
241
                 toSeq=None, toStore=None,
242
                 stop=False, stopAndKeep=False,
243
                 stopIfNoMoreMatch=False,
244
                 nextSor=False, mayEnd=False,
245
                 storeToken=None,
246
                 exception=None):
247
        """
248
        name
249
            name used for error reporting
250
        match callback
251
            function called with parameters tokentype and tokenvalue
252
            returning True, False or raising ParseError
253
        toSeq callback (optional) or False
254
            calling toSeq(token, tokens) returns (type_, val) == (token[0], token[1])
255
            to be appended to seq else simply unaltered (type_, val)
256

257
            if False nothing is added
258

259
        toStore (optional)
260
            key to save util.Item to store or callback(store, util.Item)
261
        optional = False
262
            whether Prod is optional or not
263
        stop = False
264
            if True stop parsing of tokens here
265
        stopAndKeep
266
            if True stop parsing of tokens here but return stopping
267
            token in unused tokens
268
        stopIfNoMoreMatch = False
269
            stop even if more tokens available, similar to stop and keep but with
270
            condition no more matches
271
        nextSor=False
272
            next is S or other like , or / (CSSValue)
273
        mayEnd = False
274
            no token must follow even defined by Sequence.
275
            Used for operator ',/ ' currently only
276

277
        storeToken = None
278
            if True toStore saves simple token tuple and not and Item object
279
            to store. Old style processing, TODO: resolve
280

281
        exception = None
282
            exception to be raised in case of error, normaly SyntaxErr
283
        """
284
        self._name = name
285
        self.match = match
286
        self.optional = optional
287
        self.stop = stop
288
        self.stopAndKeep = stopAndKeep
289
        self.stopIfNoMoreMatch = stopIfNoMoreMatch
290
        self.nextSor = nextSor
291
        self.mayEnd = mayEnd
292
        self.storeToken = storeToken
293
        self.exception = exception
294

    
295
        def makeToStore(key):
296
            "Return a function used by toStore."
297
            def toStore(store, item):
298
                "Set or append store item."
299
                if key in store:
300
                    _v = store[key]
301
                    if not isinstance(_v, list):
302
                        store[key] = [_v]
303
                    store[key].append(item)
304
                else:
305
                    store[key] = item
306
            return toStore
307

    
308
        if toSeq or toSeq is False:
309
            # called: seq.append(toSeq(value))
310
            self.toSeq = toSeq
311
        else:
312
            self.toSeq = lambda t, tokens: (t[0], t[1])
313

    
314
        if hasattr(toStore, '__call__'):
315
            self.toStore = toStore
316
        elif toStore:
317
            self.toStore = makeToStore(toStore)
318
        else:
319
            # always set!
320
            self.toStore = None
321

    
322
    def matches(self, token):
323
        """Return if token matches."""
324
        if not token:
325
            return False
326
        type_, val, line, col = token
327
        return self.match(type_, val)
328

    
329
    def reset(self):
330
        pass
331

    
332
    def __str__(self):
333
        return self._name
334

    
335
    def __repr__(self):
336
        return "<cssutils.prodsparser.%s object name=%r at 0x%x>" % (
337
                self.__class__.__name__, self._name, id(self))
338

    
339

    
340
# global tokenizer as there is only one!
341
tokenizer = cssutils.tokenize2.Tokenizer()
342

    
343
# global: saved from subProds
344
savedTokens = []
345

    
346

    
347
class ProdParser(object):
348
    """Productions parser."""
349
    def __init__(self, clear=True):
350
        self.types = cssutils.cssproductions.CSSProductions
351
        self._log = cssutils.log
352
        if clear:
353
            tokenizer.clear()
354
   
355
    def _texttotokens(self, text):
356
        """Build a generator which is the only thing that is parsed!
357
        old classes may use lists etc
358
        """
359
        if isinstance(text, basestring):
360
            # DEFAULT, to tokenize strip space
361
            return tokenizer.tokenize(text.strip())
362

    
363
        elif type(text) == types.GeneratorType:
364
            # DEFAULT, already tokenized, should be generator
365
            return text
366

    
367
        elif isinstance(text, tuple):
368
            # OLD: (token, tokens) or a single token
369
            if len(text) == 2:
370
                # (token, tokens)
371
                chain([token], tokens)
372
            else:
373
                # single token
374
                return iter([text])
375

    
376
        elif isinstance(text, list):
377
            # OLD: generator from list
378
            return iter(text)
379

    
380
        else:
381
            # ?
382
            return text
383

    
384
    def _SorTokens(self, tokens, until=',/'):
385
        """New tokens generator which has S tokens removed,
386
        if followed by anything in ``until``, normally a ``,``."""
387
        for token in tokens:
388
            if token[0] == self.types.S:
389
                try:
390
                    next_ = tokens.next()
391
                except StopIteration:
392
                    yield token
393
                else:
394
                    if next_[1] in until:
395
                        # omit S as e.g. ``,`` has been found
396
                        yield next_
397
                    elif next_[0] == self.types.COMMENT:
398
                        # pass COMMENT
399
                        yield next_
400
                    else:
401
                        yield token
402
                        yield next_
403

    
404
            elif token[0] == self.types.COMMENT:
405
                # pass COMMENT
406
                yield token
407
            else:
408
                yield token
409
                break
410
        # normal mode again
411
        for token in tokens:
412
            yield token
413

    
414

    
415
    def parse(self, text, name, productions, keepS=False, checkS=False, store=None,
416
              emptyOk=False, debug=False):
417
        """
418
        text (or token generator)
419
            to parse, will be tokenized if not a generator yet
420

421
            may be:
422
            - a string to be tokenized
423
            - a single token, a tuple
424
            - a tuple of (token, tokensGenerator)
425
            - already tokenized so a tokens generator
426

427
        name
428
            used for logging
429
        productions
430
            used to parse tokens
431
        keepS
432
            if WS should be added to Seq or just be ignored
433
        store  UPDATED
434
            If a Prod defines ``toStore`` the key defined there
435
            is a key in store to be set or if store[key] is a list
436
            the next Item is appended here.
437

438
            TODO: NEEDED? :
439
            Key ``raw`` is always added and holds all unprocessed
440
            values found
441
        emptyOk
442
            if True text may be empty, hard to test before as may be generator
443

444
        returns
445
            :wellformed: True or False
446
            :seq: a filled cssutils.util.Seq object which is NOT readonly yet
447
            :store: filled keys defined by Prod.toStore
448
            :unusedtokens: token generator containing tokens not used yet
449
        """
450
        tokens = self._texttotokens(text)
451

    
452
        if not tokens:
453
            self._log.error(u'No content to parse.')
454
            return False, [], None, None
455

    
456
        seq = cssutils.util.Seq(readonly=False)
457
        if not store: # store for specific values
458
            store = {}
459
        prods = [productions] # stack of productions
460
        wellformed = True
461
        # while no real token is found any S are ignored
462
        started = False
463
        stopall = False
464
        prod = None
465
        # flag if default S handling should be done
466
        defaultS = True
467

    
468
        stopIfNoMoreMatch = False
469
        stopIfNoMoreMatchNow = False
470

    
471
        while True:
472
            # get from savedTokens or normal tokens
473
            try:
474
                #print debug, "SAVED", savedTokens
475
                token = savedTokens.pop()
476
            except IndexError, e:
477
                try:
478
                    token = tokens.next()
479
                except StopIteration:
480
                    break
481

    
482
            #print debug, token, stopIfNoMoreMatch
483

    
484
            type_, val, line, col = token
485

    
486
            # default productions
487
            if type_ == self.types.COMMENT:
488
                # always append COMMENT
489
                seq.append(cssutils.css.CSSComment(val),
490
                           cssutils.css.CSSComment, line, col)
491

    
492
            elif defaultS and type_ == self.types.S and not checkS:
493
                # append S (but ignore starting ones)
494
                if not keepS or not started:
495
                    continue
496
                else:
497
                    seq.append(val, type_, line, col)
498

    
499
#            elif type_ == self.types.ATKEYWORD:
500
#                # @rule
501
#                r = cssutils.css.CSSUnknownRule(cssText=val)
502
#                seq.append(r, type(r), line, col)
503
            elif type_ == self.types.INVALID:
504
                # invalidate parse
505
                wellformed = False
506
                self._log.error(u'Invalid token: %r' % (token,))
507
                break
508

    
509
            elif type_ == 'EOF':
510
                # do nothing? (self.types.EOF == True!)
511
                stopall = True
512

    
513
            else:
514
                started = True # check S now
515
                nextSor = False # reset
516

    
517
                try:
518
                    while True:
519
                        # find next matching production
520
                        try:
521
                            prod = prods[-1].nextProd(token)
522
                        except (Exhausted, NoMatch), e:
523
                            # try next
524
                            prod = None
525

    
526
                        if isinstance(prod, Prod):
527
                            # found actual Prod, not a Choice or Sequence
528
                            break
529
                        elif prod:
530
                            # nested Sequence, Choice
531
                            prods.append(prod)
532
                        else:
533
                            # nested exhausted, try in parent
534
                            if len(prods) > 1:
535
                                prods.pop()
536
                            else:
537
                                raise NoMatch('No match')
538

    
539
                except NoMatch, e:
540
                    if stopIfNoMoreMatch: # and token:
541
                        #print "\t1stopIfNoMoreMatch", e, token, prod, 'PUSHING'
542
                        #tokenizer.push(token)
543
                        savedTokens.append(token)
544
                        stopIfNoMoreMatchNow = True
545
                        stopall = True
546

    
547
                    else:
548
                        wellformed = False
549
                        self._log.error(u'%s: %s: %r' % (name, e, token))
550
                    break
551

    
552
                except ParseError, e:
553
                    # needed???
554
                    if stopIfNoMoreMatch: # and token:
555
                        #print "\t2stopIfNoMoreMatch", e, token, prod
556
                        tokenizer.push(token)
557
                        stopIfNoMoreMatchNow = True
558
                        stopall = True
559

    
560
                    else:
561
                        wellformed = False
562
                        self._log.error(u'%s: %s: %r' % (name, e, token))
563
                    break
564

    
565
                else:
566
                    #print '\t1', debug, 'PROD', prod
567

    
568
                    # may stop next time, once set stays
569
                    stopIfNoMoreMatch = prod.stopIfNoMoreMatch or stopIfNoMoreMatch 
570

    
571
                    # process prod
572
                    if prod.toSeq and not prod.stopAndKeep:
573
                        type_, val = prod.toSeq(token, tokens)
574
                        if val is not None:
575
                            seq.append(val, type_, line, col)
576
                            if prod.toStore:
577
                                if not prod.storeToken:
578
                                    prod.toStore(store, seq[-1])
579
                                else:
580
                                    # workaround for now for old style token
581
                                    # parsing!
582
                                    # TODO: remove when all new style
583
                                    prod.toStore(store, token)
584

    
585
                    if prod.stop: 
586
                        # stop here and ignore following tokens
587
                        # EOF? or end of e.g. func ")"
588
                        break
589

    
590
                    if prod.stopAndKeep: # e.g. ;
591
                        # stop here and ignore following tokens
592
                        # but keep this token for next run
593
                        
594
                        # TODO: CHECK!!!!
595
                        tokenizer.push(token) 
596
                        tokens = itertools.chain(token, tokens)
597
                                               
598
                        stopall = True
599
                        break
600

    
601
                    if prod.nextSor:
602
                        # following is S or other token (e.g. ",")?
603
                        # remove S if
604
                        tokens = self._SorTokens(tokens, ',/')
605
                        defaultS = False
606
                    else:
607
                        defaultS = True
608
        
609
        lastprod = prod
610
        #print debug, 'parse done', token, stopall, '\n'
611
        if not stopall:
612
            # stop immediately
613

    
614
            while True:
615
                # all productions exhausted?
616
                try:
617
                    prod = prods[-1].nextProd(token=None)
618
                except Done, e:
619
                    # ok
620
                    prod = None
621

    
622
                except Missing, e:
623
                    prod = None
624
                    # last was a S operator which may End a Sequence, then ok
625
                    if hasattr(lastprod, 'mayEnd') and not lastprod.mayEnd:
626
                        wellformed = False
627
                        self._log.error(u'%s: %s' % (name, e))
628

    
629
                except ParseError, e:
630
                    prod = None
631
                    wellformed = False
632
                    self._log.error(u'%s: %s' % (name, e))
633

    
634
                else:
635
                    if prods[-1].optional:
636
                        prod = None
637
                    elif prod and prod.optional:
638
                        # ignore optional
639
                        continue
640

    
641
                if prod and not prod.optional:
642
                    wellformed = False
643
                    self._log.error(u'%s: Missing token for production %r'
644
                                    % (name, str(prod)))
645
                    break
646
                elif len(prods) > 1:
647
                    # nested exhausted, next in parent
648
                    prods.pop()
649
                else:
650
                    break
651

    
652
            if not emptyOk and not len(seq):
653
                self._log.error(u'No content to parse.')
654
                return False, [], None, None
655

    
656
        # trim S from end
657
        seq.rstrip()
658
        return wellformed, seq, store, tokens
659

    
660

    
661
class PreDef(object):
662
    """Predefined Prod definition for use in productions definition
663
    for ProdParser instances.
664
    """
665
    types = cssutils.cssproductions.CSSProductions
666
    reHexcolor = re.compile(r'^\#(?:[0-9abcdefABCDEF]{3}|[0-9abcdefABCDEF]{6})$')
667

    
668
    @staticmethod
669
    def calc(toSeq=None, nextSor=False):
670
        return Prod(name=u'calcfunction',
671
                    match=lambda t, v: u'calc(' == cssutils.helper.normalize(v),
672
                    toSeq=toSeq,
673
                    nextSor=nextSor)
674

    
675
    @staticmethod
676
    def char(name='char', char=u',', toSeq=None,
677
             stop=False, stopAndKeep=False, mayEnd=False, 
678
             stopIfNoMoreMatch=False,
679
             optional=False, # WAS: optional=True, 
680
             nextSor=False):
681
        "any CHAR"
682
        return Prod(name=name, match=lambda t, v: v == char, toSeq=toSeq,
683
                    stop=stop, stopAndKeep=stopAndKeep, mayEnd=mayEnd,
684
                    stopIfNoMoreMatch=stopIfNoMoreMatch,
685
                    optional=optional,
686
                    nextSor=nextSor)
687

    
688
    @staticmethod
689
    def comma(optional=False, toSeq=None):
690
        return PreDef.char(u'comma', u',', optional=optional, toSeq=toSeq)
691

    
692
    @staticmethod
693
    def comment(parent=None):
694
        return Prod(name=u'comment',
695
                    match=lambda t, v: t == 'COMMENT',
696
                    toSeq=lambda t, tokens: (t[0], cssutils.css.CSSComment([1], 
697
                                                                           parentRule=parent)),
698
                    optional=True
699
                    )
700

    
701

    
702
    @staticmethod
703
    def dimension(nextSor=False, stop=False):
704
        return Prod(name=u'dimension',
705
                    match=lambda t, v: t == PreDef.types.DIMENSION,
706
                    toSeq=lambda t, tokens: (t[0], cssutils.helper.normalize(t[1])),
707
                    stop=stop,
708
                    nextSor=nextSor)
709

    
710
    @staticmethod
711
    def function(toSeq=None, nextSor=False, toStore=None):
712
        return Prod(name=u'function',
713
                    match=lambda t, v: t == PreDef.types.FUNCTION,
714
                    toStore=toStore,
715
                    toSeq=toSeq,
716
                    nextSor=nextSor)
717

    
718
    @staticmethod
719
    def funcEnd(stop=False, mayEnd=False):
720
        ")"
721
        return PreDef.char(u'end FUNC ")"', u')', stop=stop, mayEnd=mayEnd)
722

    
723
    @staticmethod
724
    def hexcolor(stop=False, nextSor=False):
725
        "#123 or #123456"
726
        return Prod(name='HEX color',
727
                    match=lambda t, v: (
728
                        t == PreDef.types.HASH and
729
                        PreDef.reHexcolor.match(v)
730
                    ),
731
                    stop=stop,
732
                    nextSor=nextSor)
733

    
734
    @staticmethod
735
    def ident(stop=False, toStore=None, nextSor=False):
736
        return Prod(name=u'ident',
737
                    match=lambda t, v: t == PreDef.types.IDENT,
738
                    stop=stop,
739
                    toStore=toStore,
740
                    nextSor=nextSor)
741

    
742
    @staticmethod
743
    def number(stop=False, toSeq=None, nextSor=False):
744
        return Prod(name=u'number',
745
                    match=lambda t, v: t == PreDef.types.NUMBER,
746
                    stop=stop,
747
                    toSeq=toSeq,
748
                    nextSor=nextSor)
749

    
750
    @staticmethod
751
    def percentage(stop=False, toSeq=None, nextSor=False):
752
        return Prod(name=u'percentage',
753
                    match=lambda t, v: t == PreDef.types.PERCENTAGE,
754
                    stop=stop,
755
                    toSeq=toSeq,
756
                    nextSor=nextSor)
757

    
758
    @staticmethod
759
    def string(stop=False, nextSor=False):
760
        "string delimiters are removed by default"
761
        return Prod(name=u'string',
762
                    match=lambda t, v: t == PreDef.types.STRING,
763
                    toSeq=lambda t, tokens: (t[0], cssutils.helper.stringvalue(t[1])),
764
                    stop=stop,
765
                    nextSor=nextSor)
766

    
767
    @staticmethod
768
    def S(name=u'whitespace', toSeq=None, optional=False):
769
        return Prod(name=name,
770
                    match=lambda t, v: t == PreDef.types.S,
771
                    toSeq=toSeq,
772
                    optional=optional,
773
                    mayEnd=True)
774

    
775
    @staticmethod
776
    def unary(stop=False, toSeq=None, nextSor=False):
777
        "+ or -"
778
        return Prod(name=u'unary +-', match=lambda t, v: v in (u'+', u'-'),
779
                    optional=True,
780
                    stop=stop,
781
                    toSeq=toSeq,
782
                    nextSor=nextSor)
783

    
784
    @staticmethod
785
    def uri(stop=False, nextSor=False):
786
        "'url(' and ')' are removed and URI is stripped"
787
        return Prod(name=u'URI',
788
                    match=lambda t, v: t == PreDef.types.URI,
789
                    toSeq=lambda t, tokens: (t[0], cssutils.helper.urivalue(t[1])),
790
                    stop=stop,
791
                    nextSor=nextSor)
792

    
793
    @staticmethod
794
    def unicode_range(stop=False, nextSor=False):
795
        "u+123456-abc normalized to lower `u`"
796
        return Prod(name='unicode-range',
797
                    match=lambda t, v: t == PreDef.types.UNICODE_RANGE,
798
                    toSeq=lambda t, tokens: (t[0], t[1].lower()),
799
                    stop=stop,
800
                    nextSor=nextSor
801
                    )
802

    
803
    @staticmethod
804
    def variable(toSeq=None, stop=False, nextSor=False, toStore=None):
805
        return Prod(name=u'variable',
806
                    match=lambda t, v: u'var(' == cssutils.helper.normalize(v),
807
                    toSeq=toSeq,
808
                    toStore=toStore,
809
                    stop=stop,
810
                    nextSor=nextSor)
811

    
812
    # used for MarginRule for now:
813
    @staticmethod
814
    def unknownrule(name=u'@', toStore=None):
815
        """@rule dummy (matches ATKEYWORD to remove unknown rule tokens from
816
        stream::
817

818
            @x;
819
            @x {...}
820

821
        no nested yet!
822
        """
823
        def rule(tokens):
824
            saved = []
825
            for t in tokens:
826
                saved.append(t)
827
                if (t[1] == u'}' or t[1] == u';'):
828
                    return cssutils.css.CSSUnknownRule(saved)
829

    
830
        return Prod(name=name,
831
                    match=lambda t, v: t == u'ATKEYWORD',
832
                    toSeq=lambda t, tokens: (u'CSSUnknownRule',
833
                                             rule(pushtoken(t, tokens))
834
                                             ),
835
                    toStore=toStore
836
                    )