Package cssutils :: Module util
[hide private]
[frames] | no frames]

Source Code for Module cssutils.util

  1  """base classes for css and stylesheets packages 
  2  """ 
  3  __all__ = [] 
  4  __docformat__ = 'restructuredtext' 
  5  __author__ = '$LastChangedBy: doerwalter $' 
  6  __date__ = '$LastChangedDate: 2007-08-02 22:58:23 +0200 (Do, 02 Aug 2007) $' 
  7  __version__ = '0.9.2a2 $LastChangedRevision: 160 $' 
  8   
  9  import xml.dom 
 10   
 11  from tokenize import Tokenizer 
 12   
 13  import cssutils 
 14   
 15   
16 -class Base(object):
17 """ 18 Base class for most CSS and StyleSheets classes 19 20 contains helper objects 21 * _log 22 * _ttypes 23 24 and functions 25 * staticmethod: _normalize(x) 26 * _checkReadonly() 27 * _tokenize() 28 * _tokensupto() 29 * _valuestr() 30 31 for inheriting classes helping parsing 32 """ 33 __tokenizer = Tokenizer() 34 35 _log = __tokenizer.log 36 _ttypes = __tokenizer.ttypes 37 38 @staticmethod
39 - def _normalize(x):
40 """ 41 normalizes x namely replaces any \ with the empty string 42 so for x=="c\olor\" return "color" 43 44 used in Token for normalized value and CSSStyleDeclaration 45 currently 46 """ 47 return x.replace(u'\\', u'').lower()
48 49
50 - def _checkReadonly(self):
51 "raises xml.dom.NoModificationAllowedErr if rule/... is readonly" 52 if hasattr(self, '_readonly') and self._readonly: 53 raise xml.dom.NoModificationAllowedErr( 54 u'%s is readonly.' % self.__class__) 55 return True 56 return False
57 58
59 - def _tokenize(self, textortokens, _fullSheet=False):
60 """ 61 returns tokens of textortokens which may already be tokens in which 62 case simply returns input 63 """ 64 if isinstance(textortokens, list): 65 return textortokens # already tokenized 66 elif isinstance(textortokens, cssutils.token.Token): 67 return [textortokens] # comment is a single token 68 elif isinstance(textortokens, basestring): # already string 69 return self.__tokenizer.tokenize(textortokens, _fullSheet) 70 else: 71 if textortokens is not None: 72 textortokens = unicode(textortokens) 73 return self.__tokenizer.tokenize(textortokens, _fullSheet)
74 75
76 - def _tokensupto(self, tokens, 77 blockstartonly=False, 78 blockendonly=False, 79 propertynameendonly=False, 80 propertyvalueendonly=False, 81 propertypriorityendonly=False, 82 selectorattendonly=False, 83 funcendonly=False):
84 """ 85 returns tokens upto end of atrule and end index 86 end is defined by parameters, might be ; } ) or other 87 88 default looks for ending "}" and ";" 89 """ 90 ends = u';}' 91 92 if blockstartonly: # { 93 ends = u'{' 94 if blockendonly: # } 95 ends = u'}' 96 elif propertynameendonly: # : and ; in case of an error 97 ends = u':;' 98 elif propertyvalueendonly: # ; or !important 99 ends = (u';', u'!important') 100 elif propertypriorityendonly: # ; 101 ends = u';' 102 elif selectorattendonly: # ] 103 ends = u']' 104 elif funcendonly: # ) 105 ends = u')' 106 107 brace = bracket = parant = 0 # {}, [], () 108 if blockstartonly: 109 brace = -1 # set to 0 with first { 110 resulttokens = [] 111 i, imax = 0, len(tokens) 112 while i < imax: 113 t = tokens[i] 114 115 if u'{' == t.value: brace += 1 116 elif u'}' == t.value: brace -= 1 117 if u'[' == t.value: bracket += 1 118 elif u']' == t.value: bracket -= 1 119 # function( or single ( 120 if u'(' == t.value or \ 121 Base._ttypes.FUNCTION == t.type: parant += 1 122 elif u')' == t.value: parant -= 1 123 124 resulttokens.append(t) 125 126 if t.value in ends and (brace == bracket == parant == 0): 127 break 128 129 i += 1 130 131 ## print '--- %s ---\n' % (str(ends)) 132 ## print u''.join([x.value for x in tokens]) 133 ## print u''.join([x.value for x in resulttokens]) 134 ## print 135 136 return resulttokens, i
137 138
139 - def _valuestr(self, t):
140 """ 141 returns string value of t (t may be string of tokenlist) 142 """ 143 if t is None: 144 return u'' 145 elif isinstance(t, basestring): 146 return t 147 else: 148 return u''.join([x.value for x in t])
149