1 |
2 |
drasko |
#!/usr/bin/env python
|
2 |
|
|
"""
|
3 |
|
|
Compiler for CML2
|
4 |
|
|
|
5 |
|
|
by Eric S. Raymond, <esr@thyrsus.com>
|
6 |
|
|
"""
|
7 |
|
|
import sys
|
8 |
|
|
|
9 |
|
|
if sys.version[0] < '2':
|
10 |
|
|
print "Python 2.0 or later is required for this program."
|
11 |
|
|
sys.exit(0)
|
12 |
|
|
|
13 |
|
|
import string, os, getopt, shlex, cPickle, cml, cStringIO
|
14 |
|
|
|
15 |
|
|
# Globals
|
16 |
|
|
rulebase = None
|
17 |
|
|
compstate = None
|
18 |
|
|
|
19 |
|
|
# User-visible strings. Separated out in order to
|
20 |
|
|
# support internationalization.
|
21 |
|
|
|
22 |
|
|
_eng = {
|
23 |
|
|
"CLIHELP":"""\
|
24 |
|
|
|
25 |
|
|
Usage: clmlcompile.py [-o output] [-P] [-v]
|
26 |
|
|
|
27 |
|
|
-o file write the result to a specified file
|
28 |
|
|
-P enable profiling
|
29 |
|
|
-v increment debug level
|
30 |
|
|
|
31 |
|
|
""",
|
32 |
|
|
}
|
33 |
|
|
|
34 |
|
|
# Eventually, do more intelligent selection using LOCALE
|
35 |
|
|
lang = _eng
|
36 |
|
|
|
37 |
|
|
class CompilationState:
|
38 |
|
|
def __init__(self):
|
39 |
|
|
self.debug = 0
|
40 |
|
|
self.errors = 0
|
41 |
|
|
self.bad_symbols = {}
|
42 |
|
|
self.bool_tests = []
|
43 |
|
|
self.warndepend = []
|
44 |
|
|
self.explicit_ancestors = {}
|
45 |
|
|
self.derivations = {}
|
46 |
|
|
self.propnames = {}
|
47 |
|
|
self.dfltsyms = []
|
48 |
|
|
# Used by the menu-declaration parser
|
49 |
|
|
self.condition_stack = [] # Stack of active conditions for {} shorthand
|
50 |
|
|
self.property_stack = [] # Stack of property switches
|
51 |
|
|
self.symbol_list = [] # Result list
|
52 |
|
|
|
53 |
|
|
# Lexical analysis
|
54 |
|
|
_keywords = (
|
55 |
|
|
'alias', 'banner', 'choices', 'choicegroup',
|
56 |
|
|
'condition', 'debug', 'default', 'dependent',
|
57 |
|
|
'derive', 'enum', 'expert', 'explanation',
|
58 |
|
|
'give', 'icon', 'like', 'menu',
|
59 |
|
|
'nohelp', 'on', 'prefix', 'prohibit',
|
60 |
|
|
'property', 'range', 'require', 'save',
|
61 |
|
|
'start', 'suppress', 'symbols', 'text',
|
62 |
|
|
'trits', 'unless', 'warndepend', 'when',
|
63 |
|
|
)
|
64 |
|
|
_ternaryops = ('?', ':')
|
65 |
|
|
_arithops = ('*', '+', '-')
|
66 |
|
|
_boolops = ('and', 'or', 'implies')
|
67 |
|
|
_relops = ('==', '!=', '<', '>', '>=', '<=')
|
68 |
|
|
_termops = ('|', '&', '$') # min, max, similarity
|
69 |
|
|
_operators = _termops + _relops + _boolops + _arithops + _ternaryops + ("(", ")")
|
70 |
|
|
_tritvals = ("n", "m", "y")
|
71 |
|
|
_atoms = ("trit", "string", "decimal", "hexadecimal")
|
72 |
|
|
#_suffixes = ("&", "?", "%", "@", "$")
|
73 |
|
|
|
74 |
|
|
class Token:
|
75 |
|
|
"CML2's internal token type."
|
76 |
|
|
def __init__(self, type, attr=None):
|
77 |
|
|
self.type = type
|
78 |
|
|
self.attr = attr
|
79 |
|
|
if compstate.debug > 1: print "CML token: ", `self`
|
80 |
|
|
def __repr__(self):
|
81 |
|
|
if self.type == "EOF":
|
82 |
|
|
return "EOF"
|
83 |
|
|
elif self.attr is not None:
|
84 |
|
|
return self.type + "=" + `self.attr`
|
85 |
|
|
else:
|
86 |
|
|
return self.type
|
87 |
|
|
def __cmp__(self, other):
|
88 |
|
|
if isinstance(other, Token):
|
89 |
|
|
typecmp = cmp(self.type, other.type)
|
90 |
|
|
if typecmp or not self.attr:
|
91 |
|
|
return typecmp
|
92 |
|
|
else:
|
93 |
|
|
return cmp(self.attr, other.attr)
|
94 |
|
|
else:
|
95 |
|
|
return cmp(self.type, other)
|
96 |
|
|
def __getitem__(self, i):
|
97 |
|
|
raise IndexError
|
98 |
|
|
|
99 |
|
|
class lexwrapper(shlex.shlex):
|
100 |
|
|
"Lexer subclass that returns Tokens with type-annotation information."
|
101 |
|
|
def __init__(self, stream, endtok=None):
|
102 |
|
|
self.endtok = endtok
|
103 |
|
|
# Strictly a speed hack.
|
104 |
|
|
name = stream.name
|
105 |
|
|
if endtok:
|
106 |
|
|
contents = stream
|
107 |
|
|
else:
|
108 |
|
|
contents = cStringIO.StringIO(stream.read())
|
109 |
|
|
stream.close()
|
110 |
|
|
shlex.shlex.__init__(self, contents, name)
|
111 |
|
|
|
112 |
|
|
def lex_token(self):
|
113 |
|
|
# Get a (type, attr) token tuple, handling inclusion
|
114 |
|
|
raw = self.get_token()
|
115 |
|
|
if type(raw) is not type(""): # Pushed-back token
|
116 |
|
|
return raw
|
117 |
|
|
elif not raw or raw == self.endtok:
|
118 |
|
|
return Token("EOF")
|
119 |
|
|
elif raw[0] in self.quotes:
|
120 |
|
|
return Token('string', raw[1:-1])
|
121 |
|
|
elif raw in _tritvals:
|
122 |
|
|
return Token('trit', raw)
|
123 |
|
|
elif len(raw) > 2 and \
|
124 |
|
|
raw[0] == '0' and raw[1] == 'x' and raw[2] in string.hexdigits:
|
125 |
|
|
return Token('hexadecimal', long(raw[2:], 16))
|
126 |
|
|
elif raw[0] in string.digits:
|
127 |
|
|
return Token('decimal', int(raw))
|
128 |
|
|
elif raw in ('!', '=', '<', '>'): # Relational tests
|
129 |
|
|
next = self.get_token()
|
130 |
|
|
if next == '=':
|
131 |
|
|
return Token(raw+next)
|
132 |
|
|
else:
|
133 |
|
|
self.push_token(next)
|
134 |
|
|
return Token(raw)
|
135 |
|
|
elif raw == 'text':
|
136 |
|
|
data = ""
|
137 |
|
|
while 1:
|
138 |
|
|
line = self.instream.readline()
|
139 |
|
|
if line == "" or line == ".\n": # Terminated by dot.
|
140 |
|
|
break
|
141 |
|
|
if line[0] == '.':
|
142 |
|
|
line = line[1:]
|
143 |
|
|
data = data + line
|
144 |
|
|
return Token("text", data)
|
145 |
|
|
elif raw == 'icon':
|
146 |
|
|
data = ""
|
147 |
|
|
while 1:
|
148 |
|
|
line = self.instream.readline()
|
149 |
|
|
if line == "" or line == "\n": # Terminated by blank line
|
150 |
|
|
break
|
151 |
|
|
data = data + line
|
152 |
|
|
self.push_token(data)
|
153 |
|
|
return Token(raw)
|
154 |
|
|
elif raw in _keywords or raw in _operators:
|
155 |
|
|
return Token(raw)
|
156 |
|
|
elif compstate.propnames.has_key(raw):
|
157 |
|
|
return Token('property', raw)
|
158 |
|
|
else:
|
159 |
|
|
# Nasty hack alert. If there is a declared prefix for the
|
160 |
|
|
# rulebase, ignore it as a prefix of names. This will
|
161 |
|
|
# enable us to be backward-compatible with names like like
|
162 |
|
|
# CONFIG_3C515 that have leading numerics when stripped.
|
163 |
|
|
if rulebase.prefix and raw[:len(rulebase.prefix)] == rulebase.prefix:
|
164 |
|
|
raw = raw[len(rulebase.prefix):]
|
165 |
|
|
return Token('word', raw)
|
166 |
|
|
|
167 |
|
|
def complain(self, str):
|
168 |
|
|
# Report non-fatal parse error; format like C compiler message.
|
169 |
|
|
if not compstate.debug and not compstate.errors:
|
170 |
|
|
sys.stderr.write('\n')
|
171 |
|
|
sys.stderr.write(self.error_leader() + " " + str + "\n")
|
172 |
|
|
compstate.errors = compstate.errors + 1
|
173 |
|
|
|
174 |
|
|
def croak(self, str):
|
175 |
|
|
# Report a fatal parse error and die
|
176 |
|
|
self.complain(str)
|
177 |
|
|
sys.exit(1)
|
178 |
|
|
|
179 |
|
|
def demand(self, type, attr=None):
|
180 |
|
|
# Require a given token or token type, croak if we don't get it
|
181 |
|
|
tok = self.lex_token()
|
182 |
|
|
if tok.type == "EOF":
|
183 |
|
|
self.croak("premature EOF")
|
184 |
|
|
elif attr is not None and tok.attr != attr:
|
185 |
|
|
self.croak("syntax error, saw `%s' while expecting `%s'" % (tok, attr))
|
186 |
|
|
elif tok.type != type:
|
187 |
|
|
self.croak("syntax error, expecting token of type `%s' (actually saw %s=%s)" % (type, tok.type, tok.attr))
|
188 |
|
|
else:
|
189 |
|
|
return tok.attr
|
190 |
|
|
|
191 |
|
|
def sourcehook(self, newfile):
|
192 |
|
|
# Override the hook in the shlex class
|
193 |
|
|
try:
|
194 |
|
|
if newfile[0] == '"':
|
195 |
|
|
newfile = newfile[1:-1]
|
196 |
|
|
# This implements cpp-like semantics for relative-path inclusion.
|
197 |
|
|
if type(self.infile) is type("") and not os.path.isabs(newfile):
|
198 |
|
|
newfile = os.path.join(os.path.dirname(self.infile), newfile)
|
199 |
|
|
return (newfile, open(newfile, "r"))
|
200 |
|
|
except IOError:
|
201 |
|
|
self.complain("I/O error while opening '%s'" % (newfile,))
|
202 |
|
|
sys.exit(1)
|
203 |
|
|
return None # Appease pychecker
|
204 |
|
|
|
205 |
|
|
# Parsing
|
206 |
|
|
|
207 |
|
|
class ExpressionError:
|
208 |
|
|
"Express a compile-time error."
|
209 |
|
|
def __init__(self, explain):
|
210 |
|
|
self.args = ("expression error " + explain,)
|
211 |
|
|
|
212 |
|
|
def parse_atom(input):
|
213 |
|
|
if compstate.debug >= 2: print "entering parse_atom..."
|
214 |
|
|
op = input.lex_token()
|
215 |
|
|
if op.type in _atoms:
|
216 |
|
|
if compstate.debug >= 2: print "parse_atom returns", op.attr
|
217 |
|
|
return op
|
218 |
|
|
elif op.type == '(':
|
219 |
|
|
sub = parse_expr_inner(input)
|
220 |
|
|
close = input.lex_token()
|
221 |
|
|
if close != ')':
|
222 |
|
|
raise ExpressionError, "while expecting a close paren"
|
223 |
|
|
else:
|
224 |
|
|
if compstate.debug >= 2: print "parse_atom returns singleton", sub
|
225 |
|
|
return sub
|
226 |
|
|
elif op.type in _keywords:
|
227 |
|
|
raise ExpressionError, "keyword %s while expecting atom" % op.type
|
228 |
|
|
elif op.type == 'word':
|
229 |
|
|
if compstate.debug >= 2: print "parse_atom returns", op.attr
|
230 |
|
|
return op
|
231 |
|
|
|
232 |
|
|
def parse_term(input):
|
233 |
|
|
if compstate.debug >= 2: print "entering parse_term..."
|
234 |
|
|
left = parse_atom(input)
|
235 |
|
|
op = input.lex_token()
|
236 |
|
|
if op.type not in _termops:
|
237 |
|
|
input.push_token(op)
|
238 |
|
|
if compstate.debug >= 2: print "parse_term returns singleton", left
|
239 |
|
|
return left
|
240 |
|
|
right = parse_term(input)
|
241 |
|
|
expr = (op.type, left, right)
|
242 |
|
|
if compstate.debug >= 2: print "parse_term returns", expr
|
243 |
|
|
return expr
|
244 |
|
|
|
245 |
|
|
def parse_relational(input):
|
246 |
|
|
if compstate.debug >= 2: print "entering parse_relational..."
|
247 |
|
|
left = parse_term(input)
|
248 |
|
|
op = input.lex_token()
|
249 |
|
|
if op.type not in _relops:
|
250 |
|
|
input.push_token(op)
|
251 |
|
|
if compstate.debug >= 2: print "parse_relational returns singleton", left
|
252 |
|
|
return left
|
253 |
|
|
right = parse_term(input)
|
254 |
|
|
expr = (op.type, left, right)
|
255 |
|
|
if compstate.debug >= 2: print "parse_relational returns", expr
|
256 |
|
|
return(op.type, left, right)
|
257 |
|
|
|
258 |
|
|
def parse_assertion(input):
|
259 |
|
|
if compstate.debug >= 2: print "entering parse_assertion..."
|
260 |
|
|
negate = input.lex_token()
|
261 |
|
|
if negate.type == 'not':
|
262 |
|
|
return ('not', parse_relational(input))
|
263 |
|
|
input.push_token(negate)
|
264 |
|
|
return parse_relational(input)
|
265 |
|
|
|
266 |
|
|
def parse_conjunct(input):
|
267 |
|
|
if compstate.debug >= 2: print "entering parse_conjunct..."
|
268 |
|
|
left = parse_assertion(input)
|
269 |
|
|
op = input.lex_token()
|
270 |
|
|
if op.type != 'and':
|
271 |
|
|
input.push_token(op)
|
272 |
|
|
if compstate.debug >= 2: print "parse_conjunct returns singleton", left
|
273 |
|
|
return left
|
274 |
|
|
else:
|
275 |
|
|
expr = ('and', left, parse_conjunct(input))
|
276 |
|
|
if compstate.debug >= 2: print "parse_conjunct returns", expr
|
277 |
|
|
return expr
|
278 |
|
|
|
279 |
|
|
def parse_disjunct(input):
|
280 |
|
|
if compstate.debug >= 2: print "entering parse_disjunct..."
|
281 |
|
|
left = parse_conjunct(input)
|
282 |
|
|
op = input.lex_token()
|
283 |
|
|
if op.type != 'or':
|
284 |
|
|
input.push_token(op)
|
285 |
|
|
if compstate.debug >= 2: print "parse_disjunct returns singleton", left
|
286 |
|
|
return left
|
287 |
|
|
else:
|
288 |
|
|
expr = ('or', left, parse_disjunct(input))
|
289 |
|
|
if compstate.debug >= 2: print "parse_disjunct returns", expr
|
290 |
|
|
return expr
|
291 |
|
|
|
292 |
|
|
def parse_factor(input):
|
293 |
|
|
if compstate.debug >= 2:
|
294 |
|
|
print "entering parse_factor..."
|
295 |
|
|
left = parse_disjunct(input)
|
296 |
|
|
op = input.lex_token()
|
297 |
|
|
if op.type != 'implies':
|
298 |
|
|
input.push_token(op)
|
299 |
|
|
if compstate.debug >= 2: print "parse_factor returns singleton", left
|
300 |
|
|
return left
|
301 |
|
|
else:
|
302 |
|
|
expr = ('implies', left, parse_disjunct(input))
|
303 |
|
|
if compstate.debug >= 2: print "parse_factor returns", expr
|
304 |
|
|
return expr
|
305 |
|
|
|
306 |
|
|
def parse_summand(input):
|
307 |
|
|
if compstate.debug >= 2: print "entering parse_summand..."
|
308 |
|
|
left = parse_factor(input)
|
309 |
|
|
op = input.lex_token()
|
310 |
|
|
if op.type != '*':
|
311 |
|
|
input.push_token(op)
|
312 |
|
|
if compstate.debug >= 2: print "parse_summand returns singleton", left
|
313 |
|
|
return left
|
314 |
|
|
else:
|
315 |
|
|
expr = ('*', left, parse_expr_inner(input))
|
316 |
|
|
if compstate.debug >= 2: print "parse_summand returns", expr
|
317 |
|
|
return expr
|
318 |
|
|
|
319 |
|
|
def parse_ternary(input):
|
320 |
|
|
if compstate.debug >= 2: print "entering parse_ternary..."
|
321 |
|
|
guard = parse_summand(input)
|
322 |
|
|
op = input.lex_token()
|
323 |
|
|
if op.type != '?':
|
324 |
|
|
input.push_token(op)
|
325 |
|
|
if compstate.debug >= 2: print "parse_ternary returns singleton", guard
|
326 |
|
|
return guard
|
327 |
|
|
else:
|
328 |
|
|
trueval = parse_summand(input)
|
329 |
|
|
op = input.lex_token()
|
330 |
|
|
if op.type != ':':
|
331 |
|
|
raise ExpressionError("while expecting : in ternary")
|
332 |
|
|
falseval = parse_summand(input)
|
333 |
|
|
expr = ('?', guard, trueval, falseval)
|
334 |
|
|
if compstate.debug >= 2: print "parse_ternary returns", expr
|
335 |
|
|
return expr
|
336 |
|
|
|
337 |
|
|
def parse_expr_inner(input):
|
338 |
|
|
if compstate.debug >= 2: print "entering parse_inner_expr..."
|
339 |
|
|
left = parse_ternary(input)
|
340 |
|
|
op = input.lex_token()
|
341 |
|
|
if op.type not in ('+', '-'):
|
342 |
|
|
input.push_token(op)
|
343 |
|
|
if compstate.debug >= 2: print "parse_expr_inner returns singleton", left
|
344 |
|
|
return left
|
345 |
|
|
else:
|
346 |
|
|
expr = (op.type, left, parse_expr_inner(input))
|
347 |
|
|
if compstate.debug >= 2: print "parse_expr_inner returns", expr
|
348 |
|
|
return expr
|
349 |
|
|
|
350 |
|
|
def parse_expr(input):
|
351 |
|
|
"Parse an expression."
|
352 |
|
|
try:
|
353 |
|
|
exp = parse_expr_inner(input)
|
354 |
|
|
return exp
|
355 |
|
|
except ExpressionError, exp:
|
356 |
|
|
input.croak(exp.args[0])
|
357 |
|
|
return None
|
358 |
|
|
|
359 |
|
|
def make_dependent(guard, symbol):
|
360 |
|
|
"Create a dependency lnk, indirecting properly through menus."
|
361 |
|
|
if compstate.debug > 0:
|
362 |
|
|
print "Making %s dependent on %s" % (symbol.name, guard.name)
|
363 |
|
|
# If symbol is a menu, we'd really like to create a dependency link
|
364 |
|
|
# for each of its children. But they won't be defined at this point
|
365 |
|
|
# if the reference is forward.
|
366 |
|
|
if guard not in symbol.ancestors:
|
367 |
|
|
symbol.ancestors.append(guard)
|
368 |
|
|
if symbol not in guard.dependents:
|
369 |
|
|
guard.dependents.append(symbol)
|
370 |
|
|
|
371 |
|
|
def intern_symbol(input, name=None, oktypes=None, record=0):
|
372 |
|
|
"Attempt to read and intern a symbol."
|
373 |
|
|
if name is None:
|
374 |
|
|
tok = input.lex_token()
|
375 |
|
|
if tok.type == "word":
|
376 |
|
|
name = tok.attr
|
377 |
|
|
else:
|
378 |
|
|
input.push_token(tok)
|
379 |
|
|
return None
|
380 |
|
|
# If symbol is a constant just pass it through.
|
381 |
|
|
if name == "y":
|
382 |
|
|
return cml.y
|
383 |
|
|
elif name == "m":
|
384 |
|
|
return cml.m
|
385 |
|
|
elif name == "n":
|
386 |
|
|
return cml.n
|
387 |
|
|
# If we have not seen the symbol before, create an entry for it.
|
388 |
|
|
if not rulebase.dictionary.has_key(name):
|
389 |
|
|
ref = rulebase.dictionary[name] = cml.ConfigSymbol(name,
|
390 |
|
|
None, None, None,
|
391 |
|
|
input.infile,
|
392 |
|
|
input.lineno)
|
393 |
|
|
compstate.explicit_ancestors[ref] = []
|
394 |
|
|
else:
|
395 |
|
|
ref = rulebase.dictionary[name]
|
396 |
|
|
if ref.type and oktypes is not None and ref.type not in oktypes:
|
397 |
|
|
input.complain('incompatible previous declaration of %s as %s (see "%s", %d)' % (name, ref.type, ref.file, ref.lineno))
|
398 |
|
|
if record:
|
399 |
|
|
if record:
|
400 |
|
|
ref.file = input.infile
|
401 |
|
|
ref.lineno = input.lineno
|
402 |
|
|
else:
|
403 |
|
|
input.complain('duplicate symbol %s (see "%s", line %d)'
|
404 |
|
|
% (name, ref.file, ref.lineno))
|
405 |
|
|
return ref
|
406 |
|
|
|
407 |
|
|
def intern_symbol_list(input, record=0):
|
408 |
|
|
"Get a list of symbols (terminate on keyword)."
|
409 |
|
|
list = []
|
410 |
|
|
while 1:
|
411 |
|
|
symbol = intern_symbol(input, None, None, record)
|
412 |
|
|
if symbol == None:
|
413 |
|
|
break
|
414 |
|
|
else:
|
415 |
|
|
list.append(symbol)
|
416 |
|
|
if not list:
|
417 |
|
|
input.complain("syntax error, expected a nonempty word list")
|
418 |
|
|
return list
|
419 |
|
|
|
420 |
|
|
def parse(input, baton):
|
421 |
|
|
# Parse an entire CML program
|
422 |
|
|
input.source = "source"
|
423 |
|
|
if compstate.debug > 2:
|
424 |
|
|
print "Calling parse()"
|
425 |
|
|
input.debug = 1
|
426 |
|
|
while 1:
|
427 |
|
|
if not compstate.debug and not compstate.errors:
|
428 |
|
|
baton.twirl()
|
429 |
|
|
leader = input.lex_token()
|
430 |
|
|
if compstate.debug > 1: print "Parsing declaration beginning with %s..." % (leader,)
|
431 |
|
|
# Language constructs begin here
|
432 |
|
|
if leader.type == "EOF":
|
433 |
|
|
break
|
434 |
|
|
elif leader.type == "start":
|
435 |
|
|
rulebase.start = input.lex_token().attr
|
436 |
|
|
elif leader.type in ("menus", "explanations"):
|
437 |
|
|
input.complain("menus and explanations declarations are "
|
438 |
|
|
"obsolete, replace these keywords with `symbols'")
|
439 |
|
|
elif leader.type == "symbols":
|
440 |
|
|
while 1:
|
441 |
|
|
ref = intern_symbol(input, None, None, record=1)
|
442 |
|
|
if ref == None:
|
443 |
|
|
break
|
444 |
|
|
ref.prompt = input.demand("string")
|
445 |
|
|
|
446 |
|
|
# These symbols may be followed by optional help text
|
447 |
|
|
tok = input.lex_token()
|
448 |
|
|
if tok.type == "text":
|
449 |
|
|
rulebase.dictionary[ref.name].helptext = tok.attr
|
450 |
|
|
elif tok.type == "like":
|
451 |
|
|
target = input.lex_token()
|
452 |
|
|
if not rulebase.dictionary.has_key(target.attr):
|
453 |
|
|
input.complain("unknown 'like' symbol %s" % target.attr)
|
454 |
|
|
elif not rulebase.dictionary[target.attr].help():
|
455 |
|
|
input.complain("'like' symbol %s has no help" % target.attr)
|
456 |
|
|
else:
|
457 |
|
|
rulebase.dictionary[ref.name].helptext = rulebase.dictionary[target.attr].help()
|
458 |
|
|
else:
|
459 |
|
|
input.push_token(tok)
|
460 |
|
|
if compstate.debug:
|
461 |
|
|
print "%d symbols read" % (len(rulebase.dictionary),)
|
462 |
|
|
elif leader.type in ("unless", "when"):
|
463 |
|
|
guard = parse_expr(input)
|
464 |
|
|
maybe = input.lex_token()
|
465 |
|
|
if maybe == "suppress":
|
466 |
|
|
if leader.type == "when":
|
467 |
|
|
guard = ("==", guard, cml.n)
|
468 |
|
|
dependent = input.lex_token()
|
469 |
|
|
make_dep = 0
|
470 |
|
|
if dependent.type == "dependent":
|
471 |
|
|
make_dep = 1
|
472 |
|
|
else:
|
473 |
|
|
input.push_token(dependent)
|
474 |
|
|
list = intern_symbol_list(input)
|
475 |
|
|
list.reverse()
|
476 |
|
|
for symbol in list:
|
477 |
|
|
if make_dep:
|
478 |
|
|
traverse_make_dep(symbol, guard, input)
|
479 |
|
|
# Add it to ordinary visibility constraints
|
480 |
|
|
if symbol.visibility:
|
481 |
|
|
symbol.visibility = ('and', guard, symbol.visibility)
|
482 |
|
|
else:
|
483 |
|
|
symbol.visibility = guard
|
484 |
|
|
elif maybe == "save":
|
485 |
|
|
if leader.type == "unless":
|
486 |
|
|
guard = ("==", guard, cml.n)
|
487 |
|
|
list = intern_symbol_list(input)
|
488 |
|
|
list.reverse()
|
489 |
|
|
for symbol in list:
|
490 |
|
|
if symbol.saveability:
|
491 |
|
|
symbol.saveability = ('and', guard, symbol.saveability)
|
492 |
|
|
else:
|
493 |
|
|
symbol.saveability = guard
|
494 |
|
|
# This is a kluge. It relies on the fact that symbols
|
495 |
|
|
# explicitly set are always saved.
|
496 |
|
|
while symbol.menu:
|
497 |
|
|
symbol.menu.setcount = 1
|
498 |
|
|
symbol = symbol.menu
|
499 |
|
|
else:
|
500 |
|
|
input.complain("expected `suppress' or `save'")
|
501 |
|
|
compstate.bool_tests.append((guard, input.infile, input.lineno))
|
502 |
|
|
elif leader.type == "menu":
|
503 |
|
|
menusym = intern_symbol(input, None, ('bool', 'menu', 'choices'), record=1)
|
504 |
|
|
menusym.type = "menu"
|
505 |
|
|
list = parse_symbol_tree(input)
|
506 |
|
|
#print "Adding %s to %s" % (list, menusym.name)
|
507 |
|
|
# Add and validate items
|
508 |
|
|
menusym.items += list
|
509 |
|
|
for symbol in list:
|
510 |
|
|
if symbol.menu:
|
511 |
|
|
input.complain("symbol %s in %s occurs in another menu (%s)"
|
512 |
|
|
% (symbol.name, menusym.name, symbol.menu.name))
|
513 |
|
|
else:
|
514 |
|
|
symbol.menu = menusym
|
515 |
|
|
elif leader.type == "choices":
|
516 |
|
|
menusym = intern_symbol(input, None, ('bool', 'menu', 'choices'), record=1)
|
517 |
|
|
menusym.type = "choices"
|
518 |
|
|
list = parse_symbol_tree(input)
|
519 |
|
|
for symbol in list:
|
520 |
|
|
symbol.type = "bool"
|
521 |
|
|
symbol.choicegroup = filter(lambda x, s=symbol: x != s, list)
|
522 |
|
|
dflt = input.lex_token()
|
523 |
|
|
if dflt.type != 'default':
|
524 |
|
|
default = list[0].name
|
525 |
|
|
input.push_token(dflt)
|
526 |
|
|
else:
|
527 |
|
|
default = intern_symbol(input, None, None, record=1)
|
528 |
|
|
if default not in list:
|
529 |
|
|
input.complain("default %s must be in the menu" % (`default`,))
|
530 |
|
|
else:
|
531 |
|
|
menusym.default = default
|
532 |
|
|
menusym.items = list
|
533 |
|
|
for symbol in list:
|
534 |
|
|
if symbol.menu:
|
535 |
|
|
input.complain("symbol %s occurs in another menu (%s)"
|
536 |
|
|
% (symbol.name, symbol.menu.name))
|
537 |
|
|
else:
|
538 |
|
|
symbol.menu = menusym
|
539 |
|
|
elif leader.type == "choicegroup":
|
540 |
|
|
group = intern_symbol_list(input)
|
541 |
|
|
for symbol in group:
|
542 |
|
|
symbol.choicegroup = filter(lambda x, s=symbol: x != s, group)
|
543 |
|
|
elif leader.type == "derive":
|
544 |
|
|
symbol = intern_symbol(input)
|
545 |
|
|
input.demand("word", "from")
|
546 |
|
|
symbol.default = parse_expr(input)
|
547 |
|
|
compstate.derivations[symbol] = 1
|
548 |
|
|
elif leader.type in ("require", "prohibit"):
|
549 |
|
|
expr = parse_expr(input)
|
550 |
|
|
if leader.type == "prohibit":
|
551 |
|
|
expr = ('==', expr, cml.n)
|
552 |
|
|
next = input.lex_token()
|
553 |
|
|
if next.type != 'explanation':
|
554 |
|
|
input.push_token(next)
|
555 |
|
|
msg = None
|
556 |
|
|
else:
|
557 |
|
|
expl = input.lex_token()
|
558 |
|
|
if expl.type != 'word':
|
559 |
|
|
input.complain("while expecting a word of explanation, I see %s" % (`expl`,))
|
560 |
|
|
continue
|
561 |
|
|
entry = intern_symbol(input, expl.attr)
|
562 |
|
|
if entry.type:
|
563 |
|
|
input.complain("expecting an explanation symbol here")
|
564 |
|
|
else:
|
565 |
|
|
entry.type = "explanation"
|
566 |
|
|
msg = entry.prompt
|
567 |
|
|
rulebase.constraints.append(cml.Requirement(expr, msg, input.infile, input.lineno))
|
568 |
|
|
compstate.bool_tests.append((expr, input.infile, input.lineno))
|
569 |
|
|
elif leader.type == "default":
|
570 |
|
|
symbol = input.demand("word")
|
571 |
|
|
input.demand("word", "from")
|
572 |
|
|
expr = parse_expr(input)
|
573 |
|
|
entry = intern_symbol(input, symbol)
|
574 |
|
|
if entry.default:
|
575 |
|
|
input.complain("%s already has a default" % (symbol,))
|
576 |
|
|
else:
|
577 |
|
|
entry.default = expr
|
578 |
|
|
next = input.lex_token()
|
579 |
|
|
if next.type == "range":
|
580 |
|
|
entry.range = []
|
581 |
|
|
while 1:
|
582 |
|
|
low = input.lex_token()
|
583 |
|
|
if low.type in _keywords:
|
584 |
|
|
input.push_token(low)
|
585 |
|
|
break
|
586 |
|
|
elif low.type in ("decimal", "hexadecimal"):
|
587 |
|
|
low = low.attr
|
588 |
|
|
else:
|
589 |
|
|
input.complain("bad token %s where range literal expected" % (low.attr))
|
590 |
|
|
rangesep = input.lex_token()
|
591 |
|
|
if rangesep.type in _keywords:
|
592 |
|
|
entry.range.append(low)
|
593 |
|
|
input.push_token(rangesep)
|
594 |
|
|
break
|
595 |
|
|
elif rangesep.type in ("decimal", "hexadecimal"):
|
596 |
|
|
entry.range.append(low)
|
597 |
|
|
input.push_token(rangesep)
|
598 |
|
|
continue
|
599 |
|
|
elif rangesep.type == '-':
|
600 |
|
|
high = input.lex_token()
|
601 |
|
|
if high.type in ("decimal", "hexadecimal"):
|
602 |
|
|
high = high.attr
|
603 |
|
|
entry.range.append((low, high))
|
604 |
|
|
continue
|
605 |
|
|
else:
|
606 |
|
|
input.croak("malformed range")
|
607 |
|
|
break
|
608 |
|
|
elif next.type == "enum":
|
609 |
|
|
entry.range = []
|
610 |
|
|
entry.enum = 1
|
611 |
|
|
while 1:
|
612 |
|
|
name = input.lex_token()
|
613 |
|
|
if name.type in _keywords:
|
614 |
|
|
input.push_token(name)
|
615 |
|
|
break
|
616 |
|
|
elif name.type != 'word':
|
617 |
|
|
input.complain("bad token %s where enum name expected" % (name.attr))
|
618 |
|
|
continue
|
619 |
|
|
ename = intern_symbol(input, name.attr, None, record=1)
|
620 |
|
|
ename.type = "message"
|
621 |
|
|
input.demand('=')
|
622 |
|
|
value = input.lex_token()
|
623 |
|
|
if value.type in ("decimal", "hexadecimal"):
|
624 |
|
|
value = value.attr
|
625 |
|
|
entry.range.append((ename.name, value))
|
626 |
|
|
continue
|
627 |
|
|
else:
|
628 |
|
|
input.croak("malformed enum")
|
629 |
|
|
else:
|
630 |
|
|
input.push_token(next)
|
631 |
|
|
continue
|
632 |
|
|
elif leader.type == 'give':
|
633 |
|
|
list = intern_symbol_list(input)
|
634 |
|
|
input.demand('property')
|
635 |
|
|
label = input.lex_token()
|
636 |
|
|
for symbol in list:
|
637 |
|
|
symbol.setprop(label.attr)
|
638 |
|
|
elif leader.type == 'debug':
|
639 |
|
|
compstate.debug = input.lex_token().attr
|
640 |
|
|
elif leader.type == 'prefix':
|
641 |
|
|
rulebase.prefix = input.lex_token().attr
|
642 |
|
|
elif leader.type == 'banner':
|
643 |
|
|
entry = intern_symbol(input, None, record=1)
|
644 |
|
|
entry.type = "message"
|
645 |
|
|
rulebase.banner = entry
|
646 |
|
|
elif leader.type == 'icon':
|
647 |
|
|
if rulebase.icon:
|
648 |
|
|
input.complain("multiple icon declarations")
|
649 |
|
|
rulebase.icon = input.lex_token().attr
|
650 |
|
|
elif leader.type == 'condition':
|
651 |
|
|
flag = input.lex_token()
|
652 |
|
|
input.demand("on")
|
653 |
|
|
val = None
|
654 |
|
|
switch = input.lex_token()
|
655 |
|
|
if switch.type in ("decimal", "hexadecimal"):
|
656 |
|
|
val = int(switch.attr)
|
657 |
|
|
elif switch.type == "string":
|
658 |
|
|
val = switch.attr
|
659 |
|
|
elif switch.type == "trit":
|
660 |
|
|
val = resolve(switch) # No flag is module-valued yet
|
661 |
|
|
entry = intern_symbol(input, switch.attr)
|
662 |
|
|
# Someday is today
|
663 |
|
|
if flag == "trits":
|
664 |
|
|
if val is not None:
|
665 |
|
|
rulebase.trit_tie = val
|
666 |
|
|
else:
|
667 |
|
|
rulebase.trit_tie = entry
|
668 |
|
|
elif flag == "nohelp":
|
669 |
|
|
if val is not None:
|
670 |
|
|
rulebase.help_tie = val
|
671 |
|
|
else:
|
672 |
|
|
rulebase.help_tie = entry
|
673 |
|
|
elif flag == "expert":
|
674 |
|
|
if val is not None:
|
675 |
|
|
rulebase.expert_tie = val
|
676 |
|
|
else:
|
677 |
|
|
rulebase.expert_tie = entry
|
678 |
|
|
else:
|
679 |
|
|
input.complain("unknown flag %s in condition statement" % (flag,))
|
680 |
|
|
elif leader.type == 'warndepend':
|
681 |
|
|
iffy = intern_symbol_list(input)
|
682 |
|
|
for symbol in iffy:
|
683 |
|
|
compstate.warndepend.append(symbol)
|
684 |
|
|
elif leader.type == 'property':
|
685 |
|
|
propname = input.lex_token()
|
686 |
|
|
if propname.type in _keywords:
|
687 |
|
|
input.croak("malformed property declaration")
|
688 |
|
|
compstate.propnames[propname.attr] = propname.attr
|
689 |
|
|
maybe = input.lex_token()
|
690 |
|
|
if maybe.type == 'alias':
|
691 |
|
|
while 1:
|
692 |
|
|
alias = input.lex_token()
|
693 |
|
|
if alias.type != 'word':
|
694 |
|
|
input.push_token(alias)
|
695 |
|
|
break
|
696 |
|
|
compstate.propnames[alias.attr] = propname.attr
|
697 |
|
|
else:
|
698 |
|
|
input.croak("syntax error, unknown statement %s" % (leader,))
|
699 |
|
|
|
700 |
|
|
# Mwnu list parsing
|
701 |
|
|
|
702 |
|
|
def get_symbol_declaration(input):
|
703 |
|
|
# First grab a properties prefix
|
704 |
|
|
global compstate
|
705 |
|
|
if compstate.debug >= 2: print "entering get_symbol_declaration..."
|
706 |
|
|
props = []
|
707 |
|
|
propflag = 1
|
708 |
|
|
while 1:
|
709 |
|
|
symbol = input.lex_token()
|
710 |
|
|
if symbol.attr == '~':
|
711 |
|
|
propflag = 0
|
712 |
|
|
elif symbol.type == ':':
|
713 |
|
|
propflag = 1
|
714 |
|
|
elif symbol.type == 'property':
|
715 |
|
|
props.append((propflag, compstate.propnames[symbol.attr]))
|
716 |
|
|
else:
|
717 |
|
|
input.push_token(symbol)
|
718 |
|
|
break
|
719 |
|
|
compstate.property_stack.append(props)
|
720 |
|
|
#if compstate.debug >= 2: print "label list is %s" % props
|
721 |
|
|
# Now, we get either a subtree or a single declaration
|
722 |
|
|
symbol = input.lex_token()
|
723 |
|
|
if symbol.attr == '{': # Symbol subtree
|
724 |
|
|
if compstate.symbol_list and compstate.symbol_list[0].type == "string":
|
725 |
|
|
input.complain("string symbol is not a legal submenu guard")
|
726 |
|
|
if compstate.symbol_list:
|
727 |
|
|
compstate.condition_stack.append(compstate.symbol_list[-1])
|
728 |
|
|
else:
|
729 |
|
|
compstate.condition_stack.append(None)
|
730 |
|
|
inner_symbol_tree(input)
|
731 |
|
|
compstate.property_stack.pop()
|
732 |
|
|
return 1
|
733 |
|
|
elif symbol.attr == '}':
|
734 |
|
|
if not compstate.condition_stack:
|
735 |
|
|
input.complain("extra }")
|
736 |
|
|
else:
|
737 |
|
|
compstate.condition_stack.pop()
|
738 |
|
|
compstate.property_stack.pop()
|
739 |
|
|
return 0
|
740 |
|
|
elif symbol.type == 'word': # Declaration
|
741 |
|
|
if compstate.debug >= 2: print "interning %s" % symbol.attr
|
742 |
|
|
entry = intern_symbol(input, symbol.attr, record=1)
|
743 |
|
|
compstate.symbol_list.append(entry)
|
744 |
|
|
entry.depth = len(compstate.condition_stack)
|
745 |
|
|
if compstate.condition_stack and compstate.condition_stack[-1] is not None:
|
746 |
|
|
make_dependent(compstate.condition_stack[-1], entry)
|
747 |
|
|
# Apply properties
|
748 |
|
|
propdict = {}
|
749 |
|
|
for level in compstate.property_stack:
|
750 |
|
|
for (flag, property) in level:
|
751 |
|
|
if flag:
|
752 |
|
|
propdict[property] = 1
|
753 |
|
|
else:
|
754 |
|
|
if not propdict.has_key(property):
|
755 |
|
|
input.complain("property %s can't be removed when it's not present" % property)
|
756 |
|
|
else:
|
757 |
|
|
propdict[property] = 0
|
758 |
|
|
for (prop, val) in propdict.items():
|
759 |
|
|
if val == 1 and not entry.hasprop(prop):
|
760 |
|
|
entry.setprop(prop)
|
761 |
|
|
elif val == 0 and entry.hasprop(prop):
|
762 |
|
|
entry.delprop(prop)
|
763 |
|
|
# Read a type suffix if present
|
764 |
|
|
if entry.type not in ("menu", "choices", "explanation", "message"):
|
765 |
|
|
entry.type = "bool"
|
766 |
|
|
symbol = input.lex_token()
|
767 |
|
|
if symbol.type == '?': # This is also an operator
|
768 |
|
|
entry.type = 'trit'
|
769 |
|
|
elif symbol.attr == '%':
|
770 |
|
|
entry.type = 'decimal'
|
771 |
|
|
elif symbol.attr == '@':
|
772 |
|
|
entry.type = 'hexadecimal'
|
773 |
|
|
elif symbol.type == '$': # This is also an operator
|
774 |
|
|
entry.type = 'string'
|
775 |
|
|
else:
|
776 |
|
|
input.push_token(symbol)
|
777 |
|
|
compstate.property_stack.pop()
|
778 |
|
|
return 1
|
779 |
|
|
elif symbol.type in _keywords + ("EOF",):
|
780 |
|
|
input.push_token(symbol)
|
781 |
|
|
compstate.property_stack.pop()
|
782 |
|
|
return 0
|
783 |
|
|
else:
|
784 |
|
|
input.complain("unexpected token %s" % symbol)
|
785 |
|
|
compstate.property_stack.pop()
|
786 |
|
|
return 0
|
787 |
|
|
return 1
|
788 |
|
|
|
789 |
|
|
def inner_symbol_tree(input):
|
790 |
|
|
while get_symbol_declaration(input):
|
791 |
|
|
pass
|
792 |
|
|
|
793 |
|
|
def parse_symbol_tree(input):
|
794 |
|
|
global compstate
|
795 |
|
|
if compstate.debug >= 2: print "entering parse_symbol_tree..."
|
796 |
|
|
# Get a nonempty list of config symbols and menu ids.
|
797 |
|
|
# Interpret the {} shorthand if second argument is nonempty
|
798 |
|
|
compstate.condition_stack = [] # Stack of active conditions for {} shorthand
|
799 |
|
|
compstate.property_stack = []
|
800 |
|
|
compstate.symbol_list = []
|
801 |
|
|
inner_symbol_tree(input)
|
802 |
|
|
if not list:
|
803 |
|
|
input.complain("syntax error, expected a nonempty symbol declaration list")
|
804 |
|
|
if compstate.symbol_list[0].depth == 1:
|
805 |
|
|
for symbol in compstate.symbol_list:
|
806 |
|
|
symbol.depth -= 1
|
807 |
|
|
return compstate.symbol_list
|
808 |
|
|
|
809 |
|
|
def traverse_make_dep(symbol, guard, input):
|
810 |
|
|
"Create the dependency relations implied by a 'suppress depend' guard."
|
811 |
|
|
#print "traverse_make_dep(%s, %s)" % (symbol.name, guard)
|
812 |
|
|
if compstate.derivations.has_key(symbol):
|
813 |
|
|
return
|
814 |
|
|
elif isinstance(guard, cml.trit) or (isinstance(guard, Token) and guard.attr in ("n", "m", "y")):
|
815 |
|
|
return
|
816 |
|
|
elif isinstance(guard, Token):
|
817 |
|
|
if guard in compstate.explicit_ancestors[symbol]:
|
818 |
|
|
input.complain("%s is already an ancestor of %s"% (guard.attr, symbol.name))
|
819 |
|
|
else:
|
820 |
|
|
compstate.explicit_ancestors[symbol].append(guard)
|
821 |
|
|
elif isinstance(guard, cml.ConfigSymbol):
|
822 |
|
|
if guard in compstate.explicit_ancestors[symbol]:
|
823 |
|
|
input.complain("%s is already an ancestor of %s"% (guard.name, symbol.name))
|
824 |
|
|
else:
|
825 |
|
|
compstate.explicit_ancestors[symbol].append(guard)
|
826 |
|
|
elif guard[0] == 'and' or guard[0] in _relops:
|
827 |
|
|
traverse_make_dep(symbol, guard[1], input)
|
828 |
|
|
traverse_make_dep(symbol, guard[2], input)
|
829 |
|
|
elif guard[0] in _boolops:
|
830 |
|
|
return # Don't descend into disjunctions
|
831 |
|
|
else:
|
832 |
|
|
input.complain("unexpected operation %s in visibility guard"%guard[0])
|
833 |
|
|
|
834 |
|
|
# Functions for validating the parse tree
|
835 |
|
|
|
836 |
|
|
def simple_error(file, line, errmsg):
|
837 |
|
|
if not compstate.debug and not compstate.errors:
|
838 |
|
|
sys.stderr.write('\n')
|
839 |
|
|
sys.stderr.write(error_leader(file, line) + errmsg)
|
840 |
|
|
compstate.errors = compstate.errors + 1
|
841 |
|
|
|
842 |
|
|
def validate_boolean(expr, file, line, ok=0):
|
843 |
|
|
# Check for ambiguous boolean expr terms.
|
844 |
|
|
#print "validate_boolean(%s, %s, %s, %s)" % (expr, file, line, ok)
|
845 |
|
|
if isinstance(expr, cml.ConfigSymbol):
|
846 |
|
|
if expr.type in ("trit", "decimal", "hexadecimal") and not ok:
|
847 |
|
|
simple_error(file, line, "test of %s is ambiguous\n" % (expr.name,))
|
848 |
|
|
elif type(expr) is type(()):
|
849 |
|
|
validate_boolean(expr[1], file, line, expr[0] in _relops + _termops)
|
850 |
|
|
validate_boolean(expr[2], file, line, expr[0] in _relops + _termops)
|
851 |
|
|
|
852 |
|
|
def validate_expr(expr, file, line):
|
853 |
|
|
# Check for bad type combinations in expressions
|
854 |
|
|
# Return a leaf node type, inaccurate but good enough for
|
855 |
|
|
# consistency checking.
|
856 |
|
|
if isinstance(expr, cml.ConfigSymbol):
|
857 |
|
|
if expr.is_numeric():
|
858 |
|
|
return "integer"
|
859 |
|
|
elif expr.is_logical():
|
860 |
|
|
return "trit"
|
861 |
|
|
else:
|
862 |
|
|
return expr.type
|
863 |
|
|
elif isinstance(expr, cml.trit):
|
864 |
|
|
return "trit"
|
865 |
|
|
elif type(expr) in (type(0), type(0L)):
|
866 |
|
|
return "integer"
|
867 |
|
|
elif type(expr) == type(""):
|
868 |
|
|
return "string"
|
869 |
|
|
elif expr[0] == '?':
|
870 |
|
|
left = validate_expr(expr[2], file, line)
|
871 |
|
|
right = validate_expr(expr[3], file, line)
|
872 |
|
|
if left != right:
|
873 |
|
|
simple_error(file, line, "types %s and %s don't match in ternary expression\n" % (left, right))
|
874 |
|
|
return left
|
875 |
|
|
elif expr[0] in _arithops:
|
876 |
|
|
left = validate_expr(expr[1], file, line)
|
877 |
|
|
if left not in ("integer", "trit"):
|
878 |
|
|
simple_error(file, line, "bad %s left operand for arithmetic operator %s\n" % (left, expr[0]))
|
879 |
|
|
right = validate_expr(expr[2], file, line)
|
880 |
|
|
if right not in ("integer", "trit"):
|
881 |
|
|
simple_error(file, line, "bad %s right operand for arithmetic operator %s\n" % (right, expr[0]))
|
882 |
|
|
return "integer"
|
883 |
|
|
elif expr[0] in _boolops or expr[0] in _termops:
|
884 |
|
|
left = validate_expr(expr[1], file, line)
|
885 |
|
|
if left != "trit":
|
886 |
|
|
simple_error(file, line, "bad %s left operand for trit operator %s\n" % (left, expr[0]))
|
887 |
|
|
right = validate_expr(expr[2], file, line)
|
888 |
|
|
if right != "trit":
|
889 |
|
|
simple_error(file, line, "bad %s right operand for trit operator %s\n" % (right, expr[0]))
|
890 |
|
|
return "trit"
|
891 |
|
|
elif expr[0] in _relops:
|
892 |
|
|
left = validate_expr(expr[1], file, line)
|
893 |
|
|
right = validate_expr(expr[2], file, line)
|
894 |
|
|
if left != right:
|
895 |
|
|
simple_error(file, line, "types %s and %s don't match in %s expression\n" % (expr[0], left, right))
|
896 |
|
|
return "trit"
|
897 |
|
|
else:
|
898 |
|
|
if not compstate.debug and not compstate.errors:
|
899 |
|
|
sys.stderr.write('\n')
|
900 |
|
|
sys.stderr.write(error_leader(file, line) + \
|
901 |
|
|
"internal error: unexpected node %s in expression\n" % expr[0])
|
902 |
|
|
compstate.errors = compstate.errors + 1
|
903 |
|
|
|
904 |
|
|
def symbols_by_preorder(node):
|
905 |
|
|
# Get a list of config symbols in natural traverse order
|
906 |
|
|
if node.items:
|
907 |
|
|
sublists = map(symbols_by_preorder, node.items)
|
908 |
|
|
flattened = []
|
909 |
|
|
for m in sublists:
|
910 |
|
|
flattened = flattened + m
|
911 |
|
|
return flattened
|
912 |
|
|
else:
|
913 |
|
|
return [node.name]
|
914 |
|
|
|
915 |
|
|
def resolve(exp):
|
916 |
|
|
# Replace symbols in an expr with resolved versions
|
917 |
|
|
if type(exp) is type(()):
|
918 |
|
|
if exp[0] == 'not':
|
919 |
|
|
return ('not', resolve(exp[1]))
|
920 |
|
|
elif exp[0] == '?':
|
921 |
|
|
return ('?', resolve(exp[1]), resolve(exp[2]), resolve(exp[3]))
|
922 |
|
|
else:
|
923 |
|
|
return (exp[0], resolve(exp[1]), resolve(exp[2]))
|
924 |
|
|
elif isinstance(exp, cml.ConfigSymbol): # Symbol, already resolved
|
925 |
|
|
return exp
|
926 |
|
|
elif isinstance(exp, cml.trit): # Trit, already resolved
|
927 |
|
|
return exp
|
928 |
|
|
elif type(exp) in (type(0), type("")): # Constant, already resolved
|
929 |
|
|
return exp
|
930 |
|
|
elif not hasattr(exp, "type"):
|
931 |
|
|
sys.stderr.write("Symbol %s has no type.\n" % (exp,))
|
932 |
|
|
compstate.errors = compstate.errors + 1
|
933 |
|
|
return None
|
934 |
|
|
elif exp.type == 'trit':
|
935 |
|
|
if exp.attr == 'y':
|
936 |
|
|
return cml.y
|
937 |
|
|
elif exp.attr == 'm':
|
938 |
|
|
return cml.m
|
939 |
|
|
elif exp.attr == 'n':
|
940 |
|
|
return cml.n
|
941 |
|
|
elif exp.type in _atoms:
|
942 |
|
|
return exp.attr
|
943 |
|
|
elif rulebase.dictionary.has_key(exp.attr):
|
944 |
|
|
return rulebase.dictionary[exp.attr]
|
945 |
|
|
else:
|
946 |
|
|
compstate.bad_symbols[exp.attr] = 1
|
947 |
|
|
return None
|
948 |
|
|
|
949 |
|
|
def ancestry_check(symbol, counts):
|
950 |
|
|
# Check for circular ancestry chains
|
951 |
|
|
# print "Checking ancestry of %s: %s" % (symbol, symbol.ancestors)
|
952 |
|
|
counts[symbol] = 1
|
953 |
|
|
for ancestor in symbol.ancestors:
|
954 |
|
|
if counts.has_key(ancestor.name):
|
955 |
|
|
raise NameError, symbol.name + " through " + ancestor.name
|
956 |
|
|
else:
|
957 |
|
|
map(lambda symbol, counts=counts: ancestry_check(symbol, counts), symbol.ancestors)
|
958 |
|
|
|
959 |
|
|
def circularity_check(name, exp, counts):
|
960 |
|
|
# Recursive circularity check...
|
961 |
|
|
# print "Expression check of %s against %s" % (name, exp)
|
962 |
|
|
if type(exp) is type(()):
|
963 |
|
|
if exp[0] == '?':
|
964 |
|
|
circularity_check(name, exp[1], counts)
|
965 |
|
|
circularity_check(name, exp[2], counts)
|
966 |
|
|
circularity_check(name, exp[3], counts)
|
967 |
|
|
else:
|
968 |
|
|
circularity_check(name, exp[1], counts)
|
969 |
|
|
circularity_check(name, exp[2], counts)
|
970 |
|
|
elif isinstance(exp, cml.ConfigSymbol) and name == exp.name:
|
971 |
|
|
raise NameError, name
|
972 |
|
|
elif hasattr(exp, "default"):
|
973 |
|
|
vars = cml.flatten_expr(exp.default)
|
974 |
|
|
# print "Components of %s default: %s" % (exp.name vars)
|
975 |
|
|
for v in vars:
|
976 |
|
|
if v.name == name:
|
977 |
|
|
raise NameError, name
|
978 |
|
|
elif counts.has_key(v.name):
|
979 |
|
|
pass # Already checked this branch
|
980 |
|
|
else:
|
981 |
|
|
counts[v.name] = 1
|
982 |
|
|
circularity_check(name, v.name, counts)
|
983 |
|
|
|
984 |
|
|
def error_leader(file, line):
|
985 |
|
|
return '"%s", line %d:' % (file, line)
|
986 |
|
|
|
987 |
|
|
def postcomplain(msg):
|
988 |
|
|
if not compstate.debug and not compstate.errors:
|
989 |
|
|
sys.stderr.write('\n')
|
990 |
|
|
sys.stderr.write("cmlcompile: " + msg)
|
991 |
|
|
compstate.errors += 1
|
992 |
|
|
|
993 |
|
|
# This is the entry point to use if we want the compiler as a function
|
994 |
|
|
|
995 |
|
|
def compile(debug, arguments, profile, endtok=None):
|
996 |
|
|
"Sequence a compilation"
|
997 |
|
|
global rulebase, compstate
|
998 |
|
|
|
999 |
|
|
rulebase = cml.CMLRulebase()
|
1000 |
|
|
compstate = CompilationState()
|
1001 |
|
|
compstate.debug = debug
|
1002 |
|
|
|
1003 |
|
|
if not debug:
|
1004 |
|
|
baton = cml.Baton("Compiling rules, please wait")
|
1005 |
|
|
else:
|
1006 |
|
|
baton = None
|
1007 |
|
|
|
1008 |
|
|
if profile:
|
1009 |
|
|
import time
|
1010 |
|
|
now = zerotime = basetime = time.time();
|
1011 |
|
|
|
1012 |
|
|
# Parse everything
|
1013 |
|
|
try:
|
1014 |
|
|
if not arguments:
|
1015 |
|
|
parse(lexwrapper(sys.stdin, endtok), baton)
|
1016 |
|
|
else:
|
1017 |
|
|
for file in arguments:
|
1018 |
|
|
parse(lexwrapper(open(file), endtok), baton)
|
1019 |
|
|
except IOError, details:
|
1020 |
|
|
sys.stderr.write("cmlcompile: I/O error, %s\n" % (details,))
|
1021 |
|
|
return None
|
1022 |
|
|
|
1023 |
|
|
if profile:
|
1024 |
|
|
now = time.time();
|
1025 |
|
|
print "Rule parsing:", now - basetime
|
1026 |
|
|
basetime = now
|
1027 |
|
|
if not debug and not compstate.errors:
|
1028 |
|
|
baton.twirl()
|
1029 |
|
|
|
1030 |
|
|
# Sanity and consistency checks:
|
1031 |
|
|
|
1032 |
|
|
# We need a main menu declaration
|
1033 |
|
|
if not rulebase.start:
|
1034 |
|
|
postcomplain("missing a start declaration.\n")
|
1035 |
|
|
return None
|
1036 |
|
|
elif not rulebase.dictionary.has_key(rulebase.start):
|
1037 |
|
|
postcomplain("declared start menu '%s' does not exist.\n"%(rulebase.start,))
|
1038 |
|
|
return None
|
1039 |
|
|
if not debug and not compstate.errors:
|
1040 |
|
|
baton.twirl()
|
1041 |
|
|
|
1042 |
|
|
# Check for symbols that have been forward-referenced but not declared
|
1043 |
|
|
for ref in rulebase.dictionary.values():
|
1044 |
|
|
if not ref.prompt and not compstate.derivations.has_key(ref):
|
1045 |
|
|
postcomplain('"%s", line %d: %s in menu %s has no prompt\n' % (ref.file, ref.lineno, ref.name, `ref.menu`))
|
1046 |
|
|
|
1047 |
|
|
# Check that all symbols other than those on the right side of
|
1048 |
|
|
# derives are either known or themselves derived.
|
1049 |
|
|
for entry in rulebase.dictionary.values():
|
1050 |
|
|
if entry.visibility:
|
1051 |
|
|
entry.visibility = resolve(entry.visibility)
|
1052 |
|
|
if entry.saveability:
|
1053 |
|
|
entry.saveability = resolve(entry.saveability)
|
1054 |
|
|
if entry.default:
|
1055 |
|
|
entry.default = resolve(entry.default)
|
1056 |
|
|
rulebase.constraints = map(lambda x: cml.Requirement(resolve(x.predicate), x.message, x.file, x.line), rulebase.constraints)
|
1057 |
|
|
if compstate.bad_symbols:
|
1058 |
|
|
postcomplain("%d symbols could not be resolved:\n"%(len(compstate.bad_symbols),))
|
1059 |
|
|
sys.stderr.write(`compstate.bad_symbols.keys()` + "\n")
|
1060 |
|
|
if not debug and not compstate.errors:
|
1061 |
|
|
baton.twirl()
|
1062 |
|
|
|
1063 |
|
|
# Now associate a type with all derived symbols. Since such symbols
|
1064 |
|
|
# are never queried, the only place this is used is in formatting
|
1065 |
|
|
# the symbol's appearance in the final configuration file.
|
1066 |
|
|
#
|
1067 |
|
|
# (The outer loop forces this to keep spinning until it has done all
|
1068 |
|
|
# possible deductions, even in the presence of forward declarations.)
|
1069 |
|
|
while 1:
|
1070 |
|
|
deducecount = 0
|
1071 |
|
|
for entry in rulebase.dictionary.values():
|
1072 |
|
|
if compstate.derivations.has_key(entry) and not entry.type:
|
1073 |
|
|
derived_type = None
|
1074 |
|
|
if entry.default == cml.m:
|
1075 |
|
|
derived_type = "trit"
|
1076 |
|
|
elif entry.default == cml.n or entry.default == cml.y:
|
1077 |
|
|
derived_type = "bool"
|
1078 |
|
|
elif type(entry.default) is type(()):
|
1079 |
|
|
if entry.default[0] in _boolops + _relops:
|
1080 |
|
|
derived_type = "bool"
|
1081 |
|
|
elif entry.default[0] in _termops:
|
1082 |
|
|
derived_type = "trit"
|
1083 |
|
|
elif entry.default[0] in _arithops:
|
1084 |
|
|
derived_type = "decimal"
|
1085 |
|
|
elif entry.default[0] == '?':
|
1086 |
|
|
if isinstance(entry.default[2], cml.ConfigSymbol):
|
1087 |
|
|
derived_type = entry.default[2].type
|
1088 |
|
|
elif isinstance(entry.default[2], cml.trit):
|
1089 |
|
|
derived_type = "trit"
|
1090 |
|
|
elif type(entry.default[2]) in (type(0), type(0L)):
|
1091 |
|
|
derived_type = "decimal"
|
1092 |
|
|
elif type(entry.default[2]) is type(""):
|
1093 |
|
|
derived_type = "string"
|
1094 |
|
|
elif type(entry.default) is type(0):
|
1095 |
|
|
derived_type = "decimal" # Could be hex
|
1096 |
|
|
elif type(entry.default) is type(""):
|
1097 |
|
|
derived_type = "string"
|
1098 |
|
|
elif isinstance(entry.default, cml.ConfigSymbol):
|
1099 |
|
|
derived_type = entry.default.type
|
1100 |
|
|
if derived_type:
|
1101 |
|
|
entry.type = derived_type
|
1102 |
|
|
deducecount = 1
|
1103 |
|
|
if not deducecount:
|
1104 |
|
|
break
|
1105 |
|
|
|
1106 |
|
|
for entry in rulebase.dictionary.values():
|
1107 |
|
|
if compstate.derivations.has_key(entry) and not entry.type:
|
1108 |
|
|
postcomplain(error_leader(entry.file, entry.lineno) + \
|
1109 |
|
|
'can\'t deduce type for derived symbol %s from %s\n' % (entry.name, entry.default))
|
1110 |
|
|
if not debug and not compstate.errors:
|
1111 |
|
|
baton.twirl()
|
1112 |
|
|
|
1113 |
|
|
# Now run our ambiguity check on all unless expressions.
|
1114 |
|
|
for (guard, file, line) in compstate.bool_tests:
|
1115 |
|
|
validate_boolean(resolve(guard), file, line)
|
1116 |
|
|
if not debug and not compstate.errors:
|
1117 |
|
|
baton.twirl()
|
1118 |
|
|
|
1119 |
|
|
# Handle explicit dependencies
|
1120 |
|
|
for symbol in compstate.explicit_ancestors.keys():
|
1121 |
|
|
for guard in compstate.explicit_ancestors[symbol]:
|
1122 |
|
|
for guardsymbol in cml.flatten_expr(resolve(guard)):
|
1123 |
|
|
make_dependent(guardsymbol, symbol)
|
1124 |
|
|
if not debug and not compstate.errors:
|
1125 |
|
|
baton.twirl()
|
1126 |
|
|
|
1127 |
|
|
# Check that every symbol in the table (that isn't an unresolved forward
|
1128 |
|
|
# reference, we've already detected those) is referenced from a menu
|
1129 |
|
|
# exactly once (except explanations). We checked for multiple
|
1130 |
|
|
# inclusions at parse-tree generation time. Now...
|
1131 |
|
|
compstate.bad_symbols = {}
|
1132 |
|
|
for entry in rulebase.dictionary.values():
|
1133 |
|
|
if entry.prompt and not entry.type:
|
1134 |
|
|
compstate.bad_symbols[entry.name] = 1
|
1135 |
|
|
if compstate.bad_symbols:
|
1136 |
|
|
postcomplain("%d symbols have no references"%(len(compstate.bad_symbols),))
|
1137 |
|
|
sys.stderr.write("\n" +`compstate.bad_symbols.keys()` + "\n")
|
1138 |
|
|
if not debug and not compstate.errors:
|
1139 |
|
|
baton.twirl()
|
1140 |
|
|
|
1141 |
|
|
# Check for forward references in visibility constraints.
|
1142 |
|
|
# Note: this is *not* a fatal error.
|
1143 |
|
|
preorder = symbols_by_preorder(rulebase.dictionary[rulebase.start])
|
1144 |
|
|
for i in range(len(preorder)):
|
1145 |
|
|
key = preorder[i]
|
1146 |
|
|
forwards = []
|
1147 |
|
|
for guards in cml.flatten_expr(rulebase.dictionary[key].visibility):
|
1148 |
|
|
if guards.name in preorder[i+1:]:
|
1149 |
|
|
forwards.append(guards.name)
|
1150 |
|
|
if forwards:
|
1151 |
|
|
sym = rulebase.dictionary[key]
|
1152 |
|
|
postcomplain('"%s", line %d: %s in %s requires %s forward\n' % (sym.file, sym.lineno, key, sym.menu.name, forwards))
|
1153 |
|
|
compstate.errors -= 1
|
1154 |
|
|
if not debug and not compstate.errors:
|
1155 |
|
|
baton.twirl()
|
1156 |
|
|
|
1157 |
|
|
# Check for circularities in derives and defaults.
|
1158 |
|
|
try:
|
1159 |
|
|
for entry in rulebase.dictionary.values():
|
1160 |
|
|
if entry.default:
|
1161 |
|
|
expr_counts = {}
|
1162 |
|
|
circularity_check(entry.name, entry.default, expr_counts)
|
1163 |
|
|
if entry.visibility:
|
1164 |
|
|
expr_counts = {}
|
1165 |
|
|
circularity_check(entry.name, entry.visibility, expr_counts)
|
1166 |
|
|
if entry.ancestors:
|
1167 |
|
|
ancestor_counts = {}
|
1168 |
|
|
ancestry_check(entry, ancestor_counts)
|
1169 |
|
|
except NameError:
|
1170 |
|
|
postcomplain("%s depends on itself\n"%(sys.exc_value,))
|
1171 |
|
|
if not debug and not compstate.errors:
|
1172 |
|
|
baton.twirl()
|
1173 |
|
|
|
1174 |
|
|
# Various small hacks combined here to save traversal overhead.
|
1175 |
|
|
bitch_once = {}
|
1176 |
|
|
for entry in rulebase.dictionary.values():
|
1177 |
|
|
# Validate choice groups
|
1178 |
|
|
for symbol in entry.choicegroup:
|
1179 |
|
|
if not symbol.is_logical() and not bitch_once.has_key(symbol):
|
1180 |
|
|
postcomplain("Symbol %s in a choicegroup is not logical" % symbol.name)
|
1181 |
|
|
bitch_once[symbol] = 1
|
1182 |
|
|
# Validate the formulas for boolean derived symbols.
|
1183 |
|
|
if compstate.derivations.has_key(entry):
|
1184 |
|
|
if entry.menu:
|
1185 |
|
|
postcomplain("menu %s contains derived symbol %s\n"%(entry.menu.name, `entry`))
|
1186 |
|
|
if entry.type == "bool":
|
1187 |
|
|
validate_boolean(entry.default, entry.file, entry.lineno)
|
1188 |
|
|
else:
|
1189 |
|
|
validate_expr(entry.default, entry.file, entry.lineno)
|
1190 |
|
|
continue
|
1191 |
|
|
#if not entry.default is None:
|
1192 |
|
|
# validate_expr(entry.default, entry.file, entry.lineno)
|
1193 |
|
|
# Give childless menus the `message' type. This will make
|
1194 |
|
|
# it easier for the front end to do special things with these objects.
|
1195 |
|
|
if entry.type == 'menu':
|
1196 |
|
|
if not entry.items:
|
1197 |
|
|
entry.type = 'message'
|
1198 |
|
|
continue
|
1199 |
|
|
# Check for type mismatches between symbols and their defaults.
|
1200 |
|
|
if entry.is_symbol() and not entry.default is None:
|
1201 |
|
|
if type(entry.default) in (type(0L),type(0)) and not entry.is_numeric():
|
1202 |
|
|
postcomplain("%s is not of numeric type but has numeric constant default\n" % entry.name)
|
1203 |
|
|
elif type(entry.default) == type("") and not entry.type == "string":
|
1204 |
|
|
postcomplain("%s is not of string type but has string constant default\n" % entry.name)
|
1205 |
|
|
# Symbols with decimal/hexadecimal/string type must have a default.
|
1206 |
|
|
if entry.type in ("decimal", "hexadecimal", "string"):
|
1207 |
|
|
if entry.default is None:
|
1208 |
|
|
postcomplain("%s needs a default\n"%(`entry`,))
|
1209 |
|
|
elif entry.range:
|
1210 |
|
|
# This member can be used by front ends to determine whether the
|
1211 |
|
|
# entry's value should be queried with a pulldown of its values.
|
1212 |
|
|
entry.discrete = not filter(lambda x: type(x) is type(()), entry.range)
|
1213 |
|
|
# This member can be used by front ends to determine whether the
|
1214 |
|
|
# entry's value should be queried with a pulldown of enums.
|
1215 |
|
|
entry.enum = type(entry.range[0]) is type(()) \
|
1216 |
|
|
and type(entry.range[0][0]) is type("")
|
1217 |
|
|
# Now hack the prompts of anything dependent on a warndepend symbol
|
1218 |
|
|
for guard in compstate.warndepend:
|
1219 |
|
|
if entry.prompt and guard.ancestor_of(entry):
|
1220 |
|
|
entry.warnings.append(guard)
|
1221 |
|
|
if not debug and not compstate.errors:
|
1222 |
|
|
baton.twirl()
|
1223 |
|
|
|
1224 |
|
|
# Check for constraint violations. If the defaults set up by the
|
1225 |
|
|
# rule file are not consistent, it's not likely the user will make
|
1226 |
|
|
# a consistent one. Don't try this if we've seen syntax
|
1227 |
|
|
# compstate.errors, as they tend to produce Nones in expressions
|
1228 |
|
|
# that this will barf on.
|
1229 |
|
|
if not compstate.errors:
|
1230 |
|
|
for wff in rulebase.constraints:
|
1231 |
|
|
if not cml.evaluate(wff.predicate, debug):
|
1232 |
|
|
postcomplain(error_leader(wff.file, wff.line) + " constraint violation: %s\n" % `wff`)
|
1233 |
|
|
if not debug and not compstate.errors:
|
1234 |
|
|
baton.twirl()
|
1235 |
|
|
|
1236 |
|
|
# Now integrate the help references
|
1237 |
|
|
help_dict = {}
|
1238 |
|
|
for key in rulebase.dictionary.keys():
|
1239 |
|
|
if help_dict.has_key(key):
|
1240 |
|
|
rulebase.dictionary[key].helptext = help_dict[key]
|
1241 |
|
|
del help_dict[key]
|
1242 |
|
|
if debug:
|
1243 |
|
|
missing = []
|
1244 |
|
|
for entry in rulebase.dictionary.values():
|
1245 |
|
|
if not entry.type in ("message", "menu", "choices", "explanation") and entry.prompt and not entry.help():
|
1246 |
|
|
missing.append(entry.name)
|
1247 |
|
|
if missing:
|
1248 |
|
|
postcomplain("The following symbols lack help entries: %s\n" % missing)
|
1249 |
|
|
orphans = help_dict.keys()
|
1250 |
|
|
if orphans:
|
1251 |
|
|
postcomplain("The following help entries do not correspond to symbols: %s\n" % orphans)
|
1252 |
|
|
if not debug and not compstate.errors:
|
1253 |
|
|
baton.end("Done")
|
1254 |
|
|
|
1255 |
|
|
if profile:
|
1256 |
|
|
now = time.time();
|
1257 |
|
|
print "Sanity checks:", now - basetime
|
1258 |
|
|
basetime = now
|
1259 |
|
|
|
1260 |
|
|
# We only need the banner string, not the banner symbol
|
1261 |
|
|
if rulebase.banner:
|
1262 |
|
|
rulebase.banner = rulebase.banner.prompt
|
1263 |
|
|
|
1264 |
|
|
# Package everything up for pickling
|
1265 |
|
|
if compstate.errors:
|
1266 |
|
|
postcomplain("rulebase write suppressed due to errors.\n")
|
1267 |
|
|
return None
|
1268 |
|
|
else:
|
1269 |
|
|
rulebase.start = rulebase.dictionary[rulebase.start]
|
1270 |
|
|
# Precomputation to speed up the configurator's load time
|
1271 |
|
|
rulebase.reduced = map(lambda x: x.predicate, rulebase.constraints)
|
1272 |
|
|
rulebase.optimize_constraint_access()
|
1273 |
|
|
if debug:
|
1274 |
|
|
cc = dc = tc = 0
|
1275 |
|
|
for symbol in rulebase.dictionary.values():
|
1276 |
|
|
if not compstate.derivations.has_key(entry):
|
1277 |
|
|
tc = tc + 1
|
1278 |
|
|
if symbol.dependents:
|
1279 |
|
|
dc = dc + 1
|
1280 |
|
|
if symbol.constraints:
|
1281 |
|
|
cc = cc + 1
|
1282 |
|
|
print "%d total symbols; %d symbols are involved in constraints; %d in dependencies." % (tc, cc, dc)
|
1283 |
|
|
|
1284 |
|
|
if profile:
|
1285 |
|
|
now = time.time();
|
1286 |
|
|
print "Total compilation time:", now - zerotime
|
1287 |
|
|
|
1288 |
|
|
# We have a rulebase object.
|
1289 |
|
|
return rulebase
|
1290 |
|
|
|
1291 |
|
|
if __name__ == '__main__':
|
1292 |
|
|
def main(debug, outfile, arguments, profile):
|
1293 |
|
|
"Compile and write out a ruebase."
|
1294 |
|
|
rulebase = compile(debug, arguments, profile)
|
1295 |
|
|
if not rulebase:
|
1296 |
|
|
raise SystemExit, 1
|
1297 |
|
|
else:
|
1298 |
|
|
try:
|
1299 |
|
|
if debug: print "cmlcompile: output directed to %s" % (outfile)
|
1300 |
|
|
out = open(outfile, "wb")
|
1301 |
|
|
cPickle.dump(rulebase, out, 1)
|
1302 |
|
|
out.close()
|
1303 |
|
|
except:
|
1304 |
|
|
postcomplain('couldn\'t open output file "%s"\n' % (outfile,))
|
1305 |
|
|
raise SystemExit, 1
|
1306 |
|
|
|
1307 |
|
|
outfile = "rules.out"
|
1308 |
|
|
profile = debug = 0
|
1309 |
|
|
(options, arguments) = getopt.getopt(sys.argv[1:], "o:Pv", "help")
|
1310 |
|
|
for (switch, val) in options:
|
1311 |
|
|
if switch == '-o':
|
1312 |
|
|
outfile = val
|
1313 |
|
|
elif switch == '-P':
|
1314 |
|
|
profile = 1
|
1315 |
|
|
elif switch == '-v':
|
1316 |
|
|
debug = debug + 1
|
1317 |
|
|
elif switch == '--help':
|
1318 |
|
|
sys.stdout.write(lang["CLIHELP"])
|
1319 |
|
|
raise SystemExit
|
1320 |
|
|
|
1321 |
|
|
if profile:
|
1322 |
|
|
import profile
|
1323 |
|
|
profile.run("main(debug, outfile, arguments, profile)")
|
1324 |
|
|
else:
|
1325 |
|
|
main(debug, outfile, arguments, profile)
|
1326 |
|
|
|
1327 |
|
|
# That's all, folks!
|