
This was the view outside the bathroom window at my office, in Liege
import sys import re # alphanumeric character escaped # for regular expressions ALPHANUMS = 'a-zA-Z0-9_' # python operators, escaped # for regular expressions OPERATORS = r'\.\(\)\[\]\{\}\:\'\"\!=,%\+\-\*\/\^\&<>' # python keywords KEYWORDS = ['if', 'for', 'while', 'do', 'def', 'class', 'None', 'True', 'False', 'and', 'or', 'not', 'import', 'else', 'elif', 'raise', 'except', 'break', 'continue', 'lambda', 'return', 'yield', 'global'] COLORS = { 'bg':'#030303', 'kw':'blue', 'tk':'silver', 'op':'teal', 'cm':'green', 'txt':'red', 'qu':'darkred', '??':'white'} # how many spaces should a tab character be: TAB_LENGTH = 4 escape = lambda source, replacements: \ ''.join( [ c if c not in replacements \ else replacements[c] \ for c in source ] ) # escape a string so it will be parsable # by the re module def rxEscaped(source): replacements = { r'[':r'\[', r']':r'\]', r'{':r'\{', r'}':r'\}'} return escape(source, replacements) SPACES = ' \t' SPLITTERS = rxEscaped( OPERATORS + SPACES ) class TokensList(list): replacements = { '<':'<', '>':'>', '&':'&', ' ':' ', '\t':' '*TAB_LENGTH} def __init__(self): list.__init__(self) def add(self, key, value): self.append( (key, \ escape(value, TokensList.replacements)) ) class Tokenizer(object): '''Transforms a line in a group of tokens, where each token is represented by a pair: line -> [ (key, token), (key, token), ... ] The key represents the type of the token: kw = keyword tk = word sp = spacing op = operator cm = comment qu = quotes txt = string contents ?? = unidentified (for any errors) ''' rxToken = re.compile(r''' ^(?P<tk>[%s]+) # string token |(?P<sp>[%s]{1}) # or space |(?P<op>[%s]{1}) # or operators |(?P<cm>\#.*$) # or comment ''' % (ALPHANUMS, SPACES, OPERATORS), \ re.VERBOSE) quotes = None def __init__(self): self._init() def _init(self, line=''): self.line, self.parsed = line, line self.tokens = TokensList() def _parseRx(self, rx): '''Search for rx against line and return the found match and the groups dict. ''' found = re.search(rx, self.parsed) if found: return (found, found.groupdict()) else: return (None, None) def _parseStringStart(self): quotes = \ self._parseRx(r'^(?P<qu>[\']{3})')[1] or \ self._parseRx(r'^(?P<qu>[\"]{3})')[1] or \ self._parseRx(r'^(?P<qu>\')')[1] or \ self._parseRx(r'^(?P<qu>\")')[1] if quotes: quotes = quotes['qu'] self.parsed = self.parsed[len(quotes):] Tokenizer.quotes = quotes return True return False def _getTokens(self): while self.parsed: ## are we in a string? if Tokenizer.quotes: if len(Tokenizer.quotes) == 3: # rx for multiline string rx = '(?P<qu>%s)' % \ (('\\' + Tokenizer.quotes[0])*3) else: # rx for single line string rx = r'(?P<qu>%s)' % \ Tokenizer.quotes token, gd = self._parseRx(rx) if gd: start, end = token.span('qu') if start > 2 and \ self.parsed[start-1] == '\\' and \ self.parsed[start-2] != '\\': self.tokens.add('txt', self.parsed[:end]) self.parsed = self.parsed[end:] else: self.tokens.add('txt', self.parsed[:start]) self.tokens.add('qu', gd['qu']) Tokenizer.quotes = None self.parsed = self.parsed[end:] continue elif len(Tokenizer.quotes) == 3: # a multiline string is legal # add everything as text self.tokens.add('txt', self.parsed) else: # singleline string not closed # pass everything as '??' self.tokens.add('??', self.parsed) # process next line correctly Tokenizer.quotes = None break # are qe opening a string now? if self._parseStringStart(): self.tokens.add('qu', Tokenizer.quotes) continue # we're not parsing a string token, gd = self._parseRx(Tokenizer.rxToken) if not gd: self.tokens.add('??', self.parsed) break for key in gd: value = gd[key] if not value: continue elif value in KEYWORDS: self.tokens.add('kw', value) else: self.tokens.add(key, value) self.parsed = self.parsed[token.end():] def parse(self, line): # reset the object self._init(line) # get list of tokens self._getTokens() # merge like tokens retval, currentKey, currentList = [], '', [] for key, value in self.tokens: if key == currentKey: currentList.append(value) else: retval.append( (currentKey, currentList) ) currentKey, currentList = key, [value] if len(currentList): retval.append( (currentKey, currentList) ) self.tokens = retval return self.tokens def generateFormattedFile(source, destination): '''Generate a formatted HTML block from the source code. ''' dest = open(destination, 'wt') dest.write('''<div style="background-color:%s;"> <pre>''' % COLORS['bg']) tokenizer = Tokenizer() NEWLINE = '\n' for line in open(source).readlines(): line = line.rstrip() if not line: dest.write(NEWLINE) continue formattedLine = '' for name, list in tokenizer.parse(line): if name in COLORS: formattedLine += \ '<span style="color:%s;">' % \ COLORS[name] + \ ''.join(list) + '</span>' else: formattedLine += ''.join(list) dest.write(formattedLine + NEWLINE) dest.write(''' </pre> </div>''') if __name__ == '__main__': if len(sys.argv) < 2: print('Syntax: %s <sourcefile>' % sys.argv[0]) else: generateFormattedFile(sys.argv[1], sys.argv[1] + '.html')
Mal (a figure within Cobb's subconscious) tells Cobb that maybe his reality is the dream because it's implausible for nameless corporations to chase you all over the globe - it sounds too much like a dream.This idea, striking to Cobb at the time, seems to support one of the possible interpretations of the ending of the movie.
Cobb: I know how to find secrets from your Mind, I know all the tricks!(see? :-D)
from deoxy.org:
Hakuin used to tell his pupils about an old woman who had a teashop, praising her understanding of Zen. The pupils refused to believe what he told them and would go to the teashop to find out for themselves.
Whenever the woman saw them coming she could tell at once whether they had come for tea or to look into her grasp of Zen. In the former case, she would server them graciously. In the latter, she would beckon to the pupils to come behind her screen. The instant they obeyed, she would strike them with a fire-poker.
Nine out of ten of them could not escape her beating.
utnapistim's comment: