[Top][All Lists]
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[gnue] r7267 - trunk/gnue-common/src/formatting/masks
From: |
jamest |
Subject: |
[gnue] r7267 - trunk/gnue-common/src/formatting/masks |
Date: |
Sun, 27 Mar 2005 18:27:08 -0600 (CST) |
Author: jamest
Date: 2005-03-27 18:27:06 -0600 (Sun, 27 Mar 2005)
New Revision: 7267
Modified:
trunk/gnue-common/src/formatting/masks/InputMask.py
trunk/gnue-common/src/formatting/masks/MaskParser.py
trunk/gnue-common/src/formatting/masks/Tokens.py
trunk/gnue-common/src/formatting/masks/test.py
Log:
more comments
Modified: trunk/gnue-common/src/formatting/masks/InputMask.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/InputMask.py 2005-03-27 02:35:17 UTC
(rev 7266)
+++ trunk/gnue-common/src/formatting/masks/InputMask.py 2005-03-28 00:27:06 UTC
(rev 7267)
@@ -119,6 +119,9 @@
"""
Parses an input string into its components
and sets the resultant display
+
+ @type newtext: string
+ @parma newtext: The text to add via the input mask
"""
cursor = self.cursor
@@ -136,12 +139,11 @@
try:
while True:
parsed, extra = scanner.read()
-## print parsed, extra
+ print parsed, extra
if parsed is None:
last_state = self.eof_nextstate[0]
break
else:
-## print "Parsed", parsed
state, char = parsed
mstate = state[0]
inputted_states[mstate].append(state)
@@ -382,12 +384,21 @@
self.emptyDisplay.append(self.placeholder*token.maxchars)
i += 1
- print self.tokens
+
# -------------------------------------------------------------------------
# Next, we will build the actual lexicon. We start
# at the end of the mask and work backwards, as
# any optional mask tokens will need to reference the
# next token's initial grammar elements.
+ #
+ # Each position in the input mask gets it's own lexicon state
+ #
+ # First each rules are created in with state name format of
+ # (position #, path #, rule #)
+ # Then they are merged into a single entry per position
+ #
+ # Each path represents one text string which would
+ # pass the lexicons test.
# -------------------------------------------------------------------------
i = len(tokens)
lexicon = [
@@ -401,60 +412,91 @@
leadin = []
while i > 0:
- # Iterate backward thru the tokens in the input mask
+ # Iterate backward through the tokens in the input mask
i -= 1
token = tokens[i]
if not token.optional:
leadin = []
- j = 0
+ # Iterate forward through each token's path lists (ruleset)
+ j = 0 # j is used in the naming of the next valid lexicon state
for ruleset in token.paths:
- ks = 0
- possibly_completed = False
+ ks = 0
+ possibly_completed = False # Once a ruleset encounters an object
+ # of class forcible then it is possible
+ # that the ruleset may possibly be complete
+
+ # Iterate forward through the ruleset and define a (pattern, action)
+ # tuple (rule) for the lexicon we are constructing
for k in range(len(ruleset)):
path = ruleset[k]
lexi = []
+
+ # See if the current rule may be the last one needed
+ # to complete the ruleset
try:
possibly_completed = possibly_completed or \
ruleset[k+1]==Tokens.forcible
except IndexError:
pass
+ # Add the rule, skipping any class foricble items
+ # as they are not actually tokens
if not path == Tokens.forcible:
if (k < len(ruleset) - 1):
+ # There are additional items in this ruleset so
+ # set the next state to point to the next rule in the
+ # set
next_state = (i, j, ks+1)
else:
+ # There are no additional rules in this ruleset so
+ # set the next state to the next character's tokens
next_state = (i+1,0,0)
- print "tokenFound(%s,%s,%s,%s)" % (0,0,(i,j,ks),next_state)
+
+ # Construct the lexicon pattern (rule) and store with a
+ # lambda based action function. Note that the lambda
+ # isn't executed at this time so p and t arguments below
+ # are the scanner(parser), text arguments that plex will pass to
+ # the action function
rule = (path,
lambda p, t, c=self._tokenFound, st=(i, j, ks), ns=next_state:
c(p, t, st, ns))
- print "Rule", rule
-
+ # Store the first rule of each path list
if k == 0:
leadin.append(rule)
+
+ # Add the rule to the list of rules to be inserted into
+ # our generated lexicon
lexi.append(rule)
+
+ # If no more characters are required then
+ # add in the start points from the previous token's
+ # paths (rulesets)
if possibly_completed:
lexi += last_leadin
+
+ # I
if j or ks:
- print "jks"
- self.pp.pprint(lexi)
lexicon.append((State((i, j, ks), lexi)))
ks += 1
-
j += 1
- print "lexicon"
- self.pp.pprint(leadin[:])
+## print "lexicon"
+## self.pp.pprint(leadin[:])
+ # Append the created state to the main lexicon
+ # we are creating
lexicon.append(State((i,0,0), leadin[:]))
+ print "Leadin", leadin
+ last_leadin = leadin # Assign the current leadin to previous leadin
+ # this will be used in the next iteration to
+ #
- last_leadin = leadin
+ Tokens.printLexiconTree(lexicon)
-## Tokens.printLexiconTree(lexicon)
-
# Create a consolidated validation rule so we
- # can test if inputted string is "complete".
+ # can test if inputted string is "complete". This
+ # creates the single rule for each position.
self.validationRule = Tokens.buildValidationRule(tokens)
# Pre-compile the lexicon for this mask
Modified: trunk/gnue-common/src/formatting/masks/MaskParser.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/MaskParser.py 2005-03-27
02:35:17 UTC (rev 7266)
+++ trunk/gnue-common/src/formatting/masks/MaskParser.py 2005-03-28
00:27:06 UTC (rev 7267)
@@ -175,10 +175,20 @@
"""
return self.tokens[:]
- #
+ # ===========================================================================
# Private stuff
- #
+ # ===========================================================================
+
+ # ---------------------------------------------------------------------------
+ # Lexicon action functions
+ # ---------------------------------------------------------------------------
def _check_single(self, text):
+ """
+ Function to add single instance tokens to the input mask.
+
+ A single instance token is something that can appear only once
+ in an input mask.
+ """
if text in self.__singles:
raise Errors.UnrecognizedInput(self, 'Mask can only have one "%s" token'
%text)
self.__singles.append(text)
@@ -190,16 +200,28 @@
self.produce(TextToken(text))
def _literal(self, text):
+ """
+ A text literal that should appear as is in the mask
+ """
+ print "literal %s" % text
self.produce(Literal(text))
def _literal_2nd(self, text):
+ """
+ Closes the literal string
+ """
+ print "literal 2nd %s" % text
return self.literal(text[1:])
def _escape(self, text):
+ """
+ An escaped character such as \$ to display a $
+ """
+ print "escape %s" % text
self.begin('')
self.produce(Literal(text))
- def _repeater(self, text):
+ def _repeater(self, text):
self.produce(Repeater(int(text)))
def _begin_set(self, text):
Modified: trunk/gnue-common/src/formatting/masks/Tokens.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/Tokens.py 2005-03-27 02:35:17 UTC
(rev 7266)
+++ trunk/gnue-common/src/formatting/masks/Tokens.py 2005-03-28 00:27:06 UTC
(rev 7267)
@@ -23,7 +23,9 @@
#
# DESCRIPTION:
"""
-Tokens used to define the components that make up an input mask
+Tokens used to define the components that make up an input mask.
+These tokens are used to define the final lexicon used by the
+mask.
"""
# NOTES:
#
@@ -108,6 +110,10 @@
"""
optional = True
def __repr__(self):
+ """
+ Force the class to return a custom string representation of itself.
+ Not sure why yet.
+ """
return "%s(%s)" % (self.__class__.__name__, self.autochar)
@@ -207,9 +213,9 @@
symbol='M'
maxchars = 2
zero_pad = True
- paths=[ [ Str('1'), forcible, Any('012') ],
- [ Str('0'), Range('19') ],
- [ Range('29') ] ]
+ paths=[ [ Str('1'), forcible, Any('012') ], # months 1, 10 - 12
+ [ Str('0'), Range('19') ], # months 01 - 09
+ [ Range('29') ] ] # months 2 - 9
class tm(tM):
"""
@@ -224,10 +230,10 @@
symbol='D'
zero_pad = True
maxchars = 2
- paths=[ [ Str('3'), forcible, Any('01') ],
- [ Any('12'), forcible, digit ],
- [ Str('0'), Range('19')],
- [ Range('49') ] ]
+ paths=[ [ Str('3'), forcible, Any('01') ], # days 3, 30 - 31
+ [ Any('12'), forcible, digit ], # days 1,2,10 - 29
+ [ Str('0'), Range('19')], # days 01 - 09
+ [ Range('49') ] ] # days 4 - 9
class td(tD):
"""
@@ -257,9 +263,9 @@
"""
symbol='H'
maxchars = 2
- paths = [ [ Str('2'),forcible, Any('0123') ],
- [ Any('01'), forcible, digit ],
- [ Range('39') ] ]
+ paths = [ [ Str('2'),forcible, Any('0123') ], # Hour 2, 20-23
+ [ Any('01'), forcible, digit ], # Hour 00 - 19
+ [ Range('39') ] ] # Hour 3 - 9
class tI(DateTok):
"""
@@ -357,8 +363,43 @@
self.paths = [[Any(chars)]]
self.symbol = '[%s]' %
chars.replace('\\','\\\\').replace(']','\\]').replace('-','\\-')
+# ---------------------------------------------------------------------------
+# Map of tokens to classes
+# ---------------------------------------------------------------------------
+tokenMap = {
+ # Input/output tokens
+ '_': tChar, # Any character, required
+ '?': tCharOpt, # Any character, optional
+ 'A': tA, # Any alphanumeric, required
+ 'a': ta, # Any alphanumeric, optional
+ 'L': tL, # Any letter, required
+ 'l': tl, # Any letter, optional
+ 'C': tC, # Any character (alphanum) or space, required
+ 'c': tc, # Any character (alphanum) or space, optional
+ '+': tsign, # Positive or negative sign (one per mask)
+ '0': tDigit, # Any digit, required
+ '#': tDigitOpt, # Any digit, optional
+ 'M': tM, # Month, zero padding
+ 'D': tD, # Day, zero padding
+ 'Y': tY, # Year - 4 digits
+ 'y': ty, # Year - 2 digits
+ 'H': tH, # Hour
+ 'I': tI, # Minute
+ 'S': tS, # Seconds
+ 'P': tP, # PM AM token
+ 'p': tp, # pm am token
+ '.': tDecSep, # Decimal separator
+ ',': tThouSep, # Thousands separator
+ ':': tTimeSep, # Time Separator
+ '/': tDateSep, # Date Separator
+ # Output-only
+ 'm': tm, # Month, no zero padding
+ 'd': td, # Day, no zero padding
+}
-
+# =============================================================================
+# Module level functions
+# =============================================================================
def buildSingleValidationRule(token, honorOptional=True):
"""
Build a validation rule for a specific token
@@ -392,7 +433,6 @@
else:
return val
-
def buildValidationRule(tokens):
"""
Take a list of tokens and combine all their rule paths
@@ -407,40 +447,13 @@
return val + Eol
-tokenMap = {
- # Input/output tokens
- '_': tChar, # Any character, required
- '?': tCharOpt, # Any character, optional
- 'A': tA, # Any alphanumeric, required
- 'a': ta, # Any alphanumeric, optional
- 'L': tL, # Any letter, required
- 'l': tl, # Any letter, optional
- 'C': tC, # Any character (alphanum) or space, required
- 'c': tc, # Any character (alphanum) or space, optional
- '+': tsign, # Positive or negative sign (one per mask)
- '0': tDigit, # Any digit, required
- '#': tDigitOpt, # Any digit, optional
- 'M': tM, # Month, zero padding
- 'D': tD, # Day, zero padding
- 'Y': tY, # Year - 4 digits
- 'y': ty, # Year - 2 digits
- 'H': tH, # Hour
- 'I': tI, # Minute
- 'S': tS, # Seconds
- 'P': tP, # PM AM token
- 'p': tp, # pm am token
- '.': tDecSep, # Decimal separator
- ',': tThouSep, # Thousands separator
- ':': tTimeSep, # Time Separator
- '/': tDateSep, # Date Separator
- # Output-only
- 'm': tm, # Month, no zero padding
- 'd': td, # Day, no zero padding
-}
-
-
-###########
+# =============================================================================
+# Debugging functions
+# =============================================================================
def printLexiconTree(lexicon, indent=0):
+ """
+ Function useful for debuging.
+ """
for foo in lexicon:
if isinstance(foo, State):
print (" "*indent) + ("State: %s" % str((foo.name)))
Modified: trunk/gnue-common/src/formatting/masks/test.py
===================================================================
--- trunk/gnue-common/src/formatting/masks/test.py 2005-03-27 02:35:17 UTC
(rev 7266)
+++ trunk/gnue-common/src/formatting/masks/test.py 2005-03-28 00:27:06 UTC
(rev 7267)
@@ -34,8 +34,9 @@
m='\\$###,##0!.00'
mask = InputMask(m)
print "Mask: %s" % m
-mask.begin()
+
for f in ('','1','12','123','1234','12345','9999'):
+ mask.begin()
print string.ljust("Input: '%s'" % f, 18),
output, cursor = mask._parseInput(newtext='%s'%f)
print "Output: " + formatOutput(output, cursor)
[Prev in Thread] |
Current Thread |
[Next in Thread] |
- [gnue] r7267 - trunk/gnue-common/src/formatting/masks,
jamest <=