[Top][All Lists]
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
r5114 - in trunk/gnue-common/src/schema/scripter: . processors
From: |
johannes |
Subject: |
r5114 - in trunk/gnue-common/src/schema/scripter: . processors |
Date: |
Tue, 17 Feb 2004 10:23:29 -0600 (CST) |
Author: johannes
Date: 2004-02-17 10:23:28 -0600 (Tue, 17 Feb 2004)
New Revision: 5114
Modified:
trunk/gnue-common/src/schema/scripter/Definition.py
trunk/gnue-common/src/schema/scripter/Scripter.py
trunk/gnue-common/src/schema/scripter/processors/Base.py
trunk/gnue-common/src/schema/scripter/processors/SQL.py
trunk/gnue-common/src/schema/scripter/processors/interbase.py
trunk/gnue-common/src/schema/scripter/processors/mssql.py
trunk/gnue-common/src/schema/scripter/processors/mysql.py
trunk/gnue-common/src/schema/scripter/processors/oracle.py
trunk/gnue-common/src/schema/scripter/processors/postgresql.py
Log:
Improved Scripter/Processor concept. Added data-transformation-stuff, and
successfully tested the SQL code on PostgreSQL, MySQL, Interbase and
MS SQL Server.
Modified: trunk/gnue-common/src/schema/scripter/Definition.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/Definition.py 2004-02-17 16:17:42 UTC
(rev 5113)
+++ trunk/gnue-common/src/schema/scripter/Definition.py 2004-02-17 16:23:28 UTC
(rev 5114)
@@ -18,40 +18,83 @@
#
# Copyright 2001-2004 Free Software Foundation
#
-# $Id: $
+# $Id$
#
# =============================================================================
# The basic definition class
# =============================================================================
class Definition:
+ """
+ This class implements a basic definition. Each definition has a name, and a
+ list of sequences: a prologue, a header, a body, a footer and finally an
+ epilogue. A dump of a definition is done by concatenating all these sequences
+ line by line.
+ """
# ---------------------------------------------------------------------------
# Constructor
# ---------------------------------------------------------------------------
def __init__ (self, name = None):
- self.name = name
+ self.name = name
+ self.prologue = []
+ self.header = []
+ self.body = []
+ self.footer = []
+ self.epilogue = []
+
+ # ---------------------------------------------------------------------------
+ # Return all sequences as one single list
+ # ---------------------------------------------------------------------------
+ def merge (self):
+ """
+ This function merges all sequences into a single list
+ """
+ return self.prologue + self.header + self.body + self.footer + \
+ self.epilogue
+
+ # ---------------------------------------------------------------------------
+ # Dump a definition to a given destination
+ # ---------------------------------------------------------------------------
+ def writeDefinition (self, destination, encoding = "UTF-8"):
+ """
+ This method writes all sequences to the given destination using 'encoding',
+ which defaults to 'UTF-8'. The definition is written in the following
+ order: prologue, header, body, footer, epilogue
+ """
+ for line in self.merge ():
+ destination.write (line.encode (encoding) + "\n")
+
+
+
# =============================================================================
# Basic class for schema definitions
# =============================================================================
class SchemaDefinition (Definition):
+ """
+ This class introduces another sequence "fields" to the definition. This list
+ holds GSField/GSIndexField instances.
+ """
# ---------------------------------------------------------------------------
# Constructor
# ---------------------------------------------------------------------------
def __init__ (self, name = None):
Definition.__init__ (self, name)
- self.fields = []
- self.prologue = []
- self.epilogue = []
+ self.fields = []
+
# =============================================================================
# IndexDefinition is just basic at the moment
# =============================================================================
class IndexDefinition (SchemaDefinition):
+ """
+ This class has another public property 'unique', which describes wether a
+ given index has the unique-flag set or not.
+ """
# ---------------------------------------------------------------------------
# Constructor
@@ -60,10 +103,19 @@
SchemaDefinition.__init__ (self, name)
self.unique = unique
+
+
# =============================================================================
# TableDefinition adds postfield- and index- sequences
# =============================================================================
class TableDefinition (SchemaDefinition):
+ """
+ A TableDefinition introduces another property primaryKey which is an
+ IndexDefinition, and two dictionaries indices and constraints. The first one
+ 'indices' holds all IndexDefinitions for the table (except the primary key),
+ and 'constraints' is allways empty, since constraints are NOT implemented by
+ now.
+ """
# ---------------------------------------------------------------------------
# Constructor
@@ -71,39 +123,95 @@
def __init__ (self, name = None):
SchemaDefinition.__init__ (self, name)
- self.postfields = []
- self.pk_fields = []
- self.indices = []
- self.constraints= []
+ self.primaryKey = None
+ self.indices = {}
+ self.constraints= {}
+
# ---------------------------------------------------------------------------
# Create a new IndexDefinition and add it to our index-sequence
# ---------------------------------------------------------------------------
+ """
+ This function creates a new IndexDefinition instance, adds it into the
+ table definitions' index-dictionary and returns it as a function result.
+ """
def newIndex (self, name, unique = False):
index = IndexDefinition (name, unique)
- self.indices.append (index)
+ self.indices [index.name] = index
return index
+ # ---------------------------------------------------------------------------
+ # Create a new primary key definition
+ # ---------------------------------------------------------------------------
+
+ def addPrimaryKey (self, name):
+ """
+ This function returns a new primary key definition.
+ """
+ self.primaryKey = IndexDefinition (name, True)
+ return self.primaryKey
+
+
+ # ---------------------------------------------------------------------------
+ # Returns a field by name from the fields collection
+ # ---------------------------------------------------------------------------
+ def getField (self, fieldName):
+ """
+ This function searches a field with the name 'fieldName' in it's fields
+ list. None is returned if no field with the given name was found.
+ """
+ for field in self.fields:
+ if field.name == fieldName:
+ return field
+
+ return None
+
+
# =============================================================================
# Definition class for data rows
# =============================================================================
class DataDefinition (Definition):
+ """
+ This class encapsulates data to be inserted into the table 'name'. The
+ collection rows is a list of RowDefinition objects. These objects hold the
+ actual insertion data.
+ """
# ---------------------------------------------------------------------------
# Constructor
# ---------------------------------------------------------------------------
def __init__ (self, name = None):
Definition.__init__ (self, name)
- self.clear ()
+ self.rows = []
+
# ---------------------------------------------------------------------------
- # Reset all members to initial state (on start of a new row)
+ # Add a new row to the collection
# ---------------------------------------------------------------------------
- def clear (self):
- self.columns = []
- self.values = []
+ """
+ This method extends the rows collection and returns a new RowDefinition
+ instance.
+ """
+ def addRow (self):
+ row = RowDefinition ()
+ self.rows.append (row)
+ return row
- self.prologue = []
- self.lines = []
- self.epilogue = []
+
+# =============================================================================
+# Definition of a single data row
+# =============================================================================
+class RowDefinition:
+ """
+ This class encapsulates a single data row by providing two sequences: columns
+ and vaules, where columns is a list with column-names and values a list of
+ corresponding values.
+ """
+
+ # ---------------------------------------------------------------------------
+ # Constructor
+ # ---------------------------------------------------------------------------
+ def __init__ (self):
+ self.columns = []
+ self.values = []
Modified: trunk/gnue-common/src/schema/scripter/Scripter.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/Scripter.py 2004-02-17 16:17:42 UTC
(rev 5113)
+++ trunk/gnue-common/src/schema/scripter/Scripter.py 2004-02-17 16:23:28 UTC
(rev 5114)
@@ -27,6 +27,7 @@
from gnue.common.apps.GClientApp import GClientApp
from processors import vendors
from gnue.common.schema.scripter.Definition import *
+from time import strftime
import sys
import os
@@ -43,36 +44,39 @@
USAGE = "[options] file [old-schema]"
COMMAND_OPTIONS = [
[ 'drop_tables',None,'drop-tables', 0, None, None,
- 'Generate commands to drop relevant tables. * NOT IMPLEMENTED'],
+ _("Generate commands to drop relevant tables. * NOT IMPLEMENTED")],
[ 'ignore_schema','S','no-schema', 0, None, None,
- 'Do not generate schema creation code. * NOT IMPLEMENTED'],
+ _("Do not generate schema creation code. * NOT IMPLEMENTED")],
[ 'ignore_data','D','no-data', 0, None, None,
- 'Do not generate data insertion code. * NOT IMPLEMENTED'],
+ _("Do not generate data insertion code. * NOT IMPLEMENTED")],
+ [ 'encoding', 'e', 'encoding', True, 'UTF-8', 'encoding',
+ _("The generated SQL script will be encoded using <encoding>. ") + \
+ _("Default encoding is UTF-8")],
[ 'upgrade_schema','u','upgrade-schema', 0, None, None,
- 'Generate code to upgrade an older version of a schema to '
- 'the recent version. You must specify a previous schema with on the '
- 'command line. * NOT IMPLEMENTED'],
+ _("Generate code to upgrade an older version of a schema to ") + \
+ _("the recent version. You must specify a previous schema with ") + \
+ _("on the command line. * NOT IMPLEMENTED")],
[ 'upgrade_data','U','upgrade-data', 0, None, None,
- 'Generate code to upgrade an older version of schema data to '
- 'the recent version. You must specify a previous schema with on the '
- 'command line. * NOT IMPLEMENTED'],
+ _("Generate code to upgrade an older version of schema data to ") + \
+ _("the recent version. You must specify a previous schema with ") + \
+ _("on the command line. * NOT IMPLEMENTED")],
[ 'list_vendors','l','list-vendors', 0, None, None,
- 'List all supported vendors.'],
+ _("List all supported vendors.")],
[ 'output','o','output', 1, None, 'dest',
- 'The destination for the created schemas. This can be in several '
- 'formats. If <dest> is a file name, then output is written to this '
- 'file. If <dest> is a directory, then <dest>/<Vendor>.sql is
created.'
- ' The default is to create <Vendor>.sql in the current directory. '
- 'NOTE: the first form (<dest> as a filename) is not supported for '
- '--vendors all.' ],
+ _("The destination for the created schemas. This can be in several ")+
\
+ _("formats. If <dest> is a file name, then output is written to ") +
\
+ _("this file. If <dest> is a directory, then <dest>/<Vendor>.sql ") +
\
+ _("is created. The default is to create <Vendor>.sql in the ") +
\
+ _("current directory. NOTE: the first form (<dest> as a filename) ") +
\
+ _("is not supported for --vendors all.") ],
[ 'vendor','v','vendor', 1, 'all', 'vendor',
- 'The vendor to create a script for. If <vendor> is "all", then '
- 'scripts for all supported vendors will be created. <vendor> can '
- 'also be a comma-separated list.'],
+ _("The vendor to create a script for. If <vendor> is 'all', then ") + \
+ _("scripts for all supported vendors will be created. <vendor> can ")+\
+ _("also be a comma-separated list.")],
]
- SUMMARY = \
- "GNUe Schema Scripter creates SQL files based on GNUe Schema Definitions."
+ SUMMARY = _("GNUe Schema Scripter creates SQL files based on GNUe ") + \
+ _("Schema Definitions.")
_PROC_PATH = "gnue.common.schema.scripter.processors.%s"
@@ -192,20 +196,23 @@
sys.stderr.write (_("Unable to create output file %s.") % filename)
sys.exit (1)
+
# Instanciate the given processor and iterate over all schema objects
aModule = self._PROC_PATH % vendor
self.processor = dyn_import (aModule).Processor (self.destination)
print _("Writing schema to %s ...") % filename
+ self.tables = {}
for line in self.processor.comment ( \
_("\nThis file was generated by %s\n") % self.NAME +
- _("from %s\n\n") % self.ARGUMENTS [0] +
+ _("from %s on %s\n\n") % (self.ARGUMENTS [0], strftime ('%c')) +
_("Do not edit manually!\n")):
self.destination.write (line.encode ('utf-8') + "\n")
self.destination.write ("\n")
self.processor.startDump ()
+ self.processor.client_encoding (self.OPTIONS ['encoding'])
self.schema.walk (self.__iterate_objects)
@@ -241,6 +248,7 @@
# ---------------------------------------------------------------------------
def __schema_table (self, sObject):
aTable = TableDefinition (sObject.name)
+ self.tables [aTable.name] = aTable
sObject.walk (self.__schema_fields, tableDef = aTable)
self.processor.writeTable (aTable)
@@ -253,11 +261,11 @@
# process a regular field of a table
if sObject._type == "GSField":
- self.processor.addField (tableDef, sObject)
+ tableDef.fields.append (sObject)
- # process a primary key field
- elif sObject._type == "GSPKField":
- tableDef.pk_fields.append (sObject.name)
+ elif sObject._type == "GSPrimaryKey":
+ pkdef = tableDef.addPrimaryKey (sObject.name)
+ sObject.walk (self.__schema_primarykey, tableDef = tableDef, pDef =
pkdef)
# start an index definition and process it's fields
elif sObject._type == "GSIndex":
@@ -271,11 +279,19 @@
# ---------------------------------------------------------------------------
+ # Iterate over all fields of a primary key
+ # ---------------------------------------------------------------------------
+ def __schema_primarykey (self, sObject, tableDef, pDef):
+ if sObject._type == "GSPKField":
+ pDef.fields.append (sObject)
+
+
+ # ---------------------------------------------------------------------------
# Iterate over all fields of an index
# ---------------------------------------------------------------------------
def __schema_index (self, sObject, tableDef, indexDef):
if sObject._type == "GSIndexField":
- indexDef.fields.append (sObject.name)
+ indexDef.fields.append (sObject)
# ---------------------------------------------------------------------------
@@ -284,36 +300,38 @@
def __data_table (self, sObject):
data = DataDefinition (sObject.tablename)
- self.destination.write ("\n")
- for line in self.processor.comment (_("Data for %s") % data.name):
- self.destination.write (line.encode ('utf-8') + "\n")
-
sObject.walk (self.__data_rows, dataDef = data)
- if len (data.values):
- self.processor.writeData (data)
+ if self.tables.has_key (data.name):
+ tableDef = self.tables [data.name]
+ else:
+ tableDef = None
+ self.processor.writeData (data, tableDef)
+
+
# ---------------------------------------------------------------------------
# Iterate over all rows of a tabledata definition
# ---------------------------------------------------------------------------
def __data_rows (self, sObject, dataDef):
-
if sObject._type == "GSRow":
- if len (dataDef.values):
- self.processor.writeData (dataDef)
+ row = dataDef.addRow ()
+ sObject.walk (self.__data_values, rowDef = row)
- dataDef.clear ()
-
- elif sObject._type == "GSValue":
+ # ---------------------------------------------------------------------------
+ # Iterate over all values of a row definition
+ # ---------------------------------------------------------------------------
+ def __data_values (self, sObject, rowDef):
+ if sObject._type == "GSValue":
if hasattr (sObject, "field"):
- dataDef.columns.append (sObject.field)
+ rowDef.columns.append (sObject.field)
value = ""
for line in sObject._children:
value += line.getContent ()
- dataDef.values.append (value)
+ rowDef.values.append (value)
Modified: trunk/gnue-common/src/schema/scripter/processors/Base.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/processors/Base.py 2004-02-17
16:17:42 UTC (rev 5113)
+++ trunk/gnue-common/src/schema/scripter/processors/Base.py 2004-02-17
16:23:28 UTC (rev 5114)
@@ -21,8 +21,35 @@
# $Id$
from string import join
+from types import ListType
# =============================================================================
+# Exceptions
+# =============================================================================
+
+class EProcessorError (Exception):
+ """
+ This is the base exception class for all processor related exceptions.
+ """
+
+ # ---------------------------------------------------------------------------
+ # Constructor
+ # ---------------------------------------------------------------------------
+
+ def __init__ (self, text):
+ self._text = text
+ Exception.__init__ (self, self._text)
+
+
+ # ---------------------------------------------------------------------------
+ # Instance representation
+ # ---------------------------------------------------------------------------
+
+ def __repr__ (self):
+ return self._text
+
+
+# =============================================================================
# Base class for GNUe Schema Definition processors
# =============================================================================
class BaseProcessor:
@@ -41,11 +68,16 @@
COMMENT_END = ""
COMMENT_SINGLELINE = 1
+ END_COMMAND = "" # Symbol used to terminate a command
+ END_BATCH = "" # Symbol used to terminate a command-sequence
+
+
# ---------------------------------------------------------------------------
# Constructor
# ---------------------------------------------------------------------------
def __init__ (self, destination):
self.destination = destination
+ self.encoding = "UTF-8"
# ---------------------------------------------------------------------------
@@ -64,17 +96,11 @@
# ---------------------------------------------------------------------------
- # Write a text to the destination, and make sure it's utf-8 encoded
- # ---------------------------------------------------------------------------
- def _dumpText (self, text):
- self.destination.write (text.encode ('utf-8'))
-
- # ---------------------------------------------------------------------------
# Create an identifier for a sequence-like thing
# ---------------------------------------------------------------------------
def _getSequenceName (self, tablename, gsObject):
"""
- Create a name for a sequence-like objects using 'tablename' and 'gsObject'.
+ Create a name for a sequence-like object using 'tablename' and 'gsObject'.
"""
res = ""
@@ -95,8 +121,29 @@
# ---------------------------------------------------------------------------
+ # Write a string to the destination using the specified encoding
+ # ---------------------------------------------------------------------------
+
+ def _writeText (self, text):
+ """
+ This method writes the 'text' (string/list) to the destination using
+ encoding. If text is a sequence it will be joined with newlines first.
+ """
+ if isinstance (text, ListType):
+ astr = join (text, "\n") + "\n"
+ else:
+ astr = text
+ self.destination.write (astr.encode (self.encoding))
+
+
+ # ===========================================================================
+ # Schema support methods
+ # ===========================================================================
+
+ # ---------------------------------------------------------------------------
# fully qualify a field with name and datatype
# ---------------------------------------------------------------------------
+
def _qualify (self, gsField):
"""
This method qualifies 'gsField' by concatenating the fieldname and it's
@@ -108,6 +155,7 @@
# ---------------------------------------------------------------------------
# get an apropriate representation for gsField's datatype
# ---------------------------------------------------------------------------
+
def _translateType (self, gsField):
"""
Find a method for datatype translation of gsField.type in the current
@@ -122,122 +170,238 @@
# ---------------------------------------------------------------------------
- # Comment all lines from text
+ # Process the fields-sequence of a table definition
# ---------------------------------------------------------------------------
- def comment (self, text):
+
+ def _processFields (self, tableDef):
"""
- Create a sequence of 'commented' lines given in the sequence 'text'. Use
- the COMMENT_* constants to control this functions behaviour.
+ This function iterates over all fields in table definition and calls the
+ _processField () function on it. A processor has to override this last
+ function to do the actual work on fields.
"""
- body = []
- result = []
-
- ruler = "=" * (self.MAX_LINE_LENGTH - len (self.COMMENT_BEGIN) - \
- len (self.COMMENT_END))
- body.append (ruler)
- body.extend (text.split ("\n"))
- body.append (ruler)
+ for field in tableDef.fields:
+ self._processField (tableDef, field, field == tableDef.fields [-1])
- if self.COMMENT_SINGLELINE:
- for line in body:
- result.append ("%s%s%s" % (self.COMMENT_BEGIN, line, self.COMMENT_END))
- else:
- space = " " * len (self.COMMENT_BEGIN)
- first = True
- for line in body:
- if first:
- line = "%s%s" % (self.COMMENT_BEGIN, line)
- first = False
- else:
- if len (line):
- line = "%s%s" % (space, line)
+ # ---------------------------------------------------------------------------
+ # Virtual: Process a single field of a table definition
+ # ---------------------------------------------------------------------------
- result.append (line)
+ def _processField (self, tableDef, gsField, isLast):
+ """
+ A processor can override this method to translate a single field. The
+ argument 'isLast' is set to True if gsField is the last field in the
+ collection.
+ """
+ pass
- if len (result):
- result [-1] += " %s" % self.COMMENT_END
- return result
+ # ---------------------------------------------------------------------------
+ # Virtual: Process the primary key of a table definition
+ # ---------------------------------------------------------------------------
+ def _processPrimaryKey (self, tableDef):
+ """
+ A processor can override this method to translate a primary key definition.
+ """
+ pass
+
# ---------------------------------------------------------------------------
- # Treat 'datetime' as 'timestamp'
+ # Process the indices of a table definition
# ---------------------------------------------------------------------------
- def datetime (self, gsField):
+
+ def _processIndices (self, tableDef):
"""
- The datatype 'datetime' will call 'timestamp' implicitly, so it doesn't
- matter wether GSD files use datetime or timestamp.
+ A processor can override this method to translate all index definitions of
+ a table definition.
"""
- return self.timestamp (gsField)
+ for index in tableDef.indices.values ():
+ self._processIndex (tableDef, index)
# ---------------------------------------------------------------------------
+ # Virtual: Process a index definition
+ # ---------------------------------------------------------------------------
+
+ def _processIndex (self, tableDef, indexDef):
+ """
+ A processor can override this method to translate a single index
+ definition.
+ """
+ pass
+
+
+ # ---------------------------------------------------------------------------
+ # Virtual: Process the constraints of a table definition
+ # ---------------------------------------------------------------------------
+
+ def _processConstraints (self, tableDef):
+ """
+ A processor can override this method to translate all constraints of a
+ table definition.
+ """
+ pass
+
+
+ # ---------------------------------------------------------------------------
# Write a table definition to the destination
# ---------------------------------------------------------------------------
+
def writeTable (self, tableDef):
"""
- This function writes a table definition to the destination. If the table
- definition has primary key fields, _processPrimaryKey () will be called
- first. If there are indices defined, _processIndices () will be called. If
- the table definition has constraints, _processConstraints () will be
- called.
+ This function calls all _process*-functions on the given table definition
+ and finally writes the definition to the destination.
+ """
- NOTE: gnue-schema doesn't support constraints at the moment.
- """
- # Integrate referenced definitions first
- if len (tableDef.pk_fields):
+ # Process all parts of the table definition
+ self._processFields (tableDef)
+
+ if tableDef.primaryKey is not None:
self._processPrimaryKey (tableDef)
- if len (tableDef.indices):
+ if len (tableDef.indices.keys ()):
self._processIndices (tableDef)
- if len (tableDef.constraints):
+ if len (tableDef.constraints.keys ()):
self._processConstraints (tableDef)
- # Now dump the table definition
- for line in self.comment (_("Create table '%s'") % tableDef.name):
- self._dumpText (line + "\n")
+ if len (self.END_BATCH):
+ tableDef.epilogue.append (self.END_BATCH)
- # Create the prologue
- if len (tableDef.prologue):
- self._dumpText (join (tableDef.prologue, ";\n") + ";\n")
+ # and finally write the definition to the destination
+ tableDef.writeDefinition (self.destination, self.encoding)
- # Add the fields and postfields
- self._dumpText ("CREATE TABLE %s\n (" % tableDef.name)
- self._dumpText (join (tableDef.fields + tableDef.postfields,
- ",\n ") + ");\n")
- # Create the epilogue
- if len (tableDef.epilogue):
- self._dumpText (join (tableDef.epilogue, "\n") + "\n")
- self._dumpText ("\n");
+ # ===========================================================================
+ # Data support methods
+ # ===========================================================================
+
# ---------------------------------------------------------------------------
+ # Find a Data transformation service handler for a fields data-type
+ # ---------------------------------------------------------------------------
+
+ def _dts_type (self, gsField, value):
+ """
+ This function looks for a data transformation handler function for the
+ gsField's data type. The function first looks in the current class, and if
+ not successfull asks all superclasses for such a method. On success the
+ method will be called with gsField and value returning it's result. If no
+ handler was found the value will be returned.
+
+ NOTE: data transformation service functions must have a name of
+ 'dts_<type>' where <type> stands for the acutal datatype, e.g. "dts_date"
+ is a transformation handler for date-values.
+ """
+ aMethod = self.__findMethod (self.__class__, "dts_%s" % gsField.type)
+ if aMethod is not None:
+ return aMethod (self, gsField, value)
+ else:
+ return value
+
+
+ # ---------------------------------------------------------------------------
+ # Virtual: Process all rows of a data definition
+ # ---------------------------------------------------------------------------
+
+ def _processDataRows (self, dataDef, tableDef):
+ """
+ A processor can override this method to translate all data rows held
+ by the data definition object.
+ """
+ pass
+
+
+ # ---------------------------------------------------------------------------
# Write a data definition to the destination
# ---------------------------------------------------------------------------
- def writeData (self, dataDef):
+ def writeData (self, dataDef, tableDef = None):
"""
- Writes a data definition to destination. Before dumping the data
- definition's sequences, _processData () get's called to prepare the
- definition.
+ Process all data rows in the data definition and writes it to the
+ destination.
"""
- self._processData (dataDef)
+ self._processDataRows (dataDef, tableDef)
- if len (dataDef.prologue):
- self._dumpText (join (dataDef.prologue, "\n"))
+ if len (self.END_BATCH):
+ dataDef.epilogue.append (self.END_BATCH)
- if len (dataDef.lines):
- self._dumpText (join (dataDef.lines, "\n") + "\n")
+ dataDef.writeDefinition (self.destination, self.encoding)
- if len (dataDef.epilogue):
- self._dumpText (join (dataDef.epilogue, "\n") + "\n")
+
+ # ===========================================================================
+ # Miscellaneous public methods
+ # ===========================================================================
+
# ---------------------------------------------------------------------------
+ # Set the client encoding
+ # ---------------------------------------------------------------------------
+
+ def client_encoding (self, encoding = None):
+ """
+ This function creates a comment describing the current encoding of the SQL
+ script generated by the processor. A processor would like to override this
+ function for changing the client encoding. This function will be called by
+ the scripter.
+ """
+ if encoding is not None:
+ self.encoding = encoding
+
+ self._writeText (self.comment (_("Client encoding set to '%s'" % \
+ self.encoding)))
+
+
+ # ---------------------------------------------------------------------------
+ # Comment all lines from text
+ # ---------------------------------------------------------------------------
+
+ def comment (self, text):
+ """
+ Create a sequence of 'commented' lines given in the sequence 'text'. Use
+ the COMMENT_* constants to control this functions behaviour.
+ """
+ body = []
+ result = []
+
+ ruler = "=" * (self.MAX_LINE_LENGTH - len (self.COMMENT_BEGIN) - \
+ len (self.COMMENT_END))
+ body.append (ruler)
+ if isinstance (text, ListType):
+ body.extend (text)
+ else:
+ body.extend (text.split ("\n"))
+ body.append (ruler)
+
+ if self.COMMENT_SINGLELINE:
+ for line in body:
+ result.append ("%s%s%s" % (self.COMMENT_BEGIN, line, self.COMMENT_END))
+
+ else:
+ space = " " * len (self.COMMENT_BEGIN)
+ first = True
+ for line in body:
+ if first:
+ line = "%s%s" % (self.COMMENT_BEGIN, line)
+ first = False
+ else:
+ if len (line):
+ line = "%s%s" % (space, line)
+
+ result.append (line)
+
+ if len (result):
+ result [-1] += " %s" % self.COMMENT_END
+
+ return result
+
+
+ # ---------------------------------------------------------------------------
# Virtual: called on start of a dump
# ---------------------------------------------------------------------------
+
def startDump (self):
"""
This method is called by the scripter on start of a dump. Use it to do
@@ -245,9 +409,11 @@
"""
pass
+
# ---------------------------------------------------------------------------
# Virtual: called on start of a schema dump
# ---------------------------------------------------------------------------
+
def startSchema (self):
"""
This method is called by the scripter on start of a schema dump. Use it to
@@ -259,9 +425,37 @@
# ---------------------------------------------------------------------------
# Virtual: called on start of a data dump
# ---------------------------------------------------------------------------
+
def startData (self):
"""
This method is called by the scripter on start of a data dump. Use it to
take initial actions.
"""
pass
+
+
+ # ---------------------------------------------------------------------------
+ # Depreciated 'timestamp': we won't use timestamp any longer
+ # ---------------------------------------------------------------------------
+
+ def timestamp (self, gsField):
+ """
+ Depreciated - use datatype 'datetime' instead
+ """
+ print _("WARNING: datatype 'timestamp' is depreciated. ") + \
+ _("Use datetime instead.")
+ return self.datetime (gsField)
+
+
+ # ---------------------------------------------------------------------------
+ # Depreciated 'text': we won't use text any longer
+ # ---------------------------------------------------------------------------
+
+ def text (self, gsField):
+ """
+ Depreciated - use datatype 'string' without a length attribute instead
+ """
+ print _("WARNING: datatype 'text' is depreciated. ") + \
+ _("Use 'string' without length instead.")
+ gsField.type = "string"
+ return self.string (gsField)
Modified: trunk/gnue-common/src/schema/scripter/processors/SQL.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/processors/SQL.py 2004-02-17
16:17:42 UTC (rev 5113)
+++ trunk/gnue-common/src/schema/scripter/processors/SQL.py 2004-02-17
16:23:28 UTC (rev 5114)
@@ -33,40 +33,108 @@
backends.
"""
+ END_COMMAND = ";" # Symbol used to terminate a command
+ END_BATCH = "" # Symbol used to terminate a command-sequence
+
# ---------------------------------------------------------------------------
+ # Process the fields collection of a table definition
+ # ---------------------------------------------------------------------------
+
+ def _processFields (self, tableDef):
+ """
+ Populate a table definitions sequences.
+ """
+ tableDef.prologue.append ("")
+ tableDef.prologue.extend (self.comment ("Create table '%s'" % \
+ tableDef.name))
+
+ tableDef.body.append ("CREATE TABLE %s (" % tableDef.name)
+ tableDef.footer.append (")%s" % self.END_COMMAND)
+
+ BaseProcessor._processFields (self, tableDef)
+
+
+
+
+ # ---------------------------------------------------------------------------
+ # A single field is usually added to the definitions body
+ # ---------------------------------------------------------------------------
+
+ def _processField (self, tableDef, gsField, isLast):
+ """
+ Default implementation: Add the qualified field to the table definitions
+ body.
+ """
+ field = " %s" % self._qualify (gsField)
+
+ if not isLast:
+ field += ", "
+
+ tableDef.body.append (field)
+
+
+ # ---------------------------------------------------------------------------
# Primary key definition comes after the last field of a table
# ---------------------------------------------------------------------------
+
def _processPrimaryKey (self, tableDef):
"""
- Primary keys are specified after the last field of the table.
+ The primary key extends the table definitions body by a constraint
+ definition.
"""
- tableDef.postfields.append ("PRIMARY KEY (%s)" %
- join (tableDef.pk_fields, ", "))
+ pkDef = tableDef.primaryKey
+ flist = join ([pkf.name for pkf in pkDef.fields], ", ")
+ if len (tableDef.body):
+ tableDef.body [-1] += ","
+ tableDef.body.append (" CONSTRAINT %s PRIMARY KEY (%s)" % \
+ (pkDef.name, flist))
+
+
# ---------------------------------------------------------------------------
# Integrate index definitions into tableDef
# ---------------------------------------------------------------------------
+
def _processIndices (self, tableDef):
"""
- This function iterates over all specified index definitions and adds a
- CREATE INDEX statement to the epilogue of the table definition.
+ After processing all indices this function integrates these index
+ definitions into the table definitions epilogue.
"""
- # index definition comes after table definition, so we use the epilogue
- epi = tableDef.epilogue
+ BaseProcessor._processIndices (self, tableDef)
- for index in tableDef.indices:
- uni = ""
- if index.unique:
- uni = "UNIQUE "
+ for index in tableDef.indices.values ():
+ tableDef.epilogue.extend (index.merge ())
- epi.append ("")
- epi.extend (self.comment (_("Create index '%s'") % index.name))
- epi.append ("CREATE %sINDEX %s ON %s" % (uni, index.name, tableDef.name))
- epi.append (" (%s);" % join (index.fields, ", "))
+ # ---------------------------------------------------------------------------
+ # Process a single index definition
+ # ---------------------------------------------------------------------------
+ def _processIndex (self, tableDef, indexDef):
+ """
+ This function translates an index definition object into SQL code.
+ """
+ indexDef.prologue.append ("")
+ indexDef.prologue.extend (self.comment ("Create index '%s'" % \
+ indexDef.name))
+
+ if indexDef.unique:
+ uniq = "UNIQUE "
+ else:
+ uniq = ""
+
+ indexDef.header.append ("CREATE %sINDEX %s ON %s" % \
+ (uniq, indexDef.name, tableDef.name))
+
+ indexDef.body.append (" (%s)%s" % \
+ (join ([fld.name for fld in indexDef.fields], ", "), self.END_COMMAND))
+
+ indexDef.epilogue.append ("")
+
+
+
# ---------------------------------------------------------------------------
# Integrate constraints into table definition
# ---------------------------------------------------------------------------
@@ -74,26 +142,76 @@
"""
Constraints are NOT implemented at the moment
"""
- for constraint in tableDef.constraints:
+ for constraint in tableDef.constraints.values ():
pass
+
# ---------------------------------------------------------------------------
# Translate a data definition
# ---------------------------------------------------------------------------
- def _processData (self, dataDef):
+
+ def _processDataRows (self, dataDef, tableDef):
"""
- This function creates an INSERT statement to populate a table with data.
- If the columns-sequence of the data definition holds values, a columnlist
- will be added to the INSERT statement.
+ This function iterates over all rows of the data definition and calls
+ _processDataRow () on them.
"""
- if len (dataDef.columns):
- collist = " (%s)" % join (dataDef.columns, ", ")
+ dataDef.prologue.append ("")
+ dataDef.prologue.extend (self.comment ("Data for '%s'" % dataDef.name))
+
+ for row in dataDef.rows:
+ self._processDataRow (row, dataDef, tableDef)
+
+
+
+ # ---------------------------------------------------------------------------
+ # Process a single data row
+ # ---------------------------------------------------------------------------
+
+ def _processDataRow (self, row, dataDef, tableDef):
+ """
+ This function creates an INSERT statement for the given row definition. If
+ a table definition is available and the row definition has a column list,
+ all apropriate dts_* () functions are called.
+ """
+ # First, replace all empty values by NULL
+ index = 0
+ for val in row.values:
+ if not len (val) or val in ["''", '""']:
+ row.values [index] = "NULL"
+ index += 1
+
+ # if a column list is available we might use some data transformation
+ # services (if a table definition is available too)
+ if len (row.columns):
+ cols = " (%s)" % join (row.columns, ", ")
+
+ if tableDef is not None:
+ vlist = []
+ index = 0
+
+ for column in row.columns:
+ field = tableDef.getField (column)
+ if field is None:
+ raise AttributeError, _("Table '%s' has no field '%s'") % \
+ (tableDef.name, column)
+
+ vlist.append (self._dts_type (field, row.values [index]))
+ index += 1
+
+ else:
+ vlist = row.values
+
+ values = join (vlist, ", ")
+
+ # no column list available, so just concatenate all values
else:
- collist = ""
+ cols = ""
+ values = join (row.values, ", ")
- dataDef.lines.append ("INSERT INTO %s%s VALUES (%s);" % \
- (dataDef.name, collist, join (dataDef.values, ", ")))
+ # and create an insert statement
+ dataDef.body.append ("INSERT INTO %s%s VALUES (%s)%s" % \
+ (dataDef.name, cols, values, self.END_COMMAND))
@@ -104,45 +222,48 @@
# ---------------------------------------------------------------------------
# String usually becomes a 'varchar'
# ---------------------------------------------------------------------------
+
def string (self, gsField):
"""
Returns a 'varchar' or 'varchar (length)' if gsField has a length property.
"""
- res = "varchar"
if hasattr (gsField, "length"):
- res += " (%s)" % gsField.length
+ res = "varchar (%s)" % gsField.length
+ else:
+ res = "text"
return res
+
# ---------------------------------------------------------------------------
# Keep date as 'date'
# ---------------------------------------------------------------------------
+
def date (self, gsField):
"""
Keep date as 'date'
"""
return "date"
+
# ---------------------------------------------------------------------------
# Keep time as 'time'
# ---------------------------------------------------------------------------
+
def time (self, gsField):
"""
Keep time as 'time'
"""
return "time"
+
# ---------------------------------------------------------------------------
- # Keep timestamp as 'timestamp'
+ # Keep datetime as 'datetime'
# ---------------------------------------------------------------------------
- def timestamp (self, gsField):
+
+ def datetime (self, gsField):
"""
- Keep timestamp as 'timestamp'
+ Keep datetime as 'datetime'
"""
- return "timestamp"
+ return "datetime"
- # ---------------------------------------------------------------------------
- # Keep text as 'text'
- # ---------------------------------------------------------------------------
- def text (self, gsField):
- return "text"
Modified: trunk/gnue-common/src/schema/scripter/processors/interbase.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/processors/interbase.py
2004-02-17 16:17:42 UTC (rev 5113)
+++ trunk/gnue-common/src/schema/scripter/processors/interbase.py
2004-02-17 16:23:28 UTC (rev 5114)
@@ -41,12 +41,12 @@
# ---------------------------------------------------------------------------
# Add a GSField instance to the table definition
# ---------------------------------------------------------------------------
- def addField (self, tableDef, gsField):
- field = self._qualify (gsField)
+ def _processField (self, tableDef, gsField, isLast):
+ field = " %s" % self._qualify (gsField)
if gsField.defaultwith == "serial":
gen = self._getSequenceName (tableDef.name, gsField)
- tableDef.prologue.append ("CREATE GENERATOR %s" % gen)
+ tableDef.header.append ("CREATE GENERATOR %s%s" % (gen,
self.END_COMMAND))
self.__addTrigger (tableDef, gsField, gen)
@@ -59,9 +59,12 @@
if not gsField.nullable:
field += " NOT NULL"
- tableDef.fields.append (field)
+ if not isLast:
+ field += ", "
+ tableDef.body.append (field)
+
# ---------------------------------------------------------------------------
# Add a generator trigger to the table definition
# ---------------------------------------------------------------------------
@@ -69,7 +72,7 @@
epi = tableDef.epilogue
epi.append ("")
epi.append ("SET TERM ^ ;")
- epi.append ("CREATE TRIGGER trg_%s FOR %s" % (gsField.name, gen))
+ epi.append ("CREATE TRIGGER trg_%s FOR %s" % (gsField.name, tableDef.name))
epi.append (" ACTIVE BEFORE INSERT POSITION 0 AS")
epi.append (" BEGIN")
epi.append (" NEW.%s = GEN_ID (%s,1);" % (gsField.name, gen))
@@ -84,22 +87,39 @@
# ---------------------------------------------------------------------------
# Keys are allways 'integer'
# ---------------------------------------------------------------------------
+
def key (self, gsField):
+ """
+ A key is of type 'integer'.
+ """
return "integer"
+
# ---------------------------------------------------------------------------
- # Text becomes either 'string' or 'blob'
+ # String becomes either 'varchar' or 'blob'
# ---------------------------------------------------------------------------
- def text (self, gsField):
+
+ def string (self, gsField):
+ """
+ If the length of the string is missing or less than 2000 'varchar' is
+ returned, otherwise 'blob'.
+ """
if hasattr (gsField, "length") and gsField.length <= 2000:
- return string (gsField)
+ return "varchar (%s)" % gsField.length
+
else:
- return "blob"
+ return "varchar (32765)"
+
# ---------------------------------------------------------------------------
# translate a number according to it's precision and length
# ---------------------------------------------------------------------------
+
def number (self, gsField):
+ """
+ A number is translated into a 'smallint', 'integer' or 'numeric' according
+ to it's precision and length.
+ """
if gsField.precision == 0:
if gsField.length <= 4:
return "smallint"
@@ -114,9 +134,26 @@
return "numeric (%s,%s)" % (gsField.length + gsField.precision,
gsField.precision)
+
# ---------------------------------------------------------------------------
- # boolean becomes a number; TODO: add some check-constraints
+ # boolean becomes a smallint
# ---------------------------------------------------------------------------
+
def boolean (self, gsField):
+ """
+ Interbase doesn't support booleans, so we use a 'smallint' with a CHECK
+ constraint, which allows only 0, 1 and NULLs.
+ """
return "smallint CHECK (%s IS NULL OR %s IN (0,1))" % \
(gsField.name, gsField.name)
+
+
+ # ---------------------------------------------------------------------------
+ # Datetime maps to date
+ # ---------------------------------------------------------------------------
+
+ def datetime (self, gsField):
+ """
+ Interbase has no datetime, but uses 'timestamp' instead.
+ """
+ return "timestamp"
Modified: trunk/gnue-common/src/schema/scripter/processors/mssql.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/processors/mssql.py 2004-02-17
16:17:42 UTC (rev 5113)
+++ trunk/gnue-common/src/schema/scripter/processors/mssql.py 2004-02-17
16:23:28 UTC (rev 5114)
@@ -25,7 +25,7 @@
from string import join
name = "MSSQL"
-description = "MS SQL Server"
+description = "MS SQL Server (7.x/2000)"
# =============================================================================
# GSD processor for MS SQL Server
@@ -33,12 +33,14 @@
class Processor (SQLProcessor):
MAX_NAME_LENGTH = 31
+ END_BATCH = "GO"
+
# ---------------------------------------------------------------------------
# Process a GSField instance
# ---------------------------------------------------------------------------
- def addField (self, tableDef, gsField):
- field = self._qualify (gsField)
+ def _processField (self, tableDef, gsField, isLast):
+ field = " %s" % self._qualify (gsField)
# build a default value for this field
if gsField.defaultwith == "serial":
@@ -53,8 +55,12 @@
if not gsField.nullable:
field += " NOT NULL"
- tableDef.fields.append (field)
+ if not isLast:
+ field += ","
+ tableDef.body.append (field)
+
+
# ===========================================================================
# Datatype translation
# ===========================================================================
@@ -62,13 +68,22 @@
# ---------------------------------------------------------------------------
# A key field is of type 'int'
# ---------------------------------------------------------------------------
+
def key (self, gsField):
+ """
+ A key is allways an 'int'
+ """
return "int"
+
# ---------------------------------------------------------------------------
# A number needs special treatment
# ---------------------------------------------------------------------------
def number (self, gsField):
+ """
+ A number translates to 'smallint', 'int', 'bigint' or 'decimal' according
+ to it's precision and length.
+ """
if gsField.precision == 0:
if gsField.length <= 4:
return "smallint"
@@ -85,54 +100,93 @@
return "decimal (%s,%s)" % (gsField.length + gsField.precision,
gsField.precision)
+
# ---------------------------------------------------------------------------
# Keep boolean as 'boolean'
# ---------------------------------------------------------------------------
+
def boolean (self, gsField):
+ """
+ A boolean is translated to 'bit'.
+ """
return "bit"
+
# ---------------------------------------------------------------------------
# datetime is not equivalent to timestamp
# ---------------------------------------------------------------------------
+
def datetime (self, gsField):
+ """
+ 'datetime' is the one and only date-time type of SQL Server.
+ """
return "datetime"
+
# ---------------------------------------------------------------------------
# date becomes datetime
# ---------------------------------------------------------------------------
+
def date (self, gsField):
- return "smalldatetime"
+ """
+ date is mapped to 'datetime'
+ """
+ return self.datetime (gsField)
+
# ---------------------------------------------------------------------------
# time becomes datetime
# ---------------------------------------------------------------------------
+
def time (self, gsField):
- return "datetime"
+ """
+ time is mapped to 'datetime'
+ """
+ return self.datetime (gsField)
+
+ # ===========================================================================
+ # Data transformation services
+ # ===========================================================================
+
# ---------------------------------------------------------------------------
- # timestamp becomes datetime
+ # convert datetime values using the convert method
# ---------------------------------------------------------------------------
- def timestamp (self, gsField):
- return "timestamp"
+ def dts_datetime (self, gsField, value):
+ """
+ Datetime values will be converted using SQL Server's CONVERT function.
+ """
+ return "CONVERT (DATETIME, %s, 102)" % value
+
+
# ---------------------------------------------------------------------------
- # Before starting a dump, set encoding to UTF-8
+ # map date to datetime
# ---------------------------------------------------------------------------
- def startDump (self):
- # self._dumpText ("\\encoding utf8\n")
- pass
+ def dts_date (self, gsField, value):
+ """
+ Call dts_datetime ().
+ """
+ return self.dts_datetime (gsField, value)
+
# ---------------------------------------------------------------------------
- # Extend the writeTable () function with a batch-closing 'GO'
+ # call dts_datetime ()
# ---------------------------------------------------------------------------
- def writeTable (self, tableDef):
- SQLProcessor.writeTable (self, tableDef)
- self._dumpText ("GO\n\n")
+ def dts_time (self, gsField, value):
+ """
+ Call dts_datetime ().
+ """
+ return self.dts_datetime (gsField, value)
+
+
# ---------------------------------------------------------------------------
- # Extend the writeTable () function with a batch-closing 'GO'
+ # call dts_datetime ()
# ---------------------------------------------------------------------------
- def writeData (self, dataDef):
- SQLProcessor.writeData (self, dataDef)
- self._dumpText ("GO\n\n")
+ def dts_timestamp (self, gsField, value):
+ """
+ Call dts_datetime ()
+ """
+ return self.dts_datetime (gsField, value)
Modified: trunk/gnue-common/src/schema/scripter/processors/mysql.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/processors/mysql.py 2004-02-17
16:17:42 UTC (rev 5113)
+++ trunk/gnue-common/src/schema/scripter/processors/mysql.py 2004-02-17
16:23:28 UTC (rev 5114)
@@ -22,6 +22,7 @@
#
from gnue.common.schema.scripter.processors.SQL import SQLProcessor
+from gnue.common.schema.scripter.processors.Base import EProcessorError
name = "MySQL"
description = "MySQL (3.x/4.x)"
@@ -34,27 +35,38 @@
MAX_NAME_LENGTH = 64
+
# ---------------------------------------------------------------------------
- # Add a GSField instance to the table definition
+ # Process a GSField instance
# ---------------------------------------------------------------------------
- def addField (self, tableDef, gsField):
- field = self._qualify (gsField)
+
+ def _processField (self, tableDef, gsField, isLast):
+ field = " %s" % self._qualify (gsField)
if gsField.defaultwith == "serial":
field += " auto_increment"
elif gsField.defaultwith == "timestamp":
- field += " DEFAULT `current_timestamp()`"
+ if gsField.type != "timestamp":
+ gsField.type = "timestamp"
+ field = " %s" % self._qualify (gsField)
+ print _("WARNING: changing column type of '%s.%s' to 'timestamp'" % \
+ (tableDef.name, gsField.name))
+
elif hasattr (gsField, "default") and gsField.default is not None:
field += " DEFAULT %s" % gsField.default
if not gsField.nullable:
field += " NOT NULL"
- tableDef.fields.append (field)
+ if not isLast:
+ field += ","
+ tableDef.body.append (field)
+
+
# ===========================================================================
# Datatype translation
# ===========================================================================
@@ -62,28 +74,38 @@
# ---------------------------------------------------------------------------
# Keys are unsinged integers
# ---------------------------------------------------------------------------
+
def key (self, gsField):
+ """
+ A key is an unsigned integer.
+ """
return "int unsigned"
+
# ---------------------------------------------------------------------------
# String
# ---------------------------------------------------------------------------
+
def string (self, gsField):
+ """
+ If the string has no length or exceeds 255 characters the datatype 'text'
+ will be used, otherwise the result is 'varchar'.
+ """
if hasattr (gsField, "length") and gsField.length <= 255:
return "varchar (%s)" % gsField.length
else:
return "text"
- # ---------------------------------------------------------------------------
- # text becomes either a 'string' or 'text'
- # ---------------------------------------------------------------------------
- def text (self, gsField):
- return self.string (gsField)
# ---------------------------------------------------------------------------
# Translate a number according to it's precision and length
# ---------------------------------------------------------------------------
+
def number (self, gsField):
+ """
+ Number translates to 'smallint', 'int', 'bigint' or 'decimal' according to
+ it's precision and length.
+ """
if gsField.precision == 0:
if gsField.length <= 4:
return "smallint"
@@ -99,8 +121,28 @@
else:
return "decimal (%s,%s)" % (gsField.length, gsField.precision)
+
# ---------------------------------------------------------------------------
# MySQL has no native boolean data type
# ---------------------------------------------------------------------------
+
def boolean (self, gsField):
+ """
+ In MySQL a boolean maps to a tinyint (1)
+ """
return "tinyint (1) unsigned"
+
+
+ # ---------------------------------------------------------------------------
+ # MySQL has a timestamp, which is needed for 'defaultwith timestamp' columns
+ # ---------------------------------------------------------------------------
+
+ def timestamp (self, gsField):
+ """
+ MySQL is allowed to use timestamp for fields with a default-timestamp
+ value.
+ """
+ if gsField.defaultwith == 'timestamp':
+ return "timestamp"
+ else:
+ return SQLProcessor.timestamp (self, gsField)
Modified: trunk/gnue-common/src/schema/scripter/processors/oracle.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/processors/oracle.py 2004-02-17
16:17:42 UTC (rev 5113)
+++ trunk/gnue-common/src/schema/scripter/processors/oracle.py 2004-02-17
16:23:28 UTC (rev 5114)
@@ -18,7 +18,7 @@
#
# Copyright 2001-2004 Free Software Foundation
#
-# $Id: $
+# $Id$
#
from gnue.common.schema.scripter.processors.SQL import SQLProcessor
@@ -39,15 +39,15 @@
# ---------------------------------------------------------------------------
# add a GSField instance to the table definition
# ---------------------------------------------------------------------------
- def addField (self, tableDef, gsField):
- field = self._qualify (gsField)
+ def _processField (self, tableDef, gsField, isLast):
+ field = " %s" % self._qualify (gsField)
# Add a 'serial' as default value
if gsField.defaultwith == "serial":
seq = self._getSequenceName (tableDef.name, gsField)
- tableDef.prologue.append ("CREATE SEQUENCE %s MAXVALUE %s NOCYCLE" % \
- (seq, "9" * self._PK_PRECISION))
+ tableDef.prologue.append ("CREATE SEQUENCE %s MAXVALUE %s NOCYCLE%s" % \
+ (seq, "9" * self._PK_PRECISION, self.END_COMMAND))
trig = []
trig.append (" IF :new.%s IS NULL THEN" % gsField.name)
trig.append (" SELECT %s.nextval INTO :new.%s FROM dual;" % \
@@ -77,9 +77,12 @@
if not gsField.nullable:
field += " NOT NULL"
- tableDef.fields.append (field)
+ if not isLast:
+ field += ","
+ tableDef.body.append (field)
+
# ---------------------------------------------------------------------------
# Add a trigger for defaults to the table definition
# ---------------------------------------------------------------------------
@@ -87,15 +90,16 @@
epi = tableDef.epilogue
epi.append ("")
- epi.append ("CREATE OR REPLACE TRIGGER t__%s__pre" % tableDef.name)
+ epi.append ("CREATE OR REPLACE TRIGGER t__%s_%s_pre" % (tableDef.name,
+ gsField.name))
epi.append (" BEFORE INSERT ON %s" % tableDef.name)
- epi.append (" FOR EACH ROW")
- epi.append (" WHEN ((new.%s IS NULL))" % gsField.name)
+ epi.append (" FOR EACH ROW WHEN (:new.%s IS NULL)" % gsField.name)
epi.append (" BEGIN")
epi.extend (body)
epi.append (" END;")
epi.append ("/")
+
# ===========================================================================
# Datatype translation
# ===========================================================================
@@ -103,47 +107,61 @@
# ---------------------------------------------------------------------------
# String becomes 'varchar2'
# ---------------------------------------------------------------------------
+
def string (self, gsField):
- return "varchar2 (%s)" % gsField.length
+ """
+ If the string has no length or exceeds 2000 characters, we use the
+ datatype 'long', otherwise 'varchar2' will be used.
+ """
+ if hasattr (gsField, 'length') and gsField.length <= 2000:
+ return "varchar2 (%s)" % gsField.length
+ else:
+ return "long"
+
# ---------------------------------------------------------------------------
# time becomes 'date'
# ---------------------------------------------------------------------------
+
def time (self, gsField):
+ """
+ Time translates to 'date'
+ """
return "date"
- # ---------------------------------------------------------------------------
- # so does timestamp
- # ---------------------------------------------------------------------------
- def timestamp (self, gsField):
- return "date"
- # ---------------------------------------------------------------------------
- # text becomes either 'string' or 'long'
- # ---------------------------------------------------------------------------
- def text (self, gsField):
- if hasattr (gsField, 'length') and gsField.length <= 2000:
- return self.string (gsField)
- else:
- return "long"
# ---------------------------------------------------------------------------
# A key is a number of _PK_PRECISION
# ---------------------------------------------------------------------------
+
def key (self, gsField):
+ """
+ A key is a number of a given precision (see _PK_PRECISION)
+ """
return "number (%s)" % self._PK_PRECISION
+
# ---------------------------------------------------------------------------
# Oracle doesn't seem to have booleans, so we're using number (1)
# ---------------------------------------------------------------------------
+
def boolean (self, gsField):
+ """
+ Oracle has no native boolean type. So a boolean translates to a number (1)
+ with a CHECK constraint, which allows only 0, 1 or NULLs.
+ """
return "number (1) CHECK (%s IS NULL OR %s IN (0,1))" % \
(gsField.name, gsField.name)
# ---------------------------------------------------------------------------
# Translate a number according to it's precision and length
# ---------------------------------------------------------------------------
+
def number (self, gsField):
+ """
+ A number will be kept as 'number' with the given precision.
+ """
if gsField.precision == 0:
return "number (%s)" % gsField.length
else:
Modified: trunk/gnue-common/src/schema/scripter/processors/postgresql.py
===================================================================
--- trunk/gnue-common/src/schema/scripter/processors/postgresql.py
2004-02-17 16:17:42 UTC (rev 5113)
+++ trunk/gnue-common/src/schema/scripter/processors/postgresql.py
2004-02-17 16:23:28 UTC (rev 5114)
@@ -30,21 +30,25 @@
# =============================================================================
# GSD processor for PostgreSQL
# =============================================================================
+
class Processor (SQLProcessor):
+
MAX_NAME_LENGTH = 31
+
# ---------------------------------------------------------------------------
# Process a GSField instance
# ---------------------------------------------------------------------------
- def addField (self, tableDef, gsField):
- field = self._qualify (gsField)
+ def _processField (self, tableDef, gsField, isLast):
+ field = " %s" % self._qualify (gsField)
+
# build a default value for this field
if gsField.defaultwith == "serial":
seq = self._getSequenceName (tableDef.name, gsField)
- tableDef.prologue.append ("CREATE SEQUENCE %s" % seq)
+ tableDef.header.append ("CREATE SEQUENCE %s;" % seq)
field += " DEFAULT nextval ('%s')" % seq
elif gsField.defaultwith == "timestamp":
@@ -56,8 +60,12 @@
if not gsField.nullable:
field += " NOT NULL"
- tableDef.fields.append (field)
+ if not isLast:
+ field += ", "
+ tableDef.body.append (field)
+
+
# ===========================================================================
# Datatype translation
# ===========================================================================
@@ -68,10 +76,15 @@
def key (self, gsField):
return "int8"
+
# ---------------------------------------------------------------------------
# A number needs special treatment
# ---------------------------------------------------------------------------
def number (self, gsField):
+ """
+ Number translates to smallint, integer, bigint or numeric according to it's
+ precision and length.
+ """
if gsField.precision == 0:
if gsField.length <= 4:
return "smallint"
@@ -91,12 +104,23 @@
# ---------------------------------------------------------------------------
# Keep boolean as 'boolean'
# ---------------------------------------------------------------------------
+
def boolean (self, gsField):
+ """
+ PostgreSQL has a native type boolean.
+ """
return "boolean"
+
# ---------------------------------------------------------------------------
- # Before starting a dump, set encoding to UTF-8
+ # Set the client encoding as requested by the scripter
# ---------------------------------------------------------------------------
- def startDump (self):
- self._dumpText ("\\encoding utf8\n")
+ def client_encoding (self, encoding):
+ """
+ PostgreSQL sets the client encoding with a '\encoding' command.
+ """
+ SQLProcessor.client_encoding (self, encoding)
+ self._writeText ("\\encoding %s\n" % self.encoding)
+
+
[Prev in Thread] |
Current Thread |
[Next in Thread] |
- r5114 - in trunk/gnue-common/src/schema/scripter: . processors,
johannes <=