#-*- mode: python; mode: fold -*-
# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
# Author: Joachim Schmitz <js@aixtraware.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# 02111-1307, USA.
#
# $Id: WAeUPTables.py 2192 2007-09-11 12:48:10Z henrik $

from zope.interface import implements
from Globals import InitializeClass
from Products.ZCatalog.ZCatalog import ZCatalog
from Products.ZCatalog.ProgressHandler import ZLogHandler
from AccessControl import ClassSecurityInfo
from Products.CMFCore.permissions import ModifyPortalContent
from Products.CMFCore.utils import getToolByName
from Products.CMFCore.CatalogTool import CatalogTool
from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
from Products.CPSSchemas.DataStructure import DataStructure
from Products.CPSSchemas.DataModel import DataModel
from Products.AdvancedQuery import Eq, Between, Le,In
import urllib
import DateTime,time
import csv,re
import logging
import Globals
p_home = Globals.package_home(globals())
i_home = Globals.INSTANCE_HOME

ADDING_SHEDULED = "adding_sheduled"
OBJECT_CREATED = "object_created"

from interfaces import IWAeUPTable

class AttributeHolder(object):
    pass

def dict2ob(dict):
    ob = AttributeHolder()
    for key, value in dict.items():
        setattr(ob, key, value)
    return ob

class WAeUPTable(ZCatalog): ###(

    implements(IWAeUPTable)
    security = ClassSecurityInfo()
    meta_type = None

    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        ZCatalog.__init__(self,name)

    def refreshCatalog(self, clear=0, pghandler=None): ###(
        """ don't refresh for a normal table """

        if self.REQUEST and self.REQUEST.RESPONSE:
            self.REQUEST.RESPONSE.redirect(
              URL1 +
              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')

###)

    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
        """ clears the whole enchilada """

        #if REQUEST and RESPONSE:
        #    RESPONSE.redirect(
        #      URL1 +
        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')

        self._catalog.clear()
        if REQUEST and RESPONSE:
            RESPONSE.redirect(
              URL1 +
              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')

###)

    def record2dict(self,fields,record):
        d = {}
        for key in fields:
            v = getattr(record, key, None)
            if key == 'sex':
                if v:
                    v = 'F'
                else:
                    v = 'M'
                d[key] = v
            elif v:
                if key == 'lga':
                    v = self.portal_vocabularies.local_gov_areas.get(v)
                d[key] = v
            else:
                d[key] = ''
        return d

    def addRecord(self, **data): ###(
        # The uid is the same as "bed".
        uid = data[self.key]
        res = self.searchResults({"%s" % self.key : uid})
        if len(res) > 0:
            raise ValueError("More than one record with uid %s" % uid)
        self.catalog_object(dict2ob(data), uid=uid)
        return uid

###)

    def deleteRecord(self, uid):
        self.uncatalog_object(uid)

    def searchAndSetRecord(self, **data):
        raise NotImplemented

    def modifyRecord(self, record=None, **data): ###(
        #records = self.searchResults(uid=uid)
        uid = data[self.key]
        if record is None:
            records = self.searchResults({"%s" % self.key : uid})
            if len(records) > 1:
                # Can not happen, but anyway...
                raise ValueError("More than one record with uid %s" % uid)
            if len(records) == 0:
                raise KeyError("No record for uid %s" % uid)
            record = records[0]
        record_data = {}
        for field in self.schema() + self.indexes():
            record_data[field] = getattr(record, field)
        # Add the updated data:
        record_data.update(data)
        self.catalog_object(dict2ob(record_data), uid)

###)

    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
        if isinstance(name, str):
            name =  (name,)
        paths = self._catalog.uids.items()
        i = 0
        #import pdb;pdb.set_trace()
        for p,rid in paths:
            i += 1
            metadata = self.getMetadataForRID(rid)
            record_data = {}
            for field in name:
                record_data[field] = metadata.get(field)
            uid = metadata.get(self.key)
            self.catalog_object(dict2ob(record_data), uid, idxs=name,
                                update_metadata=0)

###)

    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
    def exportAllRecords(self):
        "export a WAeUPTable"
        #import pdb;pdb.set_trace()
        fields = [field for field in self.schema()]
        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
        csv = []
        csv.append(','.join(['"%s"' % fn for fn in fields]))
        for uid in self._catalog.uids:
            records = self.searchResults({"%s" % self.key : uid})
            if len(records) > 1:
                # Can not happen, but anyway...
                raise ValueError("More than one record with uid %s" % uid)
            if len(records) == 0:
                raise KeyError("No record for uid %s" % uid)
            rec = records[0]
            csv.append(format % rec)
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))

###)

    security.declareProtected(ModifyPortalContent,"dumpAll")###(
    def dumpAll(self):
        """dump all data in the table to a csv"""
        member = self.portal_membership.getAuthenticatedMember()
        logger = logging.getLogger('WAeUPTables.dump_%s' % self.__name__)
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
        print export_file
        res_list = []
        lines = []
        if hasattr(self,"export_keys"):
            fields = self.export_keys
        else:
            fields = []
            for f in self.schema():
                fields.append(f)
        headline = ','.join(fields)
        #open(export_file,"a").write(headline +'\n')
        out = open(export_file,"wb")
        out.write(headline +'\n')
        out.close()
        out = open(export_file,"a")
        csv_writer = csv.DictWriter(out,fields,)
        format = '"%(' + ')s","%('.join(fields) + ')s"'
        records = self()
        nr2export = len(records)
        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
        chunk = 2000
        total = 0
        start = DateTime.DateTime().timeTime()
        start_chunk = DateTime.DateTime().timeTime()
        for record in records:
            not_all = False
            d = self.record2dict(fields,record)
            #d['state'],d['lga'] = formatLGA(d['lga'],voc = self.portal_vocabularies.local_gov_areas)
            #lines.append(format % d)
            lines.append(d)
            total += 1
            if total and not total % chunk or total == len(records):
                #open(export_file,"a").write('\n'.join(lines) +'\n')
                csv_writer.writerows(lines)
                anz = len(lines)
                logger.info("wrote %(anz)d  total written %(total)d" % vars())
                end_chunk = DateTime.DateTime().timeTime()
                duration = end_chunk-start_chunk
                per_record = duration/anz
                till_now = end_chunk - start
                avarage_per_record = till_now/total
                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
                estimated_end = estimated_end.strftime("%H:%M:%S")
                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
                start_chunk = DateTime.DateTime().timeTime()
                lines = []
        end = DateTime.DateTime().timeTime()
        logger.info('total time %6.2f m' % ((end-start)/60))
        import os
        filename, extension = os.path.splitext(export_file)
        from subprocess import call
        msg = "wrote %(total)d records to %(export_file)s" % vars()
        try:
            retcode = call('gzip %s' % (export_file),shell=True)
            if retcode == 0:
                msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
        except OSError, e:
            retcode = -99
            logger.info("zip failed with %s" % e)
        logger.info(msg)
        args = {'portal_status_message': msg}
        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
        url = self.REQUEST.get('URL2')
        return self.REQUEST.RESPONSE.redirect(url)
    ###)

    security.declarePrivate("_import_old") ###(
    def _import_old(self,filename,schema,layout, mode,logger):
        "import data from csv"
        import transaction
        import random
        pm = self.portal_membership
        member = pm.getAuthenticatedMember()
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        import_fn = "%s/import/%s.csv" % (i_home,filename)
        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
        start = True
        tr_count = 1
        total_imported = 0
        total_not_imported = 0
        total = 0
        iname =  "%s" % filename
        not_imported = []
        imported = []
        valid_records = []
        invalid_records = []
        d = {}
        d['mode'] = mode
        d['imported'] = total_imported
        d['not_imported'] = total_not_imported
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        d['import_fn'] = import_fn
        d['imported_fn'] = imported_fn
        d['not_imported_fn'] = not_imported_fn
        if schema is None:
            em = 'No schema specified'
            logger.error(em)
            return d
        if layout is None:
            em = 'No layout specified'
            logger.error(em)
            return d
        validators = {}
        for widget in layout.keys():
            try:
                validators[widget] = layout[widget].validate
            except AttributeError:
                logger.info('%s has no validate attribute' % widget)
                return d
        # if mode == 'edit':
        #     importer = self.importEdit
        # elif mode == 'add':
        #     importer = self.importAdd
        # else:
        #     importer = None
        try:
            items = csv.DictReader(open(import_fn,"rb"),
                                   dialect="excel",
                                   skipinitialspace=True)
        except:
            em = 'Error reading %s.csv' % filename
            logger.error(em)
            return d
        #import pdb;pdb.set_trace()
        for item in items:
            if start:
                start = False
                logger.info('%s starts import from %s.csv' % (member,filename))
                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
                                   dialect="excel",
                                   skipinitialspace=True).next()
                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
                diff2schema = set(import_keys).difference(set(schema.keys()))
                diff2layout = set(import_keys).difference(set(layout.keys()))
                if diff2layout:
                    em = "not ignorable key(s) %s found in heading" % diff2layout
                    logger.info(em)
                    return d
                s = ','.join(['"%s"' % fn for fn in import_keys])
                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
                #s = '"id",' + s
                open(imported_fn,"a").write(s + '\n')
                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
                format_error = format + ',"%(Error)s"'
                #format = '"%(id)s",'+ format
                adapters = [MappingStorageAdapter(schema, item)]
            dm = DataModel(item, adapters,context=self)
            ds = DataStructure(data=item,datamodel=dm)
            error_string = ""
            for k in import_keys:
                if not validators[k](ds,mode=mode):
                    error_string += " %s : %s" % (k,ds.getError(k))
            # if not error_string and importer:
            #     item.update(dm)
            #     item['id'],error = importer(item)
            #     if error:
            #         error_string += error
            if error_string:
                item['Error'] = error_string
                invalid_records.append(dm)
                not_imported.append(format_error % item)
                total_not_imported += 1
            else:
                em = format % item
                valid_records.append(dm)
                imported.append(em)
                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
                tr_count += 1
                total_imported += 1
            total += 1
        if len(imported) > 0:
            open(imported_fn,"a").write('\n'.join(imported))
        if len(not_imported) > 0:
            open(not_imported_fn,"a").write('\n'.join(not_imported))
        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
        d['imported'] = total_imported
        d['not_imported'] = total_not_imported
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        d['imported_fn'] = imported_fn
        d['not_imported_fn'] = not_imported_fn
        #logger.info(em)
        return d
    ###)

    security.declarePrivate("_import") ###(
    def _import_new(self,csv_items,schema, layout, mode,logger):
        "import data from csv.Dictreader Instance"
        start = True
        tr_count = 1
        total_imported = 0
        total_not_imported = 0
        total = 0
        iname =  "%s" % filename
        not_imported = []
        valid_records = []
        invalid_records = []
        duplicate_records = []
        d = {}
        d['mode'] = mode
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        d['invalid_records'] = duplicate_records
        # d['import_fn'] = import_fn
        # d['imported_fn'] = imported_fn
        # d['not_imported_fn'] = not_imported_fn
        validators = {}
        for widget in layout.keys():
            try:
                validators[widget] = layout[widget].validate
            except AttributeError:
                logger.info('%s has no validate attribute' % widget)
                return d
        for item in csv_items:
            if start:
                start = False
                logger.info('%s starts import from %s.csv' % (member,filename))
                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
                diff2schema = set(import_keys).difference(set(schema.keys()))
                diff2layout = set(import_keys).difference(set(layout.keys()))
                if diff2layout:
                    em = "not ignorable key(s) %s found in heading" % diff2layout
                    logger.info(em)
                    return d
                # s = ','.join(['"%s"' % fn for fn in import_keys])
                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
                # #s = '"id",' + s
                # open(imported_fn,"a").write(s + '\n')
                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
                # format_error = format + ',"%(Error)s"'
                # #format = '"%(id)s",'+ format
                adapters = [MappingStorageAdapter(schema, item)]
            dm = DataModel(item, adapters,context=self)
            ds = DataStructure(data=item,datamodel=dm)
            error_string = ""
            for k in import_keys:
                if not validators[k](ds,mode=mode):
                    error_string += " %s : %s" % (k,ds.getError(k))
            if error_string:
                item['Error'] = error_string
                #invalid_records.append(dm)
                invalid_records.append(item)
                total_not_imported += 1
            else:
                em = format % item
                valid_records.append(dm)
                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
                tr_count += 1
                total_imported += 1
            total += 1
        # if len(imported) > 0:
        #     open(imported_fn,"a").write('\n'.join(imported))
        # if len(not_imported) > 0:
        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
        d['imported'] = total_imported
        d['not_imported'] = total_not_imported
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        return d
    ###)

###)

class AccommodationTable(WAeUPTable): ###(

    meta_type = 'WAeUP Accommodation Tool'
    name = "portal_accommodation"
    key = "bed"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)

    def searchAndReserveBed(self, student_id,bed_type):
        records = self.searchResults({'student' : student_id})
        if len(records) > 0:
            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)

        records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
        #import pdb;pdb.set_trace()
        if len(records) == 0:
            return -2,"No bed available"
        rec = records[0]
        self.modifyRecord(bed=rec.bed,student=student_id)
        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
        return 1,rec.bed


InitializeClass(AccommodationTable)

###)

class PinTable(WAeUPTable): ###(
    from ZODB.POSException import ConflictError
    meta_type = 'WAeUP Pin Tool'
    name = "portal_pins"
    key = 'pin'
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


    def searchAndSetRecord(self, uid, student_id,prefix):

        # The following line must be activated after resetting the
        # the portal_pins table. This is to avoid duplicate entries
        # and disable duplicate payments.

        #student_id = student_id.upper()

        records = self.searchResults(student = student_id)
        if len(records) > 0 and prefix in ('CLR','APP'):
            for r in records:
                if r.pin != uid and r.prefix_batch.startswith(prefix):
                    return -2
        records = self.searchResults({"%s" % self.key : uid})
        if len(records) > 1:
            # Can not happen, but anyway...
            raise ValueError("More than one record with uid %s" % uid)
        if len(records) == 0:
            return -1
        record = records[0]
        if record.student == "":
            record_data = {}
            for field in self.schema() + self.indexes():
                record_data[field] = getattr(record, field)
            # Add the updated data:
            record_data['student'] = student_id
            try:
                self.catalog_object(dict2ob(record_data), uid)
                return 1
            except ConflictError:
                return 2
        if record.student.upper() != student_id.upper():
            return 0
        if record.student.upper() == student_id.upper():
            return 2
        return -3

InitializeClass(PinTable)

###)

class PumeResultsTable(WAeUPTable): ###(

    meta_type = 'WAeUP PumeResults Tool'
    name = "portal_pumeresults"
    key = "jamb_reg_no"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(PumeResultsTable)

###)

class ApplicantsCatalog(WAeUPTable): ###(

    meta_type = 'WAeUP Applicants Catalog'
    name = "applicants_catalog"
    key = "reg_no"
    security = ClassSecurityInfo()
    export_keys = (
                   "reg_no",
                   "status",
                   "lastname",
                   "sex",
                   "date_of_birth",
                   "lga",
                   "email",
                   "phone",
                   "passport",
                   "entry_mode",
                   "pin",
                   "screening_type",
                   "registration_date",
                   "testdate",
                   "application_date",
                   "screening_date",
                   "faculty",
                   "department",
                   "course1",
                   "course2",
                   "course3",
                   "eng_score",
                   "subj1",
                   "subj1score",
                   "subj2",
                   "subj2score",
                   "subj3",
                   "subj3score",
                   "aggregate",
                   "course_admitted",
                   )
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)



    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
    def new_importCSV(self,filename="JAMB_data",
                  schema_id="application",
                  layout_id="application_import",
                  mode='add'):
        """ import JAMB data """
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        pm = self.portal_membership
        member = pm.getAuthenticatedMember()
        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
        import_fn = "%s/import/%s.csv" % (i_home,filename)
        if mode not in ('add','edit'):
            logger.info("invalid mode: %s" % mode)
        if os.path.exists(lock_fn):
            logger.info("import of %(import_fn)s already in progress" % vars())
            return
        lock_file = open(lock_fn,"w")
        lock_file.write("%(current)s \n" % vars())
        lock_file.close()
        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
        stool = getToolByName(self, 'portal_schemas')
        ltool = getToolByName(self, 'portal_layouts')
        schema = stool._getOb(schema_id)
        if schema is None:
            em = 'No such schema %s' % schema_id
            logger.error(em)
            return
        for postfix in ('_import',''):
            layout_name = "%(layout_id)s%(postfix)s" % vars()
            if hasattr(ltool,layout_name):
                break
        layout = ltool._getOb(layout_name)
        if layout is None:
            em = 'No such layout %s' % layout_id
            logger.error(em)
            return
        try:
            csv_file = csv.DictReader(open(import_fn,"rb"))
        except:
            em = 'Error reading %s.csv' % filename
            logger.error(em)
            return
        d = self._import_new(csv_items,schema,layout,mode,logger)
        imported = []
        edited = []
        duplicates = []
        not_found = []
        if len(d['valid_records']) > 0:
            for record in d['valid_records']:
                #import pdb;pdb.set_trace()
                if mode == "add":
                    try:
                        self.addRecord(**dict(record.items()))
                        imported.append(**dict(record.items()))
                        logger.info("added %s" % record.items())
                    except ValueError:
                        dupplicate.append(**dict(record.items()))
                        logger.info("duplicate %s" % record.items())
                elif mode == "edit":
                    try:
                        self.modifyRecord(**dict(record.items()))
                        edited.append(**dict(record.items()))
                        logger.info("edited %s" % record.items())
                    except KeyError:
                        not_found.append(**dict(record.items()))
                        logger.info("not found %s" % record.items())
        invalid = d['invalid_records']
        for itype in ("imported","edited","not_found","duplicate","invalid"):
            outlist = locals[itype]
            if len(outlist):
                d = {}
                for k in outlist[0].keys():
                    d[k] = k
                outlist[0] = d
                outfile = open("file_name_%s" % itype,'w')
                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
###)

    security.declareProtected(ModifyPortalContent,"importCSV")###(
    def importCSV(self,filename="JAMB_data",
                  schema_id="application",
                  layout_id="application",
                  mode='add'):
        """ import JAMB data """
        stool = getToolByName(self, 'portal_schemas')
        ltool = getToolByName(self, 'portal_layouts')
        schema = stool._getOb(schema_id)
        if schema is None:
            em = 'No such schema %s' % schema_id
            logger.error(em)
            return
        layout = ltool._getOb(layout_id)
        if layout is None:
            em = 'No such layout %s' % layout_id
            logger.error(em)
            return
        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
        d = self._import_old(filename,schema,layout,mode,logger)
        if len(d['valid_records']) > 0:
            for record in d['valid_records']:
                #import pdb;pdb.set_trace()
                if mode == "add":
                    self.addRecord(**dict(record.items()))
                    logger.info("added %s" % record.items())
                elif mode == "edit":
                    self.modifyRecord(**dict(record.items()))
                    logger.info("edited %s" % record.items())
                else:
                    logger.info("invalid mode: %s" % mode)
        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
###)

InitializeClass(ApplicantsCatalog)

###)

class StudentsCatalog(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Students Catalog'
    name = "students_catalog"
    key = "id"
    affected_types = {   ###(
                      'StudentApplication':
                      {'id': 'application',
                       'fields':
                       ('jamb_reg_no',
                        'entry_mode',
                        #'entry_level',
                        'entry_session',
                       )
                      },
                      'StudentClearance':
                      {'id': 'clearance',
                       'fields':
                       ('matric_no',
                        'lga',
                       )
                      },
                      'StudentPersonal':
                      {'id': 'personal',
                       'fields':
                       ('name',
                        'sex',
                        'perm_address',
                        'email',
                        'phone',
                       )
                      },
                      'StudentStudyCourse':
                      {'id': 'study_course',
                       'fields':
                       ('course', # study_course
                        'faculty', # from certificate
                        'department', # from certificate
                        'end_level', # from certificate
                        'level', # current_level
                        'mode',  # current_mode
                        'session', # current_session
                        'verdict', # current_verdict
                       )
                      },
                     }
    ###)

    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)
        return

    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
        """ clears the whole enchilada """
        self._catalog.clear()

        if REQUEST and RESPONSE:
            RESPONSE.redirect(
              URL1 +
              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')

    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
        """ clear the catalog, then re-index everything """

        elapse = time.time()
        c_elapse = time.clock()

        pgthreshold = self._getProgressThreshold()
        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
        self.refreshCatalog(clear=1, pghandler=handler)

        elapse = time.time() - elapse
        c_elapse = time.clock() - c_elapse

        RESPONSE.redirect(
            URL1 +
            '/manage_catalogAdvanced?manage_tabs_message=' +
            urllib.quote('Catalog Updated \n'
                         'Total time: %s\n'
                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
    ###)

    def fill_certificates_dict(self): ###(
        "return certificate data in  dict"
        certificates_brains = self.portal_catalog(portal_type ='Certificate')
        d = {}
        for cb in certificates_brains:
            certificate_doc = cb.getObject().getContent()
            cb_path = cb.getPath().split('/')
            ld = {}
            ld['faculty'] = cb_path[-4]
            ld['department'] = cb_path[-3]
            ld['end_level'] = getattr(certificate_doc,'end_level','999')
            d[cb.getId] = ld
        return d
    ###)

    def get_from_doc_department(self,doc,cached_data={}): ###(
        "return the students department"
        if doc is None:
            return None
        if cached_data.has_key(doc.study_course):
            return cached_data[doc.study_course]['department']
        certificate_res = self.portal_catalog(id = doc.study_course)
        if len(certificate_res) != 1:
            return None
        return certificate_res[0].getPath().split('/')[-3]

    def get_from_doc_faculty(self,doc,cached_data={}):
        "return the students faculty"
        if doc is None:
            return None
        if cached_data.has_key(doc.study_course):
            return cached_data[doc.study_course]['faculty']
        certificate_res = self.portal_catalog(id = doc.study_course)
        if len(certificate_res) != 1:
            return None
        return certificate_res[0].getPath().split('/')[-4]

    def get_from_doc_end_level(self,doc,cached_data={}):
        "return the students end_level"
        if doc is None:
            return None
        if cached_data.has_key(doc.study_course):
            return cached_data[doc.study_course]['end_level']
        certificate_res = self.portal_catalog(id = doc.study_course)
        if len(certificate_res) != 1:
            return None
        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')

    def get_from_doc_level(self,doc,cached_data={}):
        "return the students level"
        if doc is None:
            return None
        return getattr(doc,'current_level',None)

    def get_from_doc_mode(self,doc,cached_data={}):
        "return the students mode"
        if doc is None:
            return None
        cm = getattr(doc,'current_mode',None)
        return cm


    def get_from_doc_session(self,doc,cached_data={}):
        "return the students current_session"
        if doc is None:
            return None
        return getattr(doc,'current_session',None)

    def get_from_doc_entry_session(self,doc,cached_data={}):
        "return the students entry_session"
        if doc is None:
            return None
        es = getattr(doc,'entry_session',None)
        if es is not None and len(es) == 2:
            return es
        try:
            digit = int(doc.jamb_reg_no[0])
        except:
            return "-1"
        if digit < 8:
            return "0%c" % doc.jamb_reg_no[0]
        return "9%c" % doc.jamb_reg_no[0]

    def get_from_doc_course(self,doc,cached_data={}):
        "return the students study_course"
        if doc is None:
            return None
        return getattr(doc,'study_course',None)

    def get_from_doc_name(self,doc,cached_data={}):
        "return the students name from the personal"
        if doc is None:
            return None
        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)

    def get_from_doc_verdict(self,doc,cached_data={}):
        "return the students study_course"
        if doc is None:
            return None
        return getattr(doc,'current_verdict',None)
    ###)

    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
        if isinstance(name, str):
            name = (name,)
        reindextypes = {}
        reindex_special = []
        for n in name:
            if n in ("review_state","registered_courses"):
                reindex_special.append(n)
            else:
                for pt in self.affected_types.keys():
                    if n in self.affected_types[pt]['fields']:
                        if reindextypes.has_key(pt):
                            reindextypes[pt].append(n)
                        else:
                            reindextypes[pt]= [n]
                        break
        cached_data = {}
        if set(name).intersection(set(('faculty','department','end_level'))):
            cached_data = self.fill_certificates_dict()
        students = self.portal_catalog(portal_type="Student")
        if hasattr(self,'portal_catalog_real'):
            aq_portal = self.portal_catalog_real.evalAdvancedQuery
        else:
            aq_portal = self.portal_catalog.evalAdvancedQuery
        num_objects = len(students)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
        #import pdb;pdb.set_trace()
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            student_brain = students[i]
            student_object = student_brain.getObject()
            # query = Eq('path',student_brain.getPath())
            # sub_brains_list = aq_portal(query)
            # sub_brains = {}
            # for sub_brain in sub_brains_list:
            #     sub_brains[sub_brain.portal_type] = sub_brain
            # student_path = student_brain.getPath()
            data = {}
            modified = False
            sid = data['id'] = student_brain.getId
            if reindex_special and 'review_state' in reindex_special:
                modified = True
                data['review_state'] = student_brain.review_state
            sub_objects = False
            for pt in reindextypes.keys():
                modified = True
                try:
                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
                    #doc = sub_brains[pt].getObject().getContent()
                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
                    # doc = self.unrestrictedTraverse(path).getContent()
                    sub_objects = True
                except:
                    continue
                for field in set(name).intersection(self.affected_types[pt]['fields']):
                    if hasattr(self,'get_from_doc_%s' % field):
                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
                                                                              cached_data=cached_data)
                    else:
                        data[field] = getattr(doc,field)
            if not sub_objects and noattr:
                import_res = self.returning_import(id = sid)
                if not import_res:
                    continue
                import_record = import_res[0]
                data['matric_no'] = import_record.matric_no
                data['sex'] = import_record.Sex == 'F'
                data['name'] = "%s %s %s" % (import_record.Firstname,
                                             import_record.Middlename,
                                             import_record.Lastname)
                data['jamb_reg_no'] = import_record.Entryregno
            if reindex_special and 'registered_courses' in reindex_special:
                try:
                    study_course = getattr(student_object,"study_course")
                    level_ids = study_course.objectIds()
                except:
                    continue
                if not level_ids:
                    continue
                modified = True
                level_ids.sort()
                course_ids = getattr(study_course,level_ids[-1]).objectIds()
                courses = []
                for c in course_ids:
                    if c.endswith('_co'):
                        courses.append(c[:-3])
                    else:
                        courses.append(c)
                data['registered_courses'] = courses
            if modified:
                self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    def refreshCatalog(self, clear=0, pghandler=None): ###(
        """ re-index everything we can find """
        students_folder = self.portal_url.getPortalObject().campus.students
        if clear:
            self._catalog.clear()
        students = self.portal_catalog(portal_type="Student")
        num_objects = len(students)
        cached_data = self.fill_certificates_dict()
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            student_brain = students[i]
            spath = student_brain.getPath()
            student_object = student_brain.getObject()
            data = {}
            sid = data['id'] = student_brain.getId
            data['review_state'] = student_brain.review_state
            sub_objects = False
            for pt in self.affected_types.keys():
                modified = True
                try:
                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
                    sub_objects = True
                except:
                    #from pdb import set_trace;set_trace()
                    continue
                for field in self.affected_types[pt]['fields']:
                    if hasattr(self,'get_from_doc_%s' % field):
                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
                                                                              cached_data=cached_data)
                    else:
                        data[field] = getattr(doc,field,None)
            if not sub_objects:
                import_res = self.returning_import(id = sid)
                if not import_res:
                    continue
                import_record = import_res[0]
                data['matric_no'] = import_record.matric_no
                data['sex'] = import_record.Sex == 'F'
                data['name'] = "%s %s %s" % (import_record.Firstname,
                                             import_record.Middlename,
                                             import_record.Lastname)
                data['jamb_reg_no'] = import_record.Entryregno
            else:
                study_course = getattr(student_object,'study_course',None)
                current_level = data.get('level',None)
                data['registered_courses'] = []
                if study_course and current_level and current_level in study_course.objectIds():
                    level_obj = getattr(study_course,current_level)
                    courses = []
                    for c in level_obj.objectIds():
                        if c.endswith('_co'):
                            courses.append(c[:-3])
                        else:
                            courses.append(c)
                    data['registered_courses'] = courses
            self.addRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    security.declarePrivate('notify_event_listener') ###(
    def notify_event_listener(self,event_type,object,infos):
        "listen for events"
        if not infos.has_key('rpath'):
            return
        pt = getattr(object,'portal_type',None)
        mt = getattr(object,'meta_type',None)
        students_catalog = self
        data = {}
        if pt == 'Student' and\
           mt == 'CPS Proxy Folder' and\
           event_type.startswith('workflow'):
            data['id'] = object.getId()
            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
            students_catalog.modifyRecord(**data)
            return
        rpl = infos['rpath'].split('/')
        if pt == 'Student' and mt == 'CPS Proxy Folder'\
           and event_type == "sys_add_object":
            student_id = object.id
            try:
                self.addRecord(id = student_id)
            except ValueError:
                pass
            return
        elif pt == 'StudentCourseResult' and mt == 'CPS Proxy Folder':
            if event_type not in ("sys_add_object","sys_del_object"):
                return
            level_session = getattr(object.aq_parent.getContent(),'session','unknown')
            if level_session not in (self.getSessionId()[-2:],'2006/2007'):
                return
            course_id = object.getId()
            if course_id.endswith('_co'):
                course_id = course_id[:-3]
            student_id = object.absolute_url_path().split('/')[-4]
            res = students_catalog(id = student_id)
            if not res:
                return
            student_rec = res[0]
            registered_courses = getattr(student_rec,'registered_courses',None)
            if not registered_courses:
                registered_courses = []
            #import pdb;pdb.set_trace()
            if event_type == "sys_add_object":
                if course_id not in registered_courses:
                    registered_courses.append(course_id)
                else:
                    return
            elif registered_courses and event_type == "sys_del_object":
                removed = False
                while course_id in registered_courses:
                    removed = True
                    registered_courses.remove(course_id)
                if not removed:
                    return
            data['id'] = student_id
            data['registered_courses'] = registered_courses
            self.modifyRecord(record = student_rec, **data)
            return
        if pt not in self.affected_types.keys():
            return
        if event_type not in ('sys_modify_object'):
            return
        if mt == 'CPS Proxy Folder':
            return
        for field in self.affected_types[pt]['fields']:
            if hasattr(self,'get_from_doc_%s' % field):
                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
            else:
                data[field] = getattr(object,field)
        data['id'] = rpl[2]
        self.modifyRecord(**data)
    ###)


InitializeClass(StudentsCatalog)

###)

class CoursesCatalog(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Courses Catalog'
    name =  "courses_catalog"
    key = "code"
    def __init__(self,name=None):
        if name ==  None:
            name =  self.name
        WAeUPTable.__init__(self, name)

    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
        """ clear the catalog, then re-index everything """

        elapse = time.time()
        c_elapse = time.clock()

        pgthreshold = self._getProgressThreshold()
        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
        self.refreshCatalog(clear=1, pghandler=handler)

        elapse = time.time() - elapse
        c_elapse = time.clock() - c_elapse

        RESPONSE.redirect(
            URL1 +
            '/manage_catalogAdvanced?manage_tabs_message=' +
            urllib.quote('Catalog Updated \n'
                         'Total time: %s\n'
                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
    ###)

    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
        if isinstance(name, str):
            name = (name,)
        courses = self.portal_catalog(portal_type="Course")
        num_objects = len(courses)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            course_brain = courses[i]
            course_object = course_brain.getObject()
            pl = course_brain.getPath().split('/')
            data = {}
            cid = data[self.key] = course_brain.getId
            data['faculty'] = pl[-4]
            data['department'] = pl[-3]
            doc = course_object.getContent()
            for field in name:
                if field not in (self.key,'faculty','department'):
                    data[field] = getattr(doc,field)
            self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    def refreshCatalog(self, clear=0, pghandler=None): ###(
        """ re-index everything we can find """
        if clear:
            self._catalog.clear()
        courses = self.portal_catalog(portal_type="Course")
        num_objects = len(courses)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        #from pdb import set_trace;set_trace()
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            course_brain = courses[i]
            course_doc = course_brain.getObject().getContent()
            pl = course_brain.getPath().split('/')
            data = {}
            for field in self.schema():
                data[field] = getattr(course_doc,field,None)
            data[self.key] = course_brain.getId
            ai = pl.index('academics')
            data['faculty'] = pl[ai +1]
            data['department'] = pl[ai +2]
            if clear:
                self.addRecord(**data)
            else:
                self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    security.declarePrivate('notify_event_listener') ###(
    def notify_event_listener(self,event_type,object,infos):
        "listen for events"
        if not infos.has_key('rpath'):
            return
        pt = getattr(object,'portal_type',None)
        mt = getattr(object,'meta_type',None)
        if pt != 'Course':
            return
        data = {}
        rpl = infos['rpath'].split('/')
        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
            return
        course_id = object.getId()
        data[self.key] = course_id
        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
            try:
                self.addRecord(**data)
            except ValueError:
                return
            course_id = object.getId()
            doc = object.getContent()
            if doc is None:
                return
            for field in self.schema():
                data[field] = getattr(doc,field,None)
            data[self.key] = course_id
            ai = rpl.index('academics')
            data['faculty'] = rpl[ai +1]
            data['department'] = rpl[ai +2]
            self.modifyRecord(**data)
            return
        if event_type == "sys_del_object":
            self.deleteRecord(course_id)
            return
        if event_type == "sys_modify_object" and mt == 'Course':
            #from pdb import set_trace;set_trace()
            for field in self.schema():
                data[field] = getattr(object,field,None)
            course_id = object.aq_parent.getId()
            data[self.key] = course_id
            ai = rpl.index('academics')
            data['faculty'] = rpl[ai +1]
            data['department'] = rpl[ai +2]
            self.modifyRecord(**data)
    ###)


InitializeClass(CoursesCatalog)
###)

class CourseResults(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Results Catalog'
    name = "course_results"
    key = "key" #student_id + level + course_id
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)
        self._queue = []

    def addRecord(self, **data): ###(
        """add one record"""

        uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
        if key in self._queue:
            return uid
        data['queue_status'] = ADDING_SHEDULED
        data['%s' % self.key] = uid
        res = self.searchResults({"%s" % self.key : uid})
        if len(res) > 0:
            raise ValueError("More than one record with uid %s" % uid)
        self.catalog_object(dict2ob(data), uid=uid)
        if not hasattr(self,'_queue'):
            self._queue = []
        self._queue.append(key)
        self._p_changed = 1
        return uid
    ###)

    # def _p_resolveConflict(self, oldstate, committed, newstate):
    #     # Apply the changes made in going from old to newstate to
    #     # committed

    #     # Note that in the case of undo, the olddata is the data for
    #     # the transaction being undone and newdata is the data for the
    #     # transaction previous to the undone transaction.

    #     # Find the conflict policy on the new state to make sure changes
    #     # to it will be applied

    #     # Committed is always the currently committed data.
    #     import pdb;pdb.set_trace()
    #     oldstate_data  =  oldstate['_queue']
    #     committed_data = committed['_queue']
    #     newstate_data  =  newstate['_queue']

    #     # Merge newstate changes into committed
    #     for uid, new in newstate_data.items():

    #         # Decide if this is a change
    #         old = oldstate_data.get(uid)
    #         current = committed_data.get(uid)


    def addMultipleRecords(self, records): ###(
        """add many records"""
        added_keys = []
        for data in records:
            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
            added_keys.append(key)
            if key in self._queue:
                return uid
            data['queue_status'] = ADDING_SHEDULED
            data['%s' % self.key] = uid
            res = self.searchResults({"%s" % self.key : uid})
            if len(res) > 0:
                raise ValueError("More than one record with uid %s" % uid)
            self.catalog_object(dict2ob(data), uid=uid)
        if not hasattr(self,'_queue'):
            self._queue = added_keys
        self._queue.extend(added_keys)
        self._p_changed = 1
        return uid
    ###)

    def deleteRecord(self, uid): ###(
        self.uncatalog_object(uid)
        if uid in self._queue:
            self._queue.remove(uid)
    ###)

    def updateCourseResults(self,student_id,level_id,portal_catalog_results,course_results): ###(
        # query = Eq('path',"%s/campus/students/%s/study_course/%s" %
        #            (self.portal_url.getPortalPath(),
        #             student_id,
        #             level_id)) &\
        #             Eq('portal_type', "StudentCourseResult")
        # level_results = self.portal_catalog_real.evalAdvancedQuery(query)
        # level_results = [r for r in course_results
        #                  if level_id in r.relative_path.split('/')]
        course_results_ids = [cr.getId for cr in course_results]
        for r in portal_catalog_results:
            if r.getId in course_results_ids:
                continue
            course_result_doc = r.getObject().getContent()
            data = {}
            course_id = r.getId
            for field in self.schema():
                data[field] = getattr(course_result_doc,field,'')
            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
            data['student_id'] = student_id
            data['level_id'] = level_id
            data['queue_status'] = OBJECT_CREATED
            data['code'] = course_id
            self.catalog_object(dict2ob(data), uid=key)
        query = Eq('student_id',student_id) & Eq('level_id', level_id)
        return self.course_results.evalAdvancedQuery(query)
    ###)

    def getCourses(self,student_id,level_id): ###(
        level_path = "%s/campus/students/%s/study_course/%s" % (self.portal_url.getPortalPath(),
                                                                student_id,
                                                                level_id)
        # portal_catalog_query = Eq('path',level_path) &\
        #                        Eq('portal_type', "StudentCourseResult")
        # portal_catalog_results = self.portal_catalog_real.evalAdvancedQuery(portal_catalog_query)
        portal_catalog_results = self.portal_catalog(path = level_path,
                                                     portal_type = "StudentCourseResult")
        query = Eq('student_id',student_id) & Eq('level_id', level_id)
        course_results = self.course_results.evalAdvancedQuery(query)
        if len(course_results) != len(portal_catalog_results):
            course_results = self.updateCourseResults(student_id,
                                                      level_id,
                                                      portal_catalog_results,
                                                      course_results)
        carry_overs = []
        normal = []
        credits = 0
        for brain in course_results:
            d = {}
            credits += int(brain.credits)
            for field in self.schema():
                d[field] = getattr(brain,field,'')
            d['sheduled'] = brain.queue_status == ADDING_SHEDULED
            d['coe'] = 'Elective'
            if brain.core_or_elective:
                d['coe'] = 'Core'
            id = code = d['id'] = brain.code
            is_carry_over = False
            if code.endswith('_co'):
                is_carry_over = True
                code = code[:-3]
            d['code'] = code
            d['title'] = self.courses_catalog.evalAdvancedQuery(Eq('code',code))[0].title
            if is_carry_over:
                d['coe'] = 'Core'
                carry_overs.append(d)
            else:
                normal.append(d)
        normal.sort(cmp=lambda x,y: cmp(x['semester'], y['semester']))
        carry_overs.sort(cmp=lambda x,y: cmp(x['semester'], y['semester']))
        return credits,carry_overs,normal
    ###)

    def addObject(self,record): ###(
        key = record.key
        student_id,level_id,course_id = key.split('|')
        level = getattr(getattr(self.portal_url.getPortalObject().campus.students,student_id).study_course,level_id)
        cr_id = level.invokeFactory('StudentCourseResult', course_id)
        course_result = getattr(level,cr_id)
        self.portal_workflow.doActionFor(course_result,'open')
        d = {}
        for field in self.schema():
            d[field] = getattr(record,field,'')
        course_result.getContent().edit(mapping=d)
    ###)

    security.declareProtected(ModifyPortalContent,"process_queue") ###(
    def process_queue(self,limit=None):
        """adds objects and removes them from the queue.
        If limit is specified, at most (limit) events are removed.
        """
        if not hasattr(self,'_queue'):
            return 0
        queue= self._queue
        if not limit or len(queue) <= limit:
            keys = self._queue[:]
        else:
            keys = queue[:limit]
        if not keys:
            records = self.evalAdvancedQuery(Eq('queue_status',ADDING_SHEDULED))
        else:
            records = self.evalAdvancedQuery(In("%s" % self.key,keys))
        for record in records:
            if record.queue_status == OBJECT_CREATED:
                continue
            self.addObject(record)
            data = {}
            data['key'] = record.key
            data['queue_status'] = OBJECT_CREATED
            self.modifyRecord(**data)
        count = 0
        for key in keys:
            count +=1
            if key in self._queue:
                self._queue.remove(key)
        self._p_changed = 1
        return count,len(self._queue)
    ###)

    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
        """ clears the whole enchilada """
        self._catalog.clear()

        if REQUEST and RESPONSE:
            RESPONSE.redirect(
              URL1 +
              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
    ###)

    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
        """ clear the catalog, then re-index everything """

        elapse = time.time()
        c_elapse = time.clock()

        pgthreshold = self._getProgressThreshold()
        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
        self.refreshCatalog(clear=1, pghandler=handler)

        elapse = time.time() - elapse
        c_elapse = time.clock() - c_elapse

        RESPONSE.redirect(
            URL1 +
            '/manage_catalogAdvanced?manage_tabs_message=' +
            urllib.quote('Catalog Updated \n'
                         'Total time: %s\n'
                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
    ###)

    def refreshCatalog(self, clear=1, pghandler=None): ###(
        """ re-index everything we can find """
        students_folder = self.portal_url.getPortalObject().campus.students
        if clear:
            self._catalog.clear()
        course_results = self.portal_catalog(portal_type="StudentCourseResult")
        num_objects = len(course_results)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        #import pdb;pdb.set_trace()
        for i in xrange(num_objects):
            if pghandler:
                pghandler.report(i)
            course_result_brain = course_results[i]
            path_list = course_result_brain.getPath().split('/')
            course_result_doc = course_result_brain.getObject().getContent()
            data = {}
            level_id = path_list[-2]
            course_id = path_list[-1]
            student_id = path_list[-4]
            for field in self.schema():
                data[field] = getattr(course_result_doc,field,'')
            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
            data['student_id'] = student_id
            data['level_id'] = level_id
            data['queue_status'] = OBJECT_CREATED
            self.catalog_object(dict2ob(data), uid=key)
        if pghandler: pghandler.finish()
    ###)

    security.declarePrivate('notify_event_listener') ###(
    def notify_event_listener(self,event_type,object,infos):
        "listen for events"
        if not infos.has_key('rpath'):
            return
        pt = getattr(object,'portal_type',None)
        mt = getattr(object,'meta_type',None)
        data = {}
        rpl = infos['rpath'].split('/')
        if mt == 'CPS Proxy Folder':
            return
        if pt == 'StudentCourseResult' and event_type == "sys_modify_object":
            data["%s" % self.key] = uid = "%s|%s|%s" % (rpl[-5],rpl[-3],rpl[-2])
            records = self.searchResults({"%s" % self.key : uid})
            if len(records) > 1:
                # Can not happen, but anyway...
                raise ValueError("More than one record with uid %s" % uid)
            if len(records) == 0:
                raise KeyError("No record for uid %s" % uid)
            record = records[0]
            for field in ('core_or_elective','score'):
                value = getattr(object,field,None)
                data[field] = value
            try:
                self.modifyRecord(record,**data)
            except KeyError:
                pass
        if pt == 'StudentStudyLevel' and event_type == "sys_del_object":
            #import pdb;pdb.set_trace()
            student_id = rpl[-3]
            level_id = rpl[-1]
            res = self.searchResults(student_id = student_id,
                                     level_id = level_id)
            for cr in res:
                self.deleteRecord(cr.key)
    ###)

InitializeClass(CourseResults)
###)

class OnlinePaymentsImport(WAeUPTable): ###(

    meta_type = 'WAeUP Online Payment Transactions'
    name = "online_payments_import"
    key = "order_id"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(OnlinePaymentsImport)
###)

class ReturningImport(WAeUPTable): ###(

    meta_type = 'Returning Import Table'
    name = "returning_import"
    key = "matric_no"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(ReturningImport)
###)

class ResultsImport(WAeUPTable): ###(

    meta_type = 'Results Import Table'
    name = "results_import"
    key = "key"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(ResultsImport)

###)

class PaymentsCatalog(WAeUPTable): ###(

    meta_type = 'WAeUP Payments Catalog'
    name = "students_catalog"
    key = "id"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(PaymentsCatalog)

###)

# BBB:
AccomodationTable = AccommodationTable
