#-*- mode: python; mode: fold -*-
# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
# Author: Joachim Schmitz <js@aixtraware.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# 02111-1307, USA.
#
# $Id: WAeUPTables.py 4693 2010-01-06 06:49:14Z henrik $

from zope.interface import implements
from Globals import InitializeClass
from Products.ZCatalog.ZCatalog import ZCatalog
from Products.ZCatalog.ProgressHandler import ZLogHandler
from AccessControl import ClassSecurityInfo
from Products.CMFCore.permissions import ModifyPortalContent
from Products.CMFCore.utils import getToolByName
from Products.CMFCore.CatalogTool import CatalogTool
from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
from Products.CPSSchemas.DataStructure import DataStructure
from Products.CPSSchemas.DataModel import DataModel
from Products.AdvancedQuery import Eq, Between, Le,In
import urllib
import DateTime,time
import csv,re,os
import logging
import Globals
p_home = Globals.package_home(globals())
i_home = Globals.INSTANCE_HOME

ADDING_SHEDULED = "adding_sheduled"
OBJECT_CREATED = "object_created"
NOT_OCCUPIED = 'not_occupied'

from interfaces import IWAeUPTable

class AttributeHolder(object):
    pass

def dict2ob(dict):
    ob = AttributeHolder()
    for key, value in dict.items():
        setattr(ob, key, value)
    return ob

class WAeUPTable(ZCatalog): ###(

    implements(IWAeUPTable)
    security = ClassSecurityInfo()
    meta_type = None

    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        ZCatalog.__init__(self,name)

    def refreshCatalog(self, clear=0, pghandler=None): ###(
        """ don't refresh for a normal table """

        if self.REQUEST and self.REQUEST.RESPONSE:
            self.REQUEST.RESPONSE.redirect(
              URL1 +
              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')

###)

    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
        """ clears the whole enchilada """

        #if REQUEST and RESPONSE:
        #    RESPONSE.redirect(
        #      URL1 +
        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')

        self._catalog.clear()
        if REQUEST and RESPONSE:
            RESPONSE.redirect(
              URL1 +
              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')

###)

    def record2dict(self,fields,record,index): ###(
        d = {}
        for key in fields:
            v = getattr(record, key, None)
            v_dump = v
            if key == 'sex':
                if v == True:
                    v_dump = 'F'
                elif v == False:
                    v_dump = 'M'
                d[key] = v_dump
            elif v:
                if index == 'translate':
                    if key == 'lga':
                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
                        if not v_dump:
                            v_dump = v
                    elif key == 'aos':
                        v_dump = self.portal_vocabularies.aos.get(v)
                d[key] = v_dump
            else:
                d[key] = ''
        return d

###)

    def addRecord(self, **data): ###(
        # The uid is the same as "bed".
        uid = data[self.key]
        res = self.searchResults({"%s" % self.key : uid})
        if len(res) > 0:
            raise ValueError("More than one record with uid %s" % uid)
        self.catalog_object(dict2ob(data), uid=uid)
        return uid

###)

    def deleteRecord(self, uid):
        self.uncatalog_object(uid)

    def getRecordByKey(self,key):
        if not key:
            return None
        res = self.evalAdvancedQuery(Eq(self.key,key))
        if res:
            return res[0]
        return None

    def searchAndSetRecord(self, **data):
        raise NotImplemented

    def modifyRecord(self, record=None, **data): ###(
        #records = self.searchResults(uid=uid)
        uid = data[self.key]
        if record is None:
            records = self.searchResults({"%s" % self.key : uid})
            if len(records) > 1:
                # Can not happen, but anyway...
                raise ValueError("More than one record with uid %s" % uid)
            if len(records) == 0:
                raise KeyError("No record for uid %s" % uid)
            record = records[0]
        record_data = {}
        for field in self.schema() + self.indexes():
            record_data[field] = getattr(record, field)
        # Add the updated data:
        record_data.update(data)
        self.catalog_object(dict2ob(record_data), uid)

###)

    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
        if isinstance(name, str):
            name =  (name,)
        paths = self._catalog.uids.items()
        i = 0
        #import pdb;pdb.set_trace()
        for p,rid in paths:
            i += 1
            metadata = self.getMetadataForRID(rid)
            record_data = {}
            for field in name:
                record_data[field] = metadata.get(field)
            uid = metadata.get(self.key)
            self.catalog_object(dict2ob(record_data), uid, idxs=name,
                                update_metadata=0)

###)

    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
    def exportAllRecords(self):
        "export a WAeUPTable"
        #import pdb;pdb.set_trace()
        fields = [field for field in self.schema()]
        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
        csv = []
        csv.append(','.join(['"%s"' % fn for fn in fields]))
        for uid in self._catalog.uids:
            records = self.searchResults({"%s" % self.key : uid})
            if len(records) > 1:
                # Can not happen, but anyway...
                raise ValueError("More than one record with uid %s" % uid)
            if len(records) == 0:
                raise KeyError("No record for uid %s" % uid)
            rec = records[0]
            csv.append(format % rec)
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))

###)

    security.declareProtected(ModifyPortalContent,"dumpAll")###(
    def dumpAll(self,index=None,value=None):
        """dump all data in the table to a csv"""
        member = self.portal_membership.getAuthenticatedMember()
        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
        res_list = []
        lines = []
        if hasattr(self,"export_keys"):
            fields = self.export_keys
        else:
            fields = []
            for f in self.schema():
                fields.append(f)
        headline = ','.join(fields)
        out = open(export_file,"wb")
        out.write(headline +'\n')
        out.close()
        out = open(export_file,"a")
        csv_writer = csv.DictWriter(out,fields,)
        if index is not None and value is not None:
            records = self.evalAdvancedQuery(Eq(index,value))
        else:
            records = self()
        nr2export = len(records)
        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
        chunk = 2000
        total = 0
        start = DateTime.DateTime().timeTime()
        start_chunk = DateTime.DateTime().timeTime()
        for record in records:
            not_all = False
            d = self.record2dict(fields,record,index)
            lines.append(d)
            total += 1
            if total and not total % chunk or total == len(records):
                csv_writer.writerows(lines)
                anz = len(lines)
                logger.info("wrote %(anz)d  total written %(total)d" % vars())
                end_chunk = DateTime.DateTime().timeTime()
                duration = end_chunk-start_chunk
                per_record = duration/anz
                till_now = end_chunk - start
                avarage_per_record = till_now/total
                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
                estimated_end = estimated_end.strftime("%H:%M:%S")
                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
                start_chunk = DateTime.DateTime().timeTime()
                lines = []
        end = DateTime.DateTime().timeTime()
        logger.info('total time %6.2f m' % ((end-start)/60))
        import os
        filename, extension = os.path.splitext(export_file)
        from subprocess import call
        msg = "wrote %(total)d records to %(export_file)s" % vars()
        #try:
        #    retcode = call('gzip %s' % (export_file),shell=True)
        #    if retcode == 0:
        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
        #except OSError, e:
        #    retcode = -99
        #    logger.info("zip failed with %s" % e)
        logger.info(msg)
        args = {'portal_status_message': msg}
        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
        url = self.REQUEST.get('URL2')
        return 'ready'
        #return self.REQUEST.RESPONSE.redirect(url)
    ###)

    security.declarePrivate("_import_old") ###(
    def _import_old(self,filename,schema,layout, mode,logger):
        "import data from csv"
        import transaction
        import random
        pm = self.portal_membership
        member = pm.getAuthenticatedMember()
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        import_fn = "%s/import/%s.csv" % (i_home,filename)
        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
        start = True
        tr_count = 1
        total_imported = 0
        total_not_imported = 0
        total = 0
        iname =  "%s" % filename
        not_imported = []
        imported = []
        valid_records = []
        invalid_records = []
        d = {}
        d['mode'] = mode
        d['imported'] = total_imported
        d['not_imported'] = total_not_imported
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        d['import_fn'] = import_fn
        d['imported_fn'] = imported_fn
        d['not_imported_fn'] = not_imported_fn
        if schema is None:
            em = 'No schema specified'
            logger.error(em)
            return d
        if layout is None:
            em = 'No layout specified'
            logger.error(em)
            return d
        validators = {}
        for widget in layout.keys():
            try:
                validators[widget] = layout[widget].validate
            except AttributeError:
                logger.info('%s has no validate attribute' % widget)
                return d
        # if mode == 'edit':
        #     importer = self.importEdit
        # elif mode == 'add':
        #     importer = self.importAdd
        # else:
        #     importer = None
        try:
            items = csv.DictReader(open(import_fn,"rb"),
                                   dialect="excel",
                                   skipinitialspace=True)
        except:
            em = 'Error reading %s.csv' % filename
            logger.error(em)
            return d
        #import pdb;pdb.set_trace()
        for item in items:
            if start:
                start = False
                logger.info('%s starts import from %s.csv' % (member,filename))
                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
                                   dialect="excel",
                                   skipinitialspace=True).next()
                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
                diff2schema = set(import_keys).difference(set(schema.keys()))
                diff2layout = set(import_keys).difference(set(layout.keys()))
                if diff2layout:
                    em = "not ignorable key(s) %s found in heading" % diff2layout
                    logger.info(em)
                    return d
                s = ','.join(['"%s"' % fn for fn in import_keys])
                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
                #s = '"id",' + s
                open(imported_fn,"a").write(s + '\n')
                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
                format_error = format + ',"%(Error)s"'
                #format = '"%(id)s",'+ format
                adapters = [MappingStorageAdapter(schema, item)]
            dm = DataModel(item, adapters,context=self)
            ds = DataStructure(data=item,datamodel=dm)
            error_string = ""
            #import pdb;pdb.set_trace()
            for k in import_keys:
                if not validators[k](ds,mode=mode):
                    error_string += " %s : %s" % (k,ds.getError(k))
            # if not error_string and importer:
            #     item.update(dm)
            #     item['id'],error = importer(item)
            #     if error:
            #         error_string += error
            if error_string:
                item['Error'] = error_string
                invalid_records.append(dm)
                not_imported.append(format_error % item)
                total_not_imported += 1
            else:
                em = format % item
                valid_records.append(dm)
                imported.append(em)
                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
                tr_count += 1
                total_imported += 1
            total += 1
        if len(imported) > 0:
            open(imported_fn,"a").write('\n'.join(imported))
        if len(not_imported) > 0:
            open(not_imported_fn,"a").write('\n'.join(not_imported))
        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
        d['imported'] = total_imported
        d['not_imported'] = total_not_imported
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        d['imported_fn'] = imported_fn
        d['not_imported_fn'] = not_imported_fn
        #logger.info(em)
        return d
    ###)

    security.declarePrivate("_import") ###(
    def _import_new(self,csv_items,schema, layout, mode,logger):
        "import data from csv.Dictreader Instance"
        start = True
        tr_count = 1
        total_imported = 0
        total_not_imported = 0
        total = 0
        iname =  "%s" % filename
        not_imported = []
        valid_records = []
        invalid_records = []
        duplicate_records = []
        d = {}
        d['mode'] = mode
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        d['invalid_records'] = duplicate_records
        # d['import_fn'] = import_fn
        # d['imported_fn'] = imported_fn
        # d['not_imported_fn'] = not_imported_fn
        validators = {}
        for widget in layout.keys():
            try:
                validators[widget] = layout[widget].validate
            except AttributeError:
                logger.info('%s has no validate attribute' % widget)
                return d
        for item in csv_items:
            if start:
                start = False
                logger.info('%s starts import from %s.csv' % (member,filename))
                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
                diff2schema = set(import_keys).difference(set(schema.keys()))
                diff2layout = set(import_keys).difference(set(layout.keys()))
                if diff2layout:
                    em = "not ignorable key(s) %s found in heading" % diff2layout
                    logger.info(em)
                    return d
                # s = ','.join(['"%s"' % fn for fn in import_keys])
                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
                # #s = '"id",' + s
                # open(imported_fn,"a").write(s + '\n')
                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
                # format_error = format + ',"%(Error)s"'
                # #format = '"%(id)s",'+ format
                adapters = [MappingStorageAdapter(schema, item)]
            dm = DataModel(item, adapters,context=self)
            ds = DataStructure(data=item,datamodel=dm)
            error_string = ""
            for k in import_keys:
                if not validators[k](ds,mode=mode):
                    error_string += " %s : %s" % (k,ds.getError(k))
            if error_string:
                item['Error'] = error_string
                #invalid_records.append(dm)
                invalid_records.append(item)
                total_not_imported += 1
            else:
                em = format % item
                valid_records.append(dm)
                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
                tr_count += 1
                total_imported += 1
            total += 1
        # if len(imported) > 0:
        #     open(imported_fn,"a").write('\n'.join(imported))
        # if len(not_imported) > 0:
        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
        d['imported'] = total_imported
        d['not_imported'] = total_not_imported
        d['valid_records'] = valid_records
        d['invalid_records'] = invalid_records
        return d
    ###)

    security.declarePublic("missingValue")###(
    def missingValue(self):
        from Missing import MV
        return MV
    ###)
###)

class AccommodationTable(WAeUPTable): ###(

    meta_type = 'WAeUP Accommodation Tool'
    name = "portal_accommodation"
    key = "bed"
    not_occupied = NOT_OCCUPIED
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)

    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
        records = self.evalAdvancedQuery(Eq('student',student_id))
        if len(records) == 1:
            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
            return -1,records[0].bed
        elif len(records) > 1:
            logger.info('%s found more than one (reserved) bed' % (student_id))
            return -3,'more than one bed'
        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
        if len(records) == 0:
            logger.info('no bed %s available for %s' % (bed_type,student_id))
            return -2,"no bed"
        if random_order:
            import random
            bed_no = random.randint(0,len(records)-1)
        else:
            bed_no = 0
        rec = records[bed_no]
        self.modifyRecord(bed=rec.bed,student=student_id)
        logger.info('%s booked bed %s' % (student_id,rec.bed))
        return 1,rec.bed
    ###)


InitializeClass(AccommodationTable)

###)

class PinTable(WAeUPTable): ###(
    from ZODB.POSException import ConflictError
    security = ClassSecurityInfo()
    meta_type = 'WAeUP Pin Tool'
    name = "portal_pins"
    key = 'pin'

    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)

    security.declareProtected(ModifyPortalContent,"dumpAll")###(
    def dumpAll(self,include_unused=None,index=None):
        """dump all data in the table to a csv"""
        member = self.portal_membership.getAuthenticatedMember()
        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
        res_list = []
        lines = []
        if hasattr(self,"export_keys"):
            fields = self.export_keys
        else:
            fields = []
            for f in self.schema():
                fields.append(f)
        headline = ','.join(fields)
        out = open(export_file,"wb")
        out.write(headline +'\n')
        out.close()
        out = open(export_file,"a")
        csv_writer = csv.DictWriter(out,fields,)
        if include_unused is not None and str(member) not in ('admin','joachim'):
            logger.info('%s tries to dump pintable with unused pins' % (member))
            return
        if include_unused is not None:
            records = self()
        else:
            records = self.evalAdvancedQuery(~Eq('student',''))
        nr2export = len(records)
        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
        chunk = 2000
        total = 0
        start = DateTime.DateTime().timeTime()
        start_chunk = DateTime.DateTime().timeTime()
        for record in records:
            not_all = False
            d = self.record2dict(fields,record,index)
            lines.append(d)
            total += 1
            if total and not total % chunk or total == len(records):
                csv_writer.writerows(lines)
                anz = len(lines)
                logger.info("wrote %(anz)d  total written %(total)d" % vars())
                end_chunk = DateTime.DateTime().timeTime()
                duration = end_chunk-start_chunk
                per_record = duration/anz
                till_now = end_chunk - start
                avarage_per_record = till_now/total
                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
                estimated_end = estimated_end.strftime("%H:%M:%S")
                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
                start_chunk = DateTime.DateTime().timeTime()
                lines = []
        end = DateTime.DateTime().timeTime()
        logger.info('total time %6.2f m' % ((end-start)/60))
        import os
        filename, extension = os.path.splitext(export_file)
        from subprocess import call
        msg = "wrote %(total)d records to %(export_file)s" % vars()
        #try:
        #    retcode = call('gzip %s' % (export_file),shell=True)
        #    if retcode == 0:
        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
        #except OSError, e:
        #    retcode = -99
        #    logger.info("zip failed with %s" % e)
        logger.info(msg)
        args = {'portal_status_message': msg}
        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
        url = self.REQUEST.get('URL2')
        return self.REQUEST.RESPONSE.redirect(url)
    ###)



    def searchAndSetRecord(self, uid, student_id,prefix):

        # The following line must be activated after resetting the
        # the portal_pins table. This is to avoid duplicate entries
        # and disable duplicate payments.

        #student_id = student_id.upper()

        #records = self.searchResults(student = student_id)
        #if len(records) > 0 and prefix in ('CLR','APP'):
        #    for r in records:
        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
        #            return -2
        records = self.searchResults({"%s" % self.key : uid})
        if len(records) > 1:
            # Can not happen, but anyway...
            raise ValueError("More than one record with uid %s" % uid)
        if len(records) == 0:
            return -1,None
        record = records[0]
        if record.student == "":
            record_data = {}
            for field in self.schema() + self.indexes():
                record_data[field] = getattr(record, field)
            # Add the updated data:
            record_data['student'] = student_id
            try:
                self.catalog_object(dict2ob(record_data), uid)
                return 1,record
            except ConflictError:
                return 2,record
        if record.student.upper() != student_id.upper():
            return 0,record
        if record.student.upper() == student_id.upper():
            return 2,record
        return -3,record
InitializeClass(PinTable)
###)

class PumeResultsTable(WAeUPTable): ###(

    meta_type = 'WAeUP PumeResults Tool'
    name = "portal_pumeresults"
    key = "jamb_reg_no"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(PumeResultsTable)

###)

class ApplicantsCatalog(WAeUPTable): ###(

    meta_type = 'WAeUP Applicants Catalog'
    name = "applicants_catalog"
    key = "reg_no"
    security = ClassSecurityInfo()
    #export_keys = (
    #               "reg_no",
    #               "status",
    #               "lastname",
    #               "sex",
    #               "date_of_birth",
    #               "lga",
    #               "email",
    #               "phone",
    #               "passport",
    #               "entry_mode",
    #               "pin",
    #               "screening_type",
    #               "registration_date",
    #               "testdate",
    #               "application_date",
    #               "screening_date",
    #               "faculty",
    #               "department",
    #               "course1",
    #               "course2",
    #               "course3",
    #               "eng_score",
    #               "subj1",
    #               "subj1score",
    #               "subj2",
    #               "subj2score",
    #               "subj3",
    #               "subj3score",
    #               "aggregate",
    #               "course_admitted",
    #               )

    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)

    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
    def new_importCSV(self,filename="JAMB_data",
                  schema_id="application",
                  layout_id="import_application",
                  mode='add'):
        """ import JAMB data """
        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
        pm = self.portal_membership
        member = pm.getAuthenticatedMember()
        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
        import_fn = "%s/import/%s.csv" % (i_home,filename)
        if mode not in ('add','edit'):
            logger.info("invalid mode: %s" % mode)
        if os.path.exists(lock_fn):
            logger.info("import of %(import_fn)s already in progress" % vars())
            return
        lock_file = open(lock_fn,"w")
        lock_file.write("%(current)s \n" % vars())
        lock_file.close()
        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
        stool = getToolByName(self, 'portal_schemas')
        ltool = getToolByName(self, 'portal_layouts')
        schema = stool._getOb(schema_id)
        if schema is None:
            em = 'No such schema %s' % schema_id
            logger.error(em)
            return
        for postfix in ('_import',''):
            layout_name = "%(layout_id)s%(postfix)s" % vars()
            if hasattr(ltool,layout_name):
                break
        layout = ltool._getOb(layout_name)
        if layout is None:
            em = 'No such layout %s' % layout_id
            logger.error(em)
            return
        try:
            csv_file = csv.DictReader(open(import_fn,"rb"))
        except:
            em = 'Error reading %s.csv' % filename
            logger.error(em)
            return
        d = self._import_new(csv_items,schema,layout,mode,logger)
        imported = []
        edited = []
        duplicates = []
        not_found = []
        if len(d['valid_records']) > 0:
            for record in d['valid_records']:
                #import pdb;pdb.set_trace()
                if mode == "add":
                    try:
                        self.addRecord(**dict(record.items()))
                        imported.append(**dict(record.items()))
                        logger.info("added %s" % record.items())
                    except ValueError:
                        dupplicate.append(**dict(record.items()))
                        logger.info("duplicate %s" % record.items())
                elif mode == "edit":
                    try:
                        self.modifyRecord(**dict(record.items()))
                        edited.append(**dict(record.items()))
                        logger.info("edited %s" % record.items())
                    except KeyError:
                        not_found.append(**dict(record.items()))
                        logger.info("not found %s" % record.items())
        invalid = d['invalid_records']
        for itype in ("imported","edited","not_found","duplicate","invalid"):
            outlist = locals[itype]
            if len(outlist):
                d = {}
                for k in outlist[0].keys():
                    d[k] = k
                outlist[0] = d
                outfile = open("file_name_%s" % itype,'w')
                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
###)

    security.declareProtected(ModifyPortalContent,"importCSV")###(
    def importCSV(self,filename="JAMB_data",
                  schema_id="application",
                  layout_id="application_pce",
                  mode='add'):
        """ import JAMB data """
        stool = getToolByName(self, 'portal_schemas')
        ltool = getToolByName(self, 'portal_layouts')
        schema = stool._getOb(schema_id)
        if schema is None:
            em = 'No such schema %s' % schema_id
            logger.error(em)
            return
        layout = ltool._getOb(layout_id)
        if layout is None:
            em = 'No such layout %s' % layout_id
            logger.error(em)
            return
        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
        d = self._import_old(filename,schema,layout,mode,logger)
        if len(d['valid_records']) > 0:
            for record in d['valid_records']:
                #import pdb;pdb.set_trace()
                if mode == "add":
                    self.addRecord(**dict(record.items()))
                    logger.info("added %s" % record.items())
                elif mode == "edit":
                    self.modifyRecord(**dict(record.items()))
                    logger.info("edited %s" % record.items())
                else:
                    logger.info("invalid mode: %s" % mode)
        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
    ###)

InitializeClass(ApplicantsCatalog)

###)

class StudentsCatalog(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Students Catalog'
    name = "students_catalog"
    key = "id"
    affected_types = {   ###(
                      'StudentApplication':
                      {'id': 'application',
                       'fields':
                       ('jamb_reg_no',
                        'entry_mode',
                        #'entry_level',
                        'entry_session',
                       )
                      },
                      'StudentClearance':
                      {'id': 'clearance',
                       'fields':
                       ('matric_no',
                        'lga',
                       )
                      },
                      'StudentPersonal':
                      {'id': 'personal',
                       'fields':
                       ('name',
                        'sex',
                        'perm_address',
                        'email',
                        'phone',
                       )
                      },
                      'StudentStudyCourse':
                      {'id': 'study_course',
                       'fields':
                       ('course', # study_course
                        'faculty', # from certificate
                        'department', # from certificate
                        'end_level', # from certificate
                        'level', # current_level
                        'mode',  # from certificate
                        'session', # current_session
                        'verdict', # current_verdict
                       )
                      },
                     }
    ###)

    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)
        return

    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
        """ clears the whole enchilada """
        self._catalog.clear()

        if REQUEST and RESPONSE:
            RESPONSE.redirect(
              URL1 +
              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')

    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
        """ clear the catalog, then re-index everything """

        elapse = time.time()
        c_elapse = time.clock()

        pgthreshold = self._getProgressThreshold()
        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
        self.refreshCatalog(clear=1, pghandler=handler)

        elapse = time.time() - elapse
        c_elapse = time.clock() - c_elapse

        RESPONSE.redirect(
            URL1 +
            '/manage_catalogAdvanced?manage_tabs_message=' +
            urllib.quote('Catalog Updated \n'
                         'Total time: %s\n'
                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
    ###)

    def fill_certificates_dict(self): ###(
        "return certificate data in  dict"
        certificates_brains = self.portal_catalog(portal_type ='Certificate')
        d = {}
        for cb in certificates_brains:
            certificate_doc = cb.getObject().getContent()
            cb_path = cb.getPath().split('/')
            ld = {}
            ld['faculty'] = cb_path[-4]
            ld['department'] = cb_path[-3]
            ld['end_level'] = getattr(certificate_doc,'end_level','999')
            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
            d[cb.getId] = ld
        return d
    ###)

    def get_from_doc_department(self,doc,cached_data={}): ###(
        "return the students department"
        if doc is None:
            return None
        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
            return self._v_certificates[doc.study_course]['department']
        certificate_res = self.portal_catalog(id = doc.study_course)
        if len(certificate_res) != 1:
            return None
        return certificate_res[0].getPath().split('/')[-3]

    def get_from_doc_faculty(self,doc,cached_data={}):
        "return the students faculty"
        if doc is None:
            return None
        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
            return self._v_certificates[doc.study_course]['faculty']
        certificate_res = self.portal_catalog(id = doc.study_course)
        if len(certificate_res) != 1:
            return None
        return certificate_res[0].getPath().split('/')[-4]

    def get_from_doc_end_level(self,doc,cached_data={}):
        "return the students end_level"
        if doc is None:
            return None
        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
            return self._v_certificates[doc.study_course]['end_level']
        certificate_res = self.portal_catalog(id = doc.study_course)
        if len(certificate_res) != 1:
            return None
        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')

    def get_from_doc_level(self,doc,cached_data={}):
        "return the students level"
        if doc is None:
            return None
        return getattr(doc,'current_level',None)

    #def get_from_doc_mode(self,doc,cached_data={}):
    #    "return the students mode"
    #    if doc is None:
    #        return None
    #    cm = getattr(doc,'current_mode',None)
    #    return cm
    
    def get_from_doc_mode(self,doc,cached_data={}):
        "return the students mode"
        if doc is None:
            return None
        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
            return self._v_certificates[doc.study_course]['study_mode']
        certificate_res = self.portal_catalog(id = doc.study_course)
        if len(certificate_res) != 1:
            return None
        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')    


    def get_from_doc_session(self,doc,cached_data={}):
        "return the students current_session"
        if doc is None:
            return None
        return getattr(doc,'current_session',None)

    def get_from_doc_entry_session(self,doc,cached_data={}):
        "return the students entry_session"
        if doc is None:
            return None
        es = getattr(doc,'entry_session',None)
        if es is not None and len(es) < 3:
            return es
        elif len(es) == 9:
            return es[2:4]    
        try:
            digit = int(doc.jamb_reg_no[0])
        except:
            return "-1"
        if digit < 9:
            return "0%c" % doc.jamb_reg_no[0]
        return "9%c" % doc.jamb_reg_no[0]

    def get_from_doc_course(self,doc,cached_data={}):
        "return the students study_course"
        if doc is None:
            return None
        return getattr(doc,'study_course',None)

    def get_from_doc_name(self,doc,cached_data={}):
        "return the students name from the personal"
        if doc is None:
            return None
        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)

    def get_from_doc_verdict(self,doc,cached_data={}):
        "return the students study_course"
        if doc is None:
            return None
        return getattr(doc,'current_verdict',None)
    ###)

    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
        if not hasattr(self,'_v_certificates'):
            self._v_certificates = self.fill_certificates_dict()
        if isinstance(name, str):
            name = (name,)
        reindextypes = {}
        reindex_special = []
        for n in name:
            if n in ("review_state"):
                reindex_special.append(n)
            else:
                for pt in self.affected_types.keys():
                    if n in self.affected_types[pt]['fields']:
                        if reindextypes.has_key(pt):
                            reindextypes[pt].append(n)
                        else:
                            reindextypes[pt]= [n]
                        break
        #cached_data = {}
        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
        #    cached_data = self.fill_certificates_dict()
        students = self.portal_catalog(portal_type="Student")
        if hasattr(self,'portal_catalog_real'):
            aq_portal = self.portal_catalog_real.evalAdvancedQuery
        else:
            aq_portal = self.portal_catalog.evalAdvancedQuery
        num_objects = len(students)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
        #import pdb;pdb.set_trace()
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            student_brain = students[i]
            student_object = student_brain.getObject()
            data = {}
            modified = False
            sid = data['id'] = student_brain.getId
            if reindex_special and 'review_state' in reindex_special:
                modified = True
                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
            sub_objects = False
            for pt in reindextypes.keys():
                modified = True
                try:
                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
                    sub_objects = True
                except:
                    continue
                for field in set(name).intersection(self.affected_types[pt]['fields']):
                    if hasattr(self,'get_from_doc_%s' % field):
                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
                    else:
                        data[field] = getattr(doc,field)
            if not sub_objects and noattr:
                import_res = self.returning_import(id = sid)
                if not import_res:
                    continue
                import_record = import_res[0]
                data['matric_no'] = import_record.matric_no
                data['sex'] = import_record.Sex == 'F'
                data['name'] = "%s %s %s" % (import_record.Firstname,
                                             import_record.Middlename,
                                             import_record.Lastname)
                data['jamb_reg_no'] = import_record.Entryregno
            if modified:
                self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    def refreshCatalog(self, clear=0, pghandler=None): ###(
        """ re-index everything we can find """
        students_folder = self.portal_url.getPortalObject().campus.students
        if clear:
            self._catalog.clear()
        students = self.portal_catalog(portal_type="Student")
        num_objects = len(students)
        #cached_data = self.fill_certificates_dict()
        if not hasattr(self,'_v_certificates'):
            self._v_certificates = self.fill_certificates_dict()
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            student_brain = students[i]
            spath = student_brain.getPath()
            student_object = student_brain.getObject()
            data = {}
            sid = data['id'] = student_brain.getId
            #data['review_state'] = student_brain.review_state
            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
            sub_objects = False
            for pt in self.affected_types.keys():
                modified = True
                try:
                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
                    sub_objects = True
                except:
                    #from pdb import set_trace;set_trace()
                    continue
                for field in self.affected_types[pt]['fields']:
                    if hasattr(self,'get_from_doc_%s' % field):
                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
                                                                              cached_data=cached_data)
                    else:
                        data[field] = getattr(doc,field,None)
            if not sub_objects:
                import_res = self.returning_import(id = sid)
                if not import_res:
                    continue
                import_record = import_res[0]
                data['matric_no'] = import_record.matric_no
                data['sex'] = import_record.Sex == 'F'
                data['name'] = "%s %s %s" % (import_record.Firstname,
                                             import_record.Middlename,
                                             import_record.Lastname)
                data['jamb_reg_no'] = import_record.Entryregno
            self.addRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    security.declarePrivate('notify_event_listener') ###(
    def notify_event_listener(self,event_type,object,infos):
        "listen for events"
        if not infos.has_key('rpath'):
            return
        pt = getattr(object,'portal_type',None)
        mt = getattr(object,'meta_type',None)
        students_catalog = self
        data = {}
        if pt == 'Student' and\
           mt == 'CPS Proxy Folder' and\
           event_type.startswith('workflow'):
            data['id'] = object.getId()
            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
            students_catalog.modifyRecord(**data)
            return
        rpl = infos['rpath'].split('/')
        if pt == 'Student' and mt == 'CPS Proxy Folder':
            student_id = object.id
            if event_type == "sys_add_object":
                try:
                    self.addRecord(id = student_id)
                except ValueError:
                    pass
                return
            elif event_type == 'sys_del_object':
                self.deleteRecord(student_id)
        if pt not in self.affected_types.keys():
            return
        if event_type not in ('sys_modify_object'):
            return
        if mt == 'CPS Proxy Folder':
            return
        if not hasattr(self,'_v_certificates'):
            self._v_certificates = self.fill_certificates_dict()
        for field in self.affected_types[pt]['fields']:
            if hasattr(self,'get_from_doc_%s' % field):
                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
            else:
                data[field] = getattr(object,field)
        data['id'] = rpl[2]
        self.modifyRecord(**data)
    ###)


InitializeClass(StudentsCatalog)

###)

class CertificatesCatalog(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Certificates Catalog'
    name =  "certificates_catalog"
    key = "code"
    def __init__(self,name=None):
        if name ==  None:
            name =  self.name
        WAeUPTable.__init__(self, name)

    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
        """ clear the catalog, then re-index everything """

        elapse = time.time()
        c_elapse = time.clock()

        pgthreshold = self._getProgressThreshold()
        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
        self.refreshCatalog(clear=1, pghandler=handler)

        elapse = time.time() - elapse
        c_elapse = time.clock() - c_elapse

        RESPONSE.redirect(
            URL1 +
            '/manage_catalogAdvanced?manage_tabs_message=' +
            urllib.quote('Catalog Updated \n'
                         'Total time: %s\n'
                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
    ###)

    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
        if isinstance(name, str):
            name = (name,)
        certificates = self.portal_catalog(portal_type="Certificate")
        num_objects = len(certificates)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            certificate_brain = certificates[i]
            certificate_object = certificate_brain.getObject()
            pl = certificate_brain.getPath().split('/')
            data = {}
            cid = data[self.key] = certificate_brain.getId
            data['faculty'] = pl[-4]
            data['department'] = pl[-3]
            doc = certificate_object.getContent()
            for field in name:
                if field not in (self.key,'faculty','department'):
                    data[field] = getattr(doc,field)
            self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    def refreshCatalog(self, clear=0, pghandler=None): ###(
        """ re-index everything we can find """
        if clear:
            self._catalog.clear()
        certificates = self.portal_catalog(portal_type="Certificate")
        num_objects = len(certificates)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        #from pdb import set_trace;set_trace()
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            certificate_brain = certificates[i]
            certificate_doc = certificate_brain.getObject().getContent()
            pl = certificate_brain.getPath().split('/')
            data = {}
            for field in self.schema():
                data[field] = getattr(certificate_doc,field,None)
            data[self.key] = certificate_brain.getId
            ai = pl.index('academics')
            data['faculty'] = pl[ai +1]
            data['department'] = pl[ai +2]
            if clear:
                self.addRecord(**data)
            else:
                self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    security.declarePrivate('notify_event_listener') ###(
    def notify_event_listener(self,event_type,object,infos):
        "listen for events"
        if not infos.has_key('rpath'):
            return
        pt = getattr(object,'portal_type',None)
        mt = getattr(object,'meta_type',None)
        if pt != 'Certificate':
            return
        data = {}
        rpl = infos['rpath'].split('/')
        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
            return
        certificate_id = object.getId()
        data[self.key] = certificate_id
        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
            try:
                self.addRecord(**data)
            except ValueError:
                return
            certificate_id = object.getId()
            doc = object.getContent()
            if doc is None:
                return
            for field in self.schema():
                data[field] = getattr(doc,field,None)
            data[self.key] = certificate_id
            ai = rpl.index('academics')
            data['faculty'] = rpl[ai +1]
            data['department'] = rpl[ai +2]
            self.modifyRecord(**data)
            return
        if event_type == "sys_del_object":
            self.deleteRecord(certificate_id)
            return
        if event_type == "sys_modify_object" and mt == 'Certificate':
            #from pdb import set_trace;set_trace()
            for field in self.schema():
                data[field] = getattr(object,field,None)
            certificate_id = object.aq_parent.getId()
            data[self.key] = certificate_id
            ai = rpl.index('academics')
            data['faculty'] = rpl[ai +1]
            data['department'] = rpl[ai +2]
            self.modifyRecord(**data)
    ###)


InitializeClass(CertificatesCatalog)
###)

class CoursesCatalog(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Courses Catalog'
    name =  "courses_catalog"
    key = "code"
    def __init__(self,name=None):
        if name ==  None:
            name =  self.name
        WAeUPTable.__init__(self, name)

    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
        """ clear the catalog, then re-index everything """

        elapse = time.time()
        c_elapse = time.clock()

        pgthreshold = self._getProgressThreshold()
        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
        self.refreshCatalog(clear=1, pghandler=handler)

        elapse = time.time() - elapse
        c_elapse = time.clock() - c_elapse

        RESPONSE.redirect(
            URL1 +
            '/manage_catalogAdvanced?manage_tabs_message=' +
            urllib.quote('Catalog Updated \n'
                         'Total time: %s\n'
                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
    ###)

    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
        if isinstance(name, str):
            name = (name,)
        courses = self.portal_catalog(portal_type="Course")
        num_objects = len(courses)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            course_brain = courses[i]
            course_object = course_brain.getObject()
            pl = course_brain.getPath().split('/')
            data = {}
            cid = data[self.key] = course_brain.getId
            data['faculty'] = pl[-4]
            data['department'] = pl[-3]
            doc = course_object.getContent()
            for field in name:
                if field not in (self.key,'faculty','department'):
                    data[field] = getattr(doc,field)
            self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    def refreshCatalog(self, clear=0, pghandler=None): ###(
        """ re-index everything we can find """
        if clear:
            self._catalog.clear()
        courses = self.portal_catalog(portal_type="Course")
        num_objects = len(courses)
        if pghandler:
            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
        #from pdb import set_trace;set_trace()
        for i in xrange(num_objects):
            if pghandler: pghandler.report(i)
            course_brain = courses[i]
            course_doc = course_brain.getObject().getContent()
            pl = course_brain.getPath().split('/')
            data = {}
            for field in self.schema():
                data[field] = getattr(course_doc,field,None)
            data[self.key] = course_brain.getId
            ai = pl.index('academics')
            data['faculty'] = pl[ai +1]
            data['department'] = pl[ai +2]
            if clear:
                self.addRecord(**data)
            else:
                self.modifyRecord(**data)
        if pghandler: pghandler.finish()
    ###)

    security.declarePrivate('notify_event_listener') ###(
    def notify_event_listener(self,event_type,object,infos):
        "listen for events"
        if not infos.has_key('rpath'):
            return
        pt = getattr(object,'portal_type',None)
        mt = getattr(object,'meta_type',None)
        if pt != 'Course':
            return
        data = {}
        rpl = infos['rpath'].split('/')
        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
            return
        course_id = object.getId()
        data[self.key] = course_id
        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
            try:
                self.addRecord(**data)
            except ValueError:
                return
            course_id = object.getId()
            doc = object.getContent()
            if doc is None:
                return
            for field in self.schema():
                data[field] = getattr(doc,field,None)
            data[self.key] = course_id
            ai = rpl.index('academics')
            data['faculty'] = rpl[ai +1]
            data['department'] = rpl[ai +2]
            self.modifyRecord(**data)
            return
        if event_type == "sys_del_object":
            self.deleteRecord(course_id)
            return
        if event_type == "sys_modify_object" and mt == 'Course':
            #from pdb import set_trace;set_trace()
            for field in self.schema():
                data[field] = getattr(object,field,None)
            course_id = object.aq_parent.getId()
            data[self.key] = course_id
            ai = rpl.index('academics')
            data['faculty'] = rpl[ai +1]
            data['department'] = rpl[ai +2]
            self.modifyRecord(**data)
    ###)


InitializeClass(CoursesCatalog)
###)

class CourseResults(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Results Catalog'
    name = "course_results"
    key = "key" #student_id + level + course_id
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)
        self._queue = []

    def addMultipleRecords(self, records): ###(
        """add many records"""
        existing_uids = []
        for data in records:
            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
            data['%s' % self.key] = uid
            query = Eq(self.key, uid)
            res = self.course_results.evalAdvancedQuery(query)
            if len(res) > 0:
                rec = res[0]
                equal = True
                for attr in ('student_id','level_id','course_id'):
                    if getattr(rec,attr,'') != data[attr]:
                        equal = False
                        break
                if equal:
                    existing_uids += uid,
                    continue
            self.catalog_object(dict2ob(data), uid=uid)
        return existing_uids
    ###)

    def deleteResultsHere(self,level_id,student_id): ###(
        query = Eq('student_id',student_id) & Eq('level_id', level_id)
        course_results = self.course_results.evalAdvancedQuery(query)
        #import pdb;pdb.set_trace()
        for result in course_results:
            self.deleteRecord(result.key)
    ###)

    def moveResultsHere(self,level,student_id): ###(
        #import pdb;pdb.set_trace()
        level_id = level.getId()
        query = Eq('student_id',student_id) & Eq('level_id', level_id)
        course_results = self.course_results.evalAdvancedQuery(query)
        existing_courses = [cr.code for cr in course_results]
        to_delete = []
        for code,obj in level.objectItems():
            to_delete.append(code)
            carry_over = False
            if code.endswith('_co'):
                carry_over = True
                code  = code[:-3]
            if code in existing_courses:
                continue
            course_result_doc = obj.getContent()
            data = {}
            course_id = code
            for field in self.schema():
                data[field] = getattr(course_result_doc,field,'')
            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
            data['student_id'] = student_id
            data['level_id'] = level_id
            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
            data['session_id'] = session_id
            #data['queue_status'] = OBJECT_CREATED
            data['code'] = course_id
            data['carry_over'] = carry_over
            self.catalog_object(dict2ob(data), uid=key)
        level.manage_delObjects(to_delete)
    ###)

    def getCourses(self,student_id,level_id): ###(
        query = Eq('student_id',student_id) & Eq('level_id', level_id)
        course_results = self.course_results.evalAdvancedQuery(query)
        carry_overs = []
        normal1 = []
        normal2 = []
        normal3 = []
        total_credits = 0
        gpa = 0
        for brain in course_results:
            d = {}

            for field in self.schema():
                d[field] = getattr(brain,field,None)
                if repr(d[field]) == 'Missing.Value':
                    d[field] = ''
            d['weight'] = ''
            d['grade'] = ''
            d['score'] = ''

            if str(brain.credits).isdigit():
                credits = int(brain.credits)
                total_credits += credits
                score = getattr(brain,'score',0)
                if score and str(score).isdigit() and int(score) > 0:
                    score = int(score)
                    grade,weight = self.getGradesFromScore(score,'')
                    gpa += weight * credits
                    d['weight'] = weight
                    d['grade'] = grade
                    d['score'] = score

            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
            #else:
            #    d['score_calc'] = ''
            try:
                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
            except:
                d['score_calc'] = ''

            if d['score_calc']:
                grade = self.getGradesFromScore(d['score_calc'],level_id)
                d['grade'] = grade

            d['coe'] = ''
            if brain.core_or_elective:
                d['coe'] = 'Core'
            elif brain.core_or_elective == False:
                d['coe'] = 'Elective'
            id = code = d['id'] = brain.code
            d['code'] = code
            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
            if res:
                course = res[0]
                d['title'] = course.title
                # The courses_catalog contains strings and integers in its semester field.
                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
                d['semester'] = str(course.semester)
            else:
                d['title'] = "Course has been removed from course list"
                d['semester'] = ''
            if brain.carry_over:
                d['coe'] = 'CO'
                carry_overs.append(d)
            else:
                if d['semester'] == '1':
                    normal1.append(d)

                elif d['semester'] == '2':
                    normal2.append(d)
                else:
                    normal3.append(d)
        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
        #                                "%(semester)s%(code)s" % y))
        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
                                             "%(semester)s%(code)s" % y))
        return total_credits,gpa,carry_overs,normal1,normal2,normal3
    ###)

    
    # for transcript only
    def getAllCourses(self,student_id): ###(
        query = Eq('student_id',student_id)
        course_results = self.course_results.evalAdvancedQuery(query)
        courses = []
        for brain in course_results:
            d = {}

            for field in self.schema():
                d[field] = getattr(brain,field,'')

            d['weight'] = ''
            d['grade'] = ''
            d['score'] = ''

            if str(brain.credits).isdigit():
                credits = int(brain.credits)
                score = getattr(brain,'score',0)
                if score and str(score).isdigit() and int(score) > 0:
                    score = int(score)
                    grade,weight = self.getGradesFromScore(score,'')
                    d['weight'] = weight
                    d['grade'] = grade
                    d['score'] = score
            d['coe'] = ''
            if brain.core_or_elective:
                d['coe'] = 'Core'
            elif brain.core_or_elective == False:
                d['coe'] = 'Elective'
            id = code = d['id'] = brain.code
            d['code'] = code
            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
            if res:
                course = res[0]
                d['title'] = course.title
                # The courses_catalog contains strings and integers in its semester field.
                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
                d['semester'] = str(course.semester)
            else:
                d['title'] = "Course has been removed from course list"
                d['semester'] = ''
            if brain.carry_over:
                d['coe'] = 'CO'
            courses.append(d)
        return courses
    ###)
    
    def getYearGroupAverage(self,session_id,level_id): ###(
        query = Eq('session_id',session_id) & Eq('level_id',level_id)
        course_results = self.course_results.evalAdvancedQuery(query)
        yga1 = 0
        yg1 = []
        counter1 = 0
        yga2 = 0
        yg2 = []
        counter2 = 0
        yga3 = 0
        yg3 = []
        counter3 = 0        
        #import pdb;pdb.set_trace()
        for brain in course_results:
            try:
                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
                if not om > 0:
                    continue
                code = brain.code                
                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
                if res:
                    course = res[0]
                    # The courses_catalog contains strings and integers in its semester field.
                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
                    semester = str(course.semester)
                else:
                    semester = ''
                if semester == '1':
                    counter1 += 1
                    yga1 += om
                    yg1.append(om)
                elif semester == '2':
                    counter2 += 1
                    yga2 += om      
                    yg2.append(om)    
                if semester == '3':
                    counter3 += 1
                    yga3 += om
                    yg3.append(om)
            except:
                continue                
        if counter1:
            yga1 /= counter1
            yga1 = '%.2f' % yga1    
        if counter2:
            yga2 /= counter2
            yga2 = '%.2f' % yga2   
        if counter3:
            yga3 /= counter3
            yga3 = '%.2f' % yga3                                    
        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
    ###)
    
    
    #security.declarePublic("calculateCoursePosition")
    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
        #"""calculate Course Position"""
        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
        course_results = self.course_results.evalAdvancedQuery(query)
        ygc = []
        #import pdb;pdb.set_trace()  
        for brain in course_results:
            try:
                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
                    continue
                #code = brain.code   
                if semester:
                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
                    if res:
                        course = res[0]
                        # The courses_catalog contains strings and integers in its semester field.
                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
                        semester_from_course = str(course.semester)
                    else:
                        continue
                    if semester != semester_from_course:
                        continue
                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
            except:
                continue      
        ygc.sort(reverse=True)
        if not len(ygc):
            return 'no result'
        #import pdb;pdb.set_trace()        
        for pos in range(len(ygc)):
            if ygc[pos] <= float(score):
                break
        output = {}   
        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
        output['ygc'] = ygc
        return output
        
    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
    def calculateAllCoursePositions(self,session_id=None):
        """calculate All Course Positions"""
        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
        member = self.portal_membership.getAuthenticatedMember()
        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
        if session_id:
            query = Eq('session_id',session_id)
        else:
            return 'no session_id provided'
        course_results = self.course_results.evalAdvancedQuery(query)
        for brain in course_results:
            try:
                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
                    continue
                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
                if res:
                    course = res[0]
                    semester_from_course = str(course.semester)
                else:
                    continue                    
                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
                pic = self.calculateCoursePosition('08',brain.level_id,brain.code,score,semester_from_course)['pos']
                data = {}
                data[self.key] = brain.key
                data['pic'] = pic
                self.modifyRecord(**data)
            except:
                continue       
        logger.info('recalculation finished')              
        return 'ready'   
    
    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
        ""
        query = Eq('student_id',student_id)
        cr_catalog = self.course_results
        course_results = cr_catalog.evalAdvancedQuery(query)
        courses = []
        fields = self.schema()
        format = '"%(' + ')s","%('.join(fields) + ')s"'
        for brain in course_results:
            d = {}
            for field in fields:
                d[field] = getattr(brain,field,'')
            courses.append(format % d)
                
        if export:
            export_file = "%s/export/course_results_removed.csv" % (i_home)
            if not os.path.exists(export_file):  
                file_handler = open(export_file,"a")
                headline = ','.join(fields)
                file_handler.write(headline +'\n')
            else:
                file_handler = open(export_file,"a")
            for line in courses: 
                file_handler.write(line +'\n')

        if remove:
            for brain in course_results:
                key = getattr(brain,'key','')
                cr_catalog.deleteRecord(key)
        
        return courses
    ###)    
    
   

InitializeClass(CourseResults)
###)

class OnlinePaymentsImport(WAeUPTable): ###(

    meta_type = 'WAeUP Online Payment Transactions'
    name = "online_payments_import"
    key = "order_id"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(OnlinePaymentsImport)
###)

class ReturningImport(WAeUPTable): ###(

    meta_type = 'Returning Import Table'
    name = "returning_import"
    key = "matric_no"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(ReturningImport)
###)

class ResultsImport(WAeUPTable): ###(

    meta_type = 'Results Import Table'
    name = "results_import"
    key = "key"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(ResultsImport)

###)

class PaymentsCatalog(WAeUPTable): ###(
    security = ClassSecurityInfo()

    meta_type = 'WAeUP Payments Catalog'
    name = "payments_catalog"
    key = "order_id"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


    security.declarePrivate('notify_event_listener') ###(
    def notify_event_listener(self,event_type,object,infos):
        "listen for events"
        if not infos.has_key('rpath'):
            return
        pt = getattr(object,'portal_type',None)
        mt = getattr(object,'meta_type',None)
        data = {}
        if pt != 'Payment':
            return
        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
            self.deleteRecord(object.getContent().order_id)
        if mt == 'CPS Proxy Folder':
            return # is handled only for the real object
        if event_type not in ('sys_modify_object'):
            return
        for field in self.schema():
            data[field] = getattr(object,field,'')
        rpl = infos['rpath'].split('/')
        #import pdb;pdb.set_trace()
        student_id = rpl[-4]
        data['student_id'] = student_id
        modified = False
        try:
            self.modifyRecord(**data)
            modified = True
        except KeyError:
            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
            pass
        if not modified:
            try:
                self.addRecord(**data)
            except:
                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
        ###)


    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
        ""
        query = Eq('student_id',student_id)
        pm_catalog = self.payments_catalog
        payments = pm_catalog.evalAdvancedQuery(query)
        payments_dic = []
        fields = self.schema()
        format = '"%(' + ')s","%('.join(fields) + ')s"'
        for brain in payments:
            d = {}
            for field in fields:
                d[field] = getattr(brain,field,'')
            payments_dic.append(format % d)
                
        if export:
            export_file = "%s/export/payments_removed.csv" % (i_home)
            if not os.path.exists(export_file):  
                file_handler = open(export_file,"a")
                headline = ','.join(fields)
                file_handler.write(headline +'\n')
            else:
                file_handler = open(export_file,"a")
            for line in payments_dic: 
                file_handler.write(line +'\n')

        if remove:
            for brain in payments:
                order_id = getattr(brain,'order_id','')
                pm_catalog.deleteRecord(order_id)
        
        return payments_dic
    ###)    

InitializeClass(PaymentsCatalog)

###)

class RemovedStudentIds(WAeUPTable): ###(

    meta_type = 'WAeUP Removed StudentIds'
    name = "removed_student_ids"
    key = "id"
    def __init__(self,name=None):
        if name ==  None:
            name = self.name
        WAeUPTable.__init__(self, name)


InitializeClass(RemovedStudentIds)

###)

# BBB:
AccomodationTable = AccommodationTable
