source: WAeUP_SRP/base/WAeUPTables.py @ 3350

Last change on this file since 3350 was 3348, checked in by joachim, 17 years ago

increase speed of student_studycourse editing, should speed up admitStudent about 15 - 20 %

  • Property svn:keywords set to Id
File size: 58.5 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3348 2008-03-18 17:49:35Z joachim $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        records = self()
225        nr2export = len(records)
226        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
227        chunk = 2000
228        total = 0
229        start = DateTime.DateTime().timeTime()
230        start_chunk = DateTime.DateTime().timeTime()
231        for record in records:
232            not_all = False
233            d = self.record2dict(fields,record)
234            lines.append(d)
235            total += 1
236            if total and not total % chunk or total == len(records):
237                csv_writer.writerows(lines)
238                anz = len(lines)
239                logger.info("wrote %(anz)d  total written %(total)d" % vars())
240                end_chunk = DateTime.DateTime().timeTime()
241                duration = end_chunk-start_chunk
242                per_record = duration/anz
243                till_now = end_chunk - start
244                avarage_per_record = till_now/total
245                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
246                estimated_end = estimated_end.strftime("%H:%M:%S")
247                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
248                start_chunk = DateTime.DateTime().timeTime()
249                lines = []
250        end = DateTime.DateTime().timeTime()
251        logger.info('total time %6.2f m' % ((end-start)/60))
252        import os
253        filename, extension = os.path.splitext(export_file)
254        from subprocess import call
255        msg = "wrote %(total)d records to %(export_file)s" % vars()
256        #try:
257        #    retcode = call('gzip %s' % (export_file),shell=True)
258        #    if retcode == 0:
259        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
260        #except OSError, e:
261        #    retcode = -99
262        #    logger.info("zip failed with %s" % e)
263        logger.info(msg)
264        args = {'portal_status_message': msg}
265        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
266        url = self.REQUEST.get('URL2')
267        return self.REQUEST.RESPONSE.redirect(url)
268    ###)
269
270    security.declarePrivate("_import_old") ###(
271    def _import_old(self,filename,schema,layout, mode,logger):
272        "import data from csv"
273        import transaction
274        import random
275        pm = self.portal_membership
276        member = pm.getAuthenticatedMember()
277        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
278        import_fn = "%s/import/%s.csv" % (i_home,filename)
279        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
280        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
281        start = True
282        tr_count = 1
283        total_imported = 0
284        total_not_imported = 0
285        total = 0
286        iname =  "%s" % filename
287        not_imported = []
288        imported = []
289        valid_records = []
290        invalid_records = []
291        d = {}
292        d['mode'] = mode
293        d['imported'] = total_imported
294        d['not_imported'] = total_not_imported
295        d['valid_records'] = valid_records
296        d['invalid_records'] = invalid_records
297        d['import_fn'] = import_fn
298        d['imported_fn'] = imported_fn
299        d['not_imported_fn'] = not_imported_fn
300        if schema is None:
301            em = 'No schema specified'
302            logger.error(em)
303            return d
304        if layout is None:
305            em = 'No layout specified'
306            logger.error(em)
307            return d
308        validators = {}
309        for widget in layout.keys():
310            try:
311                validators[widget] = layout[widget].validate
312            except AttributeError:
313                logger.info('%s has no validate attribute' % widget)
314                return d
315        # if mode == 'edit':
316        #     importer = self.importEdit
317        # elif mode == 'add':
318        #     importer = self.importAdd
319        # else:
320        #     importer = None
321        try:
322            items = csv.DictReader(open(import_fn,"rb"),
323                                   dialect="excel",
324                                   skipinitialspace=True)
325        except:
326            em = 'Error reading %s.csv' % filename
327            logger.error(em)
328            return d
329        #import pdb;pdb.set_trace()
330        for item in items:
331            if start:
332                start = False
333                logger.info('%s starts import from %s.csv' % (member,filename))
334                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
335                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
336                                   dialect="excel",
337                                   skipinitialspace=True).next()
338                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
339                diff2schema = set(import_keys).difference(set(schema.keys()))
340                diff2layout = set(import_keys).difference(set(layout.keys()))
341                if diff2layout:
342                    em = "not ignorable key(s) %s found in heading" % diff2layout
343                    logger.info(em)
344                    return d
345                s = ','.join(['"%s"' % fn for fn in import_keys])
346                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
347                #s = '"id",' + s
348                open(imported_fn,"a").write(s + '\n')
349                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
350                format_error = format + ',"%(Error)s"'
351                #format = '"%(id)s",'+ format
352                adapters = [MappingStorageAdapter(schema, item)]
353            dm = DataModel(item, adapters,context=self)
354            ds = DataStructure(data=item,datamodel=dm)
355            error_string = ""
356            #import pdb;pdb.set_trace()
357            for k in import_keys:
358                if not validators[k](ds,mode=mode):
359                    error_string += " %s : %s" % (k,ds.getError(k))
360            # if not error_string and importer:
361            #     item.update(dm)
362            #     item['id'],error = importer(item)
363            #     if error:
364            #         error_string += error
365            if error_string:
366                item['Error'] = error_string
367                invalid_records.append(dm)
368                not_imported.append(format_error % item)
369                total_not_imported += 1
370            else:
371                em = format % item
372                valid_records.append(dm)
373                imported.append(em)
374                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
375                tr_count += 1
376                total_imported += 1
377            total += 1
378        if len(imported) > 0:
379            open(imported_fn,"a").write('\n'.join(imported))
380        if len(not_imported) > 0:
381            open(not_imported_fn,"a").write('\n'.join(not_imported))
382        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
383        d['imported'] = total_imported
384        d['not_imported'] = total_not_imported
385        d['valid_records'] = valid_records
386        d['invalid_records'] = invalid_records
387        d['imported_fn'] = imported_fn
388        d['not_imported_fn'] = not_imported_fn
389        #logger.info(em)
390        return d
391    ###)
392
393    security.declarePrivate("_import") ###(
394    def _import_new(self,csv_items,schema, layout, mode,logger):
395        "import data from csv.Dictreader Instance"
396        start = True
397        tr_count = 1
398        total_imported = 0
399        total_not_imported = 0
400        total = 0
401        iname =  "%s" % filename
402        not_imported = []
403        valid_records = []
404        invalid_records = []
405        duplicate_records = []
406        d = {}
407        d['mode'] = mode
408        d['valid_records'] = valid_records
409        d['invalid_records'] = invalid_records
410        d['invalid_records'] = duplicate_records
411        # d['import_fn'] = import_fn
412        # d['imported_fn'] = imported_fn
413        # d['not_imported_fn'] = not_imported_fn
414        validators = {}
415        for widget in layout.keys():
416            try:
417                validators[widget] = layout[widget].validate
418            except AttributeError:
419                logger.info('%s has no validate attribute' % widget)
420                return d
421        for item in csv_items:
422            if start:
423                start = False
424                logger.info('%s starts import from %s.csv' % (member,filename))
425                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
426                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
427                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
428                diff2schema = set(import_keys).difference(set(schema.keys()))
429                diff2layout = set(import_keys).difference(set(layout.keys()))
430                if diff2layout:
431                    em = "not ignorable key(s) %s found in heading" % diff2layout
432                    logger.info(em)
433                    return d
434                # s = ','.join(['"%s"' % fn for fn in import_keys])
435                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
436                # #s = '"id",' + s
437                # open(imported_fn,"a").write(s + '\n')
438                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
439                # format_error = format + ',"%(Error)s"'
440                # #format = '"%(id)s",'+ format
441                adapters = [MappingStorageAdapter(schema, item)]
442            dm = DataModel(item, adapters,context=self)
443            ds = DataStructure(data=item,datamodel=dm)
444            error_string = ""
445            for k in import_keys:
446                if not validators[k](ds,mode=mode):
447                    error_string += " %s : %s" % (k,ds.getError(k))
448            if error_string:
449                item['Error'] = error_string
450                #invalid_records.append(dm)
451                invalid_records.append(item)
452                total_not_imported += 1
453            else:
454                em = format % item
455                valid_records.append(dm)
456                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
457                tr_count += 1
458                total_imported += 1
459            total += 1
460        # if len(imported) > 0:
461        #     open(imported_fn,"a").write('\n'.join(imported))
462        # if len(not_imported) > 0:
463        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
464        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
465        d['imported'] = total_imported
466        d['not_imported'] = total_not_imported
467        d['valid_records'] = valid_records
468        d['invalid_records'] = invalid_records
469        return d
470    ###)
471
472    security.declarePublic("missingValue")###(
473    def missingValue(self):
474        from Missing import MV
475        return MV
476    ###)
477###)
478
479class AccommodationTable(WAeUPTable): ###(
480
481    meta_type = 'WAeUP Accommodation Tool'
482    name = "portal_accommodation"
483    key = "bed"
484    not_occupied = NOT_OCCUPIED
485    def __init__(self,name=None):
486        if name ==  None:
487            name = self.name
488        WAeUPTable.__init__(self, name)
489
490    def searchAndReserveBed(self, student_id,bed_type): ###(
491        #records = self.searchResults({'student' : student_id})
492        #import pdb;pdb.set_trace()
493        records = self.evalAdvancedQuery(Eq('student',student_id))
494        if len(records) > 0:
495            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
496
497        #records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
498        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
499        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
500        if len(records) == 0:
501            return -2,"No bed available"
502        rec = records[0]
503        self.modifyRecord(bed=rec.bed,student=student_id)
504        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
505        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
506        return 1,rec.bed
507    ###)
508
509
510InitializeClass(AccommodationTable)
511
512###)
513
514class PinTable(WAeUPTable): ###(
515    from ZODB.POSException import ConflictError
516    security = ClassSecurityInfo()
517    meta_type = 'WAeUP Pin Tool'
518    name = "portal_pins"
519    key = 'pin'
520
521    def __init__(self,name=None):
522        if name ==  None:
523            name = self.name
524        WAeUPTable.__init__(self, name)
525
526    security.declareProtected(ModifyPortalContent,"dumpAll")###(
527    def dumpAll(self,include_unused=None):
528        """dump all data in the table to a csv"""
529        member = self.portal_membership.getAuthenticatedMember()
530        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
531        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
532        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
533        res_list = []
534        lines = []
535        if hasattr(self,"export_keys"):
536            fields = self.export_keys
537        else:
538            fields = []
539            for f in self.schema():
540                fields.append(f)
541        headline = ','.join(fields)
542        out = open(export_file,"wb")
543        out.write(headline +'\n')
544        out.close()
545        out = open(export_file,"a")
546        csv_writer = csv.DictWriter(out,fields,)
547        if include_unused is not None and str(member) not in ('admin','joachim'):
548            logger.info('%s tries to dump pintable with unused pins' % (member))
549            return
550        if include_unused is not None:
551            records = self()
552        else:
553            records = self.evalAdvancedQuery(~Eq('student',''))
554        nr2export = len(records)
555        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
556        chunk = 2000
557        total = 0
558        start = DateTime.DateTime().timeTime()
559        start_chunk = DateTime.DateTime().timeTime()
560        for record in records:
561            not_all = False
562            d = self.record2dict(fields,record)
563            lines.append(d)
564            total += 1
565            if total and not total % chunk or total == len(records):
566                csv_writer.writerows(lines)
567                anz = len(lines)
568                logger.info("wrote %(anz)d  total written %(total)d" % vars())
569                end_chunk = DateTime.DateTime().timeTime()
570                duration = end_chunk-start_chunk
571                per_record = duration/anz
572                till_now = end_chunk - start
573                avarage_per_record = till_now/total
574                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
575                estimated_end = estimated_end.strftime("%H:%M:%S")
576                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
577                start_chunk = DateTime.DateTime().timeTime()
578                lines = []
579        end = DateTime.DateTime().timeTime()
580        logger.info('total time %6.2f m' % ((end-start)/60))
581        import os
582        filename, extension = os.path.splitext(export_file)
583        from subprocess import call
584        msg = "wrote %(total)d records to %(export_file)s" % vars()
585        #try:
586        #    retcode = call('gzip %s' % (export_file),shell=True)
587        #    if retcode == 0:
588        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
589        #except OSError, e:
590        #    retcode = -99
591        #    logger.info("zip failed with %s" % e)
592        logger.info(msg)
593        args = {'portal_status_message': msg}
594        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
595        url = self.REQUEST.get('URL2')
596        return self.REQUEST.RESPONSE.redirect(url)
597    ###)
598
599
600
601    def searchAndSetRecord(self, uid, student_id,prefix):
602
603        # The following line must be activated after resetting the
604        # the portal_pins table. This is to avoid duplicate entries
605        # and disable duplicate payments.
606
607        #student_id = student_id.upper()
608
609        #records = self.searchResults(student = student_id)
610        #if len(records) > 0 and prefix in ('CLR','APP'):
611        #    for r in records:
612        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
613        #            return -2
614        records = self.searchResults({"%s" % self.key : uid})
615        if len(records) > 1:
616            # Can not happen, but anyway...
617            raise ValueError("More than one record with uid %s" % uid)
618        if len(records) == 0:
619            return -1,None
620        record = records[0]
621        if record.student == "":
622            record_data = {}
623            for field in self.schema() + self.indexes():
624                record_data[field] = getattr(record, field)
625            # Add the updated data:
626            record_data['student'] = student_id
627            try:
628                self.catalog_object(dict2ob(record_data), uid)
629                return 1,record
630            except ConflictError:
631                return 2,record
632        if record.student.upper() != student_id.upper():
633            return 0,record
634        if record.student.upper() == student_id.upper():
635            return 2,record
636        return -3,record
637InitializeClass(PinTable)
638###)
639
640class PumeResultsTable(WAeUPTable): ###(
641
642    meta_type = 'WAeUP PumeResults Tool'
643    name = "portal_pumeresults"
644    key = "jamb_reg_no"
645    def __init__(self,name=None):
646        if name ==  None:
647            name = self.name
648        WAeUPTable.__init__(self, name)
649
650
651InitializeClass(PumeResultsTable)
652
653###)
654
655class ApplicantsCatalog(WAeUPTable): ###(
656
657    meta_type = 'WAeUP Applicants Catalog'
658    name = "applicants_catalog"
659    key = "reg_no"
660    security = ClassSecurityInfo()
661    #export_keys = (
662    #               "reg_no",
663    #               "status",
664    #               "lastname",
665    #               "sex",
666    #               "date_of_birth",
667    #               "lga",
668    #               "email",
669    #               "phone",
670    #               "passport",
671    #               "entry_mode",
672    #               "pin",
673    #               "screening_type",
674    #               "registration_date",
675    #               "testdate",
676    #               "application_date",
677    #               "screening_date",
678    #               "faculty",
679    #               "department",
680    #               "course1",
681    #               "course2",
682    #               "course3",
683    #               "eng_score",
684    #               "subj1",
685    #               "subj1score",
686    #               "subj2",
687    #               "subj2score",
688    #               "subj3",
689    #               "subj3score",
690    #               "aggregate",
691    #               "course_admitted",
692    #               )
693
694    def __init__(self,name=None):
695        if name ==  None:
696            name = self.name
697        WAeUPTable.__init__(self, name)
698
699    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
700    def new_importCSV(self,filename="JAMB_data",
701                  schema_id="application",
702                  layout_id="import_application",
703                  mode='add'):
704        """ import JAMB data """
705        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
706        pm = self.portal_membership
707        member = pm.getAuthenticatedMember()
708        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
709        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
710        import_fn = "%s/import/%s.csv" % (i_home,filename)
711        if mode not in ('add','edit'):
712            logger.info("invalid mode: %s" % mode)
713        if os.path.exists(lock_fn):
714            logger.info("import of %(import_fn)s already in progress" % vars())
715            return
716        lock_file = open(lock_fn,"w")
717        lock_file.write("%(current)s \n" % vars())
718        lock_file.close()
719        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
720        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
721        stool = getToolByName(self, 'portal_schemas')
722        ltool = getToolByName(self, 'portal_layouts')
723        schema = stool._getOb(schema_id)
724        if schema is None:
725            em = 'No such schema %s' % schema_id
726            logger.error(em)
727            return
728        for postfix in ('_import',''):
729            layout_name = "%(layout_id)s%(postfix)s" % vars()
730            if hasattr(ltool,layout_name):
731                break
732        layout = ltool._getOb(layout_name)
733        if layout is None:
734            em = 'No such layout %s' % layout_id
735            logger.error(em)
736            return
737        try:
738            csv_file = csv.DictReader(open(import_fn,"rb"))
739        except:
740            em = 'Error reading %s.csv' % filename
741            logger.error(em)
742            return
743        d = self._import_new(csv_items,schema,layout,mode,logger)
744        imported = []
745        edited = []
746        duplicates = []
747        not_found = []
748        if len(d['valid_records']) > 0:
749            for record in d['valid_records']:
750                #import pdb;pdb.set_trace()
751                if mode == "add":
752                    try:
753                        self.addRecord(**dict(record.items()))
754                        imported.append(**dict(record.items()))
755                        logger.info("added %s" % record.items())
756                    except ValueError:
757                        dupplicate.append(**dict(record.items()))
758                        logger.info("duplicate %s" % record.items())
759                elif mode == "edit":
760                    try:
761                        self.modifyRecord(**dict(record.items()))
762                        edited.append(**dict(record.items()))
763                        logger.info("edited %s" % record.items())
764                    except KeyError:
765                        not_found.append(**dict(record.items()))
766                        logger.info("not found %s" % record.items())
767        invalid = d['invalid_records']
768        for itype in ("imported","edited","not_found","duplicate","invalid"):
769            outlist = locals[itype]
770            if len(outlist):
771                d = {}
772                for k in outlist[0].keys():
773                    d[k] = k
774                outlist[0] = d
775                outfile = open("file_name_%s" % itype,'w')
776                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
777                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
778###)
779
780    security.declareProtected(ModifyPortalContent,"importCSV")###(
781    def importCSV(self,filename="JAMB_data",
782                  schema_id="application",
783                  layout_id="application_pce",
784                  mode='add'):
785        """ import JAMB data """
786        stool = getToolByName(self, 'portal_schemas')
787        ltool = getToolByName(self, 'portal_layouts')
788        schema = stool._getOb(schema_id)
789        if schema is None:
790            em = 'No such schema %s' % schema_id
791            logger.error(em)
792            return
793        layout = ltool._getOb(layout_id)
794        if layout is None:
795            em = 'No such layout %s' % layout_id
796            logger.error(em)
797            return
798        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
799        d = self._import_old(filename,schema,layout,mode,logger)
800        if len(d['valid_records']) > 0:
801            for record in d['valid_records']:
802                #import pdb;pdb.set_trace()
803                if mode == "add":
804                    self.addRecord(**dict(record.items()))
805                    logger.info("added %s" % record.items())
806                elif mode == "edit":
807                    self.modifyRecord(**dict(record.items()))
808                    logger.info("edited %s" % record.items())
809                else:
810                    logger.info("invalid mode: %s" % mode)
811        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
812    ###)
813
814InitializeClass(ApplicantsCatalog)
815
816###)
817
818class StudentsCatalog(WAeUPTable): ###(
819    security = ClassSecurityInfo()
820
821    meta_type = 'WAeUP Students Catalog'
822    name = "students_catalog"
823    key = "id"
824    affected_types = {   ###(
825                      'StudentApplication':
826                      {'id': 'application',
827                       'fields':
828                       ('jamb_reg_no',
829                        'entry_mode',
830                        #'entry_level',
831                        'entry_session',
832                       )
833                      },
834                      'StudentClearance':
835                      {'id': 'clearance',
836                       'fields':
837                       ('matric_no',
838                        'lga',
839                       )
840                      },
841                      'StudentPersonal':
842                      {'id': 'personal',
843                       'fields':
844                       ('name',
845                        'sex',
846                        'perm_address',
847                        'email',
848                        'phone',
849                       )
850                      },
851                      'StudentStudyCourse':
852                      {'id': 'study_course',
853                       'fields':
854                       ('course', # study_course
855                        'faculty', # from certificate
856                        'department', # from certificate
857                        'end_level', # from certificate
858                        'level', # current_level
859                        'mode',  # current_mode
860                        'session', # current_session
861                        'verdict', # current_verdict
862                       )
863                      },
864                     }
865    ###)
866
867    def __init__(self,name=None):
868        if name ==  None:
869            name = self.name
870        WAeUPTable.__init__(self, name)
871        return
872
873    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
874        """ clears the whole enchilada """
875        self._catalog.clear()
876
877        if REQUEST and RESPONSE:
878            RESPONSE.redirect(
879              URL1 +
880              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
881
882    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
883        """ clear the catalog, then re-index everything """
884
885        elapse = time.time()
886        c_elapse = time.clock()
887
888        pgthreshold = self._getProgressThreshold()
889        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
890        self.refreshCatalog(clear=1, pghandler=handler)
891
892        elapse = time.time() - elapse
893        c_elapse = time.clock() - c_elapse
894
895        RESPONSE.redirect(
896            URL1 +
897            '/manage_catalogAdvanced?manage_tabs_message=' +
898            urllib.quote('Catalog Updated \n'
899                         'Total time: %s\n'
900                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
901    ###)
902
903    def fill_certificates_dict(self): ###(
904        "return certificate data in  dict"
905        certificates_brains = self.portal_catalog(portal_type ='Certificate')
906        d = {}
907        for cb in certificates_brains:
908            certificate_doc = cb.getObject().getContent()
909            cb_path = cb.getPath().split('/')
910            ld = {}
911            ld['faculty'] = cb_path[-4]
912            ld['department'] = cb_path[-3]
913            ld['end_level'] = getattr(certificate_doc,'end_level','999')
914            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
915            d[cb.getId] = ld
916        return d
917    ###)
918
919    def get_from_doc_department(self,doc,cached_data={}): ###(
920        "return the students department"
921        if doc is None:
922            return None
923        if self._v_certificates.has_key(doc.study_course):
924            return self._v_certificates[doc.study_course]['department']
925        certificate_res = self.portal_catalog(id = doc.study_course)
926        if len(certificate_res) != 1:
927            return None
928        return certificate_res[0].getPath().split('/')[-3]
929
930    def get_from_doc_faculty(self,doc,cached_data={}):
931        "return the students faculty"
932        if doc is None:
933            return None
934        if self._v_certificates.has_key(doc.study_course):
935            return self._v_certificates[doc.study_course]['faculty']
936        certificate_res = self.portal_catalog(id = doc.study_course)
937        if len(certificate_res) != 1:
938            return None
939        return certificate_res[0].getPath().split('/')[-4]
940
941    def get_from_doc_end_level(self,doc,cached_data={}):
942        "return the students end_level"
943        if doc is None:
944            return None
945        if self._v_certificates.has_key(doc.study_course):
946            return self._v_certificates[doc.study_course]['end_level']
947        certificate_res = self.portal_catalog(id = doc.study_course)
948        if len(certificate_res) != 1:
949            return None
950        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
951
952    def get_from_doc_level(self,doc,cached_data={}):
953        "return the students level"
954        if doc is None:
955            return None
956        return getattr(doc,'current_level',None)
957
958    def get_from_doc_mode(self,doc,cached_data={}):
959        "return the students mode"
960        if doc is None:
961            return None
962        cm = getattr(doc,'current_mode',None)
963        return cm
964
965
966    def get_from_doc_session(self,doc,cached_data={}):
967        "return the students current_session"
968        if doc is None:
969            return None
970        return getattr(doc,'current_session',None)
971
972    def get_from_doc_entry_session(self,doc,cached_data={}):
973        "return the students entry_session"
974        if doc is None:
975            return None
976        es = getattr(doc,'entry_session',None)
977        if es is not None and len(es) == 2:
978            return es
979        try:
980            digit = int(doc.jamb_reg_no[0])
981        except:
982            return "-1"
983        if digit < 8:
984            return "0%c" % doc.jamb_reg_no[0]
985        return "9%c" % doc.jamb_reg_no[0]
986
987    def get_from_doc_course(self,doc,cached_data={}):
988        "return the students study_course"
989        if doc is None:
990            return None
991        return getattr(doc,'study_course',None)
992
993    def get_from_doc_name(self,doc,cached_data={}):
994        "return the students name from the personal"
995        if doc is None:
996            return None
997        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
998
999    def get_from_doc_verdict(self,doc,cached_data={}):
1000        "return the students study_course"
1001        if doc is None:
1002            return None
1003        return getattr(doc,'current_verdict',None)
1004    ###)
1005
1006    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1007        if isinstance(name, str):
1008            name = (name,)
1009        reindextypes = {}
1010        reindex_special = []
1011        for n in name:
1012            if n in ("review_state","registered_courses"):
1013                reindex_special.append(n)
1014            else:
1015                for pt in self.affected_types.keys():
1016                    if n in self.affected_types[pt]['fields']:
1017                        if reindextypes.has_key(pt):
1018                            reindextypes[pt].append(n)
1019                        else:
1020                            reindextypes[pt]= [n]
1021                        break
1022        cached_data = {}
1023        if set(name).intersection(set(('faculty','department','end_level'))):
1024            cached_data = self.fill_certificates_dict()
1025        students = self.portal_catalog(portal_type="Student")
1026        if hasattr(self,'portal_catalog_real'):
1027            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1028        else:
1029            aq_portal = self.portal_catalog.evalAdvancedQuery
1030        num_objects = len(students)
1031        if pghandler:
1032            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1033        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1034        #import pdb;pdb.set_trace()
1035        for i in xrange(num_objects):
1036            if pghandler: pghandler.report(i)
1037            student_brain = students[i]
1038            student_object = student_brain.getObject()
1039            # query = Eq('path',student_brain.getPath())
1040            # sub_brains_list = aq_portal(query)
1041            # sub_brains = {}
1042            # for sub_brain in sub_brains_list:
1043            #     sub_brains[sub_brain.portal_type] = sub_brain
1044            # student_path = student_brain.getPath()
1045            data = {}
1046            modified = False
1047            sid = data['id'] = student_brain.getId
1048            if reindex_special and 'review_state' in reindex_special:
1049                modified = True
1050                data['review_state'] = student_brain.review_state
1051            sub_objects = False
1052            for pt in reindextypes.keys():
1053                modified = True
1054                try:
1055                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1056                    #doc = sub_brains[pt].getObject().getContent()
1057                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1058                    # doc = self.unrestrictedTraverse(path).getContent()
1059                    sub_objects = True
1060                except:
1061                    continue
1062                for field in set(name).intersection(self.affected_types[pt]['fields']):
1063                    if hasattr(self,'get_from_doc_%s' % field):
1064                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1065                                                                              cached_data=cached_data)
1066                    else:
1067                        data[field] = getattr(doc,field)
1068            if not sub_objects and noattr:
1069                import_res = self.returning_import(id = sid)
1070                if not import_res:
1071                    continue
1072                import_record = import_res[0]
1073                data['matric_no'] = import_record.matric_no
1074                data['sex'] = import_record.Sex == 'F'
1075                data['name'] = "%s %s %s" % (import_record.Firstname,
1076                                             import_record.Middlename,
1077                                             import_record.Lastname)
1078                data['jamb_reg_no'] = import_record.Entryregno
1079            #if reindex_special and 'registered_courses' in reindex_special:
1080            #    try:
1081            #        study_course = getattr(student_object,"study_course")
1082            #        level_ids = study_course.objectIds()
1083            #    except:
1084            #        continue
1085            #    if not level_ids:
1086            #        continue
1087            #    modified = True
1088            #    level_ids.sort()
1089            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1090            #    courses = []
1091            #    for c in course_ids:
1092            #        if c.endswith('_co'):
1093            #            courses.append(c[:-3])
1094            #        else:
1095            #            courses.append(c)
1096            #    data['registered_courses'] = courses
1097            if modified:
1098                self.modifyRecord(**data)
1099        if pghandler: pghandler.finish()
1100    ###)
1101
1102    def refreshCatalog(self, clear=0, pghandler=None): ###(
1103        """ re-index everything we can find """
1104        students_folder = self.portal_url.getPortalObject().campus.students
1105        if clear:
1106            self._catalog.clear()
1107        students = self.portal_catalog(portal_type="Student")
1108        num_objects = len(students)
1109        cached_data = self.fill_certificates_dict()
1110        if pghandler:
1111            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1112        for i in xrange(num_objects):
1113            if pghandler: pghandler.report(i)
1114            student_brain = students[i]
1115            spath = student_brain.getPath()
1116            student_object = student_brain.getObject()
1117            data = {}
1118            sid = data['id'] = student_brain.getId
1119            data['review_state'] = student_brain.review_state
1120            sub_objects = False
1121            for pt in self.affected_types.keys():
1122                modified = True
1123                try:
1124                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1125                    sub_objects = True
1126                except:
1127                    #from pdb import set_trace;set_trace()
1128                    continue
1129                for field in self.affected_types[pt]['fields']:
1130                    if hasattr(self,'get_from_doc_%s' % field):
1131                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1132                                                                              cached_data=cached_data)
1133                    else:
1134                        data[field] = getattr(doc,field,None)
1135            if not sub_objects:
1136                import_res = self.returning_import(id = sid)
1137                if not import_res:
1138                    continue
1139                import_record = import_res[0]
1140                data['matric_no'] = import_record.matric_no
1141                data['sex'] = import_record.Sex == 'F'
1142                data['name'] = "%s %s %s" % (import_record.Firstname,
1143                                             import_record.Middlename,
1144                                             import_record.Lastname)
1145                data['jamb_reg_no'] = import_record.Entryregno
1146            self.addRecord(**data)
1147        if pghandler: pghandler.finish()
1148    ###)
1149
1150    security.declarePrivate('notify_event_listener') ###(
1151    def notify_event_listener(self,event_type,object,infos):
1152        "listen for events"
1153        if not infos.has_key('rpath'):
1154            return
1155        pt = getattr(object,'portal_type',None)
1156        mt = getattr(object,'meta_type',None)
1157        students_catalog = self
1158        data = {}
1159        if pt == 'Student' and\
1160           mt == 'CPS Proxy Folder' and\
1161           event_type.startswith('workflow'):
1162            data['id'] = object.getId()
1163            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1164            students_catalog.modifyRecord(**data)
1165            return
1166        rpl = infos['rpath'].split('/')
1167        if pt == 'Student' and mt == 'CPS Proxy Folder':
1168            student_id = object.id
1169            if event_type == "sys_add_object":
1170                try:
1171                    self.addRecord(id = student_id)
1172                except ValueError:
1173                    pass
1174                return
1175            elif event_type == 'sys_del_object':
1176                self.deleteRecord(student_id)
1177        if pt not in self.affected_types.keys():
1178            return
1179        if event_type not in ('sys_modify_object'):
1180            return
1181        if pt == 'StudentStudyCourse' and mt == 'CPS Proxy Folder':
1182            if not hasattr(self,'_v_certificates'):
1183                self._v_certificates = self.fill_certificates_dict()
1184        if mt == 'CPS Proxy Folder':
1185            return
1186        for field in self.affected_types[pt]['fields']:
1187            if hasattr(self,'get_from_doc_%s' % field):
1188                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1189            else:
1190                data[field] = getattr(object,field)
1191        data['id'] = rpl[2]
1192        self.modifyRecord(**data)
1193    ###)
1194
1195
1196InitializeClass(StudentsCatalog)
1197
1198###)
1199
1200class CoursesCatalog(WAeUPTable): ###(
1201    security = ClassSecurityInfo()
1202
1203    meta_type = 'WAeUP Courses Catalog'
1204    name =  "courses_catalog"
1205    key = "code"
1206    def __init__(self,name=None):
1207        if name ==  None:
1208            name =  self.name
1209        WAeUPTable.__init__(self, name)
1210
1211    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1212        """ clear the catalog, then re-index everything """
1213
1214        elapse = time.time()
1215        c_elapse = time.clock()
1216
1217        pgthreshold = self._getProgressThreshold()
1218        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1219        self.refreshCatalog(clear=1, pghandler=handler)
1220
1221        elapse = time.time() - elapse
1222        c_elapse = time.clock() - c_elapse
1223
1224        RESPONSE.redirect(
1225            URL1 +
1226            '/manage_catalogAdvanced?manage_tabs_message=' +
1227            urllib.quote('Catalog Updated \n'
1228                         'Total time: %s\n'
1229                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1230    ###)
1231
1232    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1233        if isinstance(name, str):
1234            name = (name,)
1235        courses = self.portal_catalog(portal_type="Course")
1236        num_objects = len(courses)
1237        if pghandler:
1238            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1239        for i in xrange(num_objects):
1240            if pghandler: pghandler.report(i)
1241            course_brain = courses[i]
1242            course_object = course_brain.getObject()
1243            pl = course_brain.getPath().split('/')
1244            data = {}
1245            cid = data[self.key] = course_brain.getId
1246            data['faculty'] = pl[-4]
1247            data['department'] = pl[-3]
1248            doc = course_object.getContent()
1249            for field in name:
1250                if field not in (self.key,'faculty','department'):
1251                    data[field] = getattr(doc,field)
1252            self.modifyRecord(**data)
1253        if pghandler: pghandler.finish()
1254    ###)
1255
1256    def refreshCatalog(self, clear=0, pghandler=None): ###(
1257        """ re-index everything we can find """
1258        if clear:
1259            self._catalog.clear()
1260        courses = self.portal_catalog(portal_type="Course")
1261        num_objects = len(courses)
1262        if pghandler:
1263            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1264        #from pdb import set_trace;set_trace()
1265        for i in xrange(num_objects):
1266            if pghandler: pghandler.report(i)
1267            course_brain = courses[i]
1268            course_doc = course_brain.getObject().getContent()
1269            pl = course_brain.getPath().split('/')
1270            data = {}
1271            for field in self.schema():
1272                data[field] = getattr(course_doc,field,None)
1273            data[self.key] = course_brain.getId
1274            ai = pl.index('academics')
1275            data['faculty'] = pl[ai +1]
1276            data['department'] = pl[ai +2]
1277            if clear:
1278                self.addRecord(**data)
1279            else:
1280                self.modifyRecord(**data)
1281        if pghandler: pghandler.finish()
1282    ###)
1283
1284    security.declarePrivate('notify_event_listener') ###(
1285    def notify_event_listener(self,event_type,object,infos):
1286        "listen for events"
1287        if not infos.has_key('rpath'):
1288            return
1289        pt = getattr(object,'portal_type',None)
1290        mt = getattr(object,'meta_type',None)
1291        if pt != 'Course':
1292            return
1293        data = {}
1294        rpl = infos['rpath'].split('/')
1295        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1296            return
1297        course_id = object.getId()
1298        data[self.key] = course_id
1299        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1300            try:
1301                self.addRecord(**data)
1302            except ValueError:
1303                return
1304            course_id = object.getId()
1305            doc = object.getContent()
1306            if doc is None:
1307                return
1308            for field in self.schema():
1309                data[field] = getattr(doc,field,None)
1310            data[self.key] = course_id
1311            ai = rpl.index('academics')
1312            data['faculty'] = rpl[ai +1]
1313            data['department'] = rpl[ai +2]
1314            self.modifyRecord(**data)
1315            return
1316        if event_type == "sys_del_object":
1317            self.deleteRecord(course_id)
1318            return
1319        if event_type == "sys_modify_object" and mt == 'Course':
1320            #from pdb import set_trace;set_trace()
1321            for field in self.schema():
1322                data[field] = getattr(object,field,None)
1323            course_id = object.aq_parent.getId()
1324            data[self.key] = course_id
1325            ai = rpl.index('academics')
1326            data['faculty'] = rpl[ai +1]
1327            data['department'] = rpl[ai +2]
1328            self.modifyRecord(**data)
1329    ###)
1330
1331
1332InitializeClass(CoursesCatalog)
1333###)
1334
1335class CourseResults(WAeUPTable): ###(
1336    security = ClassSecurityInfo()
1337
1338    meta_type = 'WAeUP Results Catalog'
1339    name = "course_results"
1340    key = "key" #student_id + level + course_id
1341    def __init__(self,name=None):
1342        if name ==  None:
1343            name = self.name
1344        WAeUPTable.__init__(self, name)
1345        self._queue = []
1346
1347    def addMultipleRecords(self, records): ###(
1348        """add many records"""
1349        added_keys = []
1350        for data in records:
1351            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1352            data['%s' % self.key] = uid
1353            res = self.searchResults({"%s" % self.key : uid})
1354            if len(res) > 0:
1355                raise ValueError("More than one record with uid %s" % uid)
1356            self.catalog_object(dict2ob(data), uid=uid)
1357        return uid
1358    ###)
1359
1360    def deleteResultsHere(self,level_id,student_id): ###(
1361        #import pdb;pdb.set_trace()
1362        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1363        course_results = self.course_results.evalAdvancedQuery(query)
1364        for result in course_results:
1365            self.deleteRecord(result.key)
1366    ###)
1367
1368    def moveResultsHere(self,level,student_id): ###(
1369        #import pdb;pdb.set_trace()
1370        level_id = level.getId()
1371        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1372        course_results = self.course_results.evalAdvancedQuery(query)
1373        existing_courses = [cr.code for cr in course_results]
1374        to_delete = []
1375        for code,obj in level.objectItems():
1376            to_delete.append(code)
1377            carry_over = False
1378            if code.endswith('_co'):
1379                carry_over = True
1380                code  = code[:-3]
1381            if code in existing_courses:
1382                continue
1383            course_result_doc = obj.getContent()
1384            data = {}
1385            course_id = code
1386            for field in self.schema():
1387                data[field] = getattr(course_result_doc,field,'')
1388            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1389            data['student_id'] = student_id
1390            data['level_id'] = level_id
1391            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1392            data['session_id'] = session_id
1393            #data['queue_status'] = OBJECT_CREATED
1394            data['code'] = course_id
1395            data['carry_over'] = carry_over
1396            self.catalog_object(dict2ob(data), uid=key)
1397        level.manage_delObjects(to_delete)
1398    ###)
1399
1400    def getCourses(self,student_id,level_id): ###(
1401        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1402        course_results = self.course_results.evalAdvancedQuery(query)
1403        carry_overs = []
1404        normal1 = []
1405        normal2 = []
1406        normal3 = []
1407        total_credits = 0
1408        gpa = 0
1409        for brain in course_results:
1410            d = {}
1411
1412            for field in self.schema():
1413                d[field] = getattr(brain,field,'')
1414
1415            d['weight'] = ''
1416            d['grade'] = ''
1417            d['score'] = ''
1418
1419            if str(brain.credits).isdigit():
1420                credits = int(brain.credits)
1421                total_credits += credits
1422                score = getattr(brain,'score',0)
1423                if score and str(score).isdigit() and int(score) > 0:
1424                    score = int(score)
1425                    grade,weight = self.getGradesFromScore(score)
1426                    gpa += weight * credits
1427                    d['weight'] = weight
1428                    d['grade'] = grade
1429                    d['score'] = score
1430            d['coe'] = ''
1431            if brain.core_or_elective:
1432                d['coe'] = 'Core'
1433            elif brain.core_or_elective == False:
1434                d['coe'] = 'Elective'
1435            id = code = d['id'] = brain.code
1436            d['code'] = code
1437            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1438            if res:
1439                course = res[0]
1440                d['title'] = course.title
1441                # The courses_catalog contains strings and integers in its semester field.
1442                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1443                d['semester'] = str(course.semester)
1444            else:
1445                d['title'] = "Course has been removed from course list"
1446                d['semester'] = ''
1447            if brain.carry_over:
1448                d['coe'] = 'CO'
1449                carry_overs.append(d)
1450            else:
1451                if d['semester'] == '1':
1452                    normal1.append(d)
1453
1454                elif d['semester'] == '2':
1455                    normal2.append(d)
1456                else:
1457                    normal3.append(d)
1458        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1459        #                                "%(semester)s%(code)s" % y))
1460        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1461                                             "%(semester)s%(code)s" % y))
1462        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1463    ###)
1464
1465InitializeClass(CourseResults)
1466###)
1467
1468class OnlinePaymentsImport(WAeUPTable): ###(
1469
1470    meta_type = 'WAeUP Online Payment Transactions'
1471    name = "online_payments_import"
1472    key = "order_id"
1473    def __init__(self,name=None):
1474        if name ==  None:
1475            name = self.name
1476        WAeUPTable.__init__(self, name)
1477
1478
1479InitializeClass(OnlinePaymentsImport)
1480###)
1481
1482class ReturningImport(WAeUPTable): ###(
1483
1484    meta_type = 'Returning Import Table'
1485    name = "returning_import"
1486    key = "matric_no"
1487    def __init__(self,name=None):
1488        if name ==  None:
1489            name = self.name
1490        WAeUPTable.__init__(self, name)
1491
1492
1493InitializeClass(ReturningImport)
1494###)
1495
1496class ResultsImport(WAeUPTable): ###(
1497
1498    meta_type = 'Results Import Table'
1499    name = "results_import"
1500    key = "key"
1501    def __init__(self,name=None):
1502        if name ==  None:
1503            name = self.name
1504        WAeUPTable.__init__(self, name)
1505
1506
1507InitializeClass(ResultsImport)
1508
1509###)
1510
1511class PaymentsCatalog(WAeUPTable): ###(
1512    security = ClassSecurityInfo()
1513
1514    meta_type = 'WAeUP Payments Catalog'
1515    name = "payments_catalog"
1516    key = "order_id"
1517    def __init__(self,name=None):
1518        if name ==  None:
1519            name = self.name
1520        WAeUPTable.__init__(self, name)
1521
1522
1523    security.declarePrivate('notify_event_listener') ###(
1524    def notify_event_listener(self,event_type,object,infos):
1525        "listen for events"
1526        if not infos.has_key('rpath'):
1527            return
1528        pt = getattr(object,'portal_type',None)
1529        mt = getattr(object,'meta_type',None)
1530        data = {}
1531        if pt != 'Payment':
1532            return
1533        if mt == 'CPS Proxy Folder':
1534            return # is handled only for the real object
1535        if event_type == 'sys_del_object':
1536            self.deleteRecord(object.order_id)
1537        if event_type not in ('sys_modify_object'):
1538            return
1539        for field in self.schema():
1540            data[field] = getattr(object,field,'')
1541        rpl = infos['rpath'].split('/')
1542        #import pdb;pdb.set_trace()
1543        student_id = rpl[-4]
1544        data['student_id'] = student_id
1545        modified = False
1546        try:
1547            self.modifyRecord(**data)
1548            modified = True
1549        except KeyError:
1550            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1551            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1552            pass
1553        if not modified:
1554            try:
1555                self.addRecord(**data)
1556            except:
1557                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1558                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1559        ###)
1560
1561
1562InitializeClass(PaymentsCatalog)
1563
1564###)
1565
1566# BBB:
1567AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.