source: WAeUP_SRP/trunk/WAeUPTables.py @ 3743

Last change on this file since 3743 was 3743, checked in by Henrik Bettermann, 16 years ago

improve reindex entry_session

  • Property svn:keywords set to Id
File size: 66.6 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3743 2008-10-30 07:16:07Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        records = self()
225        nr2export = len(records)
226        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
227        chunk = 2000
228        total = 0
229        start = DateTime.DateTime().timeTime()
230        start_chunk = DateTime.DateTime().timeTime()
231        for record in records:
232            not_all = False
233            d = self.record2dict(fields,record)
234            lines.append(d)
235            total += 1
236            if total and not total % chunk or total == len(records):
237                csv_writer.writerows(lines)
238                anz = len(lines)
239                logger.info("wrote %(anz)d  total written %(total)d" % vars())
240                end_chunk = DateTime.DateTime().timeTime()
241                duration = end_chunk-start_chunk
242                per_record = duration/anz
243                till_now = end_chunk - start
244                avarage_per_record = till_now/total
245                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
246                estimated_end = estimated_end.strftime("%H:%M:%S")
247                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
248                start_chunk = DateTime.DateTime().timeTime()
249                lines = []
250        end = DateTime.DateTime().timeTime()
251        logger.info('total time %6.2f m' % ((end-start)/60))
252        import os
253        filename, extension = os.path.splitext(export_file)
254        from subprocess import call
255        msg = "wrote %(total)d records to %(export_file)s" % vars()
256        #try:
257        #    retcode = call('gzip %s' % (export_file),shell=True)
258        #    if retcode == 0:
259        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
260        #except OSError, e:
261        #    retcode = -99
262        #    logger.info("zip failed with %s" % e)
263        logger.info(msg)
264        args = {'portal_status_message': msg}
265        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
266        url = self.REQUEST.get('URL2')
267        return self.REQUEST.RESPONSE.redirect(url)
268    ###)
269
270    security.declarePrivate("_import_old") ###(
271    def _import_old(self,filename,schema,layout, mode,logger):
272        "import data from csv"
273        import transaction
274        import random
275        pm = self.portal_membership
276        member = pm.getAuthenticatedMember()
277        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
278        import_fn = "%s/import/%s.csv" % (i_home,filename)
279        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
280        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
281        start = True
282        tr_count = 1
283        total_imported = 0
284        total_not_imported = 0
285        total = 0
286        iname =  "%s" % filename
287        not_imported = []
288        imported = []
289        valid_records = []
290        invalid_records = []
291        d = {}
292        d['mode'] = mode
293        d['imported'] = total_imported
294        d['not_imported'] = total_not_imported
295        d['valid_records'] = valid_records
296        d['invalid_records'] = invalid_records
297        d['import_fn'] = import_fn
298        d['imported_fn'] = imported_fn
299        d['not_imported_fn'] = not_imported_fn
300        if schema is None:
301            em = 'No schema specified'
302            logger.error(em)
303            return d
304        if layout is None:
305            em = 'No layout specified'
306            logger.error(em)
307            return d
308        validators = {}
309        for widget in layout.keys():
310            try:
311                validators[widget] = layout[widget].validate
312            except AttributeError:
313                logger.info('%s has no validate attribute' % widget)
314                return d
315        # if mode == 'edit':
316        #     importer = self.importEdit
317        # elif mode == 'add':
318        #     importer = self.importAdd
319        # else:
320        #     importer = None
321        try:
322            items = csv.DictReader(open(import_fn,"rb"),
323                                   dialect="excel",
324                                   skipinitialspace=True)
325        except:
326            em = 'Error reading %s.csv' % filename
327            logger.error(em)
328            return d
329        #import pdb;pdb.set_trace()
330        for item in items:
331            if start:
332                start = False
333                logger.info('%s starts import from %s.csv' % (member,filename))
334                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
335                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
336                                   dialect="excel",
337                                   skipinitialspace=True).next()
338                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
339                diff2schema = set(import_keys).difference(set(schema.keys()))
340                diff2layout = set(import_keys).difference(set(layout.keys()))
341                if diff2layout:
342                    em = "not ignorable key(s) %s found in heading" % diff2layout
343                    logger.info(em)
344                    return d
345                s = ','.join(['"%s"' % fn for fn in import_keys])
346                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
347                #s = '"id",' + s
348                open(imported_fn,"a").write(s + '\n')
349                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
350                format_error = format + ',"%(Error)s"'
351                #format = '"%(id)s",'+ format
352                adapters = [MappingStorageAdapter(schema, item)]
353            dm = DataModel(item, adapters,context=self)
354            ds = DataStructure(data=item,datamodel=dm)
355            error_string = ""
356            #import pdb;pdb.set_trace()
357            for k in import_keys:
358                if not validators[k](ds,mode=mode):
359                    error_string += " %s : %s" % (k,ds.getError(k))
360            # if not error_string and importer:
361            #     item.update(dm)
362            #     item['id'],error = importer(item)
363            #     if error:
364            #         error_string += error
365            if error_string:
366                item['Error'] = error_string
367                invalid_records.append(dm)
368                not_imported.append(format_error % item)
369                total_not_imported += 1
370            else:
371                em = format % item
372                valid_records.append(dm)
373                imported.append(em)
374                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
375                tr_count += 1
376                total_imported += 1
377            total += 1
378        if len(imported) > 0:
379            open(imported_fn,"a").write('\n'.join(imported))
380        if len(not_imported) > 0:
381            open(not_imported_fn,"a").write('\n'.join(not_imported))
382        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
383        d['imported'] = total_imported
384        d['not_imported'] = total_not_imported
385        d['valid_records'] = valid_records
386        d['invalid_records'] = invalid_records
387        d['imported_fn'] = imported_fn
388        d['not_imported_fn'] = not_imported_fn
389        #logger.info(em)
390        return d
391    ###)
392
393    security.declarePrivate("_import") ###(
394    def _import_new(self,csv_items,schema, layout, mode,logger):
395        "import data from csv.Dictreader Instance"
396        start = True
397        tr_count = 1
398        total_imported = 0
399        total_not_imported = 0
400        total = 0
401        iname =  "%s" % filename
402        not_imported = []
403        valid_records = []
404        invalid_records = []
405        duplicate_records = []
406        d = {}
407        d['mode'] = mode
408        d['valid_records'] = valid_records
409        d['invalid_records'] = invalid_records
410        d['invalid_records'] = duplicate_records
411        # d['import_fn'] = import_fn
412        # d['imported_fn'] = imported_fn
413        # d['not_imported_fn'] = not_imported_fn
414        validators = {}
415        for widget in layout.keys():
416            try:
417                validators[widget] = layout[widget].validate
418            except AttributeError:
419                logger.info('%s has no validate attribute' % widget)
420                return d
421        for item in csv_items:
422            if start:
423                start = False
424                logger.info('%s starts import from %s.csv' % (member,filename))
425                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
426                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
427                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
428                diff2schema = set(import_keys).difference(set(schema.keys()))
429                diff2layout = set(import_keys).difference(set(layout.keys()))
430                if diff2layout:
431                    em = "not ignorable key(s) %s found in heading" % diff2layout
432                    logger.info(em)
433                    return d
434                # s = ','.join(['"%s"' % fn for fn in import_keys])
435                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
436                # #s = '"id",' + s
437                # open(imported_fn,"a").write(s + '\n')
438                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
439                # format_error = format + ',"%(Error)s"'
440                # #format = '"%(id)s",'+ format
441                adapters = [MappingStorageAdapter(schema, item)]
442            dm = DataModel(item, adapters,context=self)
443            ds = DataStructure(data=item,datamodel=dm)
444            error_string = ""
445            for k in import_keys:
446                if not validators[k](ds,mode=mode):
447                    error_string += " %s : %s" % (k,ds.getError(k))
448            if error_string:
449                item['Error'] = error_string
450                #invalid_records.append(dm)
451                invalid_records.append(item)
452                total_not_imported += 1
453            else:
454                em = format % item
455                valid_records.append(dm)
456                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
457                tr_count += 1
458                total_imported += 1
459            total += 1
460        # if len(imported) > 0:
461        #     open(imported_fn,"a").write('\n'.join(imported))
462        # if len(not_imported) > 0:
463        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
464        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
465        d['imported'] = total_imported
466        d['not_imported'] = total_not_imported
467        d['valid_records'] = valid_records
468        d['invalid_records'] = invalid_records
469        return d
470    ###)
471
472    security.declarePublic("missingValue")###(
473    def missingValue(self):
474        from Missing import MV
475        return MV
476    ###)
477###)
478
479class AccommodationTable(WAeUPTable): ###(
480
481    meta_type = 'WAeUP Accommodation Tool'
482    name = "portal_accommodation"
483    key = "bed"
484    not_occupied = NOT_OCCUPIED
485    def __init__(self,name=None):
486        if name ==  None:
487            name = self.name
488        WAeUPTable.__init__(self, name)
489
490    def searchAndReserveBed(self, student_id,bed_type): ###(
491        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
492        records = self.evalAdvancedQuery(Eq('student',student_id))
493        if len(records) == 1:
494            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
495            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
496            return -1,records[0].bed
497        elif len(records) > 1:
498            logger.info('%s found more than one (reserved) bed' % (student_id))
499            return -3,'more than one bed'
500        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
501        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
502        if len(records) == 0:
503            logger.info('no bed %s available for %s' % (bed_type,student_id))
504            return -2,"no bed"
505        rec = records[0]
506        self.modifyRecord(bed=rec.bed,student=student_id)
507        logger.info('%s booked bed %s' % (student_id,rec.bed))
508        return 1,rec.bed
509    ###)
510
511
512InitializeClass(AccommodationTable)
513
514###)
515
516class PinTable(WAeUPTable): ###(
517    from ZODB.POSException import ConflictError
518    security = ClassSecurityInfo()
519    meta_type = 'WAeUP Pin Tool'
520    name = "portal_pins"
521    key = 'pin'
522
523    def __init__(self,name=None):
524        if name ==  None:
525            name = self.name
526        WAeUPTable.__init__(self, name)
527
528    security.declareProtected(ModifyPortalContent,"dumpAll")###(
529    def dumpAll(self,include_unused=None):
530        """dump all data in the table to a csv"""
531        member = self.portal_membership.getAuthenticatedMember()
532        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
533        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
534        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
535        res_list = []
536        lines = []
537        if hasattr(self,"export_keys"):
538            fields = self.export_keys
539        else:
540            fields = []
541            for f in self.schema():
542                fields.append(f)
543        headline = ','.join(fields)
544        out = open(export_file,"wb")
545        out.write(headline +'\n')
546        out.close()
547        out = open(export_file,"a")
548        csv_writer = csv.DictWriter(out,fields,)
549        if include_unused is not None and str(member) not in ('admin','joachim'):
550            logger.info('%s tries to dump pintable with unused pins' % (member))
551            return
552        if include_unused is not None:
553            records = self()
554        else:
555            records = self.evalAdvancedQuery(~Eq('student',''))
556        nr2export = len(records)
557        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
558        chunk = 2000
559        total = 0
560        start = DateTime.DateTime().timeTime()
561        start_chunk = DateTime.DateTime().timeTime()
562        for record in records:
563            not_all = False
564            d = self.record2dict(fields,record)
565            lines.append(d)
566            total += 1
567            if total and not total % chunk or total == len(records):
568                csv_writer.writerows(lines)
569                anz = len(lines)
570                logger.info("wrote %(anz)d  total written %(total)d" % vars())
571                end_chunk = DateTime.DateTime().timeTime()
572                duration = end_chunk-start_chunk
573                per_record = duration/anz
574                till_now = end_chunk - start
575                avarage_per_record = till_now/total
576                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
577                estimated_end = estimated_end.strftime("%H:%M:%S")
578                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
579                start_chunk = DateTime.DateTime().timeTime()
580                lines = []
581        end = DateTime.DateTime().timeTime()
582        logger.info('total time %6.2f m' % ((end-start)/60))
583        import os
584        filename, extension = os.path.splitext(export_file)
585        from subprocess import call
586        msg = "wrote %(total)d records to %(export_file)s" % vars()
587        #try:
588        #    retcode = call('gzip %s' % (export_file),shell=True)
589        #    if retcode == 0:
590        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
591        #except OSError, e:
592        #    retcode = -99
593        #    logger.info("zip failed with %s" % e)
594        logger.info(msg)
595        args = {'portal_status_message': msg}
596        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
597        url = self.REQUEST.get('URL2')
598        return self.REQUEST.RESPONSE.redirect(url)
599    ###)
600
601
602
603    def searchAndSetRecord(self, uid, student_id,prefix):
604
605        # The following line must be activated after resetting the
606        # the portal_pins table. This is to avoid duplicate entries
607        # and disable duplicate payments.
608
609        #student_id = student_id.upper()
610
611        #records = self.searchResults(student = student_id)
612        #if len(records) > 0 and prefix in ('CLR','APP'):
613        #    for r in records:
614        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
615        #            return -2
616        records = self.searchResults({"%s" % self.key : uid})
617        if len(records) > 1:
618            # Can not happen, but anyway...
619            raise ValueError("More than one record with uid %s" % uid)
620        if len(records) == 0:
621            return -1,None
622        record = records[0]
623        if record.student == "":
624            record_data = {}
625            for field in self.schema() + self.indexes():
626                record_data[field] = getattr(record, field)
627            # Add the updated data:
628            record_data['student'] = student_id
629            try:
630                self.catalog_object(dict2ob(record_data), uid)
631                return 1,record
632            except ConflictError:
633                return 2,record
634        if record.student.upper() != student_id.upper():
635            return 0,record
636        if record.student.upper() == student_id.upper():
637            return 2,record
638        return -3,record
639InitializeClass(PinTable)
640###)
641
642class PumeResultsTable(WAeUPTable): ###(
643
644    meta_type = 'WAeUP PumeResults Tool'
645    name = "portal_pumeresults"
646    key = "jamb_reg_no"
647    def __init__(self,name=None):
648        if name ==  None:
649            name = self.name
650        WAeUPTable.__init__(self, name)
651
652
653InitializeClass(PumeResultsTable)
654
655###)
656
657class ApplicantsCatalog(WAeUPTable): ###(
658
659    meta_type = 'WAeUP Applicants Catalog'
660    name = "applicants_catalog"
661    key = "reg_no"
662    security = ClassSecurityInfo()
663    #export_keys = (
664    #               "reg_no",
665    #               "status",
666    #               "lastname",
667    #               "sex",
668    #               "date_of_birth",
669    #               "lga",
670    #               "email",
671    #               "phone",
672    #               "passport",
673    #               "entry_mode",
674    #               "pin",
675    #               "screening_type",
676    #               "registration_date",
677    #               "testdate",
678    #               "application_date",
679    #               "screening_date",
680    #               "faculty",
681    #               "department",
682    #               "course1",
683    #               "course2",
684    #               "course3",
685    #               "eng_score",
686    #               "subj1",
687    #               "subj1score",
688    #               "subj2",
689    #               "subj2score",
690    #               "subj3",
691    #               "subj3score",
692    #               "aggregate",
693    #               "course_admitted",
694    #               )
695
696    def __init__(self,name=None):
697        if name ==  None:
698            name = self.name
699        WAeUPTable.__init__(self, name)
700
701    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
702    def new_importCSV(self,filename="JAMB_data",
703                  schema_id="application",
704                  layout_id="import_application",
705                  mode='add'):
706        """ import JAMB data """
707        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
708        pm = self.portal_membership
709        member = pm.getAuthenticatedMember()
710        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
711        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
712        import_fn = "%s/import/%s.csv" % (i_home,filename)
713        if mode not in ('add','edit'):
714            logger.info("invalid mode: %s" % mode)
715        if os.path.exists(lock_fn):
716            logger.info("import of %(import_fn)s already in progress" % vars())
717            return
718        lock_file = open(lock_fn,"w")
719        lock_file.write("%(current)s \n" % vars())
720        lock_file.close()
721        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
722        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
723        stool = getToolByName(self, 'portal_schemas')
724        ltool = getToolByName(self, 'portal_layouts')
725        schema = stool._getOb(schema_id)
726        if schema is None:
727            em = 'No such schema %s' % schema_id
728            logger.error(em)
729            return
730        for postfix in ('_import',''):
731            layout_name = "%(layout_id)s%(postfix)s" % vars()
732            if hasattr(ltool,layout_name):
733                break
734        layout = ltool._getOb(layout_name)
735        if layout is None:
736            em = 'No such layout %s' % layout_id
737            logger.error(em)
738            return
739        try:
740            csv_file = csv.DictReader(open(import_fn,"rb"))
741        except:
742            em = 'Error reading %s.csv' % filename
743            logger.error(em)
744            return
745        d = self._import_new(csv_items,schema,layout,mode,logger)
746        imported = []
747        edited = []
748        duplicates = []
749        not_found = []
750        if len(d['valid_records']) > 0:
751            for record in d['valid_records']:
752                #import pdb;pdb.set_trace()
753                if mode == "add":
754                    try:
755                        self.addRecord(**dict(record.items()))
756                        imported.append(**dict(record.items()))
757                        logger.info("added %s" % record.items())
758                    except ValueError:
759                        dupplicate.append(**dict(record.items()))
760                        logger.info("duplicate %s" % record.items())
761                elif mode == "edit":
762                    try:
763                        self.modifyRecord(**dict(record.items()))
764                        edited.append(**dict(record.items()))
765                        logger.info("edited %s" % record.items())
766                    except KeyError:
767                        not_found.append(**dict(record.items()))
768                        logger.info("not found %s" % record.items())
769        invalid = d['invalid_records']
770        for itype in ("imported","edited","not_found","duplicate","invalid"):
771            outlist = locals[itype]
772            if len(outlist):
773                d = {}
774                for k in outlist[0].keys():
775                    d[k] = k
776                outlist[0] = d
777                outfile = open("file_name_%s" % itype,'w')
778                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
779                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
780###)
781
782    security.declareProtected(ModifyPortalContent,"importCSV")###(
783    def importCSV(self,filename="JAMB_data",
784                  schema_id="application",
785                  layout_id="application_pce",
786                  mode='add'):
787        """ import JAMB data """
788        stool = getToolByName(self, 'portal_schemas')
789        ltool = getToolByName(self, 'portal_layouts')
790        schema = stool._getOb(schema_id)
791        if schema is None:
792            em = 'No such schema %s' % schema_id
793            logger.error(em)
794            return
795        layout = ltool._getOb(layout_id)
796        if layout is None:
797            em = 'No such layout %s' % layout_id
798            logger.error(em)
799            return
800        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
801        d = self._import_old(filename,schema,layout,mode,logger)
802        if len(d['valid_records']) > 0:
803            for record in d['valid_records']:
804                #import pdb;pdb.set_trace()
805                if mode == "add":
806                    self.addRecord(**dict(record.items()))
807                    logger.info("added %s" % record.items())
808                elif mode == "edit":
809                    self.modifyRecord(**dict(record.items()))
810                    logger.info("edited %s" % record.items())
811                else:
812                    logger.info("invalid mode: %s" % mode)
813        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
814    ###)
815
816InitializeClass(ApplicantsCatalog)
817
818###)
819
820class StudentsCatalog(WAeUPTable): ###(
821    security = ClassSecurityInfo()
822
823    meta_type = 'WAeUP Students Catalog'
824    name = "students_catalog"
825    key = "id"
826    affected_types = {   ###(
827                      'StudentApplication':
828                      {'id': 'application',
829                       'fields':
830                       ('jamb_reg_no',
831                        'entry_mode',
832                        #'entry_level',
833                        'entry_session',
834                       )
835                      },
836                      'StudentClearance':
837                      {'id': 'clearance',
838                       'fields':
839                       ('matric_no',
840                        'lga',
841                       )
842                      },
843                      'StudentPersonal':
844                      {'id': 'personal',
845                       'fields':
846                       ('name',
847                        'sex',
848                        'perm_address',
849                        'email',
850                        'phone',
851                       )
852                      },
853                      'StudentStudyCourse':
854                      {'id': 'study_course',
855                       'fields':
856                       ('course', # study_course
857                        'faculty', # from certificate
858                        'department', # from certificate
859                        'end_level', # from certificate
860                        'level', # current_level
861                        'mode',  # current_mode
862                        'session', # current_session
863                        'verdict', # current_verdict
864                       )
865                      },
866                     }
867    ###)
868
869    def __init__(self,name=None):
870        if name ==  None:
871            name = self.name
872        WAeUPTable.__init__(self, name)
873        return
874
875    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
876        """ clears the whole enchilada """
877        self._catalog.clear()
878
879        if REQUEST and RESPONSE:
880            RESPONSE.redirect(
881              URL1 +
882              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
883
884    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
885        """ clear the catalog, then re-index everything """
886
887        elapse = time.time()
888        c_elapse = time.clock()
889
890        pgthreshold = self._getProgressThreshold()
891        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
892        self.refreshCatalog(clear=1, pghandler=handler)
893
894        elapse = time.time() - elapse
895        c_elapse = time.clock() - c_elapse
896
897        RESPONSE.redirect(
898            URL1 +
899            '/manage_catalogAdvanced?manage_tabs_message=' +
900            urllib.quote('Catalog Updated \n'
901                         'Total time: %s\n'
902                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
903    ###)
904
905    def fill_certificates_dict(self): ###(
906        "return certificate data in  dict"
907        certificates_brains = self.portal_catalog(portal_type ='Certificate')
908        d = {}
909        for cb in certificates_brains:
910            certificate_doc = cb.getObject().getContent()
911            cb_path = cb.getPath().split('/')
912            ld = {}
913            ld['faculty'] = cb_path[-4]
914            ld['department'] = cb_path[-3]
915            ld['end_level'] = getattr(certificate_doc,'end_level','999')
916            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
917            d[cb.getId] = ld
918        return d
919    ###)
920
921    def get_from_doc_department(self,doc,cached_data={}): ###(
922        "return the students department"
923        if doc is None:
924            return None
925        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
926            return self._v_certificates[doc.study_course]['department']
927        certificate_res = self.portal_catalog(id = doc.study_course)
928        if len(certificate_res) != 1:
929            return None
930        return certificate_res[0].getPath().split('/')[-3]
931
932    def get_from_doc_faculty(self,doc,cached_data={}):
933        "return the students faculty"
934        if doc is None:
935            return None
936        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
937            return self._v_certificates[doc.study_course]['faculty']
938        certificate_res = self.portal_catalog(id = doc.study_course)
939        if len(certificate_res) != 1:
940            return None
941        return certificate_res[0].getPath().split('/')[-4]
942
943    def get_from_doc_end_level(self,doc,cached_data={}):
944        "return the students end_level"
945        if doc is None:
946            return None
947        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
948            return self._v_certificates[doc.study_course]['end_level']
949        certificate_res = self.portal_catalog(id = doc.study_course)
950        if len(certificate_res) != 1:
951            return None
952        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
953
954    def get_from_doc_level(self,doc,cached_data={}):
955        "return the students level"
956        if doc is None:
957            return None
958        return getattr(doc,'current_level',None)
959
960    def get_from_doc_mode(self,doc,cached_data={}):
961        "return the students mode"
962        if doc is None:
963            return None
964        cm = getattr(doc,'current_mode',None)
965        return cm
966
967
968    def get_from_doc_session(self,doc,cached_data={}):
969        "return the students current_session"
970        if doc is None:
971            return None
972        return getattr(doc,'current_session',None)
973
974    def get_from_doc_entry_session(self,doc,cached_data={}):
975        "return the students entry_session"
976        if doc is None:
977            return None
978        es = getattr(doc,'entry_session',None)
979        if es is not None and len(es) == 2:
980            return es
981        elif len(es) == 9:
982            return es[2:4]   
983        try:
984            digit = int(doc.jamb_reg_no[0])
985        except:
986            return "-1"
987        if digit < 8:
988            return "0%c" % doc.jamb_reg_no[0]
989        return "9%c" % doc.jamb_reg_no[0]
990
991    def get_from_doc_course(self,doc,cached_data={}):
992        "return the students study_course"
993        if doc is None:
994            return None
995        return getattr(doc,'study_course',None)
996
997    def get_from_doc_name(self,doc,cached_data={}):
998        "return the students name from the personal"
999        if doc is None:
1000            return None
1001        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1002
1003    def get_from_doc_verdict(self,doc,cached_data={}):
1004        "return the students study_course"
1005        if doc is None:
1006            return None
1007        return getattr(doc,'current_verdict',None)
1008    ###)
1009
1010    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1011        if isinstance(name, str):
1012            name = (name,)
1013        reindextypes = {}
1014        reindex_special = []
1015        for n in name:
1016            if n in ("review_state","registered_courses"):
1017                reindex_special.append(n)
1018            else:
1019                for pt in self.affected_types.keys():
1020                    if n in self.affected_types[pt]['fields']:
1021                        if reindextypes.has_key(pt):
1022                            reindextypes[pt].append(n)
1023                        else:
1024                            reindextypes[pt]= [n]
1025                        break
1026        cached_data = {}
1027        if set(name).intersection(set(('faculty','department','end_level'))):
1028            cached_data = self.fill_certificates_dict()
1029        students = self.portal_catalog(portal_type="Student")
1030        if hasattr(self,'portal_catalog_real'):
1031            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1032        else:
1033            aq_portal = self.portal_catalog.evalAdvancedQuery
1034        num_objects = len(students)
1035        if pghandler:
1036            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1037        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1038        #import pdb;pdb.set_trace()
1039        for i in xrange(num_objects):
1040            if pghandler: pghandler.report(i)
1041            student_brain = students[i]
1042            student_object = student_brain.getObject()
1043            # query = Eq('path',student_brain.getPath())
1044            # sub_brains_list = aq_portal(query)
1045            # sub_brains = {}
1046            # for sub_brain in sub_brains_list:
1047            #     sub_brains[sub_brain.portal_type] = sub_brain
1048            # student_path = student_brain.getPath()
1049            data = {}
1050            modified = False
1051            sid = data['id'] = student_brain.getId
1052            if reindex_special and 'review_state' in reindex_special:
1053                modified = True
1054                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1055            sub_objects = False
1056            for pt in reindextypes.keys():
1057                modified = True
1058                try:
1059                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1060                    #doc = sub_brains[pt].getObject().getContent()
1061                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1062                    # doc = self.unrestrictedTraverse(path).getContent()
1063                    sub_objects = True
1064                except:
1065                    continue
1066                for field in set(name).intersection(self.affected_types[pt]['fields']):
1067                    if hasattr(self,'get_from_doc_%s' % field):
1068                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1069                                                                              cached_data=cached_data)
1070                    else:
1071                        data[field] = getattr(doc,field)
1072            if not sub_objects and noattr:
1073                import_res = self.returning_import(id = sid)
1074                if not import_res:
1075                    continue
1076                import_record = import_res[0]
1077                data['matric_no'] = import_record.matric_no
1078                data['sex'] = import_record.Sex == 'F'
1079                data['name'] = "%s %s %s" % (import_record.Firstname,
1080                                             import_record.Middlename,
1081                                             import_record.Lastname)
1082                data['jamb_reg_no'] = import_record.Entryregno
1083            #if reindex_special and 'registered_courses' in reindex_special:
1084            #    try:
1085            #        study_course = getattr(student_object,"study_course")
1086            #        level_ids = study_course.objectIds()
1087            #    except:
1088            #        continue
1089            #    if not level_ids:
1090            #        continue
1091            #    modified = True
1092            #    level_ids.sort()
1093            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1094            #    courses = []
1095            #    for c in course_ids:
1096            #        if c.endswith('_co'):
1097            #            courses.append(c[:-3])
1098            #        else:
1099            #            courses.append(c)
1100            #    data['registered_courses'] = courses
1101            if modified:
1102                self.modifyRecord(**data)
1103        if pghandler: pghandler.finish()
1104    ###)
1105
1106    def refreshCatalog(self, clear=0, pghandler=None): ###(
1107        """ re-index everything we can find """
1108        students_folder = self.portal_url.getPortalObject().campus.students
1109        if clear:
1110            self._catalog.clear()
1111        students = self.portal_catalog(portal_type="Student")
1112        num_objects = len(students)
1113        cached_data = self.fill_certificates_dict()
1114        if pghandler:
1115            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1116        for i in xrange(num_objects):
1117            if pghandler: pghandler.report(i)
1118            student_brain = students[i]
1119            spath = student_brain.getPath()
1120            student_object = student_brain.getObject()
1121            data = {}
1122            sid = data['id'] = student_brain.getId
1123            #data['review_state'] = student_brain.review_state
1124            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1125            sub_objects = False
1126            for pt in self.affected_types.keys():
1127                modified = True
1128                try:
1129                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1130                    sub_objects = True
1131                except:
1132                    #from pdb import set_trace;set_trace()
1133                    continue
1134                for field in self.affected_types[pt]['fields']:
1135                    if hasattr(self,'get_from_doc_%s' % field):
1136                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1137                                                                              cached_data=cached_data)
1138                    else:
1139                        data[field] = getattr(doc,field,None)
1140            if not sub_objects:
1141                import_res = self.returning_import(id = sid)
1142                if not import_res:
1143                    continue
1144                import_record = import_res[0]
1145                data['matric_no'] = import_record.matric_no
1146                data['sex'] = import_record.Sex == 'F'
1147                data['name'] = "%s %s %s" % (import_record.Firstname,
1148                                             import_record.Middlename,
1149                                             import_record.Lastname)
1150                data['jamb_reg_no'] = import_record.Entryregno
1151            self.addRecord(**data)
1152        if pghandler: pghandler.finish()
1153    ###)
1154
1155    security.declarePrivate('notify_event_listener') ###(
1156    def notify_event_listener(self,event_type,object,infos):
1157        "listen for events"
1158        if not infos.has_key('rpath'):
1159            return
1160        pt = getattr(object,'portal_type',None)
1161        mt = getattr(object,'meta_type',None)
1162        students_catalog = self
1163        data = {}
1164        if pt == 'Student' and\
1165           mt == 'CPS Proxy Folder' and\
1166           event_type.startswith('workflow'):
1167            data['id'] = object.getId()
1168            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1169            students_catalog.modifyRecord(**data)
1170            return
1171        rpl = infos['rpath'].split('/')
1172        if pt == 'Student' and mt == 'CPS Proxy Folder':
1173            student_id = object.id
1174            if event_type == "sys_add_object":
1175                try:
1176                    self.addRecord(id = student_id)
1177                except ValueError:
1178                    pass
1179                return
1180            elif event_type == 'sys_del_object':
1181                self.deleteRecord(student_id)
1182        if pt not in self.affected_types.keys():
1183            return
1184        if event_type not in ('sys_modify_object'):
1185            return
1186        if mt == 'CPS Proxy Folder':
1187            return
1188        if not hasattr(self,'_v_certificates'):
1189            self._v_certificates = self.fill_certificates_dict()
1190        for field in self.affected_types[pt]['fields']:
1191            if hasattr(self,'get_from_doc_%s' % field):
1192                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1193            else:
1194                data[field] = getattr(object,field)
1195        data['id'] = rpl[2]
1196        self.modifyRecord(**data)
1197    ###)
1198
1199
1200InitializeClass(StudentsCatalog)
1201
1202###)
1203
1204class CertificatesCatalog(WAeUPTable): ###(
1205    security = ClassSecurityInfo()
1206
1207    meta_type = 'WAeUP Certificates Catalog'
1208    name =  "certificates_catalog"
1209    key = "code"
1210    def __init__(self,name=None):
1211        if name ==  None:
1212            name =  self.name
1213        WAeUPTable.__init__(self, name)
1214
1215    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1216        """ clear the catalog, then re-index everything """
1217
1218        elapse = time.time()
1219        c_elapse = time.clock()
1220
1221        pgthreshold = self._getProgressThreshold()
1222        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1223        self.refreshCatalog(clear=1, pghandler=handler)
1224
1225        elapse = time.time() - elapse
1226        c_elapse = time.clock() - c_elapse
1227
1228        RESPONSE.redirect(
1229            URL1 +
1230            '/manage_catalogAdvanced?manage_tabs_message=' +
1231            urllib.quote('Catalog Updated \n'
1232                         'Total time: %s\n'
1233                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1234    ###)
1235
1236    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1237        if isinstance(name, str):
1238            name = (name,)
1239        certificates = self.portal_catalog(portal_type="Certificate")
1240        num_objects = len(certificates)
1241        if pghandler:
1242            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1243        for i in xrange(num_objects):
1244            if pghandler: pghandler.report(i)
1245            certificate_brain = certificates[i]
1246            certificate_object = certificate_brain.getObject()
1247            pl = certificate_brain.getPath().split('/')
1248            data = {}
1249            cid = data[self.key] = certificate_brain.getId
1250            data['faculty'] = pl[-4]
1251            data['department'] = pl[-3]
1252            doc = certificate_object.getContent()
1253            for field in name:
1254                if field not in (self.key,'faculty','department'):
1255                    data[field] = getattr(doc,field)
1256            self.modifyRecord(**data)
1257        if pghandler: pghandler.finish()
1258    ###)
1259
1260    def refreshCatalog(self, clear=0, pghandler=None): ###(
1261        """ re-index everything we can find """
1262        if clear:
1263            self._catalog.clear()
1264        certificates = self.portal_catalog(portal_type="Certificate")
1265        num_objects = len(certificates)
1266        if pghandler:
1267            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1268        #from pdb import set_trace;set_trace()
1269        for i in xrange(num_objects):
1270            if pghandler: pghandler.report(i)
1271            certificate_brain = certificates[i]
1272            certificate_doc = certificate_brain.getObject().getContent()
1273            pl = certificate_brain.getPath().split('/')
1274            data = {}
1275            for field in self.schema():
1276                data[field] = getattr(certificate_doc,field,None)
1277            data[self.key] = certificate_brain.getId
1278            ai = pl.index('academics')
1279            data['faculty'] = pl[ai +1]
1280            data['department'] = pl[ai +2]
1281            if clear:
1282                self.addRecord(**data)
1283            else:
1284                self.modifyRecord(**data)
1285        if pghandler: pghandler.finish()
1286    ###)
1287
1288    security.declarePrivate('notify_event_listener') ###(
1289    def notify_event_listener(self,event_type,object,infos):
1290        "listen for events"
1291        if not infos.has_key('rpath'):
1292            return
1293        pt = getattr(object,'portal_type',None)
1294        mt = getattr(object,'meta_type',None)
1295        if pt != 'Certificate':
1296            return
1297        data = {}
1298        rpl = infos['rpath'].split('/')
1299        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1300            return
1301        certificate_id = object.getId()
1302        data[self.key] = certificate_id
1303        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1304            try:
1305                self.addRecord(**data)
1306            except ValueError:
1307                return
1308            certificate_id = object.getId()
1309            doc = object.getContent()
1310            if doc is None:
1311                return
1312            for field in self.schema():
1313                data[field] = getattr(doc,field,None)
1314            data[self.key] = certificate_id
1315            ai = rpl.index('academics')
1316            data['faculty'] = rpl[ai +1]
1317            data['department'] = rpl[ai +2]
1318            self.modifyRecord(**data)
1319            return
1320        if event_type == "sys_del_object":
1321            self.deleteRecord(certificate_id)
1322            return
1323        if event_type == "sys_modify_object" and mt == 'Certificate':
1324            #from pdb import set_trace;set_trace()
1325            for field in self.schema():
1326                data[field] = getattr(object,field,None)
1327            certificate_id = object.aq_parent.getId()
1328            data[self.key] = certificate_id
1329            ai = rpl.index('academics')
1330            data['faculty'] = rpl[ai +1]
1331            data['department'] = rpl[ai +2]
1332            self.modifyRecord(**data)
1333    ###)
1334
1335
1336InitializeClass(CertificatesCatalog)
1337###)
1338
1339class CoursesCatalog(WAeUPTable): ###(
1340    security = ClassSecurityInfo()
1341
1342    meta_type = 'WAeUP Courses Catalog'
1343    name =  "courses_catalog"
1344    key = "code"
1345    def __init__(self,name=None):
1346        if name ==  None:
1347            name =  self.name
1348        WAeUPTable.__init__(self, name)
1349
1350    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1351        """ clear the catalog, then re-index everything """
1352
1353        elapse = time.time()
1354        c_elapse = time.clock()
1355
1356        pgthreshold = self._getProgressThreshold()
1357        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1358        self.refreshCatalog(clear=1, pghandler=handler)
1359
1360        elapse = time.time() - elapse
1361        c_elapse = time.clock() - c_elapse
1362
1363        RESPONSE.redirect(
1364            URL1 +
1365            '/manage_catalogAdvanced?manage_tabs_message=' +
1366            urllib.quote('Catalog Updated \n'
1367                         'Total time: %s\n'
1368                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1369    ###)
1370
1371    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1372        if isinstance(name, str):
1373            name = (name,)
1374        courses = self.portal_catalog(portal_type="Course")
1375        num_objects = len(courses)
1376        if pghandler:
1377            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1378        for i in xrange(num_objects):
1379            if pghandler: pghandler.report(i)
1380            course_brain = courses[i]
1381            course_object = course_brain.getObject()
1382            pl = course_brain.getPath().split('/')
1383            data = {}
1384            cid = data[self.key] = course_brain.getId
1385            data['faculty'] = pl[-4]
1386            data['department'] = pl[-3]
1387            doc = course_object.getContent()
1388            for field in name:
1389                if field not in (self.key,'faculty','department'):
1390                    data[field] = getattr(doc,field)
1391            self.modifyRecord(**data)
1392        if pghandler: pghandler.finish()
1393    ###)
1394
1395    def refreshCatalog(self, clear=0, pghandler=None): ###(
1396        """ re-index everything we can find """
1397        if clear:
1398            self._catalog.clear()
1399        courses = self.portal_catalog(portal_type="Course")
1400        num_objects = len(courses)
1401        if pghandler:
1402            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1403        #from pdb import set_trace;set_trace()
1404        for i in xrange(num_objects):
1405            if pghandler: pghandler.report(i)
1406            course_brain = courses[i]
1407            course_doc = course_brain.getObject().getContent()
1408            pl = course_brain.getPath().split('/')
1409            data = {}
1410            for field in self.schema():
1411                data[field] = getattr(course_doc,field,None)
1412            data[self.key] = course_brain.getId
1413            ai = pl.index('academics')
1414            data['faculty'] = pl[ai +1]
1415            data['department'] = pl[ai +2]
1416            if clear:
1417                self.addRecord(**data)
1418            else:
1419                self.modifyRecord(**data)
1420        if pghandler: pghandler.finish()
1421    ###)
1422
1423    security.declarePrivate('notify_event_listener') ###(
1424    def notify_event_listener(self,event_type,object,infos):
1425        "listen for events"
1426        if not infos.has_key('rpath'):
1427            return
1428        pt = getattr(object,'portal_type',None)
1429        mt = getattr(object,'meta_type',None)
1430        if pt != 'Course':
1431            return
1432        data = {}
1433        rpl = infos['rpath'].split('/')
1434        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1435            return
1436        course_id = object.getId()
1437        data[self.key] = course_id
1438        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1439            try:
1440                self.addRecord(**data)
1441            except ValueError:
1442                return
1443            course_id = object.getId()
1444            doc = object.getContent()
1445            if doc is None:
1446                return
1447            for field in self.schema():
1448                data[field] = getattr(doc,field,None)
1449            data[self.key] = course_id
1450            ai = rpl.index('academics')
1451            data['faculty'] = rpl[ai +1]
1452            data['department'] = rpl[ai +2]
1453            self.modifyRecord(**data)
1454            return
1455        if event_type == "sys_del_object":
1456            self.deleteRecord(course_id)
1457            return
1458        if event_type == "sys_modify_object" and mt == 'Course':
1459            #from pdb import set_trace;set_trace()
1460            for field in self.schema():
1461                data[field] = getattr(object,field,None)
1462            course_id = object.aq_parent.getId()
1463            data[self.key] = course_id
1464            ai = rpl.index('academics')
1465            data['faculty'] = rpl[ai +1]
1466            data['department'] = rpl[ai +2]
1467            self.modifyRecord(**data)
1468    ###)
1469
1470
1471InitializeClass(CoursesCatalog)
1472###)
1473
1474class CourseResults(WAeUPTable): ###(
1475    security = ClassSecurityInfo()
1476
1477    meta_type = 'WAeUP Results Catalog'
1478    name = "course_results"
1479    key = "key" #student_id + level + course_id
1480    def __init__(self,name=None):
1481        if name ==  None:
1482            name = self.name
1483        WAeUPTable.__init__(self, name)
1484        self._queue = []
1485
1486    def addMultipleRecords(self, records): ###(
1487        """add many records"""
1488        existing_uids = []
1489        for data in records:
1490            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1491            data['%s' % self.key] = uid
1492            query = Eq(self.key, uid)
1493            res = self.course_results.evalAdvancedQuery(query)
1494            if len(res) > 0:
1495                rec = res[0]
1496                equal = True
1497                for attr in ('student_id','level_id','course_id'):
1498                    if getattr(rec,attr,'') != data[attr]:
1499                        equal = False
1500                        break
1501                if equal:
1502                    existing_uids += uid,
1503                    continue
1504            self.catalog_object(dict2ob(data), uid=uid)
1505        return existing_uids
1506    ###)
1507
1508    def deleteResultsHere(self,level_id,student_id): ###(
1509        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1510        course_results = self.course_results.evalAdvancedQuery(query)
1511        #import pdb;pdb.set_trace()
1512        for result in course_results:
1513            self.deleteRecord(result.key)
1514    ###)
1515
1516    def moveResultsHere(self,level,student_id): ###(
1517        #import pdb;pdb.set_trace()
1518        level_id = level.getId()
1519        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1520        course_results = self.course_results.evalAdvancedQuery(query)
1521        existing_courses = [cr.code for cr in course_results]
1522        to_delete = []
1523        for code,obj in level.objectItems():
1524            to_delete.append(code)
1525            carry_over = False
1526            if code.endswith('_co'):
1527                carry_over = True
1528                code  = code[:-3]
1529            if code in existing_courses:
1530                continue
1531            course_result_doc = obj.getContent()
1532            data = {}
1533            course_id = code
1534            for field in self.schema():
1535                data[field] = getattr(course_result_doc,field,'')
1536            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1537            data['student_id'] = student_id
1538            data['level_id'] = level_id
1539            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1540            data['session_id'] = session_id
1541            #data['queue_status'] = OBJECT_CREATED
1542            data['code'] = course_id
1543            data['carry_over'] = carry_over
1544            self.catalog_object(dict2ob(data), uid=key)
1545        level.manage_delObjects(to_delete)
1546    ###)
1547
1548    def getCourses(self,student_id,level_id): ###(
1549        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1550        course_results = self.course_results.evalAdvancedQuery(query)
1551        carry_overs = []
1552        normal1 = []
1553        normal2 = []
1554        normal3 = []
1555        total_credits = 0
1556        gpa = 0
1557        for brain in course_results:
1558            d = {}
1559
1560            for field in self.schema():
1561                d[field] = getattr(brain,field,None)
1562                if repr(d[field]) == 'Missing.Value':
1563                    d[field] = ''
1564            d['weight'] = ''
1565            d['grade'] = ''
1566            d['score'] = ''
1567
1568            if str(brain.credits).isdigit():
1569                credits = int(brain.credits)
1570                total_credits += credits
1571                score = getattr(brain,'score',0)
1572                if score and str(score).isdigit() and int(score) > 0:
1573                    score = int(score)
1574                    grade,weight = self.getGradesFromScore(score,'')
1575                    gpa += weight * credits
1576                    d['weight'] = weight
1577                    d['grade'] = grade
1578                    d['score'] = score
1579
1580            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1581            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1582            #else:
1583            #    d['score_calc'] = ''
1584            try:
1585                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1586            except:
1587                d['score_calc'] = ''
1588
1589            if d['score_calc']:
1590                grade = self.getGradesFromScore(d['score_calc'],level_id)
1591                d['grade'] = grade
1592
1593            d['coe'] = ''
1594            if brain.core_or_elective:
1595                d['coe'] = 'Core'
1596            elif brain.core_or_elective == False:
1597                d['coe'] = 'Elective'
1598            id = code = d['id'] = brain.code
1599            d['code'] = code
1600            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1601            if res:
1602                course = res[0]
1603                d['title'] = course.title
1604                # The courses_catalog contains strings and integers in its semester field.
1605                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1606                d['semester'] = str(course.semester)
1607            else:
1608                d['title'] = "Course has been removed from course list"
1609                d['semester'] = ''
1610            if brain.carry_over:
1611                d['coe'] = 'CO'
1612                carry_overs.append(d)
1613            else:
1614                if d['semester'] == '1':
1615                    normal1.append(d)
1616
1617                elif d['semester'] == '2':
1618                    normal2.append(d)
1619                else:
1620                    normal3.append(d)
1621        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1622        #                                "%(semester)s%(code)s" % y))
1623        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1624                                             "%(semester)s%(code)s" % y))
1625        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1626    ###)
1627
1628
1629    def getAllCourses(self,student_id): ###(
1630        query = Eq('student_id',student_id)
1631        course_results = self.course_results.evalAdvancedQuery(query)
1632        courses = []
1633        for brain in course_results:
1634            d = {}
1635
1636            for field in self.schema():
1637                d[field] = getattr(brain,field,'')
1638
1639            d['weight'] = ''
1640            d['grade'] = ''
1641            d['score'] = ''
1642
1643            if str(brain.credits).isdigit():
1644                credits = int(brain.credits)
1645                score = getattr(brain,'score',0)
1646                if score and str(score).isdigit() and int(score) > 0:
1647                    score = int(score)
1648                    grade,weight = self.getGradesFromScore(score)
1649                    d['weight'] = weight
1650                    d['grade'] = grade
1651                    d['score'] = score
1652            d['coe'] = ''
1653            if brain.core_or_elective:
1654                d['coe'] = 'Core'
1655            elif brain.core_or_elective == False:
1656                d['coe'] = 'Elective'
1657            id = code = d['id'] = brain.code
1658            d['code'] = code
1659            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1660            if res:
1661                course = res[0]
1662                d['title'] = course.title
1663                # The courses_catalog contains strings and integers in its semester field.
1664                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1665                d['semester'] = str(course.semester)
1666            else:
1667                d['title'] = "Course has been removed from course list"
1668                d['semester'] = ''
1669            if brain.carry_over:
1670                d['coe'] = 'CO'
1671            courses.append(d)
1672        return courses
1673    ###)
1674
1675InitializeClass(CourseResults)
1676###)
1677
1678class OnlinePaymentsImport(WAeUPTable): ###(
1679
1680    meta_type = 'WAeUP Online Payment Transactions'
1681    name = "online_payments_import"
1682    key = "order_id"
1683    def __init__(self,name=None):
1684        if name ==  None:
1685            name = self.name
1686        WAeUPTable.__init__(self, name)
1687
1688
1689InitializeClass(OnlinePaymentsImport)
1690###)
1691
1692class ReturningImport(WAeUPTable): ###(
1693
1694    meta_type = 'Returning Import Table'
1695    name = "returning_import"
1696    key = "matric_no"
1697    def __init__(self,name=None):
1698        if name ==  None:
1699            name = self.name
1700        WAeUPTable.__init__(self, name)
1701
1702
1703InitializeClass(ReturningImport)
1704###)
1705
1706class ResultsImport(WAeUPTable): ###(
1707
1708    meta_type = 'Results Import Table'
1709    name = "results_import"
1710    key = "key"
1711    def __init__(self,name=None):
1712        if name ==  None:
1713            name = self.name
1714        WAeUPTable.__init__(self, name)
1715
1716
1717InitializeClass(ResultsImport)
1718
1719###)
1720
1721class PaymentsCatalog(WAeUPTable): ###(
1722    security = ClassSecurityInfo()
1723
1724    meta_type = 'WAeUP Payments Catalog'
1725    name = "payments_catalog"
1726    key = "order_id"
1727    def __init__(self,name=None):
1728        if name ==  None:
1729            name = self.name
1730        WAeUPTable.__init__(self, name)
1731
1732
1733    security.declarePrivate('notify_event_listener') ###(
1734    def notify_event_listener(self,event_type,object,infos):
1735        "listen for events"
1736        if not infos.has_key('rpath'):
1737            return
1738        pt = getattr(object,'portal_type',None)
1739        mt = getattr(object,'meta_type',None)
1740        data = {}
1741        if pt != 'Payment':
1742            return
1743        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1744            self.deleteRecord(object.getContent().order_id)
1745        if mt == 'CPS Proxy Folder':
1746            return # is handled only for the real object
1747        if event_type not in ('sys_modify_object'):
1748            return
1749        for field in self.schema():
1750            data[field] = getattr(object,field,'')
1751        rpl = infos['rpath'].split('/')
1752        #import pdb;pdb.set_trace()
1753        student_id = rpl[-4]
1754        data['student_id'] = student_id
1755        modified = False
1756        try:
1757            self.modifyRecord(**data)
1758            modified = True
1759        except KeyError:
1760            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1761            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1762            pass
1763        if not modified:
1764            try:
1765                self.addRecord(**data)
1766            except:
1767                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1768                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1769        ###)
1770
1771
1772InitializeClass(PaymentsCatalog)
1773
1774###)
1775
1776# BBB:
1777AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.