source: WAeUP_SRP/trunk/WAeUPTables.py @ 3780

Last change on this file since 3780 was 3780, checked in by Henrik Bettermann, 16 years ago
  • get study_mode from certificates and ignore study_mode attribute of study_course objects
  • customize school_fee payment for FUTMinna
  • Property svn:keywords set to Id
File size: 67.4 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3780 2008-11-21 09:26:51Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self,index=None,value=None):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        if index is not None and value is not None:
225            records = self.evalAdvancedQuery(Eq(index,value))
226        else:
227            records = self()
228        nr2export = len(records)
229        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
230        chunk = 2000
231        total = 0
232        start = DateTime.DateTime().timeTime()
233        start_chunk = DateTime.DateTime().timeTime()
234        for record in records:
235            not_all = False
236            d = self.record2dict(fields,record)
237            lines.append(d)
238            total += 1
239            if total and not total % chunk or total == len(records):
240                csv_writer.writerows(lines)
241                anz = len(lines)
242                logger.info("wrote %(anz)d  total written %(total)d" % vars())
243                end_chunk = DateTime.DateTime().timeTime()
244                duration = end_chunk-start_chunk
245                per_record = duration/anz
246                till_now = end_chunk - start
247                avarage_per_record = till_now/total
248                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
249                estimated_end = estimated_end.strftime("%H:%M:%S")
250                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
251                start_chunk = DateTime.DateTime().timeTime()
252                lines = []
253        end = DateTime.DateTime().timeTime()
254        logger.info('total time %6.2f m' % ((end-start)/60))
255        import os
256        filename, extension = os.path.splitext(export_file)
257        from subprocess import call
258        msg = "wrote %(total)d records to %(export_file)s" % vars()
259        #try:
260        #    retcode = call('gzip %s' % (export_file),shell=True)
261        #    if retcode == 0:
262        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
263        #except OSError, e:
264        #    retcode = -99
265        #    logger.info("zip failed with %s" % e)
266        logger.info(msg)
267        args = {'portal_status_message': msg}
268        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
269        url = self.REQUEST.get('URL2')
270        return self.REQUEST.RESPONSE.redirect(url)
271    ###)
272
273    security.declarePrivate("_import_old") ###(
274    def _import_old(self,filename,schema,layout, mode,logger):
275        "import data from csv"
276        import transaction
277        import random
278        pm = self.portal_membership
279        member = pm.getAuthenticatedMember()
280        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
281        import_fn = "%s/import/%s.csv" % (i_home,filename)
282        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
283        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
284        start = True
285        tr_count = 1
286        total_imported = 0
287        total_not_imported = 0
288        total = 0
289        iname =  "%s" % filename
290        not_imported = []
291        imported = []
292        valid_records = []
293        invalid_records = []
294        d = {}
295        d['mode'] = mode
296        d['imported'] = total_imported
297        d['not_imported'] = total_not_imported
298        d['valid_records'] = valid_records
299        d['invalid_records'] = invalid_records
300        d['import_fn'] = import_fn
301        d['imported_fn'] = imported_fn
302        d['not_imported_fn'] = not_imported_fn
303        if schema is None:
304            em = 'No schema specified'
305            logger.error(em)
306            return d
307        if layout is None:
308            em = 'No layout specified'
309            logger.error(em)
310            return d
311        validators = {}
312        for widget in layout.keys():
313            try:
314                validators[widget] = layout[widget].validate
315            except AttributeError:
316                logger.info('%s has no validate attribute' % widget)
317                return d
318        # if mode == 'edit':
319        #     importer = self.importEdit
320        # elif mode == 'add':
321        #     importer = self.importAdd
322        # else:
323        #     importer = None
324        try:
325            items = csv.DictReader(open(import_fn,"rb"),
326                                   dialect="excel",
327                                   skipinitialspace=True)
328        except:
329            em = 'Error reading %s.csv' % filename
330            logger.error(em)
331            return d
332        #import pdb;pdb.set_trace()
333        for item in items:
334            if start:
335                start = False
336                logger.info('%s starts import from %s.csv' % (member,filename))
337                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
338                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
339                                   dialect="excel",
340                                   skipinitialspace=True).next()
341                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
342                diff2schema = set(import_keys).difference(set(schema.keys()))
343                diff2layout = set(import_keys).difference(set(layout.keys()))
344                if diff2layout:
345                    em = "not ignorable key(s) %s found in heading" % diff2layout
346                    logger.info(em)
347                    return d
348                s = ','.join(['"%s"' % fn for fn in import_keys])
349                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
350                #s = '"id",' + s
351                open(imported_fn,"a").write(s + '\n')
352                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
353                format_error = format + ',"%(Error)s"'
354                #format = '"%(id)s",'+ format
355                adapters = [MappingStorageAdapter(schema, item)]
356            dm = DataModel(item, adapters,context=self)
357            ds = DataStructure(data=item,datamodel=dm)
358            error_string = ""
359            #import pdb;pdb.set_trace()
360            for k in import_keys:
361                if not validators[k](ds,mode=mode):
362                    error_string += " %s : %s" % (k,ds.getError(k))
363            # if not error_string and importer:
364            #     item.update(dm)
365            #     item['id'],error = importer(item)
366            #     if error:
367            #         error_string += error
368            if error_string:
369                item['Error'] = error_string
370                invalid_records.append(dm)
371                not_imported.append(format_error % item)
372                total_not_imported += 1
373            else:
374                em = format % item
375                valid_records.append(dm)
376                imported.append(em)
377                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
378                tr_count += 1
379                total_imported += 1
380            total += 1
381        if len(imported) > 0:
382            open(imported_fn,"a").write('\n'.join(imported))
383        if len(not_imported) > 0:
384            open(not_imported_fn,"a").write('\n'.join(not_imported))
385        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
386        d['imported'] = total_imported
387        d['not_imported'] = total_not_imported
388        d['valid_records'] = valid_records
389        d['invalid_records'] = invalid_records
390        d['imported_fn'] = imported_fn
391        d['not_imported_fn'] = not_imported_fn
392        #logger.info(em)
393        return d
394    ###)
395
396    security.declarePrivate("_import") ###(
397    def _import_new(self,csv_items,schema, layout, mode,logger):
398        "import data from csv.Dictreader Instance"
399        start = True
400        tr_count = 1
401        total_imported = 0
402        total_not_imported = 0
403        total = 0
404        iname =  "%s" % filename
405        not_imported = []
406        valid_records = []
407        invalid_records = []
408        duplicate_records = []
409        d = {}
410        d['mode'] = mode
411        d['valid_records'] = valid_records
412        d['invalid_records'] = invalid_records
413        d['invalid_records'] = duplicate_records
414        # d['import_fn'] = import_fn
415        # d['imported_fn'] = imported_fn
416        # d['not_imported_fn'] = not_imported_fn
417        validators = {}
418        for widget in layout.keys():
419            try:
420                validators[widget] = layout[widget].validate
421            except AttributeError:
422                logger.info('%s has no validate attribute' % widget)
423                return d
424        for item in csv_items:
425            if start:
426                start = False
427                logger.info('%s starts import from %s.csv' % (member,filename))
428                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
429                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
430                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
431                diff2schema = set(import_keys).difference(set(schema.keys()))
432                diff2layout = set(import_keys).difference(set(layout.keys()))
433                if diff2layout:
434                    em = "not ignorable key(s) %s found in heading" % diff2layout
435                    logger.info(em)
436                    return d
437                # s = ','.join(['"%s"' % fn for fn in import_keys])
438                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
439                # #s = '"id",' + s
440                # open(imported_fn,"a").write(s + '\n')
441                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
442                # format_error = format + ',"%(Error)s"'
443                # #format = '"%(id)s",'+ format
444                adapters = [MappingStorageAdapter(schema, item)]
445            dm = DataModel(item, adapters,context=self)
446            ds = DataStructure(data=item,datamodel=dm)
447            error_string = ""
448            for k in import_keys:
449                if not validators[k](ds,mode=mode):
450                    error_string += " %s : %s" % (k,ds.getError(k))
451            if error_string:
452                item['Error'] = error_string
453                #invalid_records.append(dm)
454                invalid_records.append(item)
455                total_not_imported += 1
456            else:
457                em = format % item
458                valid_records.append(dm)
459                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
460                tr_count += 1
461                total_imported += 1
462            total += 1
463        # if len(imported) > 0:
464        #     open(imported_fn,"a").write('\n'.join(imported))
465        # if len(not_imported) > 0:
466        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
467        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
468        d['imported'] = total_imported
469        d['not_imported'] = total_not_imported
470        d['valid_records'] = valid_records
471        d['invalid_records'] = invalid_records
472        return d
473    ###)
474
475    security.declarePublic("missingValue")###(
476    def missingValue(self):
477        from Missing import MV
478        return MV
479    ###)
480###)
481
482class AccommodationTable(WAeUPTable): ###(
483
484    meta_type = 'WAeUP Accommodation Tool'
485    name = "portal_accommodation"
486    key = "bed"
487    not_occupied = NOT_OCCUPIED
488    def __init__(self,name=None):
489        if name ==  None:
490            name = self.name
491        WAeUPTable.__init__(self, name)
492
493    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
494        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
495        records = self.evalAdvancedQuery(Eq('student',student_id))
496        if len(records) == 1:
497            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
498            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
499            return -1,records[0].bed
500        elif len(records) > 1:
501            logger.info('%s found more than one (reserved) bed' % (student_id))
502            return -3,'more than one bed'
503        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
504        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
505        if len(records) == 0:
506            logger.info('no bed %s available for %s' % (bed_type,student_id))
507            return -2,"no bed"
508        if random_order:
509            import random
510            bed_no = random.randint(0,len(records)-1)
511        else:
512            bed_no = 0
513        rec = records[bed_no]
514        self.modifyRecord(bed=rec.bed,student=student_id)
515        logger.info('%s booked bed %s' % (student_id,rec.bed))
516        return 1,rec.bed
517    ###)
518
519
520InitializeClass(AccommodationTable)
521
522###)
523
524class PinTable(WAeUPTable): ###(
525    from ZODB.POSException import ConflictError
526    security = ClassSecurityInfo()
527    meta_type = 'WAeUP Pin Tool'
528    name = "portal_pins"
529    key = 'pin'
530
531    def __init__(self,name=None):
532        if name ==  None:
533            name = self.name
534        WAeUPTable.__init__(self, name)
535
536    security.declareProtected(ModifyPortalContent,"dumpAll")###(
537    def dumpAll(self,include_unused=None):
538        """dump all data in the table to a csv"""
539        member = self.portal_membership.getAuthenticatedMember()
540        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
541        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
542        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
543        res_list = []
544        lines = []
545        if hasattr(self,"export_keys"):
546            fields = self.export_keys
547        else:
548            fields = []
549            for f in self.schema():
550                fields.append(f)
551        headline = ','.join(fields)
552        out = open(export_file,"wb")
553        out.write(headline +'\n')
554        out.close()
555        out = open(export_file,"a")
556        csv_writer = csv.DictWriter(out,fields,)
557        if include_unused is not None and str(member) not in ('admin','joachim'):
558            logger.info('%s tries to dump pintable with unused pins' % (member))
559            return
560        if include_unused is not None:
561            records = self()
562        else:
563            records = self.evalAdvancedQuery(~Eq('student',''))
564        nr2export = len(records)
565        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
566        chunk = 2000
567        total = 0
568        start = DateTime.DateTime().timeTime()
569        start_chunk = DateTime.DateTime().timeTime()
570        for record in records:
571            not_all = False
572            d = self.record2dict(fields,record)
573            lines.append(d)
574            total += 1
575            if total and not total % chunk or total == len(records):
576                csv_writer.writerows(lines)
577                anz = len(lines)
578                logger.info("wrote %(anz)d  total written %(total)d" % vars())
579                end_chunk = DateTime.DateTime().timeTime()
580                duration = end_chunk-start_chunk
581                per_record = duration/anz
582                till_now = end_chunk - start
583                avarage_per_record = till_now/total
584                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
585                estimated_end = estimated_end.strftime("%H:%M:%S")
586                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
587                start_chunk = DateTime.DateTime().timeTime()
588                lines = []
589        end = DateTime.DateTime().timeTime()
590        logger.info('total time %6.2f m' % ((end-start)/60))
591        import os
592        filename, extension = os.path.splitext(export_file)
593        from subprocess import call
594        msg = "wrote %(total)d records to %(export_file)s" % vars()
595        #try:
596        #    retcode = call('gzip %s' % (export_file),shell=True)
597        #    if retcode == 0:
598        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
599        #except OSError, e:
600        #    retcode = -99
601        #    logger.info("zip failed with %s" % e)
602        logger.info(msg)
603        args = {'portal_status_message': msg}
604        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
605        url = self.REQUEST.get('URL2')
606        return self.REQUEST.RESPONSE.redirect(url)
607    ###)
608
609
610
611    def searchAndSetRecord(self, uid, student_id,prefix):
612
613        # The following line must be activated after resetting the
614        # the portal_pins table. This is to avoid duplicate entries
615        # and disable duplicate payments.
616
617        #student_id = student_id.upper()
618
619        #records = self.searchResults(student = student_id)
620        #if len(records) > 0 and prefix in ('CLR','APP'):
621        #    for r in records:
622        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
623        #            return -2
624        records = self.searchResults({"%s" % self.key : uid})
625        if len(records) > 1:
626            # Can not happen, but anyway...
627            raise ValueError("More than one record with uid %s" % uid)
628        if len(records) == 0:
629            return -1,None
630        record = records[0]
631        if record.student == "":
632            record_data = {}
633            for field in self.schema() + self.indexes():
634                record_data[field] = getattr(record, field)
635            # Add the updated data:
636            record_data['student'] = student_id
637            try:
638                self.catalog_object(dict2ob(record_data), uid)
639                return 1,record
640            except ConflictError:
641                return 2,record
642        if record.student.upper() != student_id.upper():
643            return 0,record
644        if record.student.upper() == student_id.upper():
645            return 2,record
646        return -3,record
647InitializeClass(PinTable)
648###)
649
650class PumeResultsTable(WAeUPTable): ###(
651
652    meta_type = 'WAeUP PumeResults Tool'
653    name = "portal_pumeresults"
654    key = "jamb_reg_no"
655    def __init__(self,name=None):
656        if name ==  None:
657            name = self.name
658        WAeUPTable.__init__(self, name)
659
660
661InitializeClass(PumeResultsTable)
662
663###)
664
665class ApplicantsCatalog(WAeUPTable): ###(
666
667    meta_type = 'WAeUP Applicants Catalog'
668    name = "applicants_catalog"
669    key = "reg_no"
670    security = ClassSecurityInfo()
671    #export_keys = (
672    #               "reg_no",
673    #               "status",
674    #               "lastname",
675    #               "sex",
676    #               "date_of_birth",
677    #               "lga",
678    #               "email",
679    #               "phone",
680    #               "passport",
681    #               "entry_mode",
682    #               "pin",
683    #               "screening_type",
684    #               "registration_date",
685    #               "testdate",
686    #               "application_date",
687    #               "screening_date",
688    #               "faculty",
689    #               "department",
690    #               "course1",
691    #               "course2",
692    #               "course3",
693    #               "eng_score",
694    #               "subj1",
695    #               "subj1score",
696    #               "subj2",
697    #               "subj2score",
698    #               "subj3",
699    #               "subj3score",
700    #               "aggregate",
701    #               "course_admitted",
702    #               )
703
704    def __init__(self,name=None):
705        if name ==  None:
706            name = self.name
707        WAeUPTable.__init__(self, name)
708
709    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
710    def new_importCSV(self,filename="JAMB_data",
711                  schema_id="application",
712                  layout_id="import_application",
713                  mode='add'):
714        """ import JAMB data """
715        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
716        pm = self.portal_membership
717        member = pm.getAuthenticatedMember()
718        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
719        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
720        import_fn = "%s/import/%s.csv" % (i_home,filename)
721        if mode not in ('add','edit'):
722            logger.info("invalid mode: %s" % mode)
723        if os.path.exists(lock_fn):
724            logger.info("import of %(import_fn)s already in progress" % vars())
725            return
726        lock_file = open(lock_fn,"w")
727        lock_file.write("%(current)s \n" % vars())
728        lock_file.close()
729        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
730        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
731        stool = getToolByName(self, 'portal_schemas')
732        ltool = getToolByName(self, 'portal_layouts')
733        schema = stool._getOb(schema_id)
734        if schema is None:
735            em = 'No such schema %s' % schema_id
736            logger.error(em)
737            return
738        for postfix in ('_import',''):
739            layout_name = "%(layout_id)s%(postfix)s" % vars()
740            if hasattr(ltool,layout_name):
741                break
742        layout = ltool._getOb(layout_name)
743        if layout is None:
744            em = 'No such layout %s' % layout_id
745            logger.error(em)
746            return
747        try:
748            csv_file = csv.DictReader(open(import_fn,"rb"))
749        except:
750            em = 'Error reading %s.csv' % filename
751            logger.error(em)
752            return
753        d = self._import_new(csv_items,schema,layout,mode,logger)
754        imported = []
755        edited = []
756        duplicates = []
757        not_found = []
758        if len(d['valid_records']) > 0:
759            for record in d['valid_records']:
760                #import pdb;pdb.set_trace()
761                if mode == "add":
762                    try:
763                        self.addRecord(**dict(record.items()))
764                        imported.append(**dict(record.items()))
765                        logger.info("added %s" % record.items())
766                    except ValueError:
767                        dupplicate.append(**dict(record.items()))
768                        logger.info("duplicate %s" % record.items())
769                elif mode == "edit":
770                    try:
771                        self.modifyRecord(**dict(record.items()))
772                        edited.append(**dict(record.items()))
773                        logger.info("edited %s" % record.items())
774                    except KeyError:
775                        not_found.append(**dict(record.items()))
776                        logger.info("not found %s" % record.items())
777        invalid = d['invalid_records']
778        for itype in ("imported","edited","not_found","duplicate","invalid"):
779            outlist = locals[itype]
780            if len(outlist):
781                d = {}
782                for k in outlist[0].keys():
783                    d[k] = k
784                outlist[0] = d
785                outfile = open("file_name_%s" % itype,'w')
786                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
787                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
788###)
789
790    security.declareProtected(ModifyPortalContent,"importCSV")###(
791    def importCSV(self,filename="JAMB_data",
792                  schema_id="application",
793                  layout_id="application_pce",
794                  mode='add'):
795        """ import JAMB data """
796        stool = getToolByName(self, 'portal_schemas')
797        ltool = getToolByName(self, 'portal_layouts')
798        schema = stool._getOb(schema_id)
799        if schema is None:
800            em = 'No such schema %s' % schema_id
801            logger.error(em)
802            return
803        layout = ltool._getOb(layout_id)
804        if layout is None:
805            em = 'No such layout %s' % layout_id
806            logger.error(em)
807            return
808        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
809        d = self._import_old(filename,schema,layout,mode,logger)
810        if len(d['valid_records']) > 0:
811            for record in d['valid_records']:
812                #import pdb;pdb.set_trace()
813                if mode == "add":
814                    self.addRecord(**dict(record.items()))
815                    logger.info("added %s" % record.items())
816                elif mode == "edit":
817                    self.modifyRecord(**dict(record.items()))
818                    logger.info("edited %s" % record.items())
819                else:
820                    logger.info("invalid mode: %s" % mode)
821        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
822    ###)
823
824InitializeClass(ApplicantsCatalog)
825
826###)
827
828class StudentsCatalog(WAeUPTable): ###(
829    security = ClassSecurityInfo()
830
831    meta_type = 'WAeUP Students Catalog'
832    name = "students_catalog"
833    key = "id"
834    affected_types = {   ###(
835                      'StudentApplication':
836                      {'id': 'application',
837                       'fields':
838                       ('jamb_reg_no',
839                        'entry_mode',
840                        #'entry_level',
841                        'entry_session',
842                       )
843                      },
844                      'StudentClearance':
845                      {'id': 'clearance',
846                       'fields':
847                       ('matric_no',
848                        'lga',
849                       )
850                      },
851                      'StudentPersonal':
852                      {'id': 'personal',
853                       'fields':
854                       ('name',
855                        'sex',
856                        'perm_address',
857                        'email',
858                        'phone',
859                       )
860                      },
861                      'StudentStudyCourse':
862                      {'id': 'study_course',
863                       'fields':
864                       ('course', # study_course
865                        'faculty', # from certificate
866                        'department', # from certificate
867                        'end_level', # from certificate
868                        'level', # current_level
869                        'mode',  # from certificate
870                        'session', # current_session
871                        'verdict', # current_verdict
872                       )
873                      },
874                     }
875    ###)
876
877    def __init__(self,name=None):
878        if name ==  None:
879            name = self.name
880        WAeUPTable.__init__(self, name)
881        return
882
883    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
884        """ clears the whole enchilada """
885        self._catalog.clear()
886
887        if REQUEST and RESPONSE:
888            RESPONSE.redirect(
889              URL1 +
890              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
891
892    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
893        """ clear the catalog, then re-index everything """
894
895        elapse = time.time()
896        c_elapse = time.clock()
897
898        pgthreshold = self._getProgressThreshold()
899        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
900        self.refreshCatalog(clear=1, pghandler=handler)
901
902        elapse = time.time() - elapse
903        c_elapse = time.clock() - c_elapse
904
905        RESPONSE.redirect(
906            URL1 +
907            '/manage_catalogAdvanced?manage_tabs_message=' +
908            urllib.quote('Catalog Updated \n'
909                         'Total time: %s\n'
910                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
911    ###)
912
913    def fill_certificates_dict(self): ###(
914        "return certificate data in  dict"
915        certificates_brains = self.portal_catalog(portal_type ='Certificate')
916        d = {}
917        for cb in certificates_brains:
918            certificate_doc = cb.getObject().getContent()
919            cb_path = cb.getPath().split('/')
920            ld = {}
921            ld['faculty'] = cb_path[-4]
922            ld['department'] = cb_path[-3]
923            ld['end_level'] = getattr(certificate_doc,'end_level','999')
924            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
925            d[cb.getId] = ld
926        return d
927    ###)
928
929    def get_from_doc_department(self,doc,cached_data={}): ###(
930        "return the students department"
931        if doc is None:
932            return None
933        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
934            return self._v_certificates[doc.study_course]['department']
935        certificate_res = self.portal_catalog(id = doc.study_course)
936        if len(certificate_res) != 1:
937            return None
938        return certificate_res[0].getPath().split('/')[-3]
939
940    def get_from_doc_faculty(self,doc,cached_data={}):
941        "return the students faculty"
942        if doc is None:
943            return None
944        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
945            return self._v_certificates[doc.study_course]['faculty']
946        certificate_res = self.portal_catalog(id = doc.study_course)
947        if len(certificate_res) != 1:
948            return None
949        return certificate_res[0].getPath().split('/')[-4]
950
951    def get_from_doc_end_level(self,doc,cached_data={}):
952        "return the students end_level"
953        if doc is None:
954            return None
955        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
956            return self._v_certificates[doc.study_course]['end_level']
957        certificate_res = self.portal_catalog(id = doc.study_course)
958        if len(certificate_res) != 1:
959            return None
960        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
961
962    def get_from_doc_level(self,doc,cached_data={}):
963        "return the students level"
964        if doc is None:
965            return None
966        return getattr(doc,'current_level',None)
967
968    #def get_from_doc_mode(self,doc,cached_data={}):
969    #    "return the students mode"
970    #    if doc is None:
971    #        return None
972    #    cm = getattr(doc,'current_mode',None)
973    #    return cm
974   
975    def get_from_doc_mode(self,doc,cached_data={}):
976        "return the students mode"
977        if doc is None:
978            return None
979        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
980            return self._v_certificates[doc.study_course]['study_mode']
981        certificate_res = self.portal_catalog(id = doc.study_course)
982        if len(certificate_res) != 1:
983            return None
984        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
985
986
987    def get_from_doc_session(self,doc,cached_data={}):
988        "return the students current_session"
989        if doc is None:
990            return None
991        return getattr(doc,'current_session',None)
992
993    def get_from_doc_entry_session(self,doc,cached_data={}):
994        "return the students entry_session"
995        if doc is None:
996            return None
997        es = getattr(doc,'entry_session',None)
998        if es is not None and len(es) == 2:
999            return es
1000        elif len(es) == 9:
1001            return es[2:4]   
1002        try:
1003            digit = int(doc.jamb_reg_no[0])
1004        except:
1005            return "-1"
1006        if digit < 8:
1007            return "0%c" % doc.jamb_reg_no[0]
1008        return "9%c" % doc.jamb_reg_no[0]
1009
1010    def get_from_doc_course(self,doc,cached_data={}):
1011        "return the students study_course"
1012        if doc is None:
1013            return None
1014        return getattr(doc,'study_course',None)
1015
1016    def get_from_doc_name(self,doc,cached_data={}):
1017        "return the students name from the personal"
1018        if doc is None:
1019            return None
1020        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1021
1022    def get_from_doc_verdict(self,doc,cached_data={}):
1023        "return the students study_course"
1024        if doc is None:
1025            return None
1026        return getattr(doc,'current_verdict',None)
1027    ###)
1028
1029    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1030        if isinstance(name, str):
1031            name = (name,)
1032        reindextypes = {}
1033        reindex_special = []
1034        for n in name:
1035            if n in ("review_state","registered_courses"):
1036                reindex_special.append(n)
1037            else:
1038                for pt in self.affected_types.keys():
1039                    if n in self.affected_types[pt]['fields']:
1040                        if reindextypes.has_key(pt):
1041                            reindextypes[pt].append(n)
1042                        else:
1043                            reindextypes[pt]= [n]
1044                        break
1045        cached_data = {}
1046        if set(name).intersection(set(('faculty','department','end_level'))):
1047            cached_data = self.fill_certificates_dict()
1048        students = self.portal_catalog(portal_type="Student")
1049        if hasattr(self,'portal_catalog_real'):
1050            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1051        else:
1052            aq_portal = self.portal_catalog.evalAdvancedQuery
1053        num_objects = len(students)
1054        if pghandler:
1055            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1056        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1057        #import pdb;pdb.set_trace()
1058        for i in xrange(num_objects):
1059            if pghandler: pghandler.report(i)
1060            student_brain = students[i]
1061            student_object = student_brain.getObject()
1062            # query = Eq('path',student_brain.getPath())
1063            # sub_brains_list = aq_portal(query)
1064            # sub_brains = {}
1065            # for sub_brain in sub_brains_list:
1066            #     sub_brains[sub_brain.portal_type] = sub_brain
1067            # student_path = student_brain.getPath()
1068            data = {}
1069            modified = False
1070            sid = data['id'] = student_brain.getId
1071            if reindex_special and 'review_state' in reindex_special:
1072                modified = True
1073                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1074            sub_objects = False
1075            for pt in reindextypes.keys():
1076                modified = True
1077                try:
1078                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1079                    #doc = sub_brains[pt].getObject().getContent()
1080                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1081                    # doc = self.unrestrictedTraverse(path).getContent()
1082                    sub_objects = True
1083                except:
1084                    continue
1085                for field in set(name).intersection(self.affected_types[pt]['fields']):
1086                    if hasattr(self,'get_from_doc_%s' % field):
1087                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1088                                                                              cached_data=cached_data)
1089                    else:
1090                        data[field] = getattr(doc,field)
1091            if not sub_objects and noattr:
1092                import_res = self.returning_import(id = sid)
1093                if not import_res:
1094                    continue
1095                import_record = import_res[0]
1096                data['matric_no'] = import_record.matric_no
1097                data['sex'] = import_record.Sex == 'F'
1098                data['name'] = "%s %s %s" % (import_record.Firstname,
1099                                             import_record.Middlename,
1100                                             import_record.Lastname)
1101                data['jamb_reg_no'] = import_record.Entryregno
1102            #if reindex_special and 'registered_courses' in reindex_special:
1103            #    try:
1104            #        study_course = getattr(student_object,"study_course")
1105            #        level_ids = study_course.objectIds()
1106            #    except:
1107            #        continue
1108            #    if not level_ids:
1109            #        continue
1110            #    modified = True
1111            #    level_ids.sort()
1112            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1113            #    courses = []
1114            #    for c in course_ids:
1115            #        if c.endswith('_co'):
1116            #            courses.append(c[:-3])
1117            #        else:
1118            #            courses.append(c)
1119            #    data['registered_courses'] = courses
1120            if modified:
1121                self.modifyRecord(**data)
1122        if pghandler: pghandler.finish()
1123    ###)
1124
1125    def refreshCatalog(self, clear=0, pghandler=None): ###(
1126        """ re-index everything we can find """
1127        students_folder = self.portal_url.getPortalObject().campus.students
1128        if clear:
1129            self._catalog.clear()
1130        students = self.portal_catalog(portal_type="Student")
1131        num_objects = len(students)
1132        cached_data = self.fill_certificates_dict()
1133        if pghandler:
1134            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1135        for i in xrange(num_objects):
1136            if pghandler: pghandler.report(i)
1137            student_brain = students[i]
1138            spath = student_brain.getPath()
1139            student_object = student_brain.getObject()
1140            data = {}
1141            sid = data['id'] = student_brain.getId
1142            #data['review_state'] = student_brain.review_state
1143            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1144            sub_objects = False
1145            for pt in self.affected_types.keys():
1146                modified = True
1147                try:
1148                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1149                    sub_objects = True
1150                except:
1151                    #from pdb import set_trace;set_trace()
1152                    continue
1153                for field in self.affected_types[pt]['fields']:
1154                    if hasattr(self,'get_from_doc_%s' % field):
1155                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1156                                                                              cached_data=cached_data)
1157                    else:
1158                        data[field] = getattr(doc,field,None)
1159            if not sub_objects:
1160                import_res = self.returning_import(id = sid)
1161                if not import_res:
1162                    continue
1163                import_record = import_res[0]
1164                data['matric_no'] = import_record.matric_no
1165                data['sex'] = import_record.Sex == 'F'
1166                data['name'] = "%s %s %s" % (import_record.Firstname,
1167                                             import_record.Middlename,
1168                                             import_record.Lastname)
1169                data['jamb_reg_no'] = import_record.Entryregno
1170            self.addRecord(**data)
1171        if pghandler: pghandler.finish()
1172    ###)
1173
1174    security.declarePrivate('notify_event_listener') ###(
1175    def notify_event_listener(self,event_type,object,infos):
1176        "listen for events"
1177        if not infos.has_key('rpath'):
1178            return
1179        pt = getattr(object,'portal_type',None)
1180        mt = getattr(object,'meta_type',None)
1181        students_catalog = self
1182        data = {}
1183        if pt == 'Student' and\
1184           mt == 'CPS Proxy Folder' and\
1185           event_type.startswith('workflow'):
1186            data['id'] = object.getId()
1187            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1188            students_catalog.modifyRecord(**data)
1189            return
1190        rpl = infos['rpath'].split('/')
1191        if pt == 'Student' and mt == 'CPS Proxy Folder':
1192            student_id = object.id
1193            if event_type == "sys_add_object":
1194                try:
1195                    self.addRecord(id = student_id)
1196                except ValueError:
1197                    pass
1198                return
1199            elif event_type == 'sys_del_object':
1200                self.deleteRecord(student_id)
1201        if pt not in self.affected_types.keys():
1202            return
1203        if event_type not in ('sys_modify_object'):
1204            return
1205        if mt == 'CPS Proxy Folder':
1206            return
1207        if not hasattr(self,'_v_certificates'):
1208            self._v_certificates = self.fill_certificates_dict()
1209        for field in self.affected_types[pt]['fields']:
1210            if hasattr(self,'get_from_doc_%s' % field):
1211                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1212            else:
1213                data[field] = getattr(object,field)
1214        data['id'] = rpl[2]
1215        self.modifyRecord(**data)
1216    ###)
1217
1218
1219InitializeClass(StudentsCatalog)
1220
1221###)
1222
1223class CertificatesCatalog(WAeUPTable): ###(
1224    security = ClassSecurityInfo()
1225
1226    meta_type = 'WAeUP Certificates Catalog'
1227    name =  "certificates_catalog"
1228    key = "code"
1229    def __init__(self,name=None):
1230        if name ==  None:
1231            name =  self.name
1232        WAeUPTable.__init__(self, name)
1233
1234    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1235        """ clear the catalog, then re-index everything """
1236
1237        elapse = time.time()
1238        c_elapse = time.clock()
1239
1240        pgthreshold = self._getProgressThreshold()
1241        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1242        self.refreshCatalog(clear=1, pghandler=handler)
1243
1244        elapse = time.time() - elapse
1245        c_elapse = time.clock() - c_elapse
1246
1247        RESPONSE.redirect(
1248            URL1 +
1249            '/manage_catalogAdvanced?manage_tabs_message=' +
1250            urllib.quote('Catalog Updated \n'
1251                         'Total time: %s\n'
1252                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1253    ###)
1254
1255    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1256        if isinstance(name, str):
1257            name = (name,)
1258        certificates = self.portal_catalog(portal_type="Certificate")
1259        num_objects = len(certificates)
1260        if pghandler:
1261            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1262        for i in xrange(num_objects):
1263            if pghandler: pghandler.report(i)
1264            certificate_brain = certificates[i]
1265            certificate_object = certificate_brain.getObject()
1266            pl = certificate_brain.getPath().split('/')
1267            data = {}
1268            cid = data[self.key] = certificate_brain.getId
1269            data['faculty'] = pl[-4]
1270            data['department'] = pl[-3]
1271            doc = certificate_object.getContent()
1272            for field in name:
1273                if field not in (self.key,'faculty','department'):
1274                    data[field] = getattr(doc,field)
1275            self.modifyRecord(**data)
1276        if pghandler: pghandler.finish()
1277    ###)
1278
1279    def refreshCatalog(self, clear=0, pghandler=None): ###(
1280        """ re-index everything we can find """
1281        if clear:
1282            self._catalog.clear()
1283        certificates = self.portal_catalog(portal_type="Certificate")
1284        num_objects = len(certificates)
1285        if pghandler:
1286            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1287        #from pdb import set_trace;set_trace()
1288        for i in xrange(num_objects):
1289            if pghandler: pghandler.report(i)
1290            certificate_brain = certificates[i]
1291            certificate_doc = certificate_brain.getObject().getContent()
1292            pl = certificate_brain.getPath().split('/')
1293            data = {}
1294            for field in self.schema():
1295                data[field] = getattr(certificate_doc,field,None)
1296            data[self.key] = certificate_brain.getId
1297            ai = pl.index('academics')
1298            data['faculty'] = pl[ai +1]
1299            data['department'] = pl[ai +2]
1300            if clear:
1301                self.addRecord(**data)
1302            else:
1303                self.modifyRecord(**data)
1304        if pghandler: pghandler.finish()
1305    ###)
1306
1307    security.declarePrivate('notify_event_listener') ###(
1308    def notify_event_listener(self,event_type,object,infos):
1309        "listen for events"
1310        if not infos.has_key('rpath'):
1311            return
1312        pt = getattr(object,'portal_type',None)
1313        mt = getattr(object,'meta_type',None)
1314        if pt != 'Certificate':
1315            return
1316        data = {}
1317        rpl = infos['rpath'].split('/')
1318        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1319            return
1320        certificate_id = object.getId()
1321        data[self.key] = certificate_id
1322        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1323            try:
1324                self.addRecord(**data)
1325            except ValueError:
1326                return
1327            certificate_id = object.getId()
1328            doc = object.getContent()
1329            if doc is None:
1330                return
1331            for field in self.schema():
1332                data[field] = getattr(doc,field,None)
1333            data[self.key] = certificate_id
1334            ai = rpl.index('academics')
1335            data['faculty'] = rpl[ai +1]
1336            data['department'] = rpl[ai +2]
1337            self.modifyRecord(**data)
1338            return
1339        if event_type == "sys_del_object":
1340            self.deleteRecord(certificate_id)
1341            return
1342        if event_type == "sys_modify_object" and mt == 'Certificate':
1343            #from pdb import set_trace;set_trace()
1344            for field in self.schema():
1345                data[field] = getattr(object,field,None)
1346            certificate_id = object.aq_parent.getId()
1347            data[self.key] = certificate_id
1348            ai = rpl.index('academics')
1349            data['faculty'] = rpl[ai +1]
1350            data['department'] = rpl[ai +2]
1351            self.modifyRecord(**data)
1352    ###)
1353
1354
1355InitializeClass(CertificatesCatalog)
1356###)
1357
1358class CoursesCatalog(WAeUPTable): ###(
1359    security = ClassSecurityInfo()
1360
1361    meta_type = 'WAeUP Courses Catalog'
1362    name =  "courses_catalog"
1363    key = "code"
1364    def __init__(self,name=None):
1365        if name ==  None:
1366            name =  self.name
1367        WAeUPTable.__init__(self, name)
1368
1369    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1370        """ clear the catalog, then re-index everything """
1371
1372        elapse = time.time()
1373        c_elapse = time.clock()
1374
1375        pgthreshold = self._getProgressThreshold()
1376        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1377        self.refreshCatalog(clear=1, pghandler=handler)
1378
1379        elapse = time.time() - elapse
1380        c_elapse = time.clock() - c_elapse
1381
1382        RESPONSE.redirect(
1383            URL1 +
1384            '/manage_catalogAdvanced?manage_tabs_message=' +
1385            urllib.quote('Catalog Updated \n'
1386                         'Total time: %s\n'
1387                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1388    ###)
1389
1390    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1391        if isinstance(name, str):
1392            name = (name,)
1393        courses = self.portal_catalog(portal_type="Course")
1394        num_objects = len(courses)
1395        if pghandler:
1396            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1397        for i in xrange(num_objects):
1398            if pghandler: pghandler.report(i)
1399            course_brain = courses[i]
1400            course_object = course_brain.getObject()
1401            pl = course_brain.getPath().split('/')
1402            data = {}
1403            cid = data[self.key] = course_brain.getId
1404            data['faculty'] = pl[-4]
1405            data['department'] = pl[-3]
1406            doc = course_object.getContent()
1407            for field in name:
1408                if field not in (self.key,'faculty','department'):
1409                    data[field] = getattr(doc,field)
1410            self.modifyRecord(**data)
1411        if pghandler: pghandler.finish()
1412    ###)
1413
1414    def refreshCatalog(self, clear=0, pghandler=None): ###(
1415        """ re-index everything we can find """
1416        if clear:
1417            self._catalog.clear()
1418        courses = self.portal_catalog(portal_type="Course")
1419        num_objects = len(courses)
1420        if pghandler:
1421            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1422        #from pdb import set_trace;set_trace()
1423        for i in xrange(num_objects):
1424            if pghandler: pghandler.report(i)
1425            course_brain = courses[i]
1426            course_doc = course_brain.getObject().getContent()
1427            pl = course_brain.getPath().split('/')
1428            data = {}
1429            for field in self.schema():
1430                data[field] = getattr(course_doc,field,None)
1431            data[self.key] = course_brain.getId
1432            ai = pl.index('academics')
1433            data['faculty'] = pl[ai +1]
1434            data['department'] = pl[ai +2]
1435            if clear:
1436                self.addRecord(**data)
1437            else:
1438                self.modifyRecord(**data)
1439        if pghandler: pghandler.finish()
1440    ###)
1441
1442    security.declarePrivate('notify_event_listener') ###(
1443    def notify_event_listener(self,event_type,object,infos):
1444        "listen for events"
1445        if not infos.has_key('rpath'):
1446            return
1447        pt = getattr(object,'portal_type',None)
1448        mt = getattr(object,'meta_type',None)
1449        if pt != 'Course':
1450            return
1451        data = {}
1452        rpl = infos['rpath'].split('/')
1453        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1454            return
1455        course_id = object.getId()
1456        data[self.key] = course_id
1457        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1458            try:
1459                self.addRecord(**data)
1460            except ValueError:
1461                return
1462            course_id = object.getId()
1463            doc = object.getContent()
1464            if doc is None:
1465                return
1466            for field in self.schema():
1467                data[field] = getattr(doc,field,None)
1468            data[self.key] = course_id
1469            ai = rpl.index('academics')
1470            data['faculty'] = rpl[ai +1]
1471            data['department'] = rpl[ai +2]
1472            self.modifyRecord(**data)
1473            return
1474        if event_type == "sys_del_object":
1475            self.deleteRecord(course_id)
1476            return
1477        if event_type == "sys_modify_object" and mt == 'Course':
1478            #from pdb import set_trace;set_trace()
1479            for field in self.schema():
1480                data[field] = getattr(object,field,None)
1481            course_id = object.aq_parent.getId()
1482            data[self.key] = course_id
1483            ai = rpl.index('academics')
1484            data['faculty'] = rpl[ai +1]
1485            data['department'] = rpl[ai +2]
1486            self.modifyRecord(**data)
1487    ###)
1488
1489
1490InitializeClass(CoursesCatalog)
1491###)
1492
1493class CourseResults(WAeUPTable): ###(
1494    security = ClassSecurityInfo()
1495
1496    meta_type = 'WAeUP Results Catalog'
1497    name = "course_results"
1498    key = "key" #student_id + level + course_id
1499    def __init__(self,name=None):
1500        if name ==  None:
1501            name = self.name
1502        WAeUPTable.__init__(self, name)
1503        self._queue = []
1504
1505    def addMultipleRecords(self, records): ###(
1506        """add many records"""
1507        existing_uids = []
1508        for data in records:
1509            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1510            data['%s' % self.key] = uid
1511            query = Eq(self.key, uid)
1512            res = self.course_results.evalAdvancedQuery(query)
1513            if len(res) > 0:
1514                rec = res[0]
1515                equal = True
1516                for attr in ('student_id','level_id','course_id'):
1517                    if getattr(rec,attr,'') != data[attr]:
1518                        equal = False
1519                        break
1520                if equal:
1521                    existing_uids += uid,
1522                    continue
1523            self.catalog_object(dict2ob(data), uid=uid)
1524        return existing_uids
1525    ###)
1526
1527    def deleteResultsHere(self,level_id,student_id): ###(
1528        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1529        course_results = self.course_results.evalAdvancedQuery(query)
1530        #import pdb;pdb.set_trace()
1531        for result in course_results:
1532            self.deleteRecord(result.key)
1533    ###)
1534
1535    def moveResultsHere(self,level,student_id): ###(
1536        #import pdb;pdb.set_trace()
1537        level_id = level.getId()
1538        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1539        course_results = self.course_results.evalAdvancedQuery(query)
1540        existing_courses = [cr.code for cr in course_results]
1541        to_delete = []
1542        for code,obj in level.objectItems():
1543            to_delete.append(code)
1544            carry_over = False
1545            if code.endswith('_co'):
1546                carry_over = True
1547                code  = code[:-3]
1548            if code in existing_courses:
1549                continue
1550            course_result_doc = obj.getContent()
1551            data = {}
1552            course_id = code
1553            for field in self.schema():
1554                data[field] = getattr(course_result_doc,field,'')
1555            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1556            data['student_id'] = student_id
1557            data['level_id'] = level_id
1558            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1559            data['session_id'] = session_id
1560            #data['queue_status'] = OBJECT_CREATED
1561            data['code'] = course_id
1562            data['carry_over'] = carry_over
1563            self.catalog_object(dict2ob(data), uid=key)
1564        level.manage_delObjects(to_delete)
1565    ###)
1566
1567    def getCourses(self,student_id,level_id): ###(
1568        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1569        course_results = self.course_results.evalAdvancedQuery(query)
1570        carry_overs = []
1571        normal1 = []
1572        normal2 = []
1573        normal3 = []
1574        total_credits = 0
1575        gpa = 0
1576        for brain in course_results:
1577            d = {}
1578
1579            for field in self.schema():
1580                d[field] = getattr(brain,field,None)
1581                if repr(d[field]) == 'Missing.Value':
1582                    d[field] = ''
1583            d['weight'] = ''
1584            d['grade'] = ''
1585            d['score'] = ''
1586
1587            if str(brain.credits).isdigit():
1588                credits = int(brain.credits)
1589                total_credits += credits
1590                score = getattr(brain,'score',0)
1591                if score and str(score).isdigit() and int(score) > 0:
1592                    score = int(score)
1593                    grade,weight = self.getGradesFromScore(score,'')
1594                    gpa += weight * credits
1595                    d['weight'] = weight
1596                    d['grade'] = grade
1597                    d['score'] = score
1598
1599            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1600            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1601            #else:
1602            #    d['score_calc'] = ''
1603            try:
1604                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1605            except:
1606                d['score_calc'] = ''
1607
1608            if d['score_calc']:
1609                grade = self.getGradesFromScore(d['score_calc'],level_id)
1610                d['grade'] = grade
1611
1612            d['coe'] = ''
1613            if brain.core_or_elective:
1614                d['coe'] = 'Core'
1615            elif brain.core_or_elective == False:
1616                d['coe'] = 'Elective'
1617            id = code = d['id'] = brain.code
1618            d['code'] = code
1619            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1620            if res:
1621                course = res[0]
1622                d['title'] = course.title
1623                # The courses_catalog contains strings and integers in its semester field.
1624                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1625                d['semester'] = str(course.semester)
1626            else:
1627                d['title'] = "Course has been removed from course list"
1628                d['semester'] = ''
1629            if brain.carry_over:
1630                d['coe'] = 'CO'
1631                carry_overs.append(d)
1632            else:
1633                if d['semester'] == '1':
1634                    normal1.append(d)
1635
1636                elif d['semester'] == '2':
1637                    normal2.append(d)
1638                else:
1639                    normal3.append(d)
1640        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1641        #                                "%(semester)s%(code)s" % y))
1642        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1643                                             "%(semester)s%(code)s" % y))
1644        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1645    ###)
1646
1647
1648    def getAllCourses(self,student_id): ###(
1649        query = Eq('student_id',student_id)
1650        course_results = self.course_results.evalAdvancedQuery(query)
1651        courses = []
1652        for brain in course_results:
1653            d = {}
1654
1655            for field in self.schema():
1656                d[field] = getattr(brain,field,'')
1657
1658            d['weight'] = ''
1659            d['grade'] = ''
1660            d['score'] = ''
1661
1662            if str(brain.credits).isdigit():
1663                credits = int(brain.credits)
1664                score = getattr(brain,'score',0)
1665                if score and str(score).isdigit() and int(score) > 0:
1666                    score = int(score)
1667                    grade,weight = self.getGradesFromScore(score)
1668                    d['weight'] = weight
1669                    d['grade'] = grade
1670                    d['score'] = score
1671            d['coe'] = ''
1672            if brain.core_or_elective:
1673                d['coe'] = 'Core'
1674            elif brain.core_or_elective == False:
1675                d['coe'] = 'Elective'
1676            id = code = d['id'] = brain.code
1677            d['code'] = code
1678            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1679            if res:
1680                course = res[0]
1681                d['title'] = course.title
1682                # The courses_catalog contains strings and integers in its semester field.
1683                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1684                d['semester'] = str(course.semester)
1685            else:
1686                d['title'] = "Course has been removed from course list"
1687                d['semester'] = ''
1688            if brain.carry_over:
1689                d['coe'] = 'CO'
1690            courses.append(d)
1691        return courses
1692    ###)
1693
1694InitializeClass(CourseResults)
1695###)
1696
1697class OnlinePaymentsImport(WAeUPTable): ###(
1698
1699    meta_type = 'WAeUP Online Payment Transactions'
1700    name = "online_payments_import"
1701    key = "order_id"
1702    def __init__(self,name=None):
1703        if name ==  None:
1704            name = self.name
1705        WAeUPTable.__init__(self, name)
1706
1707
1708InitializeClass(OnlinePaymentsImport)
1709###)
1710
1711class ReturningImport(WAeUPTable): ###(
1712
1713    meta_type = 'Returning Import Table'
1714    name = "returning_import"
1715    key = "matric_no"
1716    def __init__(self,name=None):
1717        if name ==  None:
1718            name = self.name
1719        WAeUPTable.__init__(self, name)
1720
1721
1722InitializeClass(ReturningImport)
1723###)
1724
1725class ResultsImport(WAeUPTable): ###(
1726
1727    meta_type = 'Results Import Table'
1728    name = "results_import"
1729    key = "key"
1730    def __init__(self,name=None):
1731        if name ==  None:
1732            name = self.name
1733        WAeUPTable.__init__(self, name)
1734
1735
1736InitializeClass(ResultsImport)
1737
1738###)
1739
1740class PaymentsCatalog(WAeUPTable): ###(
1741    security = ClassSecurityInfo()
1742
1743    meta_type = 'WAeUP Payments Catalog'
1744    name = "payments_catalog"
1745    key = "order_id"
1746    def __init__(self,name=None):
1747        if name ==  None:
1748            name = self.name
1749        WAeUPTable.__init__(self, name)
1750
1751
1752    security.declarePrivate('notify_event_listener') ###(
1753    def notify_event_listener(self,event_type,object,infos):
1754        "listen for events"
1755        if not infos.has_key('rpath'):
1756            return
1757        pt = getattr(object,'portal_type',None)
1758        mt = getattr(object,'meta_type',None)
1759        data = {}
1760        if pt != 'Payment':
1761            return
1762        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1763            self.deleteRecord(object.getContent().order_id)
1764        if mt == 'CPS Proxy Folder':
1765            return # is handled only for the real object
1766        if event_type not in ('sys_modify_object'):
1767            return
1768        for field in self.schema():
1769            data[field] = getattr(object,field,'')
1770        rpl = infos['rpath'].split('/')
1771        #import pdb;pdb.set_trace()
1772        student_id = rpl[-4]
1773        data['student_id'] = student_id
1774        modified = False
1775        try:
1776            self.modifyRecord(**data)
1777            modified = True
1778        except KeyError:
1779            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1780            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1781            pass
1782        if not modified:
1783            try:
1784                self.addRecord(**data)
1785            except:
1786                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1787                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1788        ###)
1789
1790
1791InitializeClass(PaymentsCatalog)
1792
1793###)
1794
1795# BBB:
1796AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.