source: WAeUP_SRP/trunk/WAeUPTables.py @ 3730

Last change on this file since 3730 was 3707, checked in by Henrik Bettermann, 16 years ago
  • OIS customizations according to Issoufou's wish list of Oct 2 (2, 4, 5, 6, 7, 8)
  • Property svn:keywords set to Id
File size: 66.5 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3707 2008-10-04 05:41:27Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        records = self()
225        nr2export = len(records)
226        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
227        chunk = 2000
228        total = 0
229        start = DateTime.DateTime().timeTime()
230        start_chunk = DateTime.DateTime().timeTime()
231        for record in records:
232            not_all = False
233            d = self.record2dict(fields,record)
234            lines.append(d)
235            total += 1
236            if total and not total % chunk or total == len(records):
237                csv_writer.writerows(lines)
238                anz = len(lines)
239                logger.info("wrote %(anz)d  total written %(total)d" % vars())
240                end_chunk = DateTime.DateTime().timeTime()
241                duration = end_chunk-start_chunk
242                per_record = duration/anz
243                till_now = end_chunk - start
244                avarage_per_record = till_now/total
245                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
246                estimated_end = estimated_end.strftime("%H:%M:%S")
247                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
248                start_chunk = DateTime.DateTime().timeTime()
249                lines = []
250        end = DateTime.DateTime().timeTime()
251        logger.info('total time %6.2f m' % ((end-start)/60))
252        import os
253        filename, extension = os.path.splitext(export_file)
254        from subprocess import call
255        msg = "wrote %(total)d records to %(export_file)s" % vars()
256        #try:
257        #    retcode = call('gzip %s' % (export_file),shell=True)
258        #    if retcode == 0:
259        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
260        #except OSError, e:
261        #    retcode = -99
262        #    logger.info("zip failed with %s" % e)
263        logger.info(msg)
264        args = {'portal_status_message': msg}
265        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
266        url = self.REQUEST.get('URL2')
267        return self.REQUEST.RESPONSE.redirect(url)
268    ###)
269
270    security.declarePrivate("_import_old") ###(
271    def _import_old(self,filename,schema,layout, mode,logger):
272        "import data from csv"
273        import transaction
274        import random
275        pm = self.portal_membership
276        member = pm.getAuthenticatedMember()
277        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
278        import_fn = "%s/import/%s.csv" % (i_home,filename)
279        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
280        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
281        start = True
282        tr_count = 1
283        total_imported = 0
284        total_not_imported = 0
285        total = 0
286        iname =  "%s" % filename
287        not_imported = []
288        imported = []
289        valid_records = []
290        invalid_records = []
291        d = {}
292        d['mode'] = mode
293        d['imported'] = total_imported
294        d['not_imported'] = total_not_imported
295        d['valid_records'] = valid_records
296        d['invalid_records'] = invalid_records
297        d['import_fn'] = import_fn
298        d['imported_fn'] = imported_fn
299        d['not_imported_fn'] = not_imported_fn
300        if schema is None:
301            em = 'No schema specified'
302            logger.error(em)
303            return d
304        if layout is None:
305            em = 'No layout specified'
306            logger.error(em)
307            return d
308        validators = {}
309        for widget in layout.keys():
310            try:
311                validators[widget] = layout[widget].validate
312            except AttributeError:
313                logger.info('%s has no validate attribute' % widget)
314                return d
315        # if mode == 'edit':
316        #     importer = self.importEdit
317        # elif mode == 'add':
318        #     importer = self.importAdd
319        # else:
320        #     importer = None
321        try:
322            items = csv.DictReader(open(import_fn,"rb"),
323                                   dialect="excel",
324                                   skipinitialspace=True)
325        except:
326            em = 'Error reading %s.csv' % filename
327            logger.error(em)
328            return d
329        #import pdb;pdb.set_trace()
330        for item in items:
331            if start:
332                start = False
333                logger.info('%s starts import from %s.csv' % (member,filename))
334                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
335                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
336                                   dialect="excel",
337                                   skipinitialspace=True).next()
338                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
339                diff2schema = set(import_keys).difference(set(schema.keys()))
340                diff2layout = set(import_keys).difference(set(layout.keys()))
341                if diff2layout:
342                    em = "not ignorable key(s) %s found in heading" % diff2layout
343                    logger.info(em)
344                    return d
345                s = ','.join(['"%s"' % fn for fn in import_keys])
346                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
347                #s = '"id",' + s
348                open(imported_fn,"a").write(s + '\n')
349                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
350                format_error = format + ',"%(Error)s"'
351                #format = '"%(id)s",'+ format
352                adapters = [MappingStorageAdapter(schema, item)]
353            dm = DataModel(item, adapters,context=self)
354            ds = DataStructure(data=item,datamodel=dm)
355            error_string = ""
356            #import pdb;pdb.set_trace()
357            for k in import_keys:
358                if not validators[k](ds,mode=mode):
359                    error_string += " %s : %s" % (k,ds.getError(k))
360            # if not error_string and importer:
361            #     item.update(dm)
362            #     item['id'],error = importer(item)
363            #     if error:
364            #         error_string += error
365            if error_string:
366                item['Error'] = error_string
367                invalid_records.append(dm)
368                not_imported.append(format_error % item)
369                total_not_imported += 1
370            else:
371                em = format % item
372                valid_records.append(dm)
373                imported.append(em)
374                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
375                tr_count += 1
376                total_imported += 1
377            total += 1
378        if len(imported) > 0:
379            open(imported_fn,"a").write('\n'.join(imported))
380        if len(not_imported) > 0:
381            open(not_imported_fn,"a").write('\n'.join(not_imported))
382        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
383        d['imported'] = total_imported
384        d['not_imported'] = total_not_imported
385        d['valid_records'] = valid_records
386        d['invalid_records'] = invalid_records
387        d['imported_fn'] = imported_fn
388        d['not_imported_fn'] = not_imported_fn
389        #logger.info(em)
390        return d
391    ###)
392
393    security.declarePrivate("_import") ###(
394    def _import_new(self,csv_items,schema, layout, mode,logger):
395        "import data from csv.Dictreader Instance"
396        start = True
397        tr_count = 1
398        total_imported = 0
399        total_not_imported = 0
400        total = 0
401        iname =  "%s" % filename
402        not_imported = []
403        valid_records = []
404        invalid_records = []
405        duplicate_records = []
406        d = {}
407        d['mode'] = mode
408        d['valid_records'] = valid_records
409        d['invalid_records'] = invalid_records
410        d['invalid_records'] = duplicate_records
411        # d['import_fn'] = import_fn
412        # d['imported_fn'] = imported_fn
413        # d['not_imported_fn'] = not_imported_fn
414        validators = {}
415        for widget in layout.keys():
416            try:
417                validators[widget] = layout[widget].validate
418            except AttributeError:
419                logger.info('%s has no validate attribute' % widget)
420                return d
421        for item in csv_items:
422            if start:
423                start = False
424                logger.info('%s starts import from %s.csv' % (member,filename))
425                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
426                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
427                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
428                diff2schema = set(import_keys).difference(set(schema.keys()))
429                diff2layout = set(import_keys).difference(set(layout.keys()))
430                if diff2layout:
431                    em = "not ignorable key(s) %s found in heading" % diff2layout
432                    logger.info(em)
433                    return d
434                # s = ','.join(['"%s"' % fn for fn in import_keys])
435                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
436                # #s = '"id",' + s
437                # open(imported_fn,"a").write(s + '\n')
438                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
439                # format_error = format + ',"%(Error)s"'
440                # #format = '"%(id)s",'+ format
441                adapters = [MappingStorageAdapter(schema, item)]
442            dm = DataModel(item, adapters,context=self)
443            ds = DataStructure(data=item,datamodel=dm)
444            error_string = ""
445            for k in import_keys:
446                if not validators[k](ds,mode=mode):
447                    error_string += " %s : %s" % (k,ds.getError(k))
448            if error_string:
449                item['Error'] = error_string
450                #invalid_records.append(dm)
451                invalid_records.append(item)
452                total_not_imported += 1
453            else:
454                em = format % item
455                valid_records.append(dm)
456                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
457                tr_count += 1
458                total_imported += 1
459            total += 1
460        # if len(imported) > 0:
461        #     open(imported_fn,"a").write('\n'.join(imported))
462        # if len(not_imported) > 0:
463        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
464        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
465        d['imported'] = total_imported
466        d['not_imported'] = total_not_imported
467        d['valid_records'] = valid_records
468        d['invalid_records'] = invalid_records
469        return d
470    ###)
471
472    security.declarePublic("missingValue")###(
473    def missingValue(self):
474        from Missing import MV
475        return MV
476    ###)
477###)
478
479class AccommodationTable(WAeUPTable): ###(
480
481    meta_type = 'WAeUP Accommodation Tool'
482    name = "portal_accommodation"
483    key = "bed"
484    not_occupied = NOT_OCCUPIED
485    def __init__(self,name=None):
486        if name ==  None:
487            name = self.name
488        WAeUPTable.__init__(self, name)
489
490    def searchAndReserveBed(self, student_id,bed_type): ###(
491        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
492        records = self.evalAdvancedQuery(Eq('student',student_id))
493        if len(records) == 1:
494            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
495            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
496            return -1,records[0].bed
497        elif len(records) > 1:
498            logger.info('%s found more than one (reserved) bed' % (student_id))
499            return -3,'more than one bed'
500        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
501        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
502        if len(records) == 0:
503            logger.info('no bed %s available for %s' % (bed_type,student_id))
504            return -2,"no bed"
505        rec = records[0]
506        self.modifyRecord(bed=rec.bed,student=student_id)
507        logger.info('%s booked bed %s' % (student_id,rec.bed))
508        return 1,rec.bed
509    ###)
510
511
512InitializeClass(AccommodationTable)
513
514###)
515
516class PinTable(WAeUPTable): ###(
517    from ZODB.POSException import ConflictError
518    security = ClassSecurityInfo()
519    meta_type = 'WAeUP Pin Tool'
520    name = "portal_pins"
521    key = 'pin'
522
523    def __init__(self,name=None):
524        if name ==  None:
525            name = self.name
526        WAeUPTable.__init__(self, name)
527
528    security.declareProtected(ModifyPortalContent,"dumpAll")###(
529    def dumpAll(self,include_unused=None):
530        """dump all data in the table to a csv"""
531        member = self.portal_membership.getAuthenticatedMember()
532        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
533        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
534        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
535        res_list = []
536        lines = []
537        if hasattr(self,"export_keys"):
538            fields = self.export_keys
539        else:
540            fields = []
541            for f in self.schema():
542                fields.append(f)
543        headline = ','.join(fields)
544        out = open(export_file,"wb")
545        out.write(headline +'\n')
546        out.close()
547        out = open(export_file,"a")
548        csv_writer = csv.DictWriter(out,fields,)
549        if include_unused is not None and str(member) not in ('admin','joachim'):
550            logger.info('%s tries to dump pintable with unused pins' % (member))
551            return
552        if include_unused is not None:
553            records = self()
554        else:
555            records = self.evalAdvancedQuery(~Eq('student',''))
556        nr2export = len(records)
557        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
558        chunk = 2000
559        total = 0
560        start = DateTime.DateTime().timeTime()
561        start_chunk = DateTime.DateTime().timeTime()
562        for record in records:
563            not_all = False
564            d = self.record2dict(fields,record)
565            lines.append(d)
566            total += 1
567            if total and not total % chunk or total == len(records):
568                csv_writer.writerows(lines)
569                anz = len(lines)
570                logger.info("wrote %(anz)d  total written %(total)d" % vars())
571                end_chunk = DateTime.DateTime().timeTime()
572                duration = end_chunk-start_chunk
573                per_record = duration/anz
574                till_now = end_chunk - start
575                avarage_per_record = till_now/total
576                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
577                estimated_end = estimated_end.strftime("%H:%M:%S")
578                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
579                start_chunk = DateTime.DateTime().timeTime()
580                lines = []
581        end = DateTime.DateTime().timeTime()
582        logger.info('total time %6.2f m' % ((end-start)/60))
583        import os
584        filename, extension = os.path.splitext(export_file)
585        from subprocess import call
586        msg = "wrote %(total)d records to %(export_file)s" % vars()
587        #try:
588        #    retcode = call('gzip %s' % (export_file),shell=True)
589        #    if retcode == 0:
590        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
591        #except OSError, e:
592        #    retcode = -99
593        #    logger.info("zip failed with %s" % e)
594        logger.info(msg)
595        args = {'portal_status_message': msg}
596        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
597        url = self.REQUEST.get('URL2')
598        return self.REQUEST.RESPONSE.redirect(url)
599    ###)
600
601
602
603    def searchAndSetRecord(self, uid, student_id,prefix):
604
605        # The following line must be activated after resetting the
606        # the portal_pins table. This is to avoid duplicate entries
607        # and disable duplicate payments.
608
609        #student_id = student_id.upper()
610
611        #records = self.searchResults(student = student_id)
612        #if len(records) > 0 and prefix in ('CLR','APP'):
613        #    for r in records:
614        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
615        #            return -2
616        records = self.searchResults({"%s" % self.key : uid})
617        if len(records) > 1:
618            # Can not happen, but anyway...
619            raise ValueError("More than one record with uid %s" % uid)
620        if len(records) == 0:
621            return -1,None
622        record = records[0]
623        if record.student == "":
624            record_data = {}
625            for field in self.schema() + self.indexes():
626                record_data[field] = getattr(record, field)
627            # Add the updated data:
628            record_data['student'] = student_id
629            try:
630                self.catalog_object(dict2ob(record_data), uid)
631                return 1,record
632            except ConflictError:
633                return 2,record
634        if record.student.upper() != student_id.upper():
635            return 0,record
636        if record.student.upper() == student_id.upper():
637            return 2,record
638        return -3,record
639InitializeClass(PinTable)
640###)
641
642class PumeResultsTable(WAeUPTable): ###(
643
644    meta_type = 'WAeUP PumeResults Tool'
645    name = "portal_pumeresults"
646    key = "jamb_reg_no"
647    def __init__(self,name=None):
648        if name ==  None:
649            name = self.name
650        WAeUPTable.__init__(self, name)
651
652
653InitializeClass(PumeResultsTable)
654
655###)
656
657class ApplicantsCatalog(WAeUPTable): ###(
658
659    meta_type = 'WAeUP Applicants Catalog'
660    name = "applicants_catalog"
661    key = "reg_no"
662    security = ClassSecurityInfo()
663    #export_keys = (
664    #               "reg_no",
665    #               "status",
666    #               "lastname",
667    #               "sex",
668    #               "date_of_birth",
669    #               "lga",
670    #               "email",
671    #               "phone",
672    #               "passport",
673    #               "entry_mode",
674    #               "pin",
675    #               "screening_type",
676    #               "registration_date",
677    #               "testdate",
678    #               "application_date",
679    #               "screening_date",
680    #               "faculty",
681    #               "department",
682    #               "course1",
683    #               "course2",
684    #               "course3",
685    #               "eng_score",
686    #               "subj1",
687    #               "subj1score",
688    #               "subj2",
689    #               "subj2score",
690    #               "subj3",
691    #               "subj3score",
692    #               "aggregate",
693    #               "course_admitted",
694    #               )
695
696    def __init__(self,name=None):
697        if name ==  None:
698            name = self.name
699        WAeUPTable.__init__(self, name)
700
701    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
702    def new_importCSV(self,filename="JAMB_data",
703                  schema_id="application",
704                  layout_id="import_application",
705                  mode='add'):
706        """ import JAMB data """
707        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
708        pm = self.portal_membership
709        member = pm.getAuthenticatedMember()
710        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
711        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
712        import_fn = "%s/import/%s.csv" % (i_home,filename)
713        if mode not in ('add','edit'):
714            logger.info("invalid mode: %s" % mode)
715        if os.path.exists(lock_fn):
716            logger.info("import of %(import_fn)s already in progress" % vars())
717            return
718        lock_file = open(lock_fn,"w")
719        lock_file.write("%(current)s \n" % vars())
720        lock_file.close()
721        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
722        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
723        stool = getToolByName(self, 'portal_schemas')
724        ltool = getToolByName(self, 'portal_layouts')
725        schema = stool._getOb(schema_id)
726        if schema is None:
727            em = 'No such schema %s' % schema_id
728            logger.error(em)
729            return
730        for postfix in ('_import',''):
731            layout_name = "%(layout_id)s%(postfix)s" % vars()
732            if hasattr(ltool,layout_name):
733                break
734        layout = ltool._getOb(layout_name)
735        if layout is None:
736            em = 'No such layout %s' % layout_id
737            logger.error(em)
738            return
739        try:
740            csv_file = csv.DictReader(open(import_fn,"rb"))
741        except:
742            em = 'Error reading %s.csv' % filename
743            logger.error(em)
744            return
745        d = self._import_new(csv_items,schema,layout,mode,logger)
746        imported = []
747        edited = []
748        duplicates = []
749        not_found = []
750        if len(d['valid_records']) > 0:
751            for record in d['valid_records']:
752                #import pdb;pdb.set_trace()
753                if mode == "add":
754                    try:
755                        self.addRecord(**dict(record.items()))
756                        imported.append(**dict(record.items()))
757                        logger.info("added %s" % record.items())
758                    except ValueError:
759                        dupplicate.append(**dict(record.items()))
760                        logger.info("duplicate %s" % record.items())
761                elif mode == "edit":
762                    try:
763                        self.modifyRecord(**dict(record.items()))
764                        edited.append(**dict(record.items()))
765                        logger.info("edited %s" % record.items())
766                    except KeyError:
767                        not_found.append(**dict(record.items()))
768                        logger.info("not found %s" % record.items())
769        invalid = d['invalid_records']
770        for itype in ("imported","edited","not_found","duplicate","invalid"):
771            outlist = locals[itype]
772            if len(outlist):
773                d = {}
774                for k in outlist[0].keys():
775                    d[k] = k
776                outlist[0] = d
777                outfile = open("file_name_%s" % itype,'w')
778                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
779                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
780###)
781
782    security.declareProtected(ModifyPortalContent,"importCSV")###(
783    def importCSV(self,filename="JAMB_data",
784                  schema_id="application",
785                  layout_id="application_pce",
786                  mode='add'):
787        """ import JAMB data """
788        stool = getToolByName(self, 'portal_schemas')
789        ltool = getToolByName(self, 'portal_layouts')
790        schema = stool._getOb(schema_id)
791        if schema is None:
792            em = 'No such schema %s' % schema_id
793            logger.error(em)
794            return
795        layout = ltool._getOb(layout_id)
796        if layout is None:
797            em = 'No such layout %s' % layout_id
798            logger.error(em)
799            return
800        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
801        d = self._import_old(filename,schema,layout,mode,logger)
802        if len(d['valid_records']) > 0:
803            for record in d['valid_records']:
804                #import pdb;pdb.set_trace()
805                if mode == "add":
806                    self.addRecord(**dict(record.items()))
807                    logger.info("added %s" % record.items())
808                elif mode == "edit":
809                    self.modifyRecord(**dict(record.items()))
810                    logger.info("edited %s" % record.items())
811                else:
812                    logger.info("invalid mode: %s" % mode)
813        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
814    ###)
815
816InitializeClass(ApplicantsCatalog)
817
818###)
819
820class StudentsCatalog(WAeUPTable): ###(
821    security = ClassSecurityInfo()
822
823    meta_type = 'WAeUP Students Catalog'
824    name = "students_catalog"
825    key = "id"
826    affected_types = {   ###(
827                      'StudentApplication':
828                      {'id': 'application',
829                       'fields':
830                       ('jamb_reg_no',
831                        'entry_mode',
832                        #'entry_level',
833                        'entry_session',
834                       )
835                      },
836                      'StudentClearance':
837                      {'id': 'clearance',
838                       'fields':
839                       ('matric_no',
840                        'lga',
841                       )
842                      },
843                      'StudentPersonal':
844                      {'id': 'personal',
845                       'fields':
846                       ('name',
847                        'sex',
848                        'perm_address',
849                        'email',
850                        'phone',
851                       )
852                      },
853                      'StudentStudyCourse':
854                      {'id': 'study_course',
855                       'fields':
856                       ('course', # study_course
857                        'faculty', # from certificate
858                        'department', # from certificate
859                        'end_level', # from certificate
860                        'level', # current_level
861                        'mode',  # current_mode
862                        'session', # current_session
863                        'verdict', # current_verdict
864                       )
865                      },
866                     }
867    ###)
868
869    def __init__(self,name=None):
870        if name ==  None:
871            name = self.name
872        WAeUPTable.__init__(self, name)
873        return
874
875    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
876        """ clears the whole enchilada """
877        self._catalog.clear()
878
879        if REQUEST and RESPONSE:
880            RESPONSE.redirect(
881              URL1 +
882              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
883
884    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
885        """ clear the catalog, then re-index everything """
886
887        elapse = time.time()
888        c_elapse = time.clock()
889
890        pgthreshold = self._getProgressThreshold()
891        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
892        self.refreshCatalog(clear=1, pghandler=handler)
893
894        elapse = time.time() - elapse
895        c_elapse = time.clock() - c_elapse
896
897        RESPONSE.redirect(
898            URL1 +
899            '/manage_catalogAdvanced?manage_tabs_message=' +
900            urllib.quote('Catalog Updated \n'
901                         'Total time: %s\n'
902                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
903    ###)
904
905    def fill_certificates_dict(self): ###(
906        "return certificate data in  dict"
907        certificates_brains = self.portal_catalog(portal_type ='Certificate')
908        d = {}
909        for cb in certificates_brains:
910            certificate_doc = cb.getObject().getContent()
911            cb_path = cb.getPath().split('/')
912            ld = {}
913            ld['faculty'] = cb_path[-4]
914            ld['department'] = cb_path[-3]
915            ld['end_level'] = getattr(certificate_doc,'end_level','999')
916            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
917            d[cb.getId] = ld
918        return d
919    ###)
920
921    def get_from_doc_department(self,doc,cached_data={}): ###(
922        "return the students department"
923        if doc is None:
924            return None
925        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
926            return self._v_certificates[doc.study_course]['department']
927        certificate_res = self.portal_catalog(id = doc.study_course)
928        if len(certificate_res) != 1:
929            return None
930        return certificate_res[0].getPath().split('/')[-3]
931
932    def get_from_doc_faculty(self,doc,cached_data={}):
933        "return the students faculty"
934        if doc is None:
935            return None
936        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
937            return self._v_certificates[doc.study_course]['faculty']
938        certificate_res = self.portal_catalog(id = doc.study_course)
939        if len(certificate_res) != 1:
940            return None
941        return certificate_res[0].getPath().split('/')[-4]
942
943    def get_from_doc_end_level(self,doc,cached_data={}):
944        "return the students end_level"
945        if doc is None:
946            return None
947        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
948            return self._v_certificates[doc.study_course]['end_level']
949        certificate_res = self.portal_catalog(id = doc.study_course)
950        if len(certificate_res) != 1:
951            return None
952        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
953
954    def get_from_doc_level(self,doc,cached_data={}):
955        "return the students level"
956        if doc is None:
957            return None
958        return getattr(doc,'current_level',None)
959
960    def get_from_doc_mode(self,doc,cached_data={}):
961        "return the students mode"
962        if doc is None:
963            return None
964        cm = getattr(doc,'current_mode',None)
965        return cm
966
967
968    def get_from_doc_session(self,doc,cached_data={}):
969        "return the students current_session"
970        if doc is None:
971            return None
972        return getattr(doc,'current_session',None)
973
974    def get_from_doc_entry_session(self,doc,cached_data={}):
975        "return the students entry_session"
976        if doc is None:
977            return None
978        es = getattr(doc,'entry_session',None)
979        if es is not None and len(es) == 2:
980            return es
981        try:
982            digit = int(doc.jamb_reg_no[0])
983        except:
984            return "-1"
985        if digit < 8:
986            return "0%c" % doc.jamb_reg_no[0]
987        return "9%c" % doc.jamb_reg_no[0]
988
989    def get_from_doc_course(self,doc,cached_data={}):
990        "return the students study_course"
991        if doc is None:
992            return None
993        return getattr(doc,'study_course',None)
994
995    def get_from_doc_name(self,doc,cached_data={}):
996        "return the students name from the personal"
997        if doc is None:
998            return None
999        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1000
1001    def get_from_doc_verdict(self,doc,cached_data={}):
1002        "return the students study_course"
1003        if doc is None:
1004            return None
1005        return getattr(doc,'current_verdict',None)
1006    ###)
1007
1008    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1009        if isinstance(name, str):
1010            name = (name,)
1011        reindextypes = {}
1012        reindex_special = []
1013        for n in name:
1014            if n in ("review_state","registered_courses"):
1015                reindex_special.append(n)
1016            else:
1017                for pt in self.affected_types.keys():
1018                    if n in self.affected_types[pt]['fields']:
1019                        if reindextypes.has_key(pt):
1020                            reindextypes[pt].append(n)
1021                        else:
1022                            reindextypes[pt]= [n]
1023                        break
1024        cached_data = {}
1025        if set(name).intersection(set(('faculty','department','end_level'))):
1026            cached_data = self.fill_certificates_dict()
1027        students = self.portal_catalog(portal_type="Student")
1028        if hasattr(self,'portal_catalog_real'):
1029            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1030        else:
1031            aq_portal = self.portal_catalog.evalAdvancedQuery
1032        num_objects = len(students)
1033        if pghandler:
1034            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1035        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1036        #import pdb;pdb.set_trace()
1037        for i in xrange(num_objects):
1038            if pghandler: pghandler.report(i)
1039            student_brain = students[i]
1040            student_object = student_brain.getObject()
1041            # query = Eq('path',student_brain.getPath())
1042            # sub_brains_list = aq_portal(query)
1043            # sub_brains = {}
1044            # for sub_brain in sub_brains_list:
1045            #     sub_brains[sub_brain.portal_type] = sub_brain
1046            # student_path = student_brain.getPath()
1047            data = {}
1048            modified = False
1049            sid = data['id'] = student_brain.getId
1050            if reindex_special and 'review_state' in reindex_special:
1051                modified = True
1052                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1053            sub_objects = False
1054            for pt in reindextypes.keys():
1055                modified = True
1056                try:
1057                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1058                    #doc = sub_brains[pt].getObject().getContent()
1059                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1060                    # doc = self.unrestrictedTraverse(path).getContent()
1061                    sub_objects = True
1062                except:
1063                    continue
1064                for field in set(name).intersection(self.affected_types[pt]['fields']):
1065                    if hasattr(self,'get_from_doc_%s' % field):
1066                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1067                                                                              cached_data=cached_data)
1068                    else:
1069                        data[field] = getattr(doc,field)
1070            if not sub_objects and noattr:
1071                import_res = self.returning_import(id = sid)
1072                if not import_res:
1073                    continue
1074                import_record = import_res[0]
1075                data['matric_no'] = import_record.matric_no
1076                data['sex'] = import_record.Sex == 'F'
1077                data['name'] = "%s %s %s" % (import_record.Firstname,
1078                                             import_record.Middlename,
1079                                             import_record.Lastname)
1080                data['jamb_reg_no'] = import_record.Entryregno
1081            #if reindex_special and 'registered_courses' in reindex_special:
1082            #    try:
1083            #        study_course = getattr(student_object,"study_course")
1084            #        level_ids = study_course.objectIds()
1085            #    except:
1086            #        continue
1087            #    if not level_ids:
1088            #        continue
1089            #    modified = True
1090            #    level_ids.sort()
1091            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1092            #    courses = []
1093            #    for c in course_ids:
1094            #        if c.endswith('_co'):
1095            #            courses.append(c[:-3])
1096            #        else:
1097            #            courses.append(c)
1098            #    data['registered_courses'] = courses
1099            if modified:
1100                self.modifyRecord(**data)
1101        if pghandler: pghandler.finish()
1102    ###)
1103
1104    def refreshCatalog(self, clear=0, pghandler=None): ###(
1105        """ re-index everything we can find """
1106        students_folder = self.portal_url.getPortalObject().campus.students
1107        if clear:
1108            self._catalog.clear()
1109        students = self.portal_catalog(portal_type="Student")
1110        num_objects = len(students)
1111        cached_data = self.fill_certificates_dict()
1112        if pghandler:
1113            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1114        for i in xrange(num_objects):
1115            if pghandler: pghandler.report(i)
1116            student_brain = students[i]
1117            spath = student_brain.getPath()
1118            student_object = student_brain.getObject()
1119            data = {}
1120            sid = data['id'] = student_brain.getId
1121            #data['review_state'] = student_brain.review_state
1122            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1123            sub_objects = False
1124            for pt in self.affected_types.keys():
1125                modified = True
1126                try:
1127                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1128                    sub_objects = True
1129                except:
1130                    #from pdb import set_trace;set_trace()
1131                    continue
1132                for field in self.affected_types[pt]['fields']:
1133                    if hasattr(self,'get_from_doc_%s' % field):
1134                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1135                                                                              cached_data=cached_data)
1136                    else:
1137                        data[field] = getattr(doc,field,None)
1138            if not sub_objects:
1139                import_res = self.returning_import(id = sid)
1140                if not import_res:
1141                    continue
1142                import_record = import_res[0]
1143                data['matric_no'] = import_record.matric_no
1144                data['sex'] = import_record.Sex == 'F'
1145                data['name'] = "%s %s %s" % (import_record.Firstname,
1146                                             import_record.Middlename,
1147                                             import_record.Lastname)
1148                data['jamb_reg_no'] = import_record.Entryregno
1149            self.addRecord(**data)
1150        if pghandler: pghandler.finish()
1151    ###)
1152
1153    security.declarePrivate('notify_event_listener') ###(
1154    def notify_event_listener(self,event_type,object,infos):
1155        "listen for events"
1156        if not infos.has_key('rpath'):
1157            return
1158        pt = getattr(object,'portal_type',None)
1159        mt = getattr(object,'meta_type',None)
1160        students_catalog = self
1161        data = {}
1162        if pt == 'Student' and\
1163           mt == 'CPS Proxy Folder' and\
1164           event_type.startswith('workflow'):
1165            data['id'] = object.getId()
1166            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1167            students_catalog.modifyRecord(**data)
1168            return
1169        rpl = infos['rpath'].split('/')
1170        if pt == 'Student' and mt == 'CPS Proxy Folder':
1171            student_id = object.id
1172            if event_type == "sys_add_object":
1173                try:
1174                    self.addRecord(id = student_id)
1175                except ValueError:
1176                    pass
1177                return
1178            elif event_type == 'sys_del_object':
1179                self.deleteRecord(student_id)
1180        if pt not in self.affected_types.keys():
1181            return
1182        if event_type not in ('sys_modify_object'):
1183            return
1184        if mt == 'CPS Proxy Folder':
1185            return
1186        if not hasattr(self,'_v_certificates'):
1187            self._v_certificates = self.fill_certificates_dict()
1188        for field in self.affected_types[pt]['fields']:
1189            if hasattr(self,'get_from_doc_%s' % field):
1190                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1191            else:
1192                data[field] = getattr(object,field)
1193        data['id'] = rpl[2]
1194        self.modifyRecord(**data)
1195    ###)
1196
1197
1198InitializeClass(StudentsCatalog)
1199
1200###)
1201
1202class CertificatesCatalog(WAeUPTable): ###(
1203    security = ClassSecurityInfo()
1204
1205    meta_type = 'WAeUP Certificates Catalog'
1206    name =  "certificates_catalog"
1207    key = "code"
1208    def __init__(self,name=None):
1209        if name ==  None:
1210            name =  self.name
1211        WAeUPTable.__init__(self, name)
1212
1213    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1214        """ clear the catalog, then re-index everything """
1215
1216        elapse = time.time()
1217        c_elapse = time.clock()
1218
1219        pgthreshold = self._getProgressThreshold()
1220        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1221        self.refreshCatalog(clear=1, pghandler=handler)
1222
1223        elapse = time.time() - elapse
1224        c_elapse = time.clock() - c_elapse
1225
1226        RESPONSE.redirect(
1227            URL1 +
1228            '/manage_catalogAdvanced?manage_tabs_message=' +
1229            urllib.quote('Catalog Updated \n'
1230                         'Total time: %s\n'
1231                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1232    ###)
1233
1234    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1235        if isinstance(name, str):
1236            name = (name,)
1237        certificates = self.portal_catalog(portal_type="Certificate")
1238        num_objects = len(certificates)
1239        if pghandler:
1240            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1241        for i in xrange(num_objects):
1242            if pghandler: pghandler.report(i)
1243            certificate_brain = certificates[i]
1244            certificate_object = certificate_brain.getObject()
1245            pl = certificate_brain.getPath().split('/')
1246            data = {}
1247            cid = data[self.key] = certificate_brain.getId
1248            data['faculty'] = pl[-4]
1249            data['department'] = pl[-3]
1250            doc = certificate_object.getContent()
1251            for field in name:
1252                if field not in (self.key,'faculty','department'):
1253                    data[field] = getattr(doc,field)
1254            self.modifyRecord(**data)
1255        if pghandler: pghandler.finish()
1256    ###)
1257
1258    def refreshCatalog(self, clear=0, pghandler=None): ###(
1259        """ re-index everything we can find """
1260        if clear:
1261            self._catalog.clear()
1262        certificates = self.portal_catalog(portal_type="Certificate")
1263        num_objects = len(certificates)
1264        if pghandler:
1265            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1266        #from pdb import set_trace;set_trace()
1267        for i in xrange(num_objects):
1268            if pghandler: pghandler.report(i)
1269            certificate_brain = certificates[i]
1270            certificate_doc = certificate_brain.getObject().getContent()
1271            pl = certificate_brain.getPath().split('/')
1272            data = {}
1273            for field in self.schema():
1274                data[field] = getattr(certificate_doc,field,None)
1275            data[self.key] = certificate_brain.getId
1276            ai = pl.index('academics')
1277            data['faculty'] = pl[ai +1]
1278            data['department'] = pl[ai +2]
1279            if clear:
1280                self.addRecord(**data)
1281            else:
1282                self.modifyRecord(**data)
1283        if pghandler: pghandler.finish()
1284    ###)
1285
1286    security.declarePrivate('notify_event_listener') ###(
1287    def notify_event_listener(self,event_type,object,infos):
1288        "listen for events"
1289        if not infos.has_key('rpath'):
1290            return
1291        pt = getattr(object,'portal_type',None)
1292        mt = getattr(object,'meta_type',None)
1293        if pt != 'Certificate':
1294            return
1295        data = {}
1296        rpl = infos['rpath'].split('/')
1297        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1298            return
1299        certificate_id = object.getId()
1300        data[self.key] = certificate_id
1301        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1302            try:
1303                self.addRecord(**data)
1304            except ValueError:
1305                return
1306            certificate_id = object.getId()
1307            doc = object.getContent()
1308            if doc is None:
1309                return
1310            for field in self.schema():
1311                data[field] = getattr(doc,field,None)
1312            data[self.key] = certificate_id
1313            ai = rpl.index('academics')
1314            data['faculty'] = rpl[ai +1]
1315            data['department'] = rpl[ai +2]
1316            self.modifyRecord(**data)
1317            return
1318        if event_type == "sys_del_object":
1319            self.deleteRecord(certificate_id)
1320            return
1321        if event_type == "sys_modify_object" and mt == 'Certificate':
1322            #from pdb import set_trace;set_trace()
1323            for field in self.schema():
1324                data[field] = getattr(object,field,None)
1325            certificate_id = object.aq_parent.getId()
1326            data[self.key] = certificate_id
1327            ai = rpl.index('academics')
1328            data['faculty'] = rpl[ai +1]
1329            data['department'] = rpl[ai +2]
1330            self.modifyRecord(**data)
1331    ###)
1332
1333
1334InitializeClass(CertificatesCatalog)
1335###)
1336
1337class CoursesCatalog(WAeUPTable): ###(
1338    security = ClassSecurityInfo()
1339
1340    meta_type = 'WAeUP Courses Catalog'
1341    name =  "courses_catalog"
1342    key = "code"
1343    def __init__(self,name=None):
1344        if name ==  None:
1345            name =  self.name
1346        WAeUPTable.__init__(self, name)
1347
1348    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1349        """ clear the catalog, then re-index everything """
1350
1351        elapse = time.time()
1352        c_elapse = time.clock()
1353
1354        pgthreshold = self._getProgressThreshold()
1355        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1356        self.refreshCatalog(clear=1, pghandler=handler)
1357
1358        elapse = time.time() - elapse
1359        c_elapse = time.clock() - c_elapse
1360
1361        RESPONSE.redirect(
1362            URL1 +
1363            '/manage_catalogAdvanced?manage_tabs_message=' +
1364            urllib.quote('Catalog Updated \n'
1365                         'Total time: %s\n'
1366                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1367    ###)
1368
1369    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1370        if isinstance(name, str):
1371            name = (name,)
1372        courses = self.portal_catalog(portal_type="Course")
1373        num_objects = len(courses)
1374        if pghandler:
1375            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1376        for i in xrange(num_objects):
1377            if pghandler: pghandler.report(i)
1378            course_brain = courses[i]
1379            course_object = course_brain.getObject()
1380            pl = course_brain.getPath().split('/')
1381            data = {}
1382            cid = data[self.key] = course_brain.getId
1383            data['faculty'] = pl[-4]
1384            data['department'] = pl[-3]
1385            doc = course_object.getContent()
1386            for field in name:
1387                if field not in (self.key,'faculty','department'):
1388                    data[field] = getattr(doc,field)
1389            self.modifyRecord(**data)
1390        if pghandler: pghandler.finish()
1391    ###)
1392
1393    def refreshCatalog(self, clear=0, pghandler=None): ###(
1394        """ re-index everything we can find """
1395        if clear:
1396            self._catalog.clear()
1397        courses = self.portal_catalog(portal_type="Course")
1398        num_objects = len(courses)
1399        if pghandler:
1400            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1401        #from pdb import set_trace;set_trace()
1402        for i in xrange(num_objects):
1403            if pghandler: pghandler.report(i)
1404            course_brain = courses[i]
1405            course_doc = course_brain.getObject().getContent()
1406            pl = course_brain.getPath().split('/')
1407            data = {}
1408            for field in self.schema():
1409                data[field] = getattr(course_doc,field,None)
1410            data[self.key] = course_brain.getId
1411            ai = pl.index('academics')
1412            data['faculty'] = pl[ai +1]
1413            data['department'] = pl[ai +2]
1414            if clear:
1415                self.addRecord(**data)
1416            else:
1417                self.modifyRecord(**data)
1418        if pghandler: pghandler.finish()
1419    ###)
1420
1421    security.declarePrivate('notify_event_listener') ###(
1422    def notify_event_listener(self,event_type,object,infos):
1423        "listen for events"
1424        if not infos.has_key('rpath'):
1425            return
1426        pt = getattr(object,'portal_type',None)
1427        mt = getattr(object,'meta_type',None)
1428        if pt != 'Course':
1429            return
1430        data = {}
1431        rpl = infos['rpath'].split('/')
1432        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1433            return
1434        course_id = object.getId()
1435        data[self.key] = course_id
1436        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1437            try:
1438                self.addRecord(**data)
1439            except ValueError:
1440                return
1441            course_id = object.getId()
1442            doc = object.getContent()
1443            if doc is None:
1444                return
1445            for field in self.schema():
1446                data[field] = getattr(doc,field,None)
1447            data[self.key] = course_id
1448            ai = rpl.index('academics')
1449            data['faculty'] = rpl[ai +1]
1450            data['department'] = rpl[ai +2]
1451            self.modifyRecord(**data)
1452            return
1453        if event_type == "sys_del_object":
1454            self.deleteRecord(course_id)
1455            return
1456        if event_type == "sys_modify_object" and mt == 'Course':
1457            #from pdb import set_trace;set_trace()
1458            for field in self.schema():
1459                data[field] = getattr(object,field,None)
1460            course_id = object.aq_parent.getId()
1461            data[self.key] = course_id
1462            ai = rpl.index('academics')
1463            data['faculty'] = rpl[ai +1]
1464            data['department'] = rpl[ai +2]
1465            self.modifyRecord(**data)
1466    ###)
1467
1468
1469InitializeClass(CoursesCatalog)
1470###)
1471
1472class CourseResults(WAeUPTable): ###(
1473    security = ClassSecurityInfo()
1474
1475    meta_type = 'WAeUP Results Catalog'
1476    name = "course_results"
1477    key = "key" #student_id + level + course_id
1478    def __init__(self,name=None):
1479        if name ==  None:
1480            name = self.name
1481        WAeUPTable.__init__(self, name)
1482        self._queue = []
1483
1484    def addMultipleRecords(self, records): ###(
1485        """add many records"""
1486        existing_uids = []
1487        for data in records:
1488            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1489            data['%s' % self.key] = uid
1490            query = Eq(self.key, uid)
1491            res = self.course_results.evalAdvancedQuery(query)
1492            if len(res) > 0:
1493                rec = res[0]
1494                equal = True
1495                for attr in ('student_id','level_id','course_id'):
1496                    if getattr(rec,attr,'') != data[attr]:
1497                        equal = False
1498                        break
1499                if equal:
1500                    existing_uids += uid,
1501                    continue
1502            self.catalog_object(dict2ob(data), uid=uid)
1503        return existing_uids
1504    ###)
1505
1506    def deleteResultsHere(self,level_id,student_id): ###(
1507        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1508        course_results = self.course_results.evalAdvancedQuery(query)
1509        #import pdb;pdb.set_trace()
1510        for result in course_results:
1511            self.deleteRecord(result.key)
1512    ###)
1513
1514    def moveResultsHere(self,level,student_id): ###(
1515        #import pdb;pdb.set_trace()
1516        level_id = level.getId()
1517        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1518        course_results = self.course_results.evalAdvancedQuery(query)
1519        existing_courses = [cr.code for cr in course_results]
1520        to_delete = []
1521        for code,obj in level.objectItems():
1522            to_delete.append(code)
1523            carry_over = False
1524            if code.endswith('_co'):
1525                carry_over = True
1526                code  = code[:-3]
1527            if code in existing_courses:
1528                continue
1529            course_result_doc = obj.getContent()
1530            data = {}
1531            course_id = code
1532            for field in self.schema():
1533                data[field] = getattr(course_result_doc,field,'')
1534            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1535            data['student_id'] = student_id
1536            data['level_id'] = level_id
1537            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1538            data['session_id'] = session_id
1539            #data['queue_status'] = OBJECT_CREATED
1540            data['code'] = course_id
1541            data['carry_over'] = carry_over
1542            self.catalog_object(dict2ob(data), uid=key)
1543        level.manage_delObjects(to_delete)
1544    ###)
1545
1546    def getCourses(self,student_id,level_id): ###(
1547        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1548        course_results = self.course_results.evalAdvancedQuery(query)
1549        carry_overs = []
1550        normal1 = []
1551        normal2 = []
1552        normal3 = []
1553        total_credits = 0
1554        gpa = 0
1555        for brain in course_results:
1556            d = {}
1557
1558            for field in self.schema():
1559                d[field] = getattr(brain,field,None)
1560                if repr(d[field]) == 'Missing.Value':
1561                    d[field] = ''
1562            d['weight'] = ''
1563            d['grade'] = ''
1564            d['score'] = ''
1565
1566            if str(brain.credits).isdigit():
1567                credits = int(brain.credits)
1568                total_credits += credits
1569                score = getattr(brain,'score',0)
1570                if score and str(score).isdigit() and int(score) > 0:
1571                    score = int(score)
1572                    grade,weight = self.getGradesFromScore(score,'')
1573                    gpa += weight * credits
1574                    d['weight'] = weight
1575                    d['grade'] = grade
1576                    d['score'] = score
1577
1578            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1579            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1580            #else:
1581            #    d['score_calc'] = ''
1582            try:
1583                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1584            except:
1585                d['score_calc'] = ''
1586
1587            if d['score_calc']:
1588                grade = self.getGradesFromScore(d['score_calc'],level_id)
1589                d['grade'] = grade
1590
1591            d['coe'] = ''
1592            if brain.core_or_elective:
1593                d['coe'] = 'Core'
1594            elif brain.core_or_elective == False:
1595                d['coe'] = 'Elective'
1596            id = code = d['id'] = brain.code
1597            d['code'] = code
1598            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1599            if res:
1600                course = res[0]
1601                d['title'] = course.title
1602                # The courses_catalog contains strings and integers in its semester field.
1603                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1604                d['semester'] = str(course.semester)
1605            else:
1606                d['title'] = "Course has been removed from course list"
1607                d['semester'] = ''
1608            if brain.carry_over:
1609                d['coe'] = 'CO'
1610                carry_overs.append(d)
1611            else:
1612                if d['semester'] == '1':
1613                    normal1.append(d)
1614
1615                elif d['semester'] == '2':
1616                    normal2.append(d)
1617                else:
1618                    normal3.append(d)
1619        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1620        #                                "%(semester)s%(code)s" % y))
1621        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1622                                             "%(semester)s%(code)s" % y))
1623        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1624    ###)
1625
1626
1627    def getAllCourses(self,student_id): ###(
1628        query = Eq('student_id',student_id)
1629        course_results = self.course_results.evalAdvancedQuery(query)
1630        courses = []
1631        for brain in course_results:
1632            d = {}
1633
1634            for field in self.schema():
1635                d[field] = getattr(brain,field,'')
1636
1637            d['weight'] = ''
1638            d['grade'] = ''
1639            d['score'] = ''
1640
1641            if str(brain.credits).isdigit():
1642                credits = int(brain.credits)
1643                score = getattr(brain,'score',0)
1644                if score and str(score).isdigit() and int(score) > 0:
1645                    score = int(score)
1646                    grade,weight = self.getGradesFromScore(score)
1647                    d['weight'] = weight
1648                    d['grade'] = grade
1649                    d['score'] = score
1650            d['coe'] = ''
1651            if brain.core_or_elective:
1652                d['coe'] = 'Core'
1653            elif brain.core_or_elective == False:
1654                d['coe'] = 'Elective'
1655            id = code = d['id'] = brain.code
1656            d['code'] = code
1657            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1658            if res:
1659                course = res[0]
1660                d['title'] = course.title
1661                # The courses_catalog contains strings and integers in its semester field.
1662                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1663                d['semester'] = str(course.semester)
1664            else:
1665                d['title'] = "Course has been removed from course list"
1666                d['semester'] = ''
1667            if brain.carry_over:
1668                d['coe'] = 'CO'
1669            courses.append(d)
1670        return courses
1671    ###)
1672
1673InitializeClass(CourseResults)
1674###)
1675
1676class OnlinePaymentsImport(WAeUPTable): ###(
1677
1678    meta_type = 'WAeUP Online Payment Transactions'
1679    name = "online_payments_import"
1680    key = "order_id"
1681    def __init__(self,name=None):
1682        if name ==  None:
1683            name = self.name
1684        WAeUPTable.__init__(self, name)
1685
1686
1687InitializeClass(OnlinePaymentsImport)
1688###)
1689
1690class ReturningImport(WAeUPTable): ###(
1691
1692    meta_type = 'Returning Import Table'
1693    name = "returning_import"
1694    key = "matric_no"
1695    def __init__(self,name=None):
1696        if name ==  None:
1697            name = self.name
1698        WAeUPTable.__init__(self, name)
1699
1700
1701InitializeClass(ReturningImport)
1702###)
1703
1704class ResultsImport(WAeUPTable): ###(
1705
1706    meta_type = 'Results Import Table'
1707    name = "results_import"
1708    key = "key"
1709    def __init__(self,name=None):
1710        if name ==  None:
1711            name = self.name
1712        WAeUPTable.__init__(self, name)
1713
1714
1715InitializeClass(ResultsImport)
1716
1717###)
1718
1719class PaymentsCatalog(WAeUPTable): ###(
1720    security = ClassSecurityInfo()
1721
1722    meta_type = 'WAeUP Payments Catalog'
1723    name = "payments_catalog"
1724    key = "order_id"
1725    def __init__(self,name=None):
1726        if name ==  None:
1727            name = self.name
1728        WAeUPTable.__init__(self, name)
1729
1730
1731    security.declarePrivate('notify_event_listener') ###(
1732    def notify_event_listener(self,event_type,object,infos):
1733        "listen for events"
1734        if not infos.has_key('rpath'):
1735            return
1736        pt = getattr(object,'portal_type',None)
1737        mt = getattr(object,'meta_type',None)
1738        data = {}
1739        if pt != 'Payment':
1740            return
1741        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1742            self.deleteRecord(object.getContent().order_id)
1743        if mt == 'CPS Proxy Folder':
1744            return # is handled only for the real object
1745        if event_type not in ('sys_modify_object'):
1746            return
1747        for field in self.schema():
1748            data[field] = getattr(object,field,'')
1749        rpl = infos['rpath'].split('/')
1750        #import pdb;pdb.set_trace()
1751        student_id = rpl[-4]
1752        data['student_id'] = student_id
1753        modified = False
1754        try:
1755            self.modifyRecord(**data)
1756            modified = True
1757        except KeyError:
1758            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1759            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1760            pass
1761        if not modified:
1762            try:
1763                self.addRecord(**data)
1764            except:
1765                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1766                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1767        ###)
1768
1769
1770InitializeClass(PaymentsCatalog)
1771
1772###)
1773
1774# BBB:
1775AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.