source: WAeUP_SRP/base/WAeUPTables.py @ 3600

Last change on this file since 3600 was 3497, checked in by Henrik Bettermann, 17 years ago

Completion of Joachim's last revision (r3494).

Joachim, we are missing you!

  • Property svn:keywords set to Id
File size: 64.0 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3497 2008-05-14 10:48:32Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        records = self()
225        nr2export = len(records)
226        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
227        chunk = 2000
228        total = 0
229        start = DateTime.DateTime().timeTime()
230        start_chunk = DateTime.DateTime().timeTime()
231        for record in records:
232            not_all = False
233            d = self.record2dict(fields,record)
234            lines.append(d)
235            total += 1
236            if total and not total % chunk or total == len(records):
237                csv_writer.writerows(lines)
238                anz = len(lines)
239                logger.info("wrote %(anz)d  total written %(total)d" % vars())
240                end_chunk = DateTime.DateTime().timeTime()
241                duration = end_chunk-start_chunk
242                per_record = duration/anz
243                till_now = end_chunk - start
244                avarage_per_record = till_now/total
245                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
246                estimated_end = estimated_end.strftime("%H:%M:%S")
247                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
248                start_chunk = DateTime.DateTime().timeTime()
249                lines = []
250        end = DateTime.DateTime().timeTime()
251        logger.info('total time %6.2f m' % ((end-start)/60))
252        import os
253        filename, extension = os.path.splitext(export_file)
254        from subprocess import call
255        msg = "wrote %(total)d records to %(export_file)s" % vars()
256        #try:
257        #    retcode = call('gzip %s' % (export_file),shell=True)
258        #    if retcode == 0:
259        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
260        #except OSError, e:
261        #    retcode = -99
262        #    logger.info("zip failed with %s" % e)
263        logger.info(msg)
264        args = {'portal_status_message': msg}
265        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
266        url = self.REQUEST.get('URL2')
267        return self.REQUEST.RESPONSE.redirect(url)
268    ###)
269
270    security.declarePrivate("_import_old") ###(
271    def _import_old(self,filename,schema,layout, mode,logger):
272        "import data from csv"
273        import transaction
274        import random
275        pm = self.portal_membership
276        member = pm.getAuthenticatedMember()
277        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
278        import_fn = "%s/import/%s.csv" % (i_home,filename)
279        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
280        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
281        start = True
282        tr_count = 1
283        total_imported = 0
284        total_not_imported = 0
285        total = 0
286        iname =  "%s" % filename
287        not_imported = []
288        imported = []
289        valid_records = []
290        invalid_records = []
291        d = {}
292        d['mode'] = mode
293        d['imported'] = total_imported
294        d['not_imported'] = total_not_imported
295        d['valid_records'] = valid_records
296        d['invalid_records'] = invalid_records
297        d['import_fn'] = import_fn
298        d['imported_fn'] = imported_fn
299        d['not_imported_fn'] = not_imported_fn
300        if schema is None:
301            em = 'No schema specified'
302            logger.error(em)
303            return d
304        if layout is None:
305            em = 'No layout specified'
306            logger.error(em)
307            return d
308        validators = {}
309        for widget in layout.keys():
310            try:
311                validators[widget] = layout[widget].validate
312            except AttributeError:
313                logger.info('%s has no validate attribute' % widget)
314                return d
315        # if mode == 'edit':
316        #     importer = self.importEdit
317        # elif mode == 'add':
318        #     importer = self.importAdd
319        # else:
320        #     importer = None
321        try:
322            items = csv.DictReader(open(import_fn,"rb"),
323                                   dialect="excel",
324                                   skipinitialspace=True)
325        except:
326            em = 'Error reading %s.csv' % filename
327            logger.error(em)
328            return d
329        #import pdb;pdb.set_trace()
330        for item in items:
331            if start:
332                start = False
333                logger.info('%s starts import from %s.csv' % (member,filename))
334                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
335                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
336                                   dialect="excel",
337                                   skipinitialspace=True).next()
338                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
339                diff2schema = set(import_keys).difference(set(schema.keys()))
340                diff2layout = set(import_keys).difference(set(layout.keys()))
341                if diff2layout:
342                    em = "not ignorable key(s) %s found in heading" % diff2layout
343                    logger.info(em)
344                    return d
345                s = ','.join(['"%s"' % fn for fn in import_keys])
346                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
347                #s = '"id",' + s
348                open(imported_fn,"a").write(s + '\n')
349                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
350                format_error = format + ',"%(Error)s"'
351                #format = '"%(id)s",'+ format
352                adapters = [MappingStorageAdapter(schema, item)]
353            dm = DataModel(item, adapters,context=self)
354            ds = DataStructure(data=item,datamodel=dm)
355            error_string = ""
356            #import pdb;pdb.set_trace()
357            for k in import_keys:
358                if not validators[k](ds,mode=mode):
359                    error_string += " %s : %s" % (k,ds.getError(k))
360            # if not error_string and importer:
361            #     item.update(dm)
362            #     item['id'],error = importer(item)
363            #     if error:
364            #         error_string += error
365            if error_string:
366                item['Error'] = error_string
367                invalid_records.append(dm)
368                not_imported.append(format_error % item)
369                total_not_imported += 1
370            else:
371                em = format % item
372                valid_records.append(dm)
373                imported.append(em)
374                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
375                tr_count += 1
376                total_imported += 1
377            total += 1
378        if len(imported) > 0:
379            open(imported_fn,"a").write('\n'.join(imported))
380        if len(not_imported) > 0:
381            open(not_imported_fn,"a").write('\n'.join(not_imported))
382        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
383        d['imported'] = total_imported
384        d['not_imported'] = total_not_imported
385        d['valid_records'] = valid_records
386        d['invalid_records'] = invalid_records
387        d['imported_fn'] = imported_fn
388        d['not_imported_fn'] = not_imported_fn
389        #logger.info(em)
390        return d
391    ###)
392
393    security.declarePrivate("_import") ###(
394    def _import_new(self,csv_items,schema, layout, mode,logger):
395        "import data from csv.Dictreader Instance"
396        start = True
397        tr_count = 1
398        total_imported = 0
399        total_not_imported = 0
400        total = 0
401        iname =  "%s" % filename
402        not_imported = []
403        valid_records = []
404        invalid_records = []
405        duplicate_records = []
406        d = {}
407        d['mode'] = mode
408        d['valid_records'] = valid_records
409        d['invalid_records'] = invalid_records
410        d['invalid_records'] = duplicate_records
411        # d['import_fn'] = import_fn
412        # d['imported_fn'] = imported_fn
413        # d['not_imported_fn'] = not_imported_fn
414        validators = {}
415        for widget in layout.keys():
416            try:
417                validators[widget] = layout[widget].validate
418            except AttributeError:
419                logger.info('%s has no validate attribute' % widget)
420                return d
421        for item in csv_items:
422            if start:
423                start = False
424                logger.info('%s starts import from %s.csv' % (member,filename))
425                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
426                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
427                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
428                diff2schema = set(import_keys).difference(set(schema.keys()))
429                diff2layout = set(import_keys).difference(set(layout.keys()))
430                if diff2layout:
431                    em = "not ignorable key(s) %s found in heading" % diff2layout
432                    logger.info(em)
433                    return d
434                # s = ','.join(['"%s"' % fn for fn in import_keys])
435                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
436                # #s = '"id",' + s
437                # open(imported_fn,"a").write(s + '\n')
438                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
439                # format_error = format + ',"%(Error)s"'
440                # #format = '"%(id)s",'+ format
441                adapters = [MappingStorageAdapter(schema, item)]
442            dm = DataModel(item, adapters,context=self)
443            ds = DataStructure(data=item,datamodel=dm)
444            error_string = ""
445            for k in import_keys:
446                if not validators[k](ds,mode=mode):
447                    error_string += " %s : %s" % (k,ds.getError(k))
448            if error_string:
449                item['Error'] = error_string
450                #invalid_records.append(dm)
451                invalid_records.append(item)
452                total_not_imported += 1
453            else:
454                em = format % item
455                valid_records.append(dm)
456                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
457                tr_count += 1
458                total_imported += 1
459            total += 1
460        # if len(imported) > 0:
461        #     open(imported_fn,"a").write('\n'.join(imported))
462        # if len(not_imported) > 0:
463        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
464        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
465        d['imported'] = total_imported
466        d['not_imported'] = total_not_imported
467        d['valid_records'] = valid_records
468        d['invalid_records'] = invalid_records
469        return d
470    ###)
471
472    security.declarePublic("missingValue")###(
473    def missingValue(self):
474        from Missing import MV
475        return MV
476    ###)
477###)
478
479class AccommodationTable(WAeUPTable): ###(
480
481    meta_type = 'WAeUP Accommodation Tool'
482    name = "portal_accommodation"
483    key = "bed"
484    not_occupied = NOT_OCCUPIED
485    def __init__(self,name=None):
486        if name ==  None:
487            name = self.name
488        WAeUPTable.__init__(self, name)
489
490    def searchAndReserveBed(self, student_id,bed_type): ###(
491        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
492        records = self.evalAdvancedQuery(Eq('student',student_id))
493        if len(records) == 1:
494            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
495            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
496            return -1,records[0].bed
497        elif len(records) > 1:
498            logger.info('%s found more than one (reserved) bed' % (student_id))
499            return -3,'more than one bed'
500        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
501        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
502        if len(records) == 0:
503            logger.info('no bed %s available for %s' % (bed_type,student_id))
504            return -2,"no bed"
505        rec = records[0]
506        self.modifyRecord(bed=rec.bed,student=student_id)
507        logger.info('%s booked bed %s' % (student_id,rec.bed))
508        return 1,rec.bed
509    ###)
510
511
512InitializeClass(AccommodationTable)
513
514###)
515
516class PinTable(WAeUPTable): ###(
517    from ZODB.POSException import ConflictError
518    security = ClassSecurityInfo()
519    meta_type = 'WAeUP Pin Tool'
520    name = "portal_pins"
521    key = 'pin'
522
523    def __init__(self,name=None):
524        if name ==  None:
525            name = self.name
526        WAeUPTable.__init__(self, name)
527
528    security.declareProtected(ModifyPortalContent,"dumpAll")###(
529    def dumpAll(self,include_unused=None):
530        """dump all data in the table to a csv"""
531        member = self.portal_membership.getAuthenticatedMember()
532        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
533        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
534        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
535        res_list = []
536        lines = []
537        if hasattr(self,"export_keys"):
538            fields = self.export_keys
539        else:
540            fields = []
541            for f in self.schema():
542                fields.append(f)
543        headline = ','.join(fields)
544        out = open(export_file,"wb")
545        out.write(headline +'\n')
546        out.close()
547        out = open(export_file,"a")
548        csv_writer = csv.DictWriter(out,fields,)
549        if include_unused is not None and str(member) not in ('admin','joachim'):
550            logger.info('%s tries to dump pintable with unused pins' % (member))
551            return
552        if include_unused is not None:
553            records = self()
554        else:
555            records = self.evalAdvancedQuery(~Eq('student',''))
556        nr2export = len(records)
557        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
558        chunk = 2000
559        total = 0
560        start = DateTime.DateTime().timeTime()
561        start_chunk = DateTime.DateTime().timeTime()
562        for record in records:
563            not_all = False
564            d = self.record2dict(fields,record)
565            lines.append(d)
566            total += 1
567            if total and not total % chunk or total == len(records):
568                csv_writer.writerows(lines)
569                anz = len(lines)
570                logger.info("wrote %(anz)d  total written %(total)d" % vars())
571                end_chunk = DateTime.DateTime().timeTime()
572                duration = end_chunk-start_chunk
573                per_record = duration/anz
574                till_now = end_chunk - start
575                avarage_per_record = till_now/total
576                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
577                estimated_end = estimated_end.strftime("%H:%M:%S")
578                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
579                start_chunk = DateTime.DateTime().timeTime()
580                lines = []
581        end = DateTime.DateTime().timeTime()
582        logger.info('total time %6.2f m' % ((end-start)/60))
583        import os
584        filename, extension = os.path.splitext(export_file)
585        from subprocess import call
586        msg = "wrote %(total)d records to %(export_file)s" % vars()
587        #try:
588        #    retcode = call('gzip %s' % (export_file),shell=True)
589        #    if retcode == 0:
590        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
591        #except OSError, e:
592        #    retcode = -99
593        #    logger.info("zip failed with %s" % e)
594        logger.info(msg)
595        args = {'portal_status_message': msg}
596        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
597        url = self.REQUEST.get('URL2')
598        return self.REQUEST.RESPONSE.redirect(url)
599    ###)
600
601
602
603    def searchAndSetRecord(self, uid, student_id,prefix):
604
605        # The following line must be activated after resetting the
606        # the portal_pins table. This is to avoid duplicate entries
607        # and disable duplicate payments.
608
609        #student_id = student_id.upper()
610
611        #records = self.searchResults(student = student_id)
612        #if len(records) > 0 and prefix in ('CLR','APP'):
613        #    for r in records:
614        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
615        #            return -2
616        records = self.searchResults({"%s" % self.key : uid})
617        if len(records) > 1:
618            # Can not happen, but anyway...
619            raise ValueError("More than one record with uid %s" % uid)
620        if len(records) == 0:
621            return -1,None
622        record = records[0]
623        if record.student == "":
624            record_data = {}
625            for field in self.schema() + self.indexes():
626                record_data[field] = getattr(record, field)
627            # Add the updated data:
628            record_data['student'] = student_id
629            try:
630                self.catalog_object(dict2ob(record_data), uid)
631                return 1,record
632            except ConflictError:
633                return 2,record
634        if record.student.upper() != student_id.upper():
635            return 0,record
636        if record.student.upper() == student_id.upper():
637            return 2,record
638        return -3,record
639InitializeClass(PinTable)
640###)
641
642class PumeResultsTable(WAeUPTable): ###(
643
644    meta_type = 'WAeUP PumeResults Tool'
645    name = "portal_pumeresults"
646    key = "jamb_reg_no"
647    def __init__(self,name=None):
648        if name ==  None:
649            name = self.name
650        WAeUPTable.__init__(self, name)
651
652
653InitializeClass(PumeResultsTable)
654
655###)
656
657class ApplicantsCatalog(WAeUPTable): ###(
658
659    meta_type = 'WAeUP Applicants Catalog'
660    name = "applicants_catalog"
661    key = "reg_no"
662    security = ClassSecurityInfo()
663    #export_keys = (
664    #               "reg_no",
665    #               "status",
666    #               "lastname",
667    #               "sex",
668    #               "date_of_birth",
669    #               "lga",
670    #               "email",
671    #               "phone",
672    #               "passport",
673    #               "entry_mode",
674    #               "pin",
675    #               "screening_type",
676    #               "registration_date",
677    #               "testdate",
678    #               "application_date",
679    #               "screening_date",
680    #               "faculty",
681    #               "department",
682    #               "course1",
683    #               "course2",
684    #               "course3",
685    #               "eng_score",
686    #               "subj1",
687    #               "subj1score",
688    #               "subj2",
689    #               "subj2score",
690    #               "subj3",
691    #               "subj3score",
692    #               "aggregate",
693    #               "course_admitted",
694    #               )
695
696    def __init__(self,name=None):
697        if name ==  None:
698            name = self.name
699        WAeUPTable.__init__(self, name)
700
701    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
702    def new_importCSV(self,filename="JAMB_data",
703                  schema_id="application",
704                  layout_id="import_application",
705                  mode='add'):
706        """ import JAMB data """
707        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
708        pm = self.portal_membership
709        member = pm.getAuthenticatedMember()
710        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
711        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
712        import_fn = "%s/import/%s.csv" % (i_home,filename)
713        if mode not in ('add','edit'):
714            logger.info("invalid mode: %s" % mode)
715        if os.path.exists(lock_fn):
716            logger.info("import of %(import_fn)s already in progress" % vars())
717            return
718        lock_file = open(lock_fn,"w")
719        lock_file.write("%(current)s \n" % vars())
720        lock_file.close()
721        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
722        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
723        stool = getToolByName(self, 'portal_schemas')
724        ltool = getToolByName(self, 'portal_layouts')
725        schema = stool._getOb(schema_id)
726        if schema is None:
727            em = 'No such schema %s' % schema_id
728            logger.error(em)
729            return
730        for postfix in ('_import',''):
731            layout_name = "%(layout_id)s%(postfix)s" % vars()
732            if hasattr(ltool,layout_name):
733                break
734        layout = ltool._getOb(layout_name)
735        if layout is None:
736            em = 'No such layout %s' % layout_id
737            logger.error(em)
738            return
739        try:
740            csv_file = csv.DictReader(open(import_fn,"rb"))
741        except:
742            em = 'Error reading %s.csv' % filename
743            logger.error(em)
744            return
745        d = self._import_new(csv_items,schema,layout,mode,logger)
746        imported = []
747        edited = []
748        duplicates = []
749        not_found = []
750        if len(d['valid_records']) > 0:
751            for record in d['valid_records']:
752                #import pdb;pdb.set_trace()
753                if mode == "add":
754                    try:
755                        self.addRecord(**dict(record.items()))
756                        imported.append(**dict(record.items()))
757                        logger.info("added %s" % record.items())
758                    except ValueError:
759                        dupplicate.append(**dict(record.items()))
760                        logger.info("duplicate %s" % record.items())
761                elif mode == "edit":
762                    try:
763                        self.modifyRecord(**dict(record.items()))
764                        edited.append(**dict(record.items()))
765                        logger.info("edited %s" % record.items())
766                    except KeyError:
767                        not_found.append(**dict(record.items()))
768                        logger.info("not found %s" % record.items())
769        invalid = d['invalid_records']
770        for itype in ("imported","edited","not_found","duplicate","invalid"):
771            outlist = locals[itype]
772            if len(outlist):
773                d = {}
774                for k in outlist[0].keys():
775                    d[k] = k
776                outlist[0] = d
777                outfile = open("file_name_%s" % itype,'w')
778                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
779                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
780###)
781
782    security.declareProtected(ModifyPortalContent,"importCSV")###(
783    def importCSV(self,filename="JAMB_data",
784                  schema_id="application",
785                  layout_id="application_pce",
786                  mode='add'):
787        """ import JAMB data """
788        stool = getToolByName(self, 'portal_schemas')
789        ltool = getToolByName(self, 'portal_layouts')
790        schema = stool._getOb(schema_id)
791        if schema is None:
792            em = 'No such schema %s' % schema_id
793            logger.error(em)
794            return
795        layout = ltool._getOb(layout_id)
796        if layout is None:
797            em = 'No such layout %s' % layout_id
798            logger.error(em)
799            return
800        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
801        d = self._import_old(filename,schema,layout,mode,logger)
802        if len(d['valid_records']) > 0:
803            for record in d['valid_records']:
804                #import pdb;pdb.set_trace()
805                if mode == "add":
806                    self.addRecord(**dict(record.items()))
807                    logger.info("added %s" % record.items())
808                elif mode == "edit":
809                    self.modifyRecord(**dict(record.items()))
810                    logger.info("edited %s" % record.items())
811                else:
812                    logger.info("invalid mode: %s" % mode)
813        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
814    ###)
815
816InitializeClass(ApplicantsCatalog)
817
818###)
819
820class StudentsCatalog(WAeUPTable): ###(
821    security = ClassSecurityInfo()
822
823    meta_type = 'WAeUP Students Catalog'
824    name = "students_catalog"
825    key = "id"
826    affected_types = {   ###(
827                      'StudentApplication':
828                      {'id': 'application',
829                       'fields':
830                       ('jamb_reg_no',
831                        'entry_mode',
832                        #'entry_level',
833                        'entry_session',
834                       )
835                      },
836                      'StudentClearance':
837                      {'id': 'clearance',
838                       'fields':
839                       ('matric_no',
840                        'lga',
841                       )
842                      },
843                      'StudentPersonal':
844                      {'id': 'personal',
845                       'fields':
846                       ('name',
847                        'sex',
848                        'perm_address',
849                        'email',
850                        'phone',
851                       )
852                      },
853                      'StudentStudyCourse':
854                      {'id': 'study_course',
855                       'fields':
856                       ('course', # study_course
857                        'faculty', # from certificate
858                        'department', # from certificate
859                        'end_level', # from certificate
860                        'level', # current_level
861                        'mode',  # current_mode
862                        'session', # current_session
863                        'verdict', # current_verdict
864                       )
865                      },
866                     }
867    ###)
868
869    def __init__(self,name=None):
870        if name ==  None:
871            name = self.name
872        WAeUPTable.__init__(self, name)
873        return
874
875    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
876        """ clears the whole enchilada """
877        self._catalog.clear()
878
879        if REQUEST and RESPONSE:
880            RESPONSE.redirect(
881              URL1 +
882              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
883
884    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
885        """ clear the catalog, then re-index everything """
886
887        elapse = time.time()
888        c_elapse = time.clock()
889
890        pgthreshold = self._getProgressThreshold()
891        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
892        self.refreshCatalog(clear=1, pghandler=handler)
893
894        elapse = time.time() - elapse
895        c_elapse = time.clock() - c_elapse
896
897        RESPONSE.redirect(
898            URL1 +
899            '/manage_catalogAdvanced?manage_tabs_message=' +
900            urllib.quote('Catalog Updated \n'
901                         'Total time: %s\n'
902                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
903    ###)
904
905    def fill_certificates_dict(self): ###(
906        "return certificate data in  dict"
907        certificates_brains = self.portal_catalog(portal_type ='Certificate')
908        d = {}
909        for cb in certificates_brains:
910            certificate_doc = cb.getObject().getContent()
911            cb_path = cb.getPath().split('/')
912            ld = {}
913            ld['faculty'] = cb_path[-4]
914            ld['department'] = cb_path[-3]
915            ld['end_level'] = getattr(certificate_doc,'end_level','999')
916            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
917            d[cb.getId] = ld
918        return d
919    ###)
920
921    def get_from_doc_department(self,doc,cached_data={}): ###(
922        "return the students department"
923        if doc is None:
924            return None
925        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
926            return self._v_certificates[doc.study_course]['department']
927        certificate_res = self.portal_catalog(id = doc.study_course)
928        if len(certificate_res) != 1:
929            return None
930        return certificate_res[0].getPath().split('/')[-3]
931
932    def get_from_doc_faculty(self,doc,cached_data={}):
933        "return the students faculty"
934        if doc is None:
935            return None
936        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
937            return self._v_certificates[doc.study_course]['faculty']
938        certificate_res = self.portal_catalog(id = doc.study_course)
939        if len(certificate_res) != 1:
940            return None
941        return certificate_res[0].getPath().split('/')[-4]
942
943    def get_from_doc_end_level(self,doc,cached_data={}):
944        "return the students end_level"
945        if doc is None:
946            return None
947        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
948            return self._v_certificates[doc.study_course]['end_level']
949        certificate_res = self.portal_catalog(id = doc.study_course)
950        if len(certificate_res) != 1:
951            return None
952        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
953
954    def get_from_doc_level(self,doc,cached_data={}):
955        "return the students level"
956        if doc is None:
957            return None
958        return getattr(doc,'current_level',None)
959
960    def get_from_doc_mode(self,doc,cached_data={}):
961        "return the students mode"
962        if doc is None:
963            return None
964        cm = getattr(doc,'current_mode',None)
965        return cm
966
967
968    def get_from_doc_session(self,doc,cached_data={}):
969        "return the students current_session"
970        if doc is None:
971            return None
972        return getattr(doc,'current_session',None)
973
974    def get_from_doc_entry_session(self,doc,cached_data={}):
975        "return the students entry_session"
976        if doc is None:
977            return None
978        es = getattr(doc,'entry_session',None)
979        if es is not None and len(es) == 2:
980            return es
981        try:
982            digit = int(doc.jamb_reg_no[0])
983        except:
984            return "-1"
985        if digit < 8:
986            return "0%c" % doc.jamb_reg_no[0]
987        return "9%c" % doc.jamb_reg_no[0]
988
989    def get_from_doc_course(self,doc,cached_data={}):
990        "return the students study_course"
991        if doc is None:
992            return None
993        return getattr(doc,'study_course',None)
994
995    def get_from_doc_name(self,doc,cached_data={}):
996        "return the students name from the personal"
997        if doc is None:
998            return None
999        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1000
1001    def get_from_doc_verdict(self,doc,cached_data={}):
1002        "return the students study_course"
1003        if doc is None:
1004            return None
1005        return getattr(doc,'current_verdict',None)
1006    ###)
1007
1008    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1009        if isinstance(name, str):
1010            name = (name,)
1011        reindextypes = {}
1012        reindex_special = []
1013        for n in name:
1014            if n in ("review_state","registered_courses"):
1015                reindex_special.append(n)
1016            else:
1017                for pt in self.affected_types.keys():
1018                    if n in self.affected_types[pt]['fields']:
1019                        if reindextypes.has_key(pt):
1020                            reindextypes[pt].append(n)
1021                        else:
1022                            reindextypes[pt]= [n]
1023                        break
1024        cached_data = {}
1025        if set(name).intersection(set(('faculty','department','end_level'))):
1026            cached_data = self.fill_certificates_dict()
1027        students = self.portal_catalog(portal_type="Student")
1028        if hasattr(self,'portal_catalog_real'):
1029            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1030        else:
1031            aq_portal = self.portal_catalog.evalAdvancedQuery
1032        num_objects = len(students)
1033        if pghandler:
1034            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1035        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1036        #import pdb;pdb.set_trace()
1037        for i in xrange(num_objects):
1038            if pghandler: pghandler.report(i)
1039            student_brain = students[i]
1040            student_object = student_brain.getObject()
1041            # query = Eq('path',student_brain.getPath())
1042            # sub_brains_list = aq_portal(query)
1043            # sub_brains = {}
1044            # for sub_brain in sub_brains_list:
1045            #     sub_brains[sub_brain.portal_type] = sub_brain
1046            # student_path = student_brain.getPath()
1047            data = {}
1048            modified = False
1049            sid = data['id'] = student_brain.getId
1050            if reindex_special and 'review_state' in reindex_special:
1051                modified = True
1052                data['review_state'] = student_brain.review_state
1053            sub_objects = False
1054            for pt in reindextypes.keys():
1055                modified = True
1056                try:
1057                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1058                    #doc = sub_brains[pt].getObject().getContent()
1059                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1060                    # doc = self.unrestrictedTraverse(path).getContent()
1061                    sub_objects = True
1062                except:
1063                    continue
1064                for field in set(name).intersection(self.affected_types[pt]['fields']):
1065                    if hasattr(self,'get_from_doc_%s' % field):
1066                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1067                                                                              cached_data=cached_data)
1068                    else:
1069                        data[field] = getattr(doc,field)
1070            if not sub_objects and noattr:
1071                import_res = self.returning_import(id = sid)
1072                if not import_res:
1073                    continue
1074                import_record = import_res[0]
1075                data['matric_no'] = import_record.matric_no
1076                data['sex'] = import_record.Sex == 'F'
1077                data['name'] = "%s %s %s" % (import_record.Firstname,
1078                                             import_record.Middlename,
1079                                             import_record.Lastname)
1080                data['jamb_reg_no'] = import_record.Entryregno
1081            #if reindex_special and 'registered_courses' in reindex_special:
1082            #    try:
1083            #        study_course = getattr(student_object,"study_course")
1084            #        level_ids = study_course.objectIds()
1085            #    except:
1086            #        continue
1087            #    if not level_ids:
1088            #        continue
1089            #    modified = True
1090            #    level_ids.sort()
1091            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1092            #    courses = []
1093            #    for c in course_ids:
1094            #        if c.endswith('_co'):
1095            #            courses.append(c[:-3])
1096            #        else:
1097            #            courses.append(c)
1098            #    data['registered_courses'] = courses
1099            if modified:
1100                self.modifyRecord(**data)
1101        if pghandler: pghandler.finish()
1102    ###)
1103
1104    def refreshCatalog(self, clear=0, pghandler=None): ###(
1105        """ re-index everything we can find """
1106        students_folder = self.portal_url.getPortalObject().campus.students
1107        if clear:
1108            self._catalog.clear()
1109        students = self.portal_catalog(portal_type="Student")
1110        num_objects = len(students)
1111        cached_data = self.fill_certificates_dict()
1112        if pghandler:
1113            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1114        for i in xrange(num_objects):
1115            if pghandler: pghandler.report(i)
1116            student_brain = students[i]
1117            spath = student_brain.getPath()
1118            student_object = student_brain.getObject()
1119            data = {}
1120            sid = data['id'] = student_brain.getId
1121            data['review_state'] = student_brain.review_state
1122            sub_objects = False
1123            for pt in self.affected_types.keys():
1124                modified = True
1125                try:
1126                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1127                    sub_objects = True
1128                except:
1129                    #from pdb import set_trace;set_trace()
1130                    continue
1131                for field in self.affected_types[pt]['fields']:
1132                    if hasattr(self,'get_from_doc_%s' % field):
1133                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1134                                                                              cached_data=cached_data)
1135                    else:
1136                        data[field] = getattr(doc,field,None)
1137            if not sub_objects:
1138                import_res = self.returning_import(id = sid)
1139                if not import_res:
1140                    continue
1141                import_record = import_res[0]
1142                data['matric_no'] = import_record.matric_no
1143                data['sex'] = import_record.Sex == 'F'
1144                data['name'] = "%s %s %s" % (import_record.Firstname,
1145                                             import_record.Middlename,
1146                                             import_record.Lastname)
1147                data['jamb_reg_no'] = import_record.Entryregno
1148            self.addRecord(**data)
1149        if pghandler: pghandler.finish()
1150    ###)
1151
1152    security.declarePrivate('notify_event_listener') ###(
1153    def notify_event_listener(self,event_type,object,infos):
1154        "listen for events"
1155        if not infos.has_key('rpath'):
1156            return
1157        pt = getattr(object,'portal_type',None)
1158        mt = getattr(object,'meta_type',None)
1159        students_catalog = self
1160        data = {}
1161        if pt == 'Student' and\
1162           mt == 'CPS Proxy Folder' and\
1163           event_type.startswith('workflow'):
1164            data['id'] = object.getId()
1165            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1166            students_catalog.modifyRecord(**data)
1167            return
1168        rpl = infos['rpath'].split('/')
1169        if pt == 'Student' and mt == 'CPS Proxy Folder':
1170            student_id = object.id
1171            if event_type == "sys_add_object":
1172                try:
1173                    self.addRecord(id = student_id)
1174                except ValueError:
1175                    pass
1176                return
1177            elif event_type == 'sys_del_object':
1178                self.deleteRecord(student_id)
1179        if pt not in self.affected_types.keys():
1180            return
1181        if event_type not in ('sys_modify_object'):
1182            return
1183        if mt == 'CPS Proxy Folder':
1184            return
1185        if not hasattr(self,'_v_certificates'):
1186            self._v_certificates = self.fill_certificates_dict()
1187        for field in self.affected_types[pt]['fields']:
1188            if hasattr(self,'get_from_doc_%s' % field):
1189                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1190            else:
1191                data[field] = getattr(object,field)
1192        data['id'] = rpl[2]
1193        self.modifyRecord(**data)
1194    ###)
1195
1196
1197InitializeClass(StudentsCatalog)
1198
1199###)
1200
1201class CertificatesCatalog(WAeUPTable): ###(
1202    security = ClassSecurityInfo()
1203
1204    meta_type = 'WAeUP Certificates Catalog'
1205    name =  "certificates_catalog"
1206    key = "code"
1207    def __init__(self,name=None):
1208        if name ==  None:
1209            name =  self.name
1210        WAeUPTable.__init__(self, name)
1211
1212    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1213        """ clear the catalog, then re-index everything """
1214
1215        elapse = time.time()
1216        c_elapse = time.clock()
1217
1218        pgthreshold = self._getProgressThreshold()
1219        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1220        self.refreshCatalog(clear=1, pghandler=handler)
1221
1222        elapse = time.time() - elapse
1223        c_elapse = time.clock() - c_elapse
1224
1225        RESPONSE.redirect(
1226            URL1 +
1227            '/manage_catalogAdvanced?manage_tabs_message=' +
1228            urllib.quote('Catalog Updated \n'
1229                         'Total time: %s\n'
1230                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1231    ###)
1232
1233    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1234        if isinstance(name, str):
1235            name = (name,)
1236        certificates = self.portal_catalog(portal_type="Certificate")
1237        num_objects = len(certificates)
1238        if pghandler:
1239            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1240        for i in xrange(num_objects):
1241            if pghandler: pghandler.report(i)
1242            certificate_brain = certificates[i]
1243            certificate_object = certificate_brain.getObject()
1244            pl = certificate_brain.getPath().split('/')
1245            data = {}
1246            cid = data[self.key] = certificate_brain.getId
1247            data['faculty'] = pl[-4]
1248            data['department'] = pl[-3]
1249            doc = certificate_object.getContent()
1250            for field in name:
1251                if field not in (self.key,'faculty','department'):
1252                    data[field] = getattr(doc,field)
1253            self.modifyRecord(**data)
1254        if pghandler: pghandler.finish()
1255    ###)
1256
1257    def refreshCatalog(self, clear=0, pghandler=None): ###(
1258        """ re-index everything we can find """
1259        if clear:
1260            self._catalog.clear()
1261        certificates = self.portal_catalog(portal_type="Certificate")
1262        num_objects = len(certificates)
1263        if pghandler:
1264            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1265        #from pdb import set_trace;set_trace()
1266        for i in xrange(num_objects):
1267            if pghandler: pghandler.report(i)
1268            certificate_brain = certificates[i]
1269            certificate_doc = certificate_brain.getObject().getContent()
1270            pl = certificate_brain.getPath().split('/')
1271            data = {}
1272            for field in self.schema():
1273                data[field] = getattr(certificate_doc,field,None)
1274            data[self.key] = certificate_brain.getId
1275            ai = pl.index('academics')
1276            data['faculty'] = pl[ai +1]
1277            data['department'] = pl[ai +2]
1278            if clear:
1279                self.addRecord(**data)
1280            else:
1281                self.modifyRecord(**data)
1282        if pghandler: pghandler.finish()
1283    ###)
1284
1285    security.declarePrivate('notify_event_listener') ###(
1286    def notify_event_listener(self,event_type,object,infos):
1287        "listen for events"
1288        if not infos.has_key('rpath'):
1289            return
1290        pt = getattr(object,'portal_type',None)
1291        mt = getattr(object,'meta_type',None)
1292        if pt != 'Certificate':
1293            return
1294        data = {}
1295        rpl = infos['rpath'].split('/')
1296        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1297            return
1298        certificate_id = object.getId()
1299        data[self.key] = certificate_id
1300        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1301            try:
1302                self.addRecord(**data)
1303            except ValueError:
1304                return
1305            certificate_id = object.getId()
1306            doc = object.getContent()
1307            if doc is None:
1308                return
1309            for field in self.schema():
1310                data[field] = getattr(doc,field,None)
1311            data[self.key] = certificate_id
1312            ai = rpl.index('academics')
1313            data['faculty'] = rpl[ai +1]
1314            data['department'] = rpl[ai +2]
1315            self.modifyRecord(**data)
1316            return
1317        if event_type == "sys_del_object":
1318            self.deleteRecord(certificate_id)
1319            return
1320        if event_type == "sys_modify_object" and mt == 'Certificate':
1321            #from pdb import set_trace;set_trace()
1322            for field in self.schema():
1323                data[field] = getattr(object,field,None)
1324            certificate_id = object.aq_parent.getId()
1325            data[self.key] = certificate_id
1326            ai = rpl.index('academics')
1327            data['faculty'] = rpl[ai +1]
1328            data['department'] = rpl[ai +2]
1329            self.modifyRecord(**data)
1330    ###)
1331
1332
1333InitializeClass(CertificatesCatalog)
1334###)
1335
1336class CoursesCatalog(WAeUPTable): ###(
1337    security = ClassSecurityInfo()
1338
1339    meta_type = 'WAeUP Courses Catalog'
1340    name =  "courses_catalog"
1341    key = "code"
1342    def __init__(self,name=None):
1343        if name ==  None:
1344            name =  self.name
1345        WAeUPTable.__init__(self, name)
1346
1347    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1348        """ clear the catalog, then re-index everything """
1349
1350        elapse = time.time()
1351        c_elapse = time.clock()
1352
1353        pgthreshold = self._getProgressThreshold()
1354        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1355        self.refreshCatalog(clear=1, pghandler=handler)
1356
1357        elapse = time.time() - elapse
1358        c_elapse = time.clock() - c_elapse
1359
1360        RESPONSE.redirect(
1361            URL1 +
1362            '/manage_catalogAdvanced?manage_tabs_message=' +
1363            urllib.quote('Catalog Updated \n'
1364                         'Total time: %s\n'
1365                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1366    ###)
1367
1368    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1369        if isinstance(name, str):
1370            name = (name,)
1371        courses = self.portal_catalog(portal_type="Course")
1372        num_objects = len(courses)
1373        if pghandler:
1374            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1375        for i in xrange(num_objects):
1376            if pghandler: pghandler.report(i)
1377            course_brain = courses[i]
1378            course_object = course_brain.getObject()
1379            pl = course_brain.getPath().split('/')
1380            data = {}
1381            cid = data[self.key] = course_brain.getId
1382            data['faculty'] = pl[-4]
1383            data['department'] = pl[-3]
1384            doc = course_object.getContent()
1385            for field in name:
1386                if field not in (self.key,'faculty','department'):
1387                    data[field] = getattr(doc,field)
1388            self.modifyRecord(**data)
1389        if pghandler: pghandler.finish()
1390    ###)
1391
1392    def refreshCatalog(self, clear=0, pghandler=None): ###(
1393        """ re-index everything we can find """
1394        if clear:
1395            self._catalog.clear()
1396        courses = self.portal_catalog(portal_type="Course")
1397        num_objects = len(courses)
1398        if pghandler:
1399            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1400        #from pdb import set_trace;set_trace()
1401        for i in xrange(num_objects):
1402            if pghandler: pghandler.report(i)
1403            course_brain = courses[i]
1404            course_doc = course_brain.getObject().getContent()
1405            pl = course_brain.getPath().split('/')
1406            data = {}
1407            for field in self.schema():
1408                data[field] = getattr(course_doc,field,None)
1409            data[self.key] = course_brain.getId
1410            ai = pl.index('academics')
1411            data['faculty'] = pl[ai +1]
1412            data['department'] = pl[ai +2]
1413            if clear:
1414                self.addRecord(**data)
1415            else:
1416                self.modifyRecord(**data)
1417        if pghandler: pghandler.finish()
1418    ###)
1419
1420    security.declarePrivate('notify_event_listener') ###(
1421    def notify_event_listener(self,event_type,object,infos):
1422        "listen for events"
1423        if not infos.has_key('rpath'):
1424            return
1425        pt = getattr(object,'portal_type',None)
1426        mt = getattr(object,'meta_type',None)
1427        if pt != 'Course':
1428            return
1429        data = {}
1430        rpl = infos['rpath'].split('/')
1431        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1432            return
1433        course_id = object.getId()
1434        data[self.key] = course_id
1435        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1436            try:
1437                self.addRecord(**data)
1438            except ValueError:
1439                return
1440            course_id = object.getId()
1441            doc = object.getContent()
1442            if doc is None:
1443                return
1444            for field in self.schema():
1445                data[field] = getattr(doc,field,None)
1446            data[self.key] = course_id
1447            ai = rpl.index('academics')
1448            data['faculty'] = rpl[ai +1]
1449            data['department'] = rpl[ai +2]
1450            self.modifyRecord(**data)
1451            return
1452        if event_type == "sys_del_object":
1453            self.deleteRecord(course_id)
1454            return
1455        if event_type == "sys_modify_object" and mt == 'Course':
1456            #from pdb import set_trace;set_trace()
1457            for field in self.schema():
1458                data[field] = getattr(object,field,None)
1459            course_id = object.aq_parent.getId()
1460            data[self.key] = course_id
1461            ai = rpl.index('academics')
1462            data['faculty'] = rpl[ai +1]
1463            data['department'] = rpl[ai +2]
1464            self.modifyRecord(**data)
1465    ###)
1466
1467
1468InitializeClass(CoursesCatalog)
1469###)
1470
1471class CourseResults(WAeUPTable): ###(
1472    security = ClassSecurityInfo()
1473
1474    meta_type = 'WAeUP Results Catalog'
1475    name = "course_results"
1476    key = "key" #student_id + level + course_id
1477    def __init__(self,name=None):
1478        if name ==  None:
1479            name = self.name
1480        WAeUPTable.__init__(self, name)
1481        self._queue = []
1482
1483    def addMultipleRecords(self, records): ###(
1484        """add many records"""
1485        existing_uids = []
1486        for data in records:
1487            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1488            data['%s' % self.key] = uid
1489            query = Eq(self.key, uid)
1490            res = self.course_results.evalAdvancedQuery(query)
1491            if len(res) > 0:
1492                rec = res[0]
1493                equal = True
1494                for attr in ('student_id','level_id','course_id'):
1495                    if getattr(rec,attr,'') != data[attr]:
1496                        equal = False
1497                        break
1498                if equal:
1499                    existing_uids += uid,
1500                    continue
1501            self.catalog_object(dict2ob(data), uid=uid)
1502        return existing_uids
1503    ###)
1504
1505    def deleteResultsHere(self,level_id,student_id): ###(
1506        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1507        course_results = self.course_results.evalAdvancedQuery(query)
1508        #import pdb;pdb.set_trace()
1509        for result in course_results:
1510            self.deleteRecord(result.key)
1511    ###)
1512
1513    def moveResultsHere(self,level,student_id): ###(
1514        #import pdb;pdb.set_trace()
1515        level_id = level.getId()
1516        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1517        course_results = self.course_results.evalAdvancedQuery(query)
1518        existing_courses = [cr.code for cr in course_results]
1519        to_delete = []
1520        for code,obj in level.objectItems():
1521            to_delete.append(code)
1522            carry_over = False
1523            if code.endswith('_co'):
1524                carry_over = True
1525                code  = code[:-3]
1526            if code in existing_courses:
1527                continue
1528            course_result_doc = obj.getContent()
1529            data = {}
1530            course_id = code
1531            for field in self.schema():
1532                data[field] = getattr(course_result_doc,field,'')
1533            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1534            data['student_id'] = student_id
1535            data['level_id'] = level_id
1536            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1537            data['session_id'] = session_id
1538            #data['queue_status'] = OBJECT_CREATED
1539            data['code'] = course_id
1540            data['carry_over'] = carry_over
1541            self.catalog_object(dict2ob(data), uid=key)
1542        level.manage_delObjects(to_delete)
1543    ###)
1544
1545    def getCourses(self,student_id,level_id): ###(
1546        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1547        course_results = self.course_results.evalAdvancedQuery(query)
1548        carry_overs = []
1549        normal1 = []
1550        normal2 = []
1551        normal3 = []
1552        total_credits = 0
1553        gpa = 0
1554        for brain in course_results:
1555            d = {}
1556
1557            for field in self.schema():
1558                d[field] = getattr(brain,field,'')
1559
1560            d['weight'] = ''
1561            d['grade'] = ''
1562            d['score'] = ''
1563
1564            if str(brain.credits).isdigit():
1565                credits = int(brain.credits)
1566                total_credits += credits
1567                score = getattr(brain,'score',0)
1568                if score and str(score).isdigit() and int(score) > 0:
1569                    score = int(score)
1570                    grade,weight = self.getGradesFromScore(score)
1571                    gpa += weight * credits
1572                    d['weight'] = weight
1573                    d['grade'] = grade
1574                    d['score'] = score
1575            d['coe'] = ''
1576            if brain.core_or_elective:
1577                d['coe'] = 'Core'
1578            elif brain.core_or_elective == False:
1579                d['coe'] = 'Elective'
1580            id = code = d['id'] = brain.code
1581            d['code'] = code
1582            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1583            if res:
1584                course = res[0]
1585                d['title'] = course.title
1586                # The courses_catalog contains strings and integers in its semester field.
1587                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1588                d['semester'] = str(course.semester)
1589            else:
1590                d['title'] = "Course has been removed from course list"
1591                d['semester'] = ''
1592            if brain.carry_over:
1593                d['coe'] = 'CO'
1594                carry_overs.append(d)
1595            else:
1596                if d['semester'] == '1':
1597                    normal1.append(d)
1598
1599                elif d['semester'] == '2':
1600                    normal2.append(d)
1601                else:
1602                    normal3.append(d)
1603        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1604        #                                "%(semester)s%(code)s" % y))
1605        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1606                                             "%(semester)s%(code)s" % y))
1607        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1608    ###)
1609
1610InitializeClass(CourseResults)
1611###)
1612
1613class OnlinePaymentsImport(WAeUPTable): ###(
1614
1615    meta_type = 'WAeUP Online Payment Transactions'
1616    name = "online_payments_import"
1617    key = "order_id"
1618    def __init__(self,name=None):
1619        if name ==  None:
1620            name = self.name
1621        WAeUPTable.__init__(self, name)
1622
1623
1624InitializeClass(OnlinePaymentsImport)
1625###)
1626
1627class ReturningImport(WAeUPTable): ###(
1628
1629    meta_type = 'Returning Import Table'
1630    name = "returning_import"
1631    key = "matric_no"
1632    def __init__(self,name=None):
1633        if name ==  None:
1634            name = self.name
1635        WAeUPTable.__init__(self, name)
1636
1637
1638InitializeClass(ReturningImport)
1639###)
1640
1641class ResultsImport(WAeUPTable): ###(
1642
1643    meta_type = 'Results Import Table'
1644    name = "results_import"
1645    key = "key"
1646    def __init__(self,name=None):
1647        if name ==  None:
1648            name = self.name
1649        WAeUPTable.__init__(self, name)
1650
1651
1652InitializeClass(ResultsImport)
1653
1654###)
1655
1656class PaymentsCatalog(WAeUPTable): ###(
1657    security = ClassSecurityInfo()
1658
1659    meta_type = 'WAeUP Payments Catalog'
1660    name = "payments_catalog"
1661    key = "order_id"
1662    def __init__(self,name=None):
1663        if name ==  None:
1664            name = self.name
1665        WAeUPTable.__init__(self, name)
1666
1667
1668    security.declarePrivate('notify_event_listener') ###(
1669    def notify_event_listener(self,event_type,object,infos):
1670        "listen for events"
1671        if not infos.has_key('rpath'):
1672            return
1673        pt = getattr(object,'portal_type',None)
1674        mt = getattr(object,'meta_type',None)
1675        data = {}
1676        if pt != 'Payment':
1677            return
1678        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1679            self.deleteRecord(object.getContent().order_id)
1680        if mt == 'CPS Proxy Folder':
1681            return # is handled only for the real object
1682        if event_type not in ('sys_modify_object'):
1683            return
1684        for field in self.schema():
1685            data[field] = getattr(object,field,'')
1686        rpl = infos['rpath'].split('/')
1687        #import pdb;pdb.set_trace()
1688        student_id = rpl[-4]
1689        data['student_id'] = student_id
1690        modified = False
1691        try:
1692            self.modifyRecord(**data)
1693            modified = True
1694        except KeyError:
1695            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1696            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1697            pass
1698        if not modified:
1699            try:
1700                self.addRecord(**data)
1701            except:
1702                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1703                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1704        ###)
1705
1706
1707InitializeClass(PaymentsCatalog)
1708
1709###)
1710
1711# BBB:
1712AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.