source: WAeUP_SRP/trunk/WAeUPTables.py @ 3777

Last change on this file since 3777 was 3772, checked in by Henrik Bettermann, 16 years ago

enable random bed booking order

  • Property svn:keywords set to Id
File size: 66.9 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3772 2008-11-17 16:55:29Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self,index=None,value=None):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        if index is not None and value is not None:
225            records = self.evalAdvancedQuery(Eq(index,value))
226        else:
227            records = self()
228        nr2export = len(records)
229        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
230        chunk = 2000
231        total = 0
232        start = DateTime.DateTime().timeTime()
233        start_chunk = DateTime.DateTime().timeTime()
234        for record in records:
235            not_all = False
236            d = self.record2dict(fields,record)
237            lines.append(d)
238            total += 1
239            if total and not total % chunk or total == len(records):
240                csv_writer.writerows(lines)
241                anz = len(lines)
242                logger.info("wrote %(anz)d  total written %(total)d" % vars())
243                end_chunk = DateTime.DateTime().timeTime()
244                duration = end_chunk-start_chunk
245                per_record = duration/anz
246                till_now = end_chunk - start
247                avarage_per_record = till_now/total
248                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
249                estimated_end = estimated_end.strftime("%H:%M:%S")
250                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
251                start_chunk = DateTime.DateTime().timeTime()
252                lines = []
253        end = DateTime.DateTime().timeTime()
254        logger.info('total time %6.2f m' % ((end-start)/60))
255        import os
256        filename, extension = os.path.splitext(export_file)
257        from subprocess import call
258        msg = "wrote %(total)d records to %(export_file)s" % vars()
259        #try:
260        #    retcode = call('gzip %s' % (export_file),shell=True)
261        #    if retcode == 0:
262        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
263        #except OSError, e:
264        #    retcode = -99
265        #    logger.info("zip failed with %s" % e)
266        logger.info(msg)
267        args = {'portal_status_message': msg}
268        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
269        url = self.REQUEST.get('URL2')
270        return self.REQUEST.RESPONSE.redirect(url)
271    ###)
272
273    security.declarePrivate("_import_old") ###(
274    def _import_old(self,filename,schema,layout, mode,logger):
275        "import data from csv"
276        import transaction
277        import random
278        pm = self.portal_membership
279        member = pm.getAuthenticatedMember()
280        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
281        import_fn = "%s/import/%s.csv" % (i_home,filename)
282        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
283        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
284        start = True
285        tr_count = 1
286        total_imported = 0
287        total_not_imported = 0
288        total = 0
289        iname =  "%s" % filename
290        not_imported = []
291        imported = []
292        valid_records = []
293        invalid_records = []
294        d = {}
295        d['mode'] = mode
296        d['imported'] = total_imported
297        d['not_imported'] = total_not_imported
298        d['valid_records'] = valid_records
299        d['invalid_records'] = invalid_records
300        d['import_fn'] = import_fn
301        d['imported_fn'] = imported_fn
302        d['not_imported_fn'] = not_imported_fn
303        if schema is None:
304            em = 'No schema specified'
305            logger.error(em)
306            return d
307        if layout is None:
308            em = 'No layout specified'
309            logger.error(em)
310            return d
311        validators = {}
312        for widget in layout.keys():
313            try:
314                validators[widget] = layout[widget].validate
315            except AttributeError:
316                logger.info('%s has no validate attribute' % widget)
317                return d
318        # if mode == 'edit':
319        #     importer = self.importEdit
320        # elif mode == 'add':
321        #     importer = self.importAdd
322        # else:
323        #     importer = None
324        try:
325            items = csv.DictReader(open(import_fn,"rb"),
326                                   dialect="excel",
327                                   skipinitialspace=True)
328        except:
329            em = 'Error reading %s.csv' % filename
330            logger.error(em)
331            return d
332        #import pdb;pdb.set_trace()
333        for item in items:
334            if start:
335                start = False
336                logger.info('%s starts import from %s.csv' % (member,filename))
337                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
338                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
339                                   dialect="excel",
340                                   skipinitialspace=True).next()
341                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
342                diff2schema = set(import_keys).difference(set(schema.keys()))
343                diff2layout = set(import_keys).difference(set(layout.keys()))
344                if diff2layout:
345                    em = "not ignorable key(s) %s found in heading" % diff2layout
346                    logger.info(em)
347                    return d
348                s = ','.join(['"%s"' % fn for fn in import_keys])
349                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
350                #s = '"id",' + s
351                open(imported_fn,"a").write(s + '\n')
352                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
353                format_error = format + ',"%(Error)s"'
354                #format = '"%(id)s",'+ format
355                adapters = [MappingStorageAdapter(schema, item)]
356            dm = DataModel(item, adapters,context=self)
357            ds = DataStructure(data=item,datamodel=dm)
358            error_string = ""
359            #import pdb;pdb.set_trace()
360            for k in import_keys:
361                if not validators[k](ds,mode=mode):
362                    error_string += " %s : %s" % (k,ds.getError(k))
363            # if not error_string and importer:
364            #     item.update(dm)
365            #     item['id'],error = importer(item)
366            #     if error:
367            #         error_string += error
368            if error_string:
369                item['Error'] = error_string
370                invalid_records.append(dm)
371                not_imported.append(format_error % item)
372                total_not_imported += 1
373            else:
374                em = format % item
375                valid_records.append(dm)
376                imported.append(em)
377                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
378                tr_count += 1
379                total_imported += 1
380            total += 1
381        if len(imported) > 0:
382            open(imported_fn,"a").write('\n'.join(imported))
383        if len(not_imported) > 0:
384            open(not_imported_fn,"a").write('\n'.join(not_imported))
385        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
386        d['imported'] = total_imported
387        d['not_imported'] = total_not_imported
388        d['valid_records'] = valid_records
389        d['invalid_records'] = invalid_records
390        d['imported_fn'] = imported_fn
391        d['not_imported_fn'] = not_imported_fn
392        #logger.info(em)
393        return d
394    ###)
395
396    security.declarePrivate("_import") ###(
397    def _import_new(self,csv_items,schema, layout, mode,logger):
398        "import data from csv.Dictreader Instance"
399        start = True
400        tr_count = 1
401        total_imported = 0
402        total_not_imported = 0
403        total = 0
404        iname =  "%s" % filename
405        not_imported = []
406        valid_records = []
407        invalid_records = []
408        duplicate_records = []
409        d = {}
410        d['mode'] = mode
411        d['valid_records'] = valid_records
412        d['invalid_records'] = invalid_records
413        d['invalid_records'] = duplicate_records
414        # d['import_fn'] = import_fn
415        # d['imported_fn'] = imported_fn
416        # d['not_imported_fn'] = not_imported_fn
417        validators = {}
418        for widget in layout.keys():
419            try:
420                validators[widget] = layout[widget].validate
421            except AttributeError:
422                logger.info('%s has no validate attribute' % widget)
423                return d
424        for item in csv_items:
425            if start:
426                start = False
427                logger.info('%s starts import from %s.csv' % (member,filename))
428                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
429                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
430                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
431                diff2schema = set(import_keys).difference(set(schema.keys()))
432                diff2layout = set(import_keys).difference(set(layout.keys()))
433                if diff2layout:
434                    em = "not ignorable key(s) %s found in heading" % diff2layout
435                    logger.info(em)
436                    return d
437                # s = ','.join(['"%s"' % fn for fn in import_keys])
438                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
439                # #s = '"id",' + s
440                # open(imported_fn,"a").write(s + '\n')
441                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
442                # format_error = format + ',"%(Error)s"'
443                # #format = '"%(id)s",'+ format
444                adapters = [MappingStorageAdapter(schema, item)]
445            dm = DataModel(item, adapters,context=self)
446            ds = DataStructure(data=item,datamodel=dm)
447            error_string = ""
448            for k in import_keys:
449                if not validators[k](ds,mode=mode):
450                    error_string += " %s : %s" % (k,ds.getError(k))
451            if error_string:
452                item['Error'] = error_string
453                #invalid_records.append(dm)
454                invalid_records.append(item)
455                total_not_imported += 1
456            else:
457                em = format % item
458                valid_records.append(dm)
459                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
460                tr_count += 1
461                total_imported += 1
462            total += 1
463        # if len(imported) > 0:
464        #     open(imported_fn,"a").write('\n'.join(imported))
465        # if len(not_imported) > 0:
466        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
467        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
468        d['imported'] = total_imported
469        d['not_imported'] = total_not_imported
470        d['valid_records'] = valid_records
471        d['invalid_records'] = invalid_records
472        return d
473    ###)
474
475    security.declarePublic("missingValue")###(
476    def missingValue(self):
477        from Missing import MV
478        return MV
479    ###)
480###)
481
482class AccommodationTable(WAeUPTable): ###(
483
484    meta_type = 'WAeUP Accommodation Tool'
485    name = "portal_accommodation"
486    key = "bed"
487    not_occupied = NOT_OCCUPIED
488    def __init__(self,name=None):
489        if name ==  None:
490            name = self.name
491        WAeUPTable.__init__(self, name)
492
493    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
494        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
495        records = self.evalAdvancedQuery(Eq('student',student_id))
496        if len(records) == 1:
497            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
498            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
499            return -1,records[0].bed
500        elif len(records) > 1:
501            logger.info('%s found more than one (reserved) bed' % (student_id))
502            return -3,'more than one bed'
503        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
504        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
505        if len(records) == 0:
506            logger.info('no bed %s available for %s' % (bed_type,student_id))
507            return -2,"no bed"
508        if random_order:
509            import random
510            bed_no = random.randint(0,len(records)-1)
511        else:
512            bed_no = 0
513        rec = records[bed_no]
514        self.modifyRecord(bed=rec.bed,student=student_id)
515        logger.info('%s booked bed %s' % (student_id,rec.bed))
516        return 1,rec.bed
517    ###)
518
519
520InitializeClass(AccommodationTable)
521
522###)
523
524class PinTable(WAeUPTable): ###(
525    from ZODB.POSException import ConflictError
526    security = ClassSecurityInfo()
527    meta_type = 'WAeUP Pin Tool'
528    name = "portal_pins"
529    key = 'pin'
530
531    def __init__(self,name=None):
532        if name ==  None:
533            name = self.name
534        WAeUPTable.__init__(self, name)
535
536    security.declareProtected(ModifyPortalContent,"dumpAll")###(
537    def dumpAll(self,include_unused=None):
538        """dump all data in the table to a csv"""
539        member = self.portal_membership.getAuthenticatedMember()
540        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
541        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
542        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
543        res_list = []
544        lines = []
545        if hasattr(self,"export_keys"):
546            fields = self.export_keys
547        else:
548            fields = []
549            for f in self.schema():
550                fields.append(f)
551        headline = ','.join(fields)
552        out = open(export_file,"wb")
553        out.write(headline +'\n')
554        out.close()
555        out = open(export_file,"a")
556        csv_writer = csv.DictWriter(out,fields,)
557        if include_unused is not None and str(member) not in ('admin','joachim'):
558            logger.info('%s tries to dump pintable with unused pins' % (member))
559            return
560        if include_unused is not None:
561            records = self()
562        else:
563            records = self.evalAdvancedQuery(~Eq('student',''))
564        nr2export = len(records)
565        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
566        chunk = 2000
567        total = 0
568        start = DateTime.DateTime().timeTime()
569        start_chunk = DateTime.DateTime().timeTime()
570        for record in records:
571            not_all = False
572            d = self.record2dict(fields,record)
573            lines.append(d)
574            total += 1
575            if total and not total % chunk or total == len(records):
576                csv_writer.writerows(lines)
577                anz = len(lines)
578                logger.info("wrote %(anz)d  total written %(total)d" % vars())
579                end_chunk = DateTime.DateTime().timeTime()
580                duration = end_chunk-start_chunk
581                per_record = duration/anz
582                till_now = end_chunk - start
583                avarage_per_record = till_now/total
584                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
585                estimated_end = estimated_end.strftime("%H:%M:%S")
586                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
587                start_chunk = DateTime.DateTime().timeTime()
588                lines = []
589        end = DateTime.DateTime().timeTime()
590        logger.info('total time %6.2f m' % ((end-start)/60))
591        import os
592        filename, extension = os.path.splitext(export_file)
593        from subprocess import call
594        msg = "wrote %(total)d records to %(export_file)s" % vars()
595        #try:
596        #    retcode = call('gzip %s' % (export_file),shell=True)
597        #    if retcode == 0:
598        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
599        #except OSError, e:
600        #    retcode = -99
601        #    logger.info("zip failed with %s" % e)
602        logger.info(msg)
603        args = {'portal_status_message': msg}
604        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
605        url = self.REQUEST.get('URL2')
606        return self.REQUEST.RESPONSE.redirect(url)
607    ###)
608
609
610
611    def searchAndSetRecord(self, uid, student_id,prefix):
612
613        # The following line must be activated after resetting the
614        # the portal_pins table. This is to avoid duplicate entries
615        # and disable duplicate payments.
616
617        #student_id = student_id.upper()
618
619        #records = self.searchResults(student = student_id)
620        #if len(records) > 0 and prefix in ('CLR','APP'):
621        #    for r in records:
622        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
623        #            return -2
624        records = self.searchResults({"%s" % self.key : uid})
625        if len(records) > 1:
626            # Can not happen, but anyway...
627            raise ValueError("More than one record with uid %s" % uid)
628        if len(records) == 0:
629            return -1,None
630        record = records[0]
631        if record.student == "":
632            record_data = {}
633            for field in self.schema() + self.indexes():
634                record_data[field] = getattr(record, field)
635            # Add the updated data:
636            record_data['student'] = student_id
637            try:
638                self.catalog_object(dict2ob(record_data), uid)
639                return 1,record
640            except ConflictError:
641                return 2,record
642        if record.student.upper() != student_id.upper():
643            return 0,record
644        if record.student.upper() == student_id.upper():
645            return 2,record
646        return -3,record
647InitializeClass(PinTable)
648###)
649
650class PumeResultsTable(WAeUPTable): ###(
651
652    meta_type = 'WAeUP PumeResults Tool'
653    name = "portal_pumeresults"
654    key = "jamb_reg_no"
655    def __init__(self,name=None):
656        if name ==  None:
657            name = self.name
658        WAeUPTable.__init__(self, name)
659
660
661InitializeClass(PumeResultsTable)
662
663###)
664
665class ApplicantsCatalog(WAeUPTable): ###(
666
667    meta_type = 'WAeUP Applicants Catalog'
668    name = "applicants_catalog"
669    key = "reg_no"
670    security = ClassSecurityInfo()
671    #export_keys = (
672    #               "reg_no",
673    #               "status",
674    #               "lastname",
675    #               "sex",
676    #               "date_of_birth",
677    #               "lga",
678    #               "email",
679    #               "phone",
680    #               "passport",
681    #               "entry_mode",
682    #               "pin",
683    #               "screening_type",
684    #               "registration_date",
685    #               "testdate",
686    #               "application_date",
687    #               "screening_date",
688    #               "faculty",
689    #               "department",
690    #               "course1",
691    #               "course2",
692    #               "course3",
693    #               "eng_score",
694    #               "subj1",
695    #               "subj1score",
696    #               "subj2",
697    #               "subj2score",
698    #               "subj3",
699    #               "subj3score",
700    #               "aggregate",
701    #               "course_admitted",
702    #               )
703
704    def __init__(self,name=None):
705        if name ==  None:
706            name = self.name
707        WAeUPTable.__init__(self, name)
708
709    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
710    def new_importCSV(self,filename="JAMB_data",
711                  schema_id="application",
712                  layout_id="import_application",
713                  mode='add'):
714        """ import JAMB data """
715        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
716        pm = self.portal_membership
717        member = pm.getAuthenticatedMember()
718        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
719        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
720        import_fn = "%s/import/%s.csv" % (i_home,filename)
721        if mode not in ('add','edit'):
722            logger.info("invalid mode: %s" % mode)
723        if os.path.exists(lock_fn):
724            logger.info("import of %(import_fn)s already in progress" % vars())
725            return
726        lock_file = open(lock_fn,"w")
727        lock_file.write("%(current)s \n" % vars())
728        lock_file.close()
729        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
730        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
731        stool = getToolByName(self, 'portal_schemas')
732        ltool = getToolByName(self, 'portal_layouts')
733        schema = stool._getOb(schema_id)
734        if schema is None:
735            em = 'No such schema %s' % schema_id
736            logger.error(em)
737            return
738        for postfix in ('_import',''):
739            layout_name = "%(layout_id)s%(postfix)s" % vars()
740            if hasattr(ltool,layout_name):
741                break
742        layout = ltool._getOb(layout_name)
743        if layout is None:
744            em = 'No such layout %s' % layout_id
745            logger.error(em)
746            return
747        try:
748            csv_file = csv.DictReader(open(import_fn,"rb"))
749        except:
750            em = 'Error reading %s.csv' % filename
751            logger.error(em)
752            return
753        d = self._import_new(csv_items,schema,layout,mode,logger)
754        imported = []
755        edited = []
756        duplicates = []
757        not_found = []
758        if len(d['valid_records']) > 0:
759            for record in d['valid_records']:
760                #import pdb;pdb.set_trace()
761                if mode == "add":
762                    try:
763                        self.addRecord(**dict(record.items()))
764                        imported.append(**dict(record.items()))
765                        logger.info("added %s" % record.items())
766                    except ValueError:
767                        dupplicate.append(**dict(record.items()))
768                        logger.info("duplicate %s" % record.items())
769                elif mode == "edit":
770                    try:
771                        self.modifyRecord(**dict(record.items()))
772                        edited.append(**dict(record.items()))
773                        logger.info("edited %s" % record.items())
774                    except KeyError:
775                        not_found.append(**dict(record.items()))
776                        logger.info("not found %s" % record.items())
777        invalid = d['invalid_records']
778        for itype in ("imported","edited","not_found","duplicate","invalid"):
779            outlist = locals[itype]
780            if len(outlist):
781                d = {}
782                for k in outlist[0].keys():
783                    d[k] = k
784                outlist[0] = d
785                outfile = open("file_name_%s" % itype,'w')
786                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
787                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
788###)
789
790    security.declareProtected(ModifyPortalContent,"importCSV")###(
791    def importCSV(self,filename="JAMB_data",
792                  schema_id="application",
793                  layout_id="application_pce",
794                  mode='add'):
795        """ import JAMB data """
796        stool = getToolByName(self, 'portal_schemas')
797        ltool = getToolByName(self, 'portal_layouts')
798        schema = stool._getOb(schema_id)
799        if schema is None:
800            em = 'No such schema %s' % schema_id
801            logger.error(em)
802            return
803        layout = ltool._getOb(layout_id)
804        if layout is None:
805            em = 'No such layout %s' % layout_id
806            logger.error(em)
807            return
808        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
809        d = self._import_old(filename,schema,layout,mode,logger)
810        if len(d['valid_records']) > 0:
811            for record in d['valid_records']:
812                #import pdb;pdb.set_trace()
813                if mode == "add":
814                    self.addRecord(**dict(record.items()))
815                    logger.info("added %s" % record.items())
816                elif mode == "edit":
817                    self.modifyRecord(**dict(record.items()))
818                    logger.info("edited %s" % record.items())
819                else:
820                    logger.info("invalid mode: %s" % mode)
821        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
822    ###)
823
824InitializeClass(ApplicantsCatalog)
825
826###)
827
828class StudentsCatalog(WAeUPTable): ###(
829    security = ClassSecurityInfo()
830
831    meta_type = 'WAeUP Students Catalog'
832    name = "students_catalog"
833    key = "id"
834    affected_types = {   ###(
835                      'StudentApplication':
836                      {'id': 'application',
837                       'fields':
838                       ('jamb_reg_no',
839                        'entry_mode',
840                        #'entry_level',
841                        'entry_session',
842                       )
843                      },
844                      'StudentClearance':
845                      {'id': 'clearance',
846                       'fields':
847                       ('matric_no',
848                        'lga',
849                       )
850                      },
851                      'StudentPersonal':
852                      {'id': 'personal',
853                       'fields':
854                       ('name',
855                        'sex',
856                        'perm_address',
857                        'email',
858                        'phone',
859                       )
860                      },
861                      'StudentStudyCourse':
862                      {'id': 'study_course',
863                       'fields':
864                       ('course', # study_course
865                        'faculty', # from certificate
866                        'department', # from certificate
867                        'end_level', # from certificate
868                        'level', # current_level
869                        'mode',  # current_mode
870                        'session', # current_session
871                        'verdict', # current_verdict
872                       )
873                      },
874                     }
875    ###)
876
877    def __init__(self,name=None):
878        if name ==  None:
879            name = self.name
880        WAeUPTable.__init__(self, name)
881        return
882
883    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
884        """ clears the whole enchilada """
885        self._catalog.clear()
886
887        if REQUEST and RESPONSE:
888            RESPONSE.redirect(
889              URL1 +
890              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
891
892    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
893        """ clear the catalog, then re-index everything """
894
895        elapse = time.time()
896        c_elapse = time.clock()
897
898        pgthreshold = self._getProgressThreshold()
899        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
900        self.refreshCatalog(clear=1, pghandler=handler)
901
902        elapse = time.time() - elapse
903        c_elapse = time.clock() - c_elapse
904
905        RESPONSE.redirect(
906            URL1 +
907            '/manage_catalogAdvanced?manage_tabs_message=' +
908            urllib.quote('Catalog Updated \n'
909                         'Total time: %s\n'
910                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
911    ###)
912
913    def fill_certificates_dict(self): ###(
914        "return certificate data in  dict"
915        certificates_brains = self.portal_catalog(portal_type ='Certificate')
916        d = {}
917        for cb in certificates_brains:
918            certificate_doc = cb.getObject().getContent()
919            cb_path = cb.getPath().split('/')
920            ld = {}
921            ld['faculty'] = cb_path[-4]
922            ld['department'] = cb_path[-3]
923            ld['end_level'] = getattr(certificate_doc,'end_level','999')
924            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
925            d[cb.getId] = ld
926        return d
927    ###)
928
929    def get_from_doc_department(self,doc,cached_data={}): ###(
930        "return the students department"
931        if doc is None:
932            return None
933        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
934            return self._v_certificates[doc.study_course]['department']
935        certificate_res = self.portal_catalog(id = doc.study_course)
936        if len(certificate_res) != 1:
937            return None
938        return certificate_res[0].getPath().split('/')[-3]
939
940    def get_from_doc_faculty(self,doc,cached_data={}):
941        "return the students faculty"
942        if doc is None:
943            return None
944        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
945            return self._v_certificates[doc.study_course]['faculty']
946        certificate_res = self.portal_catalog(id = doc.study_course)
947        if len(certificate_res) != 1:
948            return None
949        return certificate_res[0].getPath().split('/')[-4]
950
951    def get_from_doc_end_level(self,doc,cached_data={}):
952        "return the students end_level"
953        if doc is None:
954            return None
955        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
956            return self._v_certificates[doc.study_course]['end_level']
957        certificate_res = self.portal_catalog(id = doc.study_course)
958        if len(certificate_res) != 1:
959            return None
960        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
961
962    def get_from_doc_level(self,doc,cached_data={}):
963        "return the students level"
964        if doc is None:
965            return None
966        return getattr(doc,'current_level',None)
967
968    def get_from_doc_mode(self,doc,cached_data={}):
969        "return the students mode"
970        if doc is None:
971            return None
972        cm = getattr(doc,'current_mode',None)
973        return cm
974
975
976    def get_from_doc_session(self,doc,cached_data={}):
977        "return the students current_session"
978        if doc is None:
979            return None
980        return getattr(doc,'current_session',None)
981
982    def get_from_doc_entry_session(self,doc,cached_data={}):
983        "return the students entry_session"
984        if doc is None:
985            return None
986        es = getattr(doc,'entry_session',None)
987        if es is not None and len(es) == 2:
988            return es
989        elif len(es) == 9:
990            return es[2:4]   
991        try:
992            digit = int(doc.jamb_reg_no[0])
993        except:
994            return "-1"
995        if digit < 8:
996            return "0%c" % doc.jamb_reg_no[0]
997        return "9%c" % doc.jamb_reg_no[0]
998
999    def get_from_doc_course(self,doc,cached_data={}):
1000        "return the students study_course"
1001        if doc is None:
1002            return None
1003        return getattr(doc,'study_course',None)
1004
1005    def get_from_doc_name(self,doc,cached_data={}):
1006        "return the students name from the personal"
1007        if doc is None:
1008            return None
1009        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1010
1011    def get_from_doc_verdict(self,doc,cached_data={}):
1012        "return the students study_course"
1013        if doc is None:
1014            return None
1015        return getattr(doc,'current_verdict',None)
1016    ###)
1017
1018    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1019        if isinstance(name, str):
1020            name = (name,)
1021        reindextypes = {}
1022        reindex_special = []
1023        for n in name:
1024            if n in ("review_state","registered_courses"):
1025                reindex_special.append(n)
1026            else:
1027                for pt in self.affected_types.keys():
1028                    if n in self.affected_types[pt]['fields']:
1029                        if reindextypes.has_key(pt):
1030                            reindextypes[pt].append(n)
1031                        else:
1032                            reindextypes[pt]= [n]
1033                        break
1034        cached_data = {}
1035        if set(name).intersection(set(('faculty','department','end_level'))):
1036            cached_data = self.fill_certificates_dict()
1037        students = self.portal_catalog(portal_type="Student")
1038        if hasattr(self,'portal_catalog_real'):
1039            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1040        else:
1041            aq_portal = self.portal_catalog.evalAdvancedQuery
1042        num_objects = len(students)
1043        if pghandler:
1044            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1045        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1046        #import pdb;pdb.set_trace()
1047        for i in xrange(num_objects):
1048            if pghandler: pghandler.report(i)
1049            student_brain = students[i]
1050            student_object = student_brain.getObject()
1051            # query = Eq('path',student_brain.getPath())
1052            # sub_brains_list = aq_portal(query)
1053            # sub_brains = {}
1054            # for sub_brain in sub_brains_list:
1055            #     sub_brains[sub_brain.portal_type] = sub_brain
1056            # student_path = student_brain.getPath()
1057            data = {}
1058            modified = False
1059            sid = data['id'] = student_brain.getId
1060            if reindex_special and 'review_state' in reindex_special:
1061                modified = True
1062                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1063            sub_objects = False
1064            for pt in reindextypes.keys():
1065                modified = True
1066                try:
1067                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1068                    #doc = sub_brains[pt].getObject().getContent()
1069                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1070                    # doc = self.unrestrictedTraverse(path).getContent()
1071                    sub_objects = True
1072                except:
1073                    continue
1074                for field in set(name).intersection(self.affected_types[pt]['fields']):
1075                    if hasattr(self,'get_from_doc_%s' % field):
1076                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1077                                                                              cached_data=cached_data)
1078                    else:
1079                        data[field] = getattr(doc,field)
1080            if not sub_objects and noattr:
1081                import_res = self.returning_import(id = sid)
1082                if not import_res:
1083                    continue
1084                import_record = import_res[0]
1085                data['matric_no'] = import_record.matric_no
1086                data['sex'] = import_record.Sex == 'F'
1087                data['name'] = "%s %s %s" % (import_record.Firstname,
1088                                             import_record.Middlename,
1089                                             import_record.Lastname)
1090                data['jamb_reg_no'] = import_record.Entryregno
1091            #if reindex_special and 'registered_courses' in reindex_special:
1092            #    try:
1093            #        study_course = getattr(student_object,"study_course")
1094            #        level_ids = study_course.objectIds()
1095            #    except:
1096            #        continue
1097            #    if not level_ids:
1098            #        continue
1099            #    modified = True
1100            #    level_ids.sort()
1101            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1102            #    courses = []
1103            #    for c in course_ids:
1104            #        if c.endswith('_co'):
1105            #            courses.append(c[:-3])
1106            #        else:
1107            #            courses.append(c)
1108            #    data['registered_courses'] = courses
1109            if modified:
1110                self.modifyRecord(**data)
1111        if pghandler: pghandler.finish()
1112    ###)
1113
1114    def refreshCatalog(self, clear=0, pghandler=None): ###(
1115        """ re-index everything we can find """
1116        students_folder = self.portal_url.getPortalObject().campus.students
1117        if clear:
1118            self._catalog.clear()
1119        students = self.portal_catalog(portal_type="Student")
1120        num_objects = len(students)
1121        cached_data = self.fill_certificates_dict()
1122        if pghandler:
1123            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1124        for i in xrange(num_objects):
1125            if pghandler: pghandler.report(i)
1126            student_brain = students[i]
1127            spath = student_brain.getPath()
1128            student_object = student_brain.getObject()
1129            data = {}
1130            sid = data['id'] = student_brain.getId
1131            #data['review_state'] = student_brain.review_state
1132            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1133            sub_objects = False
1134            for pt in self.affected_types.keys():
1135                modified = True
1136                try:
1137                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1138                    sub_objects = True
1139                except:
1140                    #from pdb import set_trace;set_trace()
1141                    continue
1142                for field in self.affected_types[pt]['fields']:
1143                    if hasattr(self,'get_from_doc_%s' % field):
1144                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1145                                                                              cached_data=cached_data)
1146                    else:
1147                        data[field] = getattr(doc,field,None)
1148            if not sub_objects:
1149                import_res = self.returning_import(id = sid)
1150                if not import_res:
1151                    continue
1152                import_record = import_res[0]
1153                data['matric_no'] = import_record.matric_no
1154                data['sex'] = import_record.Sex == 'F'
1155                data['name'] = "%s %s %s" % (import_record.Firstname,
1156                                             import_record.Middlename,
1157                                             import_record.Lastname)
1158                data['jamb_reg_no'] = import_record.Entryregno
1159            self.addRecord(**data)
1160        if pghandler: pghandler.finish()
1161    ###)
1162
1163    security.declarePrivate('notify_event_listener') ###(
1164    def notify_event_listener(self,event_type,object,infos):
1165        "listen for events"
1166        if not infos.has_key('rpath'):
1167            return
1168        pt = getattr(object,'portal_type',None)
1169        mt = getattr(object,'meta_type',None)
1170        students_catalog = self
1171        data = {}
1172        if pt == 'Student' and\
1173           mt == 'CPS Proxy Folder' and\
1174           event_type.startswith('workflow'):
1175            data['id'] = object.getId()
1176            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1177            students_catalog.modifyRecord(**data)
1178            return
1179        rpl = infos['rpath'].split('/')
1180        if pt == 'Student' and mt == 'CPS Proxy Folder':
1181            student_id = object.id
1182            if event_type == "sys_add_object":
1183                try:
1184                    self.addRecord(id = student_id)
1185                except ValueError:
1186                    pass
1187                return
1188            elif event_type == 'sys_del_object':
1189                self.deleteRecord(student_id)
1190        if pt not in self.affected_types.keys():
1191            return
1192        if event_type not in ('sys_modify_object'):
1193            return
1194        if mt == 'CPS Proxy Folder':
1195            return
1196        if not hasattr(self,'_v_certificates'):
1197            self._v_certificates = self.fill_certificates_dict()
1198        for field in self.affected_types[pt]['fields']:
1199            if hasattr(self,'get_from_doc_%s' % field):
1200                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1201            else:
1202                data[field] = getattr(object,field)
1203        data['id'] = rpl[2]
1204        self.modifyRecord(**data)
1205    ###)
1206
1207
1208InitializeClass(StudentsCatalog)
1209
1210###)
1211
1212class CertificatesCatalog(WAeUPTable): ###(
1213    security = ClassSecurityInfo()
1214
1215    meta_type = 'WAeUP Certificates Catalog'
1216    name =  "certificates_catalog"
1217    key = "code"
1218    def __init__(self,name=None):
1219        if name ==  None:
1220            name =  self.name
1221        WAeUPTable.__init__(self, name)
1222
1223    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1224        """ clear the catalog, then re-index everything """
1225
1226        elapse = time.time()
1227        c_elapse = time.clock()
1228
1229        pgthreshold = self._getProgressThreshold()
1230        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1231        self.refreshCatalog(clear=1, pghandler=handler)
1232
1233        elapse = time.time() - elapse
1234        c_elapse = time.clock() - c_elapse
1235
1236        RESPONSE.redirect(
1237            URL1 +
1238            '/manage_catalogAdvanced?manage_tabs_message=' +
1239            urllib.quote('Catalog Updated \n'
1240                         'Total time: %s\n'
1241                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1242    ###)
1243
1244    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1245        if isinstance(name, str):
1246            name = (name,)
1247        certificates = self.portal_catalog(portal_type="Certificate")
1248        num_objects = len(certificates)
1249        if pghandler:
1250            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1251        for i in xrange(num_objects):
1252            if pghandler: pghandler.report(i)
1253            certificate_brain = certificates[i]
1254            certificate_object = certificate_brain.getObject()
1255            pl = certificate_brain.getPath().split('/')
1256            data = {}
1257            cid = data[self.key] = certificate_brain.getId
1258            data['faculty'] = pl[-4]
1259            data['department'] = pl[-3]
1260            doc = certificate_object.getContent()
1261            for field in name:
1262                if field not in (self.key,'faculty','department'):
1263                    data[field] = getattr(doc,field)
1264            self.modifyRecord(**data)
1265        if pghandler: pghandler.finish()
1266    ###)
1267
1268    def refreshCatalog(self, clear=0, pghandler=None): ###(
1269        """ re-index everything we can find """
1270        if clear:
1271            self._catalog.clear()
1272        certificates = self.portal_catalog(portal_type="Certificate")
1273        num_objects = len(certificates)
1274        if pghandler:
1275            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1276        #from pdb import set_trace;set_trace()
1277        for i in xrange(num_objects):
1278            if pghandler: pghandler.report(i)
1279            certificate_brain = certificates[i]
1280            certificate_doc = certificate_brain.getObject().getContent()
1281            pl = certificate_brain.getPath().split('/')
1282            data = {}
1283            for field in self.schema():
1284                data[field] = getattr(certificate_doc,field,None)
1285            data[self.key] = certificate_brain.getId
1286            ai = pl.index('academics')
1287            data['faculty'] = pl[ai +1]
1288            data['department'] = pl[ai +2]
1289            if clear:
1290                self.addRecord(**data)
1291            else:
1292                self.modifyRecord(**data)
1293        if pghandler: pghandler.finish()
1294    ###)
1295
1296    security.declarePrivate('notify_event_listener') ###(
1297    def notify_event_listener(self,event_type,object,infos):
1298        "listen for events"
1299        if not infos.has_key('rpath'):
1300            return
1301        pt = getattr(object,'portal_type',None)
1302        mt = getattr(object,'meta_type',None)
1303        if pt != 'Certificate':
1304            return
1305        data = {}
1306        rpl = infos['rpath'].split('/')
1307        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1308            return
1309        certificate_id = object.getId()
1310        data[self.key] = certificate_id
1311        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1312            try:
1313                self.addRecord(**data)
1314            except ValueError:
1315                return
1316            certificate_id = object.getId()
1317            doc = object.getContent()
1318            if doc is None:
1319                return
1320            for field in self.schema():
1321                data[field] = getattr(doc,field,None)
1322            data[self.key] = certificate_id
1323            ai = rpl.index('academics')
1324            data['faculty'] = rpl[ai +1]
1325            data['department'] = rpl[ai +2]
1326            self.modifyRecord(**data)
1327            return
1328        if event_type == "sys_del_object":
1329            self.deleteRecord(certificate_id)
1330            return
1331        if event_type == "sys_modify_object" and mt == 'Certificate':
1332            #from pdb import set_trace;set_trace()
1333            for field in self.schema():
1334                data[field] = getattr(object,field,None)
1335            certificate_id = object.aq_parent.getId()
1336            data[self.key] = certificate_id
1337            ai = rpl.index('academics')
1338            data['faculty'] = rpl[ai +1]
1339            data['department'] = rpl[ai +2]
1340            self.modifyRecord(**data)
1341    ###)
1342
1343
1344InitializeClass(CertificatesCatalog)
1345###)
1346
1347class CoursesCatalog(WAeUPTable): ###(
1348    security = ClassSecurityInfo()
1349
1350    meta_type = 'WAeUP Courses Catalog'
1351    name =  "courses_catalog"
1352    key = "code"
1353    def __init__(self,name=None):
1354        if name ==  None:
1355            name =  self.name
1356        WAeUPTable.__init__(self, name)
1357
1358    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1359        """ clear the catalog, then re-index everything """
1360
1361        elapse = time.time()
1362        c_elapse = time.clock()
1363
1364        pgthreshold = self._getProgressThreshold()
1365        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1366        self.refreshCatalog(clear=1, pghandler=handler)
1367
1368        elapse = time.time() - elapse
1369        c_elapse = time.clock() - c_elapse
1370
1371        RESPONSE.redirect(
1372            URL1 +
1373            '/manage_catalogAdvanced?manage_tabs_message=' +
1374            urllib.quote('Catalog Updated \n'
1375                         'Total time: %s\n'
1376                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1377    ###)
1378
1379    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1380        if isinstance(name, str):
1381            name = (name,)
1382        courses = self.portal_catalog(portal_type="Course")
1383        num_objects = len(courses)
1384        if pghandler:
1385            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1386        for i in xrange(num_objects):
1387            if pghandler: pghandler.report(i)
1388            course_brain = courses[i]
1389            course_object = course_brain.getObject()
1390            pl = course_brain.getPath().split('/')
1391            data = {}
1392            cid = data[self.key] = course_brain.getId
1393            data['faculty'] = pl[-4]
1394            data['department'] = pl[-3]
1395            doc = course_object.getContent()
1396            for field in name:
1397                if field not in (self.key,'faculty','department'):
1398                    data[field] = getattr(doc,field)
1399            self.modifyRecord(**data)
1400        if pghandler: pghandler.finish()
1401    ###)
1402
1403    def refreshCatalog(self, clear=0, pghandler=None): ###(
1404        """ re-index everything we can find """
1405        if clear:
1406            self._catalog.clear()
1407        courses = self.portal_catalog(portal_type="Course")
1408        num_objects = len(courses)
1409        if pghandler:
1410            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1411        #from pdb import set_trace;set_trace()
1412        for i in xrange(num_objects):
1413            if pghandler: pghandler.report(i)
1414            course_brain = courses[i]
1415            course_doc = course_brain.getObject().getContent()
1416            pl = course_brain.getPath().split('/')
1417            data = {}
1418            for field in self.schema():
1419                data[field] = getattr(course_doc,field,None)
1420            data[self.key] = course_brain.getId
1421            ai = pl.index('academics')
1422            data['faculty'] = pl[ai +1]
1423            data['department'] = pl[ai +2]
1424            if clear:
1425                self.addRecord(**data)
1426            else:
1427                self.modifyRecord(**data)
1428        if pghandler: pghandler.finish()
1429    ###)
1430
1431    security.declarePrivate('notify_event_listener') ###(
1432    def notify_event_listener(self,event_type,object,infos):
1433        "listen for events"
1434        if not infos.has_key('rpath'):
1435            return
1436        pt = getattr(object,'portal_type',None)
1437        mt = getattr(object,'meta_type',None)
1438        if pt != 'Course':
1439            return
1440        data = {}
1441        rpl = infos['rpath'].split('/')
1442        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1443            return
1444        course_id = object.getId()
1445        data[self.key] = course_id
1446        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1447            try:
1448                self.addRecord(**data)
1449            except ValueError:
1450                return
1451            course_id = object.getId()
1452            doc = object.getContent()
1453            if doc is None:
1454                return
1455            for field in self.schema():
1456                data[field] = getattr(doc,field,None)
1457            data[self.key] = course_id
1458            ai = rpl.index('academics')
1459            data['faculty'] = rpl[ai +1]
1460            data['department'] = rpl[ai +2]
1461            self.modifyRecord(**data)
1462            return
1463        if event_type == "sys_del_object":
1464            self.deleteRecord(course_id)
1465            return
1466        if event_type == "sys_modify_object" and mt == 'Course':
1467            #from pdb import set_trace;set_trace()
1468            for field in self.schema():
1469                data[field] = getattr(object,field,None)
1470            course_id = object.aq_parent.getId()
1471            data[self.key] = course_id
1472            ai = rpl.index('academics')
1473            data['faculty'] = rpl[ai +1]
1474            data['department'] = rpl[ai +2]
1475            self.modifyRecord(**data)
1476    ###)
1477
1478
1479InitializeClass(CoursesCatalog)
1480###)
1481
1482class CourseResults(WAeUPTable): ###(
1483    security = ClassSecurityInfo()
1484
1485    meta_type = 'WAeUP Results Catalog'
1486    name = "course_results"
1487    key = "key" #student_id + level + course_id
1488    def __init__(self,name=None):
1489        if name ==  None:
1490            name = self.name
1491        WAeUPTable.__init__(self, name)
1492        self._queue = []
1493
1494    def addMultipleRecords(self, records): ###(
1495        """add many records"""
1496        existing_uids = []
1497        for data in records:
1498            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1499            data['%s' % self.key] = uid
1500            query = Eq(self.key, uid)
1501            res = self.course_results.evalAdvancedQuery(query)
1502            if len(res) > 0:
1503                rec = res[0]
1504                equal = True
1505                for attr in ('student_id','level_id','course_id'):
1506                    if getattr(rec,attr,'') != data[attr]:
1507                        equal = False
1508                        break
1509                if equal:
1510                    existing_uids += uid,
1511                    continue
1512            self.catalog_object(dict2ob(data), uid=uid)
1513        return existing_uids
1514    ###)
1515
1516    def deleteResultsHere(self,level_id,student_id): ###(
1517        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1518        course_results = self.course_results.evalAdvancedQuery(query)
1519        #import pdb;pdb.set_trace()
1520        for result in course_results:
1521            self.deleteRecord(result.key)
1522    ###)
1523
1524    def moveResultsHere(self,level,student_id): ###(
1525        #import pdb;pdb.set_trace()
1526        level_id = level.getId()
1527        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1528        course_results = self.course_results.evalAdvancedQuery(query)
1529        existing_courses = [cr.code for cr in course_results]
1530        to_delete = []
1531        for code,obj in level.objectItems():
1532            to_delete.append(code)
1533            carry_over = False
1534            if code.endswith('_co'):
1535                carry_over = True
1536                code  = code[:-3]
1537            if code in existing_courses:
1538                continue
1539            course_result_doc = obj.getContent()
1540            data = {}
1541            course_id = code
1542            for field in self.schema():
1543                data[field] = getattr(course_result_doc,field,'')
1544            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1545            data['student_id'] = student_id
1546            data['level_id'] = level_id
1547            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1548            data['session_id'] = session_id
1549            #data['queue_status'] = OBJECT_CREATED
1550            data['code'] = course_id
1551            data['carry_over'] = carry_over
1552            self.catalog_object(dict2ob(data), uid=key)
1553        level.manage_delObjects(to_delete)
1554    ###)
1555
1556    def getCourses(self,student_id,level_id): ###(
1557        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1558        course_results = self.course_results.evalAdvancedQuery(query)
1559        carry_overs = []
1560        normal1 = []
1561        normal2 = []
1562        normal3 = []
1563        total_credits = 0
1564        gpa = 0
1565        for brain in course_results:
1566            d = {}
1567
1568            for field in self.schema():
1569                d[field] = getattr(brain,field,None)
1570                if repr(d[field]) == 'Missing.Value':
1571                    d[field] = ''
1572            d['weight'] = ''
1573            d['grade'] = ''
1574            d['score'] = ''
1575
1576            if str(brain.credits).isdigit():
1577                credits = int(brain.credits)
1578                total_credits += credits
1579                score = getattr(brain,'score',0)
1580                if score and str(score).isdigit() and int(score) > 0:
1581                    score = int(score)
1582                    grade,weight = self.getGradesFromScore(score,'')
1583                    gpa += weight * credits
1584                    d['weight'] = weight
1585                    d['grade'] = grade
1586                    d['score'] = score
1587
1588            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1589            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1590            #else:
1591            #    d['score_calc'] = ''
1592            try:
1593                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1594            except:
1595                d['score_calc'] = ''
1596
1597            if d['score_calc']:
1598                grade = self.getGradesFromScore(d['score_calc'],level_id)
1599                d['grade'] = grade
1600
1601            d['coe'] = ''
1602            if brain.core_or_elective:
1603                d['coe'] = 'Core'
1604            elif brain.core_or_elective == False:
1605                d['coe'] = 'Elective'
1606            id = code = d['id'] = brain.code
1607            d['code'] = code
1608            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1609            if res:
1610                course = res[0]
1611                d['title'] = course.title
1612                # The courses_catalog contains strings and integers in its semester field.
1613                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1614                d['semester'] = str(course.semester)
1615            else:
1616                d['title'] = "Course has been removed from course list"
1617                d['semester'] = ''
1618            if brain.carry_over:
1619                d['coe'] = 'CO'
1620                carry_overs.append(d)
1621            else:
1622                if d['semester'] == '1':
1623                    normal1.append(d)
1624
1625                elif d['semester'] == '2':
1626                    normal2.append(d)
1627                else:
1628                    normal3.append(d)
1629        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1630        #                                "%(semester)s%(code)s" % y))
1631        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1632                                             "%(semester)s%(code)s" % y))
1633        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1634    ###)
1635
1636
1637    def getAllCourses(self,student_id): ###(
1638        query = Eq('student_id',student_id)
1639        course_results = self.course_results.evalAdvancedQuery(query)
1640        courses = []
1641        for brain in course_results:
1642            d = {}
1643
1644            for field in self.schema():
1645                d[field] = getattr(brain,field,'')
1646
1647            d['weight'] = ''
1648            d['grade'] = ''
1649            d['score'] = ''
1650
1651            if str(brain.credits).isdigit():
1652                credits = int(brain.credits)
1653                score = getattr(brain,'score',0)
1654                if score and str(score).isdigit() and int(score) > 0:
1655                    score = int(score)
1656                    grade,weight = self.getGradesFromScore(score)
1657                    d['weight'] = weight
1658                    d['grade'] = grade
1659                    d['score'] = score
1660            d['coe'] = ''
1661            if brain.core_or_elective:
1662                d['coe'] = 'Core'
1663            elif brain.core_or_elective == False:
1664                d['coe'] = 'Elective'
1665            id = code = d['id'] = brain.code
1666            d['code'] = code
1667            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1668            if res:
1669                course = res[0]
1670                d['title'] = course.title
1671                # The courses_catalog contains strings and integers in its semester field.
1672                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1673                d['semester'] = str(course.semester)
1674            else:
1675                d['title'] = "Course has been removed from course list"
1676                d['semester'] = ''
1677            if brain.carry_over:
1678                d['coe'] = 'CO'
1679            courses.append(d)
1680        return courses
1681    ###)
1682
1683InitializeClass(CourseResults)
1684###)
1685
1686class OnlinePaymentsImport(WAeUPTable): ###(
1687
1688    meta_type = 'WAeUP Online Payment Transactions'
1689    name = "online_payments_import"
1690    key = "order_id"
1691    def __init__(self,name=None):
1692        if name ==  None:
1693            name = self.name
1694        WAeUPTable.__init__(self, name)
1695
1696
1697InitializeClass(OnlinePaymentsImport)
1698###)
1699
1700class ReturningImport(WAeUPTable): ###(
1701
1702    meta_type = 'Returning Import Table'
1703    name = "returning_import"
1704    key = "matric_no"
1705    def __init__(self,name=None):
1706        if name ==  None:
1707            name = self.name
1708        WAeUPTable.__init__(self, name)
1709
1710
1711InitializeClass(ReturningImport)
1712###)
1713
1714class ResultsImport(WAeUPTable): ###(
1715
1716    meta_type = 'Results Import Table'
1717    name = "results_import"
1718    key = "key"
1719    def __init__(self,name=None):
1720        if name ==  None:
1721            name = self.name
1722        WAeUPTable.__init__(self, name)
1723
1724
1725InitializeClass(ResultsImport)
1726
1727###)
1728
1729class PaymentsCatalog(WAeUPTable): ###(
1730    security = ClassSecurityInfo()
1731
1732    meta_type = 'WAeUP Payments Catalog'
1733    name = "payments_catalog"
1734    key = "order_id"
1735    def __init__(self,name=None):
1736        if name ==  None:
1737            name = self.name
1738        WAeUPTable.__init__(self, name)
1739
1740
1741    security.declarePrivate('notify_event_listener') ###(
1742    def notify_event_listener(self,event_type,object,infos):
1743        "listen for events"
1744        if not infos.has_key('rpath'):
1745            return
1746        pt = getattr(object,'portal_type',None)
1747        mt = getattr(object,'meta_type',None)
1748        data = {}
1749        if pt != 'Payment':
1750            return
1751        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1752            self.deleteRecord(object.getContent().order_id)
1753        if mt == 'CPS Proxy Folder':
1754            return # is handled only for the real object
1755        if event_type not in ('sys_modify_object'):
1756            return
1757        for field in self.schema():
1758            data[field] = getattr(object,field,'')
1759        rpl = infos['rpath'].split('/')
1760        #import pdb;pdb.set_trace()
1761        student_id = rpl[-4]
1762        data['student_id'] = student_id
1763        modified = False
1764        try:
1765            self.modifyRecord(**data)
1766            modified = True
1767        except KeyError:
1768            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1769            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1770            pass
1771        if not modified:
1772            try:
1773                self.addRecord(**data)
1774            except:
1775                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1776                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1777        ###)
1778
1779
1780InitializeClass(PaymentsCatalog)
1781
1782###)
1783
1784# BBB:
1785AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.