source: WAeUP_SRP/base/WAeUPTables.py @ 3281

Last change on this file since 3281 was 3043, checked in by joachim, 17 years ago

update bedlist now modyfies beds, if they have been modyfied. Change beds
now sets the previous bed student to not_occupied.

  • Property svn:keywords set to Id
File size: 58.2 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3043 2008-01-24 18:02:36Z joachim $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        records = self()
225        nr2export = len(records)
226        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
227        chunk = 2000
228        total = 0
229        start = DateTime.DateTime().timeTime()
230        start_chunk = DateTime.DateTime().timeTime()
231        for record in records:
232            not_all = False
233            d = self.record2dict(fields,record)
234            lines.append(d)
235            total += 1
236            if total and not total % chunk or total == len(records):
237                csv_writer.writerows(lines)
238                anz = len(lines)
239                logger.info("wrote %(anz)d  total written %(total)d" % vars())
240                end_chunk = DateTime.DateTime().timeTime()
241                duration = end_chunk-start_chunk
242                per_record = duration/anz
243                till_now = end_chunk - start
244                avarage_per_record = till_now/total
245                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
246                estimated_end = estimated_end.strftime("%H:%M:%S")
247                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
248                start_chunk = DateTime.DateTime().timeTime()
249                lines = []
250        end = DateTime.DateTime().timeTime()
251        logger.info('total time %6.2f m' % ((end-start)/60))
252        import os
253        filename, extension = os.path.splitext(export_file)
254        from subprocess import call
255        msg = "wrote %(total)d records to %(export_file)s" % vars()
256        #try:
257        #    retcode = call('gzip %s' % (export_file),shell=True)
258        #    if retcode == 0:
259        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
260        #except OSError, e:
261        #    retcode = -99
262        #    logger.info("zip failed with %s" % e)
263        logger.info(msg)
264        args = {'portal_status_message': msg}
265        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
266        url = self.REQUEST.get('URL2')
267        return self.REQUEST.RESPONSE.redirect(url)
268    ###)
269
270    security.declarePrivate("_import_old") ###(
271    def _import_old(self,filename,schema,layout, mode,logger):
272        "import data from csv"
273        import transaction
274        import random
275        pm = self.portal_membership
276        member = pm.getAuthenticatedMember()
277        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
278        import_fn = "%s/import/%s.csv" % (i_home,filename)
279        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
280        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
281        start = True
282        tr_count = 1
283        total_imported = 0
284        total_not_imported = 0
285        total = 0
286        iname =  "%s" % filename
287        not_imported = []
288        imported = []
289        valid_records = []
290        invalid_records = []
291        d = {}
292        d['mode'] = mode
293        d['imported'] = total_imported
294        d['not_imported'] = total_not_imported
295        d['valid_records'] = valid_records
296        d['invalid_records'] = invalid_records
297        d['import_fn'] = import_fn
298        d['imported_fn'] = imported_fn
299        d['not_imported_fn'] = not_imported_fn
300        if schema is None:
301            em = 'No schema specified'
302            logger.error(em)
303            return d
304        if layout is None:
305            em = 'No layout specified'
306            logger.error(em)
307            return d
308        validators = {}
309        for widget in layout.keys():
310            try:
311                validators[widget] = layout[widget].validate
312            except AttributeError:
313                logger.info('%s has no validate attribute' % widget)
314                return d
315        # if mode == 'edit':
316        #     importer = self.importEdit
317        # elif mode == 'add':
318        #     importer = self.importAdd
319        # else:
320        #     importer = None
321        try:
322            items = csv.DictReader(open(import_fn,"rb"),
323                                   dialect="excel",
324                                   skipinitialspace=True)
325        except:
326            em = 'Error reading %s.csv' % filename
327            logger.error(em)
328            return d
329        #import pdb;pdb.set_trace()
330        for item in items:
331            if start:
332                start = False
333                logger.info('%s starts import from %s.csv' % (member,filename))
334                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
335                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
336                                   dialect="excel",
337                                   skipinitialspace=True).next()
338                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
339                diff2schema = set(import_keys).difference(set(schema.keys()))
340                diff2layout = set(import_keys).difference(set(layout.keys()))
341                if diff2layout:
342                    em = "not ignorable key(s) %s found in heading" % diff2layout
343                    logger.info(em)
344                    return d
345                s = ','.join(['"%s"' % fn for fn in import_keys])
346                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
347                #s = '"id",' + s
348                open(imported_fn,"a").write(s + '\n')
349                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
350                format_error = format + ',"%(Error)s"'
351                #format = '"%(id)s",'+ format
352                adapters = [MappingStorageAdapter(schema, item)]
353            dm = DataModel(item, adapters,context=self)
354            ds = DataStructure(data=item,datamodel=dm)
355            error_string = ""
356            #import pdb;pdb.set_trace()
357            for k in import_keys:
358                if not validators[k](ds,mode=mode):
359                    error_string += " %s : %s" % (k,ds.getError(k))
360            # if not error_string and importer:
361            #     item.update(dm)
362            #     item['id'],error = importer(item)
363            #     if error:
364            #         error_string += error
365            if error_string:
366                item['Error'] = error_string
367                invalid_records.append(dm)
368                not_imported.append(format_error % item)
369                total_not_imported += 1
370            else:
371                em = format % item
372                valid_records.append(dm)
373                imported.append(em)
374                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
375                tr_count += 1
376                total_imported += 1
377            total += 1
378        if len(imported) > 0:
379            open(imported_fn,"a").write('\n'.join(imported))
380        if len(not_imported) > 0:
381            open(not_imported_fn,"a").write('\n'.join(not_imported))
382        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
383        d['imported'] = total_imported
384        d['not_imported'] = total_not_imported
385        d['valid_records'] = valid_records
386        d['invalid_records'] = invalid_records
387        d['imported_fn'] = imported_fn
388        d['not_imported_fn'] = not_imported_fn
389        #logger.info(em)
390        return d
391    ###)
392
393    security.declarePrivate("_import") ###(
394    def _import_new(self,csv_items,schema, layout, mode,logger):
395        "import data from csv.Dictreader Instance"
396        start = True
397        tr_count = 1
398        total_imported = 0
399        total_not_imported = 0
400        total = 0
401        iname =  "%s" % filename
402        not_imported = []
403        valid_records = []
404        invalid_records = []
405        duplicate_records = []
406        d = {}
407        d['mode'] = mode
408        d['valid_records'] = valid_records
409        d['invalid_records'] = invalid_records
410        d['invalid_records'] = duplicate_records
411        # d['import_fn'] = import_fn
412        # d['imported_fn'] = imported_fn
413        # d['not_imported_fn'] = not_imported_fn
414        validators = {}
415        for widget in layout.keys():
416            try:
417                validators[widget] = layout[widget].validate
418            except AttributeError:
419                logger.info('%s has no validate attribute' % widget)
420                return d
421        for item in csv_items:
422            if start:
423                start = False
424                logger.info('%s starts import from %s.csv' % (member,filename))
425                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
426                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
427                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
428                diff2schema = set(import_keys).difference(set(schema.keys()))
429                diff2layout = set(import_keys).difference(set(layout.keys()))
430                if diff2layout:
431                    em = "not ignorable key(s) %s found in heading" % diff2layout
432                    logger.info(em)
433                    return d
434                # s = ','.join(['"%s"' % fn for fn in import_keys])
435                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
436                # #s = '"id",' + s
437                # open(imported_fn,"a").write(s + '\n')
438                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
439                # format_error = format + ',"%(Error)s"'
440                # #format = '"%(id)s",'+ format
441                adapters = [MappingStorageAdapter(schema, item)]
442            dm = DataModel(item, adapters,context=self)
443            ds = DataStructure(data=item,datamodel=dm)
444            error_string = ""
445            for k in import_keys:
446                if not validators[k](ds,mode=mode):
447                    error_string += " %s : %s" % (k,ds.getError(k))
448            if error_string:
449                item['Error'] = error_string
450                #invalid_records.append(dm)
451                invalid_records.append(item)
452                total_not_imported += 1
453            else:
454                em = format % item
455                valid_records.append(dm)
456                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
457                tr_count += 1
458                total_imported += 1
459            total += 1
460        # if len(imported) > 0:
461        #     open(imported_fn,"a").write('\n'.join(imported))
462        # if len(not_imported) > 0:
463        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
464        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
465        d['imported'] = total_imported
466        d['not_imported'] = total_not_imported
467        d['valid_records'] = valid_records
468        d['invalid_records'] = invalid_records
469        return d
470    ###)
471
472    security.declarePublic("missingValue")###(
473    def missingValue(self):
474        from Missing import MV
475        return MV
476    ###)
477###)
478
479class AccommodationTable(WAeUPTable): ###(
480
481    meta_type = 'WAeUP Accommodation Tool'
482    name = "portal_accommodation"
483    key = "bed"
484    not_occupied = NOT_OCCUPIED
485    def __init__(self,name=None):
486        if name ==  None:
487            name = self.name
488        WAeUPTable.__init__(self, name)
489
490    def searchAndReserveBed(self, student_id,bed_type): ###(
491        #records = self.searchResults({'student' : student_id})
492        #import pdb;pdb.set_trace()
493        records = self.evalAdvancedQuery(Eq('student',student_id))
494        if len(records) > 0:
495            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
496
497        #records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
498        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
499        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
500        if len(records) == 0:
501            return -2,"No bed available"
502        rec = records[0]
503        self.modifyRecord(bed=rec.bed,student=student_id)
504        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
505        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
506        return 1,rec.bed
507    ###)
508
509
510InitializeClass(AccommodationTable)
511
512###)
513
514class PinTable(WAeUPTable): ###(
515    from ZODB.POSException import ConflictError
516    security = ClassSecurityInfo()
517    meta_type = 'WAeUP Pin Tool'
518    name = "portal_pins"
519    key = 'pin'
520
521    def __init__(self,name=None):
522        if name ==  None:
523            name = self.name
524        WAeUPTable.__init__(self, name)
525
526    security.declareProtected(ModifyPortalContent,"dumpAll")###(
527    def dumpAll(self,include_unused=None):
528        """dump all data in the table to a csv"""
529        member = self.portal_membership.getAuthenticatedMember()
530        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
531        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
532        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
533        res_list = []
534        lines = []
535        if hasattr(self,"export_keys"):
536            fields = self.export_keys
537        else:
538            fields = []
539            for f in self.schema():
540                fields.append(f)
541        headline = ','.join(fields)
542        out = open(export_file,"wb")
543        out.write(headline +'\n')
544        out.close()
545        out = open(export_file,"a")
546        csv_writer = csv.DictWriter(out,fields,)
547        if include_unused is not None and str(member) not in ('admin','joachim'):
548            logger.info('%s tries to dump pintable with unused pins' % (member))
549            return
550        if include_unused is not None:
551            records = self()
552        else:
553            records = self.evalAdvancedQuery(~Eq('student',''))
554        nr2export = len(records)
555        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
556        chunk = 2000
557        total = 0
558        start = DateTime.DateTime().timeTime()
559        start_chunk = DateTime.DateTime().timeTime()
560        for record in records:
561            not_all = False
562            d = self.record2dict(fields,record)
563            lines.append(d)
564            total += 1
565            if total and not total % chunk or total == len(records):
566                csv_writer.writerows(lines)
567                anz = len(lines)
568                logger.info("wrote %(anz)d  total written %(total)d" % vars())
569                end_chunk = DateTime.DateTime().timeTime()
570                duration = end_chunk-start_chunk
571                per_record = duration/anz
572                till_now = end_chunk - start
573                avarage_per_record = till_now/total
574                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
575                estimated_end = estimated_end.strftime("%H:%M:%S")
576                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
577                start_chunk = DateTime.DateTime().timeTime()
578                lines = []
579        end = DateTime.DateTime().timeTime()
580        logger.info('total time %6.2f m' % ((end-start)/60))
581        import os
582        filename, extension = os.path.splitext(export_file)
583        from subprocess import call
584        msg = "wrote %(total)d records to %(export_file)s" % vars()
585        #try:
586        #    retcode = call('gzip %s' % (export_file),shell=True)
587        #    if retcode == 0:
588        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
589        #except OSError, e:
590        #    retcode = -99
591        #    logger.info("zip failed with %s" % e)
592        logger.info(msg)
593        args = {'portal_status_message': msg}
594        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
595        url = self.REQUEST.get('URL2')
596        return self.REQUEST.RESPONSE.redirect(url)
597    ###)
598
599
600
601    def searchAndSetRecord(self, uid, student_id,prefix):
602
603        # The following line must be activated after resetting the
604        # the portal_pins table. This is to avoid duplicate entries
605        # and disable duplicate payments.
606
607        #student_id = student_id.upper()
608
609        #records = self.searchResults(student = student_id)
610        #if len(records) > 0 and prefix in ('CLR','APP'):
611        #    for r in records:
612        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
613        #            return -2
614        records = self.searchResults({"%s" % self.key : uid})
615        if len(records) > 1:
616            # Can not happen, but anyway...
617            raise ValueError("More than one record with uid %s" % uid)
618        if len(records) == 0:
619            return -1,None
620        record = records[0]
621        if record.student == "":
622            record_data = {}
623            for field in self.schema() + self.indexes():
624                record_data[field] = getattr(record, field)
625            # Add the updated data:
626            record_data['student'] = student_id
627            try:
628                self.catalog_object(dict2ob(record_data), uid)
629                return 1,record
630            except ConflictError:
631                return 2,record
632        if record.student.upper() != student_id.upper():
633            return 0,record
634        if record.student.upper() == student_id.upper():
635            return 2,record
636        return -3,record
637InitializeClass(PinTable)
638###)
639
640class PumeResultsTable(WAeUPTable): ###(
641
642    meta_type = 'WAeUP PumeResults Tool'
643    name = "portal_pumeresults"
644    key = "jamb_reg_no"
645    def __init__(self,name=None):
646        if name ==  None:
647            name = self.name
648        WAeUPTable.__init__(self, name)
649
650
651InitializeClass(PumeResultsTable)
652
653###)
654
655class ApplicantsCatalog(WAeUPTable): ###(
656
657    meta_type = 'WAeUP Applicants Catalog'
658    name = "applicants_catalog"
659    key = "reg_no"
660    security = ClassSecurityInfo()
661    #export_keys = (
662    #               "reg_no",
663    #               "status",
664    #               "lastname",
665    #               "sex",
666    #               "date_of_birth",
667    #               "lga",
668    #               "email",
669    #               "phone",
670    #               "passport",
671    #               "entry_mode",
672    #               "pin",
673    #               "screening_type",
674    #               "registration_date",
675    #               "testdate",
676    #               "application_date",
677    #               "screening_date",
678    #               "faculty",
679    #               "department",
680    #               "course1",
681    #               "course2",
682    #               "course3",
683    #               "eng_score",
684    #               "subj1",
685    #               "subj1score",
686    #               "subj2",
687    #               "subj2score",
688    #               "subj3",
689    #               "subj3score",
690    #               "aggregate",
691    #               "course_admitted",
692    #               )
693
694    def __init__(self,name=None):
695        if name ==  None:
696            name = self.name
697        WAeUPTable.__init__(self, name)
698
699    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
700    def new_importCSV(self,filename="JAMB_data",
701                  schema_id="application",
702                  layout_id="import_application",
703                  mode='add'):
704        """ import JAMB data """
705        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
706        pm = self.portal_membership
707        member = pm.getAuthenticatedMember()
708        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
709        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
710        import_fn = "%s/import/%s.csv" % (i_home,filename)
711        if mode not in ('add','edit'):
712            logger.info("invalid mode: %s" % mode)
713        if os.path.exists(lock_fn):
714            logger.info("import of %(import_fn)s already in progress" % vars())
715            return
716        lock_file = open(lock_fn,"w")
717        lock_file.write("%(current)s \n" % vars())
718        lock_file.close()
719        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
720        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
721        stool = getToolByName(self, 'portal_schemas')
722        ltool = getToolByName(self, 'portal_layouts')
723        schema = stool._getOb(schema_id)
724        if schema is None:
725            em = 'No such schema %s' % schema_id
726            logger.error(em)
727            return
728        for postfix in ('_import',''):
729            layout_name = "%(layout_id)s%(postfix)s" % vars()
730            if hasattr(ltool,layout_name):
731                break
732        layout = ltool._getOb(layout_name)
733        if layout is None:
734            em = 'No such layout %s' % layout_id
735            logger.error(em)
736            return
737        try:
738            csv_file = csv.DictReader(open(import_fn,"rb"))
739        except:
740            em = 'Error reading %s.csv' % filename
741            logger.error(em)
742            return
743        d = self._import_new(csv_items,schema,layout,mode,logger)
744        imported = []
745        edited = []
746        duplicates = []
747        not_found = []
748        if len(d['valid_records']) > 0:
749            for record in d['valid_records']:
750                #import pdb;pdb.set_trace()
751                if mode == "add":
752                    try:
753                        self.addRecord(**dict(record.items()))
754                        imported.append(**dict(record.items()))
755                        logger.info("added %s" % record.items())
756                    except ValueError:
757                        dupplicate.append(**dict(record.items()))
758                        logger.info("duplicate %s" % record.items())
759                elif mode == "edit":
760                    try:
761                        self.modifyRecord(**dict(record.items()))
762                        edited.append(**dict(record.items()))
763                        logger.info("edited %s" % record.items())
764                    except KeyError:
765                        not_found.append(**dict(record.items()))
766                        logger.info("not found %s" % record.items())
767        invalid = d['invalid_records']
768        for itype in ("imported","edited","not_found","duplicate","invalid"):
769            outlist = locals[itype]
770            if len(outlist):
771                d = {}
772                for k in outlist[0].keys():
773                    d[k] = k
774                outlist[0] = d
775                outfile = open("file_name_%s" % itype,'w')
776                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
777                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
778###)
779
780    security.declareProtected(ModifyPortalContent,"importCSV")###(
781    def importCSV(self,filename="JAMB_data",
782                  schema_id="application",
783                  layout_id="application_pce",
784                  mode='add'):
785        """ import JAMB data """
786        stool = getToolByName(self, 'portal_schemas')
787        ltool = getToolByName(self, 'portal_layouts')
788        schema = stool._getOb(schema_id)
789        if schema is None:
790            em = 'No such schema %s' % schema_id
791            logger.error(em)
792            return
793        layout = ltool._getOb(layout_id)
794        if layout is None:
795            em = 'No such layout %s' % layout_id
796            logger.error(em)
797            return
798        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
799        d = self._import_old(filename,schema,layout,mode,logger)
800        if len(d['valid_records']) > 0:
801            for record in d['valid_records']:
802                #import pdb;pdb.set_trace()
803                if mode == "add":
804                    self.addRecord(**dict(record.items()))
805                    logger.info("added %s" % record.items())
806                elif mode == "edit":
807                    self.modifyRecord(**dict(record.items()))
808                    logger.info("edited %s" % record.items())
809                else:
810                    logger.info("invalid mode: %s" % mode)
811        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
812    ###)
813
814InitializeClass(ApplicantsCatalog)
815
816###)
817
818class StudentsCatalog(WAeUPTable): ###(
819    security = ClassSecurityInfo()
820
821    meta_type = 'WAeUP Students Catalog'
822    name = "students_catalog"
823    key = "id"
824    affected_types = {   ###(
825                      'StudentApplication':
826                      {'id': 'application',
827                       'fields':
828                       ('jamb_reg_no',
829                        'entry_mode',
830                        #'entry_level',
831                        'entry_session',
832                       )
833                      },
834                      'StudentClearance':
835                      {'id': 'clearance',
836                       'fields':
837                       ('matric_no',
838                        'lga',
839                       )
840                      },
841                      'StudentPersonal':
842                      {'id': 'personal',
843                       'fields':
844                       ('name',
845                        'sex',
846                        'perm_address',
847                        'email',
848                        'phone',
849                       )
850                      },
851                      'StudentStudyCourse':
852                      {'id': 'study_course',
853                       'fields':
854                       ('course', # study_course
855                        'faculty', # from certificate
856                        'department', # from certificate
857                        'end_level', # from certificate
858                        'level', # current_level
859                        'mode',  # current_mode
860                        'session', # current_session
861                        'verdict', # current_verdict
862                       )
863                      },
864                     }
865    ###)
866
867    def __init__(self,name=None):
868        if name ==  None:
869            name = self.name
870        WAeUPTable.__init__(self, name)
871        return
872
873    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
874        """ clears the whole enchilada """
875        self._catalog.clear()
876
877        if REQUEST and RESPONSE:
878            RESPONSE.redirect(
879              URL1 +
880              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
881
882    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
883        """ clear the catalog, then re-index everything """
884
885        elapse = time.time()
886        c_elapse = time.clock()
887
888        pgthreshold = self._getProgressThreshold()
889        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
890        self.refreshCatalog(clear=1, pghandler=handler)
891
892        elapse = time.time() - elapse
893        c_elapse = time.clock() - c_elapse
894
895        RESPONSE.redirect(
896            URL1 +
897            '/manage_catalogAdvanced?manage_tabs_message=' +
898            urllib.quote('Catalog Updated \n'
899                         'Total time: %s\n'
900                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
901    ###)
902
903    def fill_certificates_dict(self): ###(
904        "return certificate data in  dict"
905        certificates_brains = self.portal_catalog(portal_type ='Certificate')
906        d = {}
907        for cb in certificates_brains:
908            certificate_doc = cb.getObject().getContent()
909            cb_path = cb.getPath().split('/')
910            ld = {}
911            ld['faculty'] = cb_path[-4]
912            ld['department'] = cb_path[-3]
913            ld['end_level'] = getattr(certificate_doc,'end_level','999')
914            d[cb.getId] = ld
915        return d
916    ###)
917
918    def get_from_doc_department(self,doc,cached_data={}): ###(
919        "return the students department"
920        if doc is None:
921            return None
922        if cached_data.has_key(doc.study_course):
923            return cached_data[doc.study_course]['department']
924        certificate_res = self.portal_catalog(id = doc.study_course)
925        if len(certificate_res) != 1:
926            return None
927        return certificate_res[0].getPath().split('/')[-3]
928
929    def get_from_doc_faculty(self,doc,cached_data={}):
930        "return the students faculty"
931        if doc is None:
932            return None
933        if cached_data.has_key(doc.study_course):
934            return cached_data[doc.study_course]['faculty']
935        certificate_res = self.portal_catalog(id = doc.study_course)
936        if len(certificate_res) != 1:
937            return None
938        return certificate_res[0].getPath().split('/')[-4]
939
940    def get_from_doc_end_level(self,doc,cached_data={}):
941        "return the students end_level"
942        if doc is None:
943            return None
944        if cached_data.has_key(doc.study_course):
945            return cached_data[doc.study_course]['end_level']
946        certificate_res = self.portal_catalog(id = doc.study_course)
947        if len(certificate_res) != 1:
948            return None
949        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
950
951    def get_from_doc_level(self,doc,cached_data={}):
952        "return the students level"
953        if doc is None:
954            return None
955        return getattr(doc,'current_level',None)
956
957    def get_from_doc_mode(self,doc,cached_data={}):
958        "return the students mode"
959        if doc is None:
960            return None
961        cm = getattr(doc,'current_mode',None)
962        return cm
963
964
965    def get_from_doc_session(self,doc,cached_data={}):
966        "return the students current_session"
967        if doc is None:
968            return None
969        return getattr(doc,'current_session',None)
970
971    def get_from_doc_entry_session(self,doc,cached_data={}):
972        "return the students entry_session"
973        if doc is None:
974            return None
975        es = getattr(doc,'entry_session',None)
976        if es is not None and len(es) == 2:
977            return es
978        try:
979            digit = int(doc.jamb_reg_no[0])
980        except:
981            return "-1"
982        if digit < 8:
983            return "0%c" % doc.jamb_reg_no[0]
984        return "9%c" % doc.jamb_reg_no[0]
985
986    def get_from_doc_course(self,doc,cached_data={}):
987        "return the students study_course"
988        if doc is None:
989            return None
990        return getattr(doc,'study_course',None)
991
992    def get_from_doc_name(self,doc,cached_data={}):
993        "return the students name from the personal"
994        if doc is None:
995            return None
996        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
997
998    def get_from_doc_verdict(self,doc,cached_data={}):
999        "return the students study_course"
1000        if doc is None:
1001            return None
1002        return getattr(doc,'current_verdict',None)
1003    ###)
1004
1005    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1006        if isinstance(name, str):
1007            name = (name,)
1008        reindextypes = {}
1009        reindex_special = []
1010        for n in name:
1011            if n in ("review_state","registered_courses"):
1012                reindex_special.append(n)
1013            else:
1014                for pt in self.affected_types.keys():
1015                    if n in self.affected_types[pt]['fields']:
1016                        if reindextypes.has_key(pt):
1017                            reindextypes[pt].append(n)
1018                        else:
1019                            reindextypes[pt]= [n]
1020                        break
1021        cached_data = {}
1022        if set(name).intersection(set(('faculty','department','end_level'))):
1023            cached_data = self.fill_certificates_dict()
1024        students = self.portal_catalog(portal_type="Student")
1025        if hasattr(self,'portal_catalog_real'):
1026            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1027        else:
1028            aq_portal = self.portal_catalog.evalAdvancedQuery
1029        num_objects = len(students)
1030        if pghandler:
1031            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1032        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1033        #import pdb;pdb.set_trace()
1034        for i in xrange(num_objects):
1035            if pghandler: pghandler.report(i)
1036            student_brain = students[i]
1037            student_object = student_brain.getObject()
1038            # query = Eq('path',student_brain.getPath())
1039            # sub_brains_list = aq_portal(query)
1040            # sub_brains = {}
1041            # for sub_brain in sub_brains_list:
1042            #     sub_brains[sub_brain.portal_type] = sub_brain
1043            # student_path = student_brain.getPath()
1044            data = {}
1045            modified = False
1046            sid = data['id'] = student_brain.getId
1047            if reindex_special and 'review_state' in reindex_special:
1048                modified = True
1049                data['review_state'] = student_brain.review_state
1050            sub_objects = False
1051            for pt in reindextypes.keys():
1052                modified = True
1053                try:
1054                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1055                    #doc = sub_brains[pt].getObject().getContent()
1056                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1057                    # doc = self.unrestrictedTraverse(path).getContent()
1058                    sub_objects = True
1059                except:
1060                    continue
1061                for field in set(name).intersection(self.affected_types[pt]['fields']):
1062                    if hasattr(self,'get_from_doc_%s' % field):
1063                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1064                                                                              cached_data=cached_data)
1065                    else:
1066                        data[field] = getattr(doc,field)
1067            if not sub_objects and noattr:
1068                import_res = self.returning_import(id = sid)
1069                if not import_res:
1070                    continue
1071                import_record = import_res[0]
1072                data['matric_no'] = import_record.matric_no
1073                data['sex'] = import_record.Sex == 'F'
1074                data['name'] = "%s %s %s" % (import_record.Firstname,
1075                                             import_record.Middlename,
1076                                             import_record.Lastname)
1077                data['jamb_reg_no'] = import_record.Entryregno
1078            #if reindex_special and 'registered_courses' in reindex_special:
1079            #    try:
1080            #        study_course = getattr(student_object,"study_course")
1081            #        level_ids = study_course.objectIds()
1082            #    except:
1083            #        continue
1084            #    if not level_ids:
1085            #        continue
1086            #    modified = True
1087            #    level_ids.sort()
1088            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1089            #    courses = []
1090            #    for c in course_ids:
1091            #        if c.endswith('_co'):
1092            #            courses.append(c[:-3])
1093            #        else:
1094            #            courses.append(c)
1095            #    data['registered_courses'] = courses
1096            if modified:
1097                self.modifyRecord(**data)
1098        if pghandler: pghandler.finish()
1099    ###)
1100
1101    def refreshCatalog(self, clear=0, pghandler=None): ###(
1102        """ re-index everything we can find """
1103        students_folder = self.portal_url.getPortalObject().campus.students
1104        if clear:
1105            self._catalog.clear()
1106        students = self.portal_catalog(portal_type="Student")
1107        num_objects = len(students)
1108        cached_data = self.fill_certificates_dict()
1109        if pghandler:
1110            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1111        for i in xrange(num_objects):
1112            if pghandler: pghandler.report(i)
1113            student_brain = students[i]
1114            spath = student_brain.getPath()
1115            student_object = student_brain.getObject()
1116            data = {}
1117            sid = data['id'] = student_brain.getId
1118            data['review_state'] = student_brain.review_state
1119            sub_objects = False
1120            for pt in self.affected_types.keys():
1121                modified = True
1122                try:
1123                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1124                    sub_objects = True
1125                except:
1126                    #from pdb import set_trace;set_trace()
1127                    continue
1128                for field in self.affected_types[pt]['fields']:
1129                    if hasattr(self,'get_from_doc_%s' % field):
1130                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1131                                                                              cached_data=cached_data)
1132                    else:
1133                        data[field] = getattr(doc,field,None)
1134            if not sub_objects:
1135                import_res = self.returning_import(id = sid)
1136                if not import_res:
1137                    continue
1138                import_record = import_res[0]
1139                data['matric_no'] = import_record.matric_no
1140                data['sex'] = import_record.Sex == 'F'
1141                data['name'] = "%s %s %s" % (import_record.Firstname,
1142                                             import_record.Middlename,
1143                                             import_record.Lastname)
1144                data['jamb_reg_no'] = import_record.Entryregno
1145            self.addRecord(**data)
1146        if pghandler: pghandler.finish()
1147    ###)
1148
1149    security.declarePrivate('notify_event_listener') ###(
1150    def notify_event_listener(self,event_type,object,infos):
1151        "listen for events"
1152        if not infos.has_key('rpath'):
1153            return
1154        pt = getattr(object,'portal_type',None)
1155        mt = getattr(object,'meta_type',None)
1156        students_catalog = self
1157        data = {}
1158        if pt == 'Student' and\
1159           mt == 'CPS Proxy Folder' and\
1160           event_type.startswith('workflow'):
1161            data['id'] = object.getId()
1162            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1163            students_catalog.modifyRecord(**data)
1164            return
1165        rpl = infos['rpath'].split('/')
1166        if pt == 'Student' and mt == 'CPS Proxy Folder':
1167            student_id = object.id
1168            if event_type == "sys_add_object":
1169                try:
1170                    self.addRecord(id = student_id)
1171                except ValueError:
1172                    pass
1173                return
1174            elif event_type == 'sys_del_object':
1175                self.deleteRecord(student_id)
1176        if pt not in self.affected_types.keys():
1177            return
1178        if event_type not in ('sys_modify_object'):
1179            return
1180        if mt == 'CPS Proxy Folder':
1181            return
1182        for field in self.affected_types[pt]['fields']:
1183            if hasattr(self,'get_from_doc_%s' % field):
1184                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1185            else:
1186                data[field] = getattr(object,field)
1187        data['id'] = rpl[2]
1188        self.modifyRecord(**data)
1189    ###)
1190
1191
1192InitializeClass(StudentsCatalog)
1193
1194###)
1195
1196class CoursesCatalog(WAeUPTable): ###(
1197    security = ClassSecurityInfo()
1198
1199    meta_type = 'WAeUP Courses Catalog'
1200    name =  "courses_catalog"
1201    key = "code"
1202    def __init__(self,name=None):
1203        if name ==  None:
1204            name =  self.name
1205        WAeUPTable.__init__(self, name)
1206
1207    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1208        """ clear the catalog, then re-index everything """
1209
1210        elapse = time.time()
1211        c_elapse = time.clock()
1212
1213        pgthreshold = self._getProgressThreshold()
1214        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1215        self.refreshCatalog(clear=1, pghandler=handler)
1216
1217        elapse = time.time() - elapse
1218        c_elapse = time.clock() - c_elapse
1219
1220        RESPONSE.redirect(
1221            URL1 +
1222            '/manage_catalogAdvanced?manage_tabs_message=' +
1223            urllib.quote('Catalog Updated \n'
1224                         'Total time: %s\n'
1225                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1226    ###)
1227
1228    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1229        if isinstance(name, str):
1230            name = (name,)
1231        courses = self.portal_catalog(portal_type="Course")
1232        num_objects = len(courses)
1233        if pghandler:
1234            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1235        for i in xrange(num_objects):
1236            if pghandler: pghandler.report(i)
1237            course_brain = courses[i]
1238            course_object = course_brain.getObject()
1239            pl = course_brain.getPath().split('/')
1240            data = {}
1241            cid = data[self.key] = course_brain.getId
1242            data['faculty'] = pl[-4]
1243            data['department'] = pl[-3]
1244            doc = course_object.getContent()
1245            for field in name:
1246                if field not in (self.key,'faculty','department'):
1247                    data[field] = getattr(doc,field)
1248            self.modifyRecord(**data)
1249        if pghandler: pghandler.finish()
1250    ###)
1251
1252    def refreshCatalog(self, clear=0, pghandler=None): ###(
1253        """ re-index everything we can find """
1254        if clear:
1255            self._catalog.clear()
1256        courses = self.portal_catalog(portal_type="Course")
1257        num_objects = len(courses)
1258        if pghandler:
1259            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1260        #from pdb import set_trace;set_trace()
1261        for i in xrange(num_objects):
1262            if pghandler: pghandler.report(i)
1263            course_brain = courses[i]
1264            course_doc = course_brain.getObject().getContent()
1265            pl = course_brain.getPath().split('/')
1266            data = {}
1267            for field in self.schema():
1268                data[field] = getattr(course_doc,field,None)
1269            data[self.key] = course_brain.getId
1270            ai = pl.index('academics')
1271            data['faculty'] = pl[ai +1]
1272            data['department'] = pl[ai +2]
1273            if clear:
1274                self.addRecord(**data)
1275            else:
1276                self.modifyRecord(**data)
1277        if pghandler: pghandler.finish()
1278    ###)
1279
1280    security.declarePrivate('notify_event_listener') ###(
1281    def notify_event_listener(self,event_type,object,infos):
1282        "listen for events"
1283        if not infos.has_key('rpath'):
1284            return
1285        pt = getattr(object,'portal_type',None)
1286        mt = getattr(object,'meta_type',None)
1287        if pt != 'Course':
1288            return
1289        data = {}
1290        rpl = infos['rpath'].split('/')
1291        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1292            return
1293        course_id = object.getId()
1294        data[self.key] = course_id
1295        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1296            try:
1297                self.addRecord(**data)
1298            except ValueError:
1299                return
1300            course_id = object.getId()
1301            doc = object.getContent()
1302            if doc is None:
1303                return
1304            for field in self.schema():
1305                data[field] = getattr(doc,field,None)
1306            data[self.key] = course_id
1307            ai = rpl.index('academics')
1308            data['faculty'] = rpl[ai +1]
1309            data['department'] = rpl[ai +2]
1310            self.modifyRecord(**data)
1311            return
1312        if event_type == "sys_del_object":
1313            self.deleteRecord(course_id)
1314            return
1315        if event_type == "sys_modify_object" and mt == 'Course':
1316            #from pdb import set_trace;set_trace()
1317            for field in self.schema():
1318                data[field] = getattr(object,field,None)
1319            course_id = object.aq_parent.getId()
1320            data[self.key] = course_id
1321            ai = rpl.index('academics')
1322            data['faculty'] = rpl[ai +1]
1323            data['department'] = rpl[ai +2]
1324            self.modifyRecord(**data)
1325    ###)
1326
1327
1328InitializeClass(CoursesCatalog)
1329###)
1330
1331class CourseResults(WAeUPTable): ###(
1332    security = ClassSecurityInfo()
1333
1334    meta_type = 'WAeUP Results Catalog'
1335    name = "course_results"
1336    key = "key" #student_id + level + course_id
1337    def __init__(self,name=None):
1338        if name ==  None:
1339            name = self.name
1340        WAeUPTable.__init__(self, name)
1341        self._queue = []
1342
1343    def addMultipleRecords(self, records): ###(
1344        """add many records"""
1345        added_keys = []
1346        for data in records:
1347            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1348            data['%s' % self.key] = uid
1349            res = self.searchResults({"%s" % self.key : uid})
1350            if len(res) > 0:
1351                raise ValueError("More than one record with uid %s" % uid)
1352            self.catalog_object(dict2ob(data), uid=uid)
1353        return uid
1354    ###)
1355
1356    def deleteResultsHere(self,level_id,student_id): ###(
1357        #import pdb;pdb.set_trace()
1358        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1359        course_results = self.course_results.evalAdvancedQuery(query)
1360        for result in course_results:
1361            self.deleteRecord(result.key)
1362    ###)
1363
1364    def moveResultsHere(self,level,student_id): ###(
1365        #import pdb;pdb.set_trace()
1366        level_id = level.getId()
1367        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1368        course_results = self.course_results.evalAdvancedQuery(query)
1369        existing_courses = [cr.code for cr in course_results]
1370        to_delete = []
1371        for code,obj in level.objectItems():
1372            to_delete.append(code)
1373            carry_over = False
1374            if code.endswith('_co'):
1375                carry_over = True
1376                code  = code[:-3]
1377            if code in existing_courses:
1378                continue
1379            course_result_doc = obj.getContent()
1380            data = {}
1381            course_id = code
1382            for field in self.schema():
1383                data[field] = getattr(course_result_doc,field,'')
1384            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1385            data['student_id'] = student_id
1386            data['level_id'] = level_id
1387            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1388            data['session_id'] = session_id
1389            #data['queue_status'] = OBJECT_CREATED
1390            data['code'] = course_id
1391            data['carry_over'] = carry_over
1392            self.catalog_object(dict2ob(data), uid=key)
1393        level.manage_delObjects(to_delete)
1394    ###)
1395
1396    def getCourses(self,student_id,level_id): ###(
1397        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1398        course_results = self.course_results.evalAdvancedQuery(query)
1399        carry_overs = []
1400        normal1 = []
1401        normal2 = []
1402        normal3 = []
1403        total_credits = 0
1404        gpa = 0
1405        for brain in course_results:
1406            d = {}
1407
1408            for field in self.schema():
1409                d[field] = getattr(brain,field,'')
1410
1411            d['weight'] = ''
1412            d['grade'] = ''
1413            d['score'] = ''
1414
1415            if str(brain.credits).isdigit():
1416                credits = int(brain.credits)
1417                total_credits += credits
1418                score = getattr(brain,'score',0)
1419                if score and str(score).isdigit() and int(score) > 0:
1420                    score = int(score)
1421                    grade,weight = self.getGradesFromScore(score)
1422                    gpa += weight * credits
1423                    d['weight'] = weight
1424                    d['grade'] = grade
1425                    d['score'] = score
1426            d['coe'] = ''
1427            if brain.core_or_elective:
1428                d['coe'] = 'Core'
1429            elif brain.core_or_elective == False:
1430                d['coe'] = 'Elective'
1431            id = code = d['id'] = brain.code
1432            d['code'] = code
1433            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1434            if res:
1435                course = res[0]
1436                d['title'] = course.title
1437                # The courses_catalog contains strings and integers in its semester field.
1438                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1439                d['semester'] = str(course.semester)
1440            else:
1441                d['title'] = "Course has been removed from course list"
1442                d['semester'] = ''
1443            if brain.carry_over:
1444                d['coe'] = 'CO'
1445                carry_overs.append(d)
1446            else:
1447                if d['semester'] == '1':
1448                    normal1.append(d)
1449
1450                elif d['semester'] == '2':
1451                    normal2.append(d)
1452                else:
1453                    normal3.append(d)
1454        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1455        #                                "%(semester)s%(code)s" % y))
1456        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1457                                             "%(semester)s%(code)s" % y))
1458        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1459    ###)
1460
1461InitializeClass(CourseResults)
1462###)
1463
1464class OnlinePaymentsImport(WAeUPTable): ###(
1465
1466    meta_type = 'WAeUP Online Payment Transactions'
1467    name = "online_payments_import"
1468    key = "order_id"
1469    def __init__(self,name=None):
1470        if name ==  None:
1471            name = self.name
1472        WAeUPTable.__init__(self, name)
1473
1474
1475InitializeClass(OnlinePaymentsImport)
1476###)
1477
1478class ReturningImport(WAeUPTable): ###(
1479
1480    meta_type = 'Returning Import Table'
1481    name = "returning_import"
1482    key = "matric_no"
1483    def __init__(self,name=None):
1484        if name ==  None:
1485            name = self.name
1486        WAeUPTable.__init__(self, name)
1487
1488
1489InitializeClass(ReturningImport)
1490###)
1491
1492class ResultsImport(WAeUPTable): ###(
1493
1494    meta_type = 'Results Import Table'
1495    name = "results_import"
1496    key = "key"
1497    def __init__(self,name=None):
1498        if name ==  None:
1499            name = self.name
1500        WAeUPTable.__init__(self, name)
1501
1502
1503InitializeClass(ResultsImport)
1504
1505###)
1506
1507class PaymentsCatalog(WAeUPTable): ###(
1508    security = ClassSecurityInfo()
1509
1510    meta_type = 'WAeUP Payments Catalog'
1511    name = "payments_catalog"
1512    key = "order_id"
1513    def __init__(self,name=None):
1514        if name ==  None:
1515            name = self.name
1516        WAeUPTable.__init__(self, name)
1517
1518
1519    security.declarePrivate('notify_event_listener') ###(
1520    def notify_event_listener(self,event_type,object,infos):
1521        "listen for events"
1522        if not infos.has_key('rpath'):
1523            return
1524        pt = getattr(object,'portal_type',None)
1525        mt = getattr(object,'meta_type',None)
1526        data = {}
1527        if pt != 'Payment':
1528            return
1529        if mt == 'CPS Proxy Folder':
1530            return # is handled only for the real object
1531        if event_type == 'sys_del_object':
1532            self.deleteRecord(object.order_id)
1533        if event_type not in ('sys_modify_object'):
1534            return
1535        for field in self.schema():
1536            data[field] = getattr(object,field,'')
1537        rpl = infos['rpath'].split('/')
1538        #import pdb;pdb.set_trace()
1539        student_id = rpl[-4]
1540        data['student_id'] = student_id
1541        modified = False
1542        try:
1543            self.modifyRecord(**data)
1544            modified = True
1545        except KeyError:
1546            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1547            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1548            pass
1549        if not modified:
1550            try:
1551                self.addRecord(**data)
1552            except:
1553                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1554                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1555        ###)
1556
1557
1558InitializeClass(PaymentsCatalog)
1559
1560###)
1561
1562# BBB:
1563AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.