source: WAeUP_SRP/base/WAeUPTables.py @ 3016

Last change on this file since 3016 was 2976, checked in by Henrik Bettermann, 17 years ago

dito

  • Property svn:keywords set to Id
File size: 58.0 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 2976 2008-01-03 17:22:08Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            if key == 'sex':
98                if v:
99                    v = 'F'
100                else:
101                    v = 'M'
102                d[key] = v
103            elif v:
104                if key == 'lga':
105                    v = self.portal_vocabularies.local_gov_areas.get(v)
106                elif key == 'aos':
107                    v = self.portal_vocabularies.aos.get(v)
108                d[key] = v
109            else:
110                d[key] = ''
111        return d
112
113###)
114
115    def addRecord(self, **data): ###(
116        # The uid is the same as "bed".
117        uid = data[self.key]
118        res = self.searchResults({"%s" % self.key : uid})
119        if len(res) > 0:
120            raise ValueError("More than one record with uid %s" % uid)
121        self.catalog_object(dict2ob(data), uid=uid)
122        return uid
123
124###)
125
126    def deleteRecord(self, uid):
127        self.uncatalog_object(uid)
128
129    def getRecordByKey(self,key):
130        if not key:
131            return None
132        res = self.evalAdvancedQuery(Eq(self.key,key))
133        if res:
134            return res[0]
135        return None
136
137    def searchAndSetRecord(self, **data):
138        raise NotImplemented
139
140    def modifyRecord(self, record=None, **data): ###(
141        #records = self.searchResults(uid=uid)
142        uid = data[self.key]
143        if record is None:
144            records = self.searchResults({"%s" % self.key : uid})
145            if len(records) > 1:
146                # Can not happen, but anyway...
147                raise ValueError("More than one record with uid %s" % uid)
148            if len(records) == 0:
149                raise KeyError("No record for uid %s" % uid)
150            record = records[0]
151        record_data = {}
152        for field in self.schema() + self.indexes():
153            record_data[field] = getattr(record, field)
154        # Add the updated data:
155        record_data.update(data)
156        self.catalog_object(dict2ob(record_data), uid)
157
158###)
159
160    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
161        if isinstance(name, str):
162            name =  (name,)
163        paths = self._catalog.uids.items()
164        i = 0
165        #import pdb;pdb.set_trace()
166        for p,rid in paths:
167            i += 1
168            metadata = self.getMetadataForRID(rid)
169            record_data = {}
170            for field in name:
171                record_data[field] = metadata.get(field)
172            uid = metadata.get(self.key)
173            self.catalog_object(dict2ob(record_data), uid, idxs=name,
174                                update_metadata=0)
175
176###)
177
178    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
179    def exportAllRecords(self):
180        "export a WAeUPTable"
181        #import pdb;pdb.set_trace()
182        fields = [field for field in self.schema()]
183        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
184        csv = []
185        csv.append(','.join(['"%s"' % fn for fn in fields]))
186        for uid in self._catalog.uids:
187            records = self.searchResults({"%s" % self.key : uid})
188            if len(records) > 1:
189                # Can not happen, but anyway...
190                raise ValueError("More than one record with uid %s" % uid)
191            if len(records) == 0:
192                raise KeyError("No record for uid %s" % uid)
193            rec = records[0]
194            csv.append(format % rec)
195        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
196        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
197
198###)
199
200    security.declareProtected(ModifyPortalContent,"dumpAll")###(
201    def dumpAll(self):
202        """dump all data in the table to a csv"""
203        member = self.portal_membership.getAuthenticatedMember()
204        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
205        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
206        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
207        res_list = []
208        lines = []
209        if hasattr(self,"export_keys"):
210            fields = self.export_keys
211        else:
212            fields = []
213            for f in self.schema():
214                fields.append(f)
215        headline = ','.join(fields)
216        out = open(export_file,"wb")
217        out.write(headline +'\n')
218        out.close()
219        out = open(export_file,"a")
220        csv_writer = csv.DictWriter(out,fields,)
221        records = self()
222        nr2export = len(records)
223        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
224        chunk = 2000
225        total = 0
226        start = DateTime.DateTime().timeTime()
227        start_chunk = DateTime.DateTime().timeTime()
228        for record in records:
229            not_all = False
230            d = self.record2dict(fields,record)
231            lines.append(d)
232            total += 1
233            if total and not total % chunk or total == len(records):
234                csv_writer.writerows(lines)
235                anz = len(lines)
236                logger.info("wrote %(anz)d  total written %(total)d" % vars())
237                end_chunk = DateTime.DateTime().timeTime()
238                duration = end_chunk-start_chunk
239                per_record = duration/anz
240                till_now = end_chunk - start
241                avarage_per_record = till_now/total
242                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
243                estimated_end = estimated_end.strftime("%H:%M:%S")
244                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
245                start_chunk = DateTime.DateTime().timeTime()
246                lines = []
247        end = DateTime.DateTime().timeTime()
248        logger.info('total time %6.2f m' % ((end-start)/60))
249        import os
250        filename, extension = os.path.splitext(export_file)
251        from subprocess import call
252        msg = "wrote %(total)d records to %(export_file)s" % vars()
253        #try:
254        #    retcode = call('gzip %s' % (export_file),shell=True)
255        #    if retcode == 0:
256        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
257        #except OSError, e:
258        #    retcode = -99
259        #    logger.info("zip failed with %s" % e)
260        logger.info(msg)
261        args = {'portal_status_message': msg}
262        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
263        url = self.REQUEST.get('URL2')
264        return self.REQUEST.RESPONSE.redirect(url)
265    ###)
266
267    security.declarePrivate("_import_old") ###(
268    def _import_old(self,filename,schema,layout, mode,logger):
269        "import data from csv"
270        import transaction
271        import random
272        pm = self.portal_membership
273        member = pm.getAuthenticatedMember()
274        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
275        import_fn = "%s/import/%s.csv" % (i_home,filename)
276        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
277        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
278        start = True
279        tr_count = 1
280        total_imported = 0
281        total_not_imported = 0
282        total = 0
283        iname =  "%s" % filename
284        not_imported = []
285        imported = []
286        valid_records = []
287        invalid_records = []
288        d = {}
289        d['mode'] = mode
290        d['imported'] = total_imported
291        d['not_imported'] = total_not_imported
292        d['valid_records'] = valid_records
293        d['invalid_records'] = invalid_records
294        d['import_fn'] = import_fn
295        d['imported_fn'] = imported_fn
296        d['not_imported_fn'] = not_imported_fn
297        if schema is None:
298            em = 'No schema specified'
299            logger.error(em)
300            return d
301        if layout is None:
302            em = 'No layout specified'
303            logger.error(em)
304            return d
305        validators = {}
306        for widget in layout.keys():
307            try:
308                validators[widget] = layout[widget].validate
309            except AttributeError:
310                logger.info('%s has no validate attribute' % widget)
311                return d
312        # if mode == 'edit':
313        #     importer = self.importEdit
314        # elif mode == 'add':
315        #     importer = self.importAdd
316        # else:
317        #     importer = None
318        try:
319            items = csv.DictReader(open(import_fn,"rb"),
320                                   dialect="excel",
321                                   skipinitialspace=True)
322        except:
323            em = 'Error reading %s.csv' % filename
324            logger.error(em)
325            return d
326        #import pdb;pdb.set_trace()
327        for item in items:
328            if start:
329                start = False
330                logger.info('%s starts import from %s.csv' % (member,filename))
331                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
332                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
333                                   dialect="excel",
334                                   skipinitialspace=True).next()
335                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
336                diff2schema = set(import_keys).difference(set(schema.keys()))
337                diff2layout = set(import_keys).difference(set(layout.keys()))
338                if diff2layout:
339                    em = "not ignorable key(s) %s found in heading" % diff2layout
340                    logger.info(em)
341                    return d
342                s = ','.join(['"%s"' % fn for fn in import_keys])
343                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
344                #s = '"id",' + s
345                open(imported_fn,"a").write(s + '\n')
346                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
347                format_error = format + ',"%(Error)s"'
348                #format = '"%(id)s",'+ format
349                adapters = [MappingStorageAdapter(schema, item)]
350            dm = DataModel(item, adapters,context=self)
351            ds = DataStructure(data=item,datamodel=dm)
352            error_string = ""
353            #import pdb;pdb.set_trace()
354            for k in import_keys:
355                if not validators[k](ds,mode=mode):
356                    error_string += " %s : %s" % (k,ds.getError(k))
357            # if not error_string and importer:
358            #     item.update(dm)
359            #     item['id'],error = importer(item)
360            #     if error:
361            #         error_string += error
362            if error_string:
363                item['Error'] = error_string
364                invalid_records.append(dm)
365                not_imported.append(format_error % item)
366                total_not_imported += 1
367            else:
368                em = format % item
369                valid_records.append(dm)
370                imported.append(em)
371                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
372                tr_count += 1
373                total_imported += 1
374            total += 1
375        if len(imported) > 0:
376            open(imported_fn,"a").write('\n'.join(imported))
377        if len(not_imported) > 0:
378            open(not_imported_fn,"a").write('\n'.join(not_imported))
379        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
380        d['imported'] = total_imported
381        d['not_imported'] = total_not_imported
382        d['valid_records'] = valid_records
383        d['invalid_records'] = invalid_records
384        d['imported_fn'] = imported_fn
385        d['not_imported_fn'] = not_imported_fn
386        #logger.info(em)
387        return d
388    ###)
389
390    security.declarePrivate("_import") ###(
391    def _import_new(self,csv_items,schema, layout, mode,logger):
392        "import data from csv.Dictreader Instance"
393        start = True
394        tr_count = 1
395        total_imported = 0
396        total_not_imported = 0
397        total = 0
398        iname =  "%s" % filename
399        not_imported = []
400        valid_records = []
401        invalid_records = []
402        duplicate_records = []
403        d = {}
404        d['mode'] = mode
405        d['valid_records'] = valid_records
406        d['invalid_records'] = invalid_records
407        d['invalid_records'] = duplicate_records
408        # d['import_fn'] = import_fn
409        # d['imported_fn'] = imported_fn
410        # d['not_imported_fn'] = not_imported_fn
411        validators = {}
412        for widget in layout.keys():
413            try:
414                validators[widget] = layout[widget].validate
415            except AttributeError:
416                logger.info('%s has no validate attribute' % widget)
417                return d
418        for item in csv_items:
419            if start:
420                start = False
421                logger.info('%s starts import from %s.csv' % (member,filename))
422                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
423                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
424                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
425                diff2schema = set(import_keys).difference(set(schema.keys()))
426                diff2layout = set(import_keys).difference(set(layout.keys()))
427                if diff2layout:
428                    em = "not ignorable key(s) %s found in heading" % diff2layout
429                    logger.info(em)
430                    return d
431                # s = ','.join(['"%s"' % fn for fn in import_keys])
432                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
433                # #s = '"id",' + s
434                # open(imported_fn,"a").write(s + '\n')
435                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
436                # format_error = format + ',"%(Error)s"'
437                # #format = '"%(id)s",'+ format
438                adapters = [MappingStorageAdapter(schema, item)]
439            dm = DataModel(item, adapters,context=self)
440            ds = DataStructure(data=item,datamodel=dm)
441            error_string = ""
442            for k in import_keys:
443                if not validators[k](ds,mode=mode):
444                    error_string += " %s : %s" % (k,ds.getError(k))
445            if error_string:
446                item['Error'] = error_string
447                #invalid_records.append(dm)
448                invalid_records.append(item)
449                total_not_imported += 1
450            else:
451                em = format % item
452                valid_records.append(dm)
453                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
454                tr_count += 1
455                total_imported += 1
456            total += 1
457        # if len(imported) > 0:
458        #     open(imported_fn,"a").write('\n'.join(imported))
459        # if len(not_imported) > 0:
460        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
461        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
462        d['imported'] = total_imported
463        d['not_imported'] = total_not_imported
464        d['valid_records'] = valid_records
465        d['invalid_records'] = invalid_records
466        return d
467    ###)
468
469    security.declarePublic("missingValue")###(
470    def missingValue(self):
471        from Missing import MV
472        return MV
473    ###)
474###)
475
476class AccommodationTable(WAeUPTable): ###(
477
478    meta_type = 'WAeUP Accommodation Tool'
479    name = "portal_accommodation"
480    key = "bed"
481    def __init__(self,name=None):
482        if name ==  None:
483            name = self.name
484        WAeUPTable.__init__(self, name)
485
486    def searchAndReserveBed(self, student_id,bed_type):
487        #records = self.searchResults({'student' : student_id})
488        #import pdb;pdb.set_trace()
489        records = self.evalAdvancedQuery(Eq('student',student_id))
490        if len(records) > 0:
491            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
492
493        #records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
494        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
495        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
496        if len(records) == 0:
497            return -2,"No bed available"
498        rec = records[0]
499        self.modifyRecord(bed=rec.bed,student=student_id)
500        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
501        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
502        return 1,rec.bed
503
504
505InitializeClass(AccommodationTable)
506
507###)
508
509class PinTable(WAeUPTable): ###(
510    from ZODB.POSException import ConflictError
511    security = ClassSecurityInfo()
512    meta_type = 'WAeUP Pin Tool'
513    name = "portal_pins"
514    key = 'pin'
515
516    def __init__(self,name=None):
517        if name ==  None:
518            name = self.name
519        WAeUPTable.__init__(self, name)
520
521    security.declareProtected(ModifyPortalContent,"dumpAll")###(
522    def dumpAll(self,include_unused=None):
523        """dump all data in the table to a csv"""
524        member = self.portal_membership.getAuthenticatedMember()
525        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
526        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
527        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
528        res_list = []
529        lines = []
530        if hasattr(self,"export_keys"):
531            fields = self.export_keys
532        else:
533            fields = []
534            for f in self.schema():
535                fields.append(f)
536        headline = ','.join(fields)
537        out = open(export_file,"wb")
538        out.write(headline +'\n')
539        out.close()
540        out = open(export_file,"a")
541        csv_writer = csv.DictWriter(out,fields,)
542        if include_unused is not None and str(member) not in ('admin','joachim'):
543            logger.info('%s tries to dump pintable with unused pins' % (member))
544            return
545        if include_unused is not None:
546            records = self()
547        else:
548            records = self.evalAdvancedQuery(~Eq('student',''))
549        nr2export = len(records)
550        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
551        chunk = 2000
552        total = 0
553        start = DateTime.DateTime().timeTime()
554        start_chunk = DateTime.DateTime().timeTime()
555        for record in records:
556            not_all = False
557            d = self.record2dict(fields,record)
558            lines.append(d)
559            total += 1
560            if total and not total % chunk or total == len(records):
561                csv_writer.writerows(lines)
562                anz = len(lines)
563                logger.info("wrote %(anz)d  total written %(total)d" % vars())
564                end_chunk = DateTime.DateTime().timeTime()
565                duration = end_chunk-start_chunk
566                per_record = duration/anz
567                till_now = end_chunk - start
568                avarage_per_record = till_now/total
569                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
570                estimated_end = estimated_end.strftime("%H:%M:%S")
571                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
572                start_chunk = DateTime.DateTime().timeTime()
573                lines = []
574        end = DateTime.DateTime().timeTime()
575        logger.info('total time %6.2f m' % ((end-start)/60))
576        import os
577        filename, extension = os.path.splitext(export_file)
578        from subprocess import call
579        msg = "wrote %(total)d records to %(export_file)s" % vars()
580        #try:
581        #    retcode = call('gzip %s' % (export_file),shell=True)
582        #    if retcode == 0:
583        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
584        #except OSError, e:
585        #    retcode = -99
586        #    logger.info("zip failed with %s" % e)
587        logger.info(msg)
588        args = {'portal_status_message': msg}
589        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
590        url = self.REQUEST.get('URL2')
591        return self.REQUEST.RESPONSE.redirect(url)
592    ###)
593
594
595
596    def searchAndSetRecord(self, uid, student_id,prefix):
597
598        # The following line must be activated after resetting the
599        # the portal_pins table. This is to avoid duplicate entries
600        # and disable duplicate payments.
601
602        #student_id = student_id.upper()
603
604        #records = self.searchResults(student = student_id)
605        #if len(records) > 0 and prefix in ('CLR','APP'):
606        #    for r in records:
607        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
608        #            return -2
609        records = self.searchResults({"%s" % self.key : uid})
610        if len(records) > 1:
611            # Can not happen, but anyway...
612            raise ValueError("More than one record with uid %s" % uid)
613        if len(records) == 0:
614            return -1,None
615        record = records[0]
616        if record.student == "":
617            record_data = {}
618            for field in self.schema() + self.indexes():
619                record_data[field] = getattr(record, field)
620            # Add the updated data:
621            record_data['student'] = student_id
622            try:
623                self.catalog_object(dict2ob(record_data), uid)
624                return 1,record
625            except ConflictError:
626                return 2,record
627        if record.student.upper() != student_id.upper():
628            return 0,record
629        if record.student.upper() == student_id.upper():
630            return 2,record
631        return -3,record
632InitializeClass(PinTable)
633###)
634
635class PumeResultsTable(WAeUPTable): ###(
636
637    meta_type = 'WAeUP PumeResults Tool'
638    name = "portal_pumeresults"
639    key = "jamb_reg_no"
640    def __init__(self,name=None):
641        if name ==  None:
642            name = self.name
643        WAeUPTable.__init__(self, name)
644
645
646InitializeClass(PumeResultsTable)
647
648###)
649
650class ApplicantsCatalog(WAeUPTable): ###(
651
652    meta_type = 'WAeUP Applicants Catalog'
653    name = "applicants_catalog"
654    key = "reg_no"
655    security = ClassSecurityInfo()
656    #export_keys = (
657    #               "reg_no",
658    #               "status",
659    #               "lastname",
660    #               "sex",
661    #               "date_of_birth",
662    #               "lga",
663    #               "email",
664    #               "phone",
665    #               "passport",
666    #               "entry_mode",
667    #               "pin",
668    #               "screening_type",
669    #               "registration_date",
670    #               "testdate",
671    #               "application_date",
672    #               "screening_date",
673    #               "faculty",
674    #               "department",
675    #               "course1",
676    #               "course2",
677    #               "course3",
678    #               "eng_score",
679    #               "subj1",
680    #               "subj1score",
681    #               "subj2",
682    #               "subj2score",
683    #               "subj3",
684    #               "subj3score",
685    #               "aggregate",
686    #               "course_admitted",
687    #               )
688
689    def __init__(self,name=None):
690        if name ==  None:
691            name = self.name
692        WAeUPTable.__init__(self, name)
693
694    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
695    def new_importCSV(self,filename="JAMB_data",
696                  schema_id="application",
697                  layout_id="import_application",
698                  mode='add'):
699        """ import JAMB data """
700        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
701        pm = self.portal_membership
702        member = pm.getAuthenticatedMember()
703        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
704        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
705        import_fn = "%s/import/%s.csv" % (i_home,filename)
706        if mode not in ('add','edit'):
707            logger.info("invalid mode: %s" % mode)
708        if os.path.exists(lock_fn):
709            logger.info("import of %(import_fn)s already in progress" % vars())
710            return
711        lock_file = open(lock_fn,"w")
712        lock_file.write("%(current)s \n" % vars())
713        lock_file.close()
714        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
715        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
716        stool = getToolByName(self, 'portal_schemas')
717        ltool = getToolByName(self, 'portal_layouts')
718        schema = stool._getOb(schema_id)
719        if schema is None:
720            em = 'No such schema %s' % schema_id
721            logger.error(em)
722            return
723        for postfix in ('_import',''):
724            layout_name = "%(layout_id)s%(postfix)s" % vars()
725            if hasattr(ltool,layout_name):
726                break
727        layout = ltool._getOb(layout_name)
728        if layout is None:
729            em = 'No such layout %s' % layout_id
730            logger.error(em)
731            return
732        try:
733            csv_file = csv.DictReader(open(import_fn,"rb"))
734        except:
735            em = 'Error reading %s.csv' % filename
736            logger.error(em)
737            return
738        d = self._import_new(csv_items,schema,layout,mode,logger)
739        imported = []
740        edited = []
741        duplicates = []
742        not_found = []
743        if len(d['valid_records']) > 0:
744            for record in d['valid_records']:
745                #import pdb;pdb.set_trace()
746                if mode == "add":
747                    try:
748                        self.addRecord(**dict(record.items()))
749                        imported.append(**dict(record.items()))
750                        logger.info("added %s" % record.items())
751                    except ValueError:
752                        dupplicate.append(**dict(record.items()))
753                        logger.info("duplicate %s" % record.items())
754                elif mode == "edit":
755                    try:
756                        self.modifyRecord(**dict(record.items()))
757                        edited.append(**dict(record.items()))
758                        logger.info("edited %s" % record.items())
759                    except KeyError:
760                        not_found.append(**dict(record.items()))
761                        logger.info("not found %s" % record.items())
762        invalid = d['invalid_records']
763        for itype in ("imported","edited","not_found","duplicate","invalid"):
764            outlist = locals[itype]
765            if len(outlist):
766                d = {}
767                for k in outlist[0].keys():
768                    d[k] = k
769                outlist[0] = d
770                outfile = open("file_name_%s" % itype,'w')
771                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
772                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
773###)
774
775    security.declareProtected(ModifyPortalContent,"importCSV")###(
776    def importCSV(self,filename="JAMB_data",
777                  schema_id="application",
778                  layout_id="application_pce",
779                  mode='add'):
780        """ import JAMB data """
781        stool = getToolByName(self, 'portal_schemas')
782        ltool = getToolByName(self, 'portal_layouts')
783        schema = stool._getOb(schema_id)
784        if schema is None:
785            em = 'No such schema %s' % schema_id
786            logger.error(em)
787            return
788        layout = ltool._getOb(layout_id)
789        if layout is None:
790            em = 'No such layout %s' % layout_id
791            logger.error(em)
792            return
793        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
794        d = self._import_old(filename,schema,layout,mode,logger)
795        if len(d['valid_records']) > 0:
796            for record in d['valid_records']:
797                #import pdb;pdb.set_trace()
798                if mode == "add":
799                    self.addRecord(**dict(record.items()))
800                    logger.info("added %s" % record.items())
801                elif mode == "edit":
802                    self.modifyRecord(**dict(record.items()))
803                    logger.info("edited %s" % record.items())
804                else:
805                    logger.info("invalid mode: %s" % mode)
806        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
807    ###)
808
809InitializeClass(ApplicantsCatalog)
810
811###)
812
813class StudentsCatalog(WAeUPTable): ###(
814    security = ClassSecurityInfo()
815
816    meta_type = 'WAeUP Students Catalog'
817    name = "students_catalog"
818    key = "id"
819    affected_types = {   ###(
820                      'StudentApplication':
821                      {'id': 'application',
822                       'fields':
823                       ('jamb_reg_no',
824                        'entry_mode',
825                        #'entry_level',
826                        'entry_session',
827                       )
828                      },
829                      'StudentClearance':
830                      {'id': 'clearance',
831                       'fields':
832                       ('matric_no',
833                        'lga',
834                       )
835                      },
836                      'StudentPersonal':
837                      {'id': 'personal',
838                       'fields':
839                       ('name',
840                        'sex',
841                        'perm_address',
842                        'email',
843                        'phone',
844                       )
845                      },
846                      'StudentStudyCourse':
847                      {'id': 'study_course',
848                       'fields':
849                       ('course', # study_course
850                        'faculty', # from certificate
851                        'department', # from certificate
852                        'end_level', # from certificate
853                        'level', # current_level
854                        'mode',  # current_mode
855                        'session', # current_session
856                        'verdict', # current_verdict
857                       )
858                      },
859                     }
860    ###)
861
862    def __init__(self,name=None):
863        if name ==  None:
864            name = self.name
865        WAeUPTable.__init__(self, name)
866        return
867
868    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
869        """ clears the whole enchilada """
870        self._catalog.clear()
871
872        if REQUEST and RESPONSE:
873            RESPONSE.redirect(
874              URL1 +
875              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
876
877    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
878        """ clear the catalog, then re-index everything """
879
880        elapse = time.time()
881        c_elapse = time.clock()
882
883        pgthreshold = self._getProgressThreshold()
884        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
885        self.refreshCatalog(clear=1, pghandler=handler)
886
887        elapse = time.time() - elapse
888        c_elapse = time.clock() - c_elapse
889
890        RESPONSE.redirect(
891            URL1 +
892            '/manage_catalogAdvanced?manage_tabs_message=' +
893            urllib.quote('Catalog Updated \n'
894                         'Total time: %s\n'
895                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
896    ###)
897
898    def fill_certificates_dict(self): ###(
899        "return certificate data in  dict"
900        certificates_brains = self.portal_catalog(portal_type ='Certificate')
901        d = {}
902        for cb in certificates_brains:
903            certificate_doc = cb.getObject().getContent()
904            cb_path = cb.getPath().split('/')
905            ld = {}
906            ld['faculty'] = cb_path[-4]
907            ld['department'] = cb_path[-3]
908            ld['end_level'] = getattr(certificate_doc,'end_level','999')
909            d[cb.getId] = ld
910        return d
911    ###)
912
913    def get_from_doc_department(self,doc,cached_data={}): ###(
914        "return the students department"
915        if doc is None:
916            return None
917        if cached_data.has_key(doc.study_course):
918            return cached_data[doc.study_course]['department']
919        certificate_res = self.portal_catalog(id = doc.study_course)
920        if len(certificate_res) != 1:
921            return None
922        return certificate_res[0].getPath().split('/')[-3]
923
924    def get_from_doc_faculty(self,doc,cached_data={}):
925        "return the students faculty"
926        if doc is None:
927            return None
928        if cached_data.has_key(doc.study_course):
929            return cached_data[doc.study_course]['faculty']
930        certificate_res = self.portal_catalog(id = doc.study_course)
931        if len(certificate_res) != 1:
932            return None
933        return certificate_res[0].getPath().split('/')[-4]
934
935    def get_from_doc_end_level(self,doc,cached_data={}):
936        "return the students end_level"
937        if doc is None:
938            return None
939        if cached_data.has_key(doc.study_course):
940            return cached_data[doc.study_course]['end_level']
941        certificate_res = self.portal_catalog(id = doc.study_course)
942        if len(certificate_res) != 1:
943            return None
944        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
945
946    def get_from_doc_level(self,doc,cached_data={}):
947        "return the students level"
948        if doc is None:
949            return None
950        return getattr(doc,'current_level',None)
951
952    def get_from_doc_mode(self,doc,cached_data={}):
953        "return the students mode"
954        if doc is None:
955            return None
956        cm = getattr(doc,'current_mode',None)
957        return cm
958
959
960    def get_from_doc_session(self,doc,cached_data={}):
961        "return the students current_session"
962        if doc is None:
963            return None
964        return getattr(doc,'current_session',None)
965
966    def get_from_doc_entry_session(self,doc,cached_data={}):
967        "return the students entry_session"
968        if doc is None:
969            return None
970        es = getattr(doc,'entry_session',None)
971        if es is not None and len(es) == 2:
972            return es
973        try:
974            digit = int(doc.jamb_reg_no[0])
975        except:
976            return "-1"
977        if digit < 8:
978            return "0%c" % doc.jamb_reg_no[0]
979        return "9%c" % doc.jamb_reg_no[0]
980
981    def get_from_doc_course(self,doc,cached_data={}):
982        "return the students study_course"
983        if doc is None:
984            return None
985        return getattr(doc,'study_course',None)
986
987    def get_from_doc_name(self,doc,cached_data={}):
988        "return the students name from the personal"
989        if doc is None:
990            return None
991        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
992
993    def get_from_doc_verdict(self,doc,cached_data={}):
994        "return the students study_course"
995        if doc is None:
996            return None
997        return getattr(doc,'current_verdict',None)
998    ###)
999
1000    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1001        if isinstance(name, str):
1002            name = (name,)
1003        reindextypes = {}
1004        reindex_special = []
1005        for n in name:
1006            if n in ("review_state","registered_courses"):
1007                reindex_special.append(n)
1008            else:
1009                for pt in self.affected_types.keys():
1010                    if n in self.affected_types[pt]['fields']:
1011                        if reindextypes.has_key(pt):
1012                            reindextypes[pt].append(n)
1013                        else:
1014                            reindextypes[pt]= [n]
1015                        break
1016        cached_data = {}
1017        if set(name).intersection(set(('faculty','department','end_level'))):
1018            cached_data = self.fill_certificates_dict()
1019        students = self.portal_catalog(portal_type="Student")
1020        if hasattr(self,'portal_catalog_real'):
1021            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1022        else:
1023            aq_portal = self.portal_catalog.evalAdvancedQuery
1024        num_objects = len(students)
1025        if pghandler:
1026            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1027        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1028        #import pdb;pdb.set_trace()
1029        for i in xrange(num_objects):
1030            if pghandler: pghandler.report(i)
1031            student_brain = students[i]
1032            student_object = student_brain.getObject()
1033            # query = Eq('path',student_brain.getPath())
1034            # sub_brains_list = aq_portal(query)
1035            # sub_brains = {}
1036            # for sub_brain in sub_brains_list:
1037            #     sub_brains[sub_brain.portal_type] = sub_brain
1038            # student_path = student_brain.getPath()
1039            data = {}
1040            modified = False
1041            sid = data['id'] = student_brain.getId
1042            if reindex_special and 'review_state' in reindex_special:
1043                modified = True
1044                data['review_state'] = student_brain.review_state
1045            sub_objects = False
1046            for pt in reindextypes.keys():
1047                modified = True
1048                try:
1049                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1050                    #doc = sub_brains[pt].getObject().getContent()
1051                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1052                    # doc = self.unrestrictedTraverse(path).getContent()
1053                    sub_objects = True
1054                except:
1055                    continue
1056                for field in set(name).intersection(self.affected_types[pt]['fields']):
1057                    if hasattr(self,'get_from_doc_%s' % field):
1058                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1059                                                                              cached_data=cached_data)
1060                    else:
1061                        data[field] = getattr(doc,field)
1062            if not sub_objects and noattr:
1063                import_res = self.returning_import(id = sid)
1064                if not import_res:
1065                    continue
1066                import_record = import_res[0]
1067                data['matric_no'] = import_record.matric_no
1068                data['sex'] = import_record.Sex == 'F'
1069                data['name'] = "%s %s %s" % (import_record.Firstname,
1070                                             import_record.Middlename,
1071                                             import_record.Lastname)
1072                data['jamb_reg_no'] = import_record.Entryregno
1073            #if reindex_special and 'registered_courses' in reindex_special:
1074            #    try:
1075            #        study_course = getattr(student_object,"study_course")
1076            #        level_ids = study_course.objectIds()
1077            #    except:
1078            #        continue
1079            #    if not level_ids:
1080            #        continue
1081            #    modified = True
1082            #    level_ids.sort()
1083            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1084            #    courses = []
1085            #    for c in course_ids:
1086            #        if c.endswith('_co'):
1087            #            courses.append(c[:-3])
1088            #        else:
1089            #            courses.append(c)
1090            #    data['registered_courses'] = courses
1091            if modified:
1092                self.modifyRecord(**data)
1093        if pghandler: pghandler.finish()
1094    ###)
1095
1096    def refreshCatalog(self, clear=0, pghandler=None): ###(
1097        """ re-index everything we can find """
1098        students_folder = self.portal_url.getPortalObject().campus.students
1099        if clear:
1100            self._catalog.clear()
1101        students = self.portal_catalog(portal_type="Student")
1102        num_objects = len(students)
1103        cached_data = self.fill_certificates_dict()
1104        if pghandler:
1105            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1106        for i in xrange(num_objects):
1107            if pghandler: pghandler.report(i)
1108            student_brain = students[i]
1109            spath = student_brain.getPath()
1110            student_object = student_brain.getObject()
1111            data = {}
1112            sid = data['id'] = student_brain.getId
1113            data['review_state'] = student_brain.review_state
1114            sub_objects = False
1115            for pt in self.affected_types.keys():
1116                modified = True
1117                try:
1118                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1119                    sub_objects = True
1120                except:
1121                    #from pdb import set_trace;set_trace()
1122                    continue
1123                for field in self.affected_types[pt]['fields']:
1124                    if hasattr(self,'get_from_doc_%s' % field):
1125                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1126                                                                              cached_data=cached_data)
1127                    else:
1128                        data[field] = getattr(doc,field,None)
1129            if not sub_objects:
1130                import_res = self.returning_import(id = sid)
1131                if not import_res:
1132                    continue
1133                import_record = import_res[0]
1134                data['matric_no'] = import_record.matric_no
1135                data['sex'] = import_record.Sex == 'F'
1136                data['name'] = "%s %s %s" % (import_record.Firstname,
1137                                             import_record.Middlename,
1138                                             import_record.Lastname)
1139                data['jamb_reg_no'] = import_record.Entryregno
1140            self.addRecord(**data)
1141        if pghandler: pghandler.finish()
1142    ###)
1143
1144    security.declarePrivate('notify_event_listener') ###(
1145    def notify_event_listener(self,event_type,object,infos):
1146        "listen for events"
1147        if not infos.has_key('rpath'):
1148            return
1149        pt = getattr(object,'portal_type',None)
1150        mt = getattr(object,'meta_type',None)
1151        students_catalog = self
1152        data = {}
1153        if pt == 'Student' and\
1154           mt == 'CPS Proxy Folder' and\
1155           event_type.startswith('workflow'):
1156            data['id'] = object.getId()
1157            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1158            students_catalog.modifyRecord(**data)
1159            return
1160        rpl = infos['rpath'].split('/')
1161        if pt == 'Student' and mt == 'CPS Proxy Folder':
1162            student_id = object.id
1163            if event_type == "sys_add_object":
1164                try:
1165                    self.addRecord(id = student_id)
1166                except ValueError:
1167                    pass
1168                return
1169            elif event_type == 'sys_del_object':
1170                self.deleteRecord(student_id)
1171        if pt not in self.affected_types.keys():
1172            return
1173        if event_type not in ('sys_modify_object'):
1174            return
1175        if mt == 'CPS Proxy Folder':
1176            return
1177        for field in self.affected_types[pt]['fields']:
1178            if hasattr(self,'get_from_doc_%s' % field):
1179                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1180            else:
1181                data[field] = getattr(object,field)
1182        data['id'] = rpl[2]
1183        self.modifyRecord(**data)
1184    ###)
1185
1186
1187InitializeClass(StudentsCatalog)
1188
1189###)
1190
1191class CoursesCatalog(WAeUPTable): ###(
1192    security = ClassSecurityInfo()
1193
1194    meta_type = 'WAeUP Courses Catalog'
1195    name =  "courses_catalog"
1196    key = "code"
1197    def __init__(self,name=None):
1198        if name ==  None:
1199            name =  self.name
1200        WAeUPTable.__init__(self, name)
1201
1202    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1203        """ clear the catalog, then re-index everything """
1204
1205        elapse = time.time()
1206        c_elapse = time.clock()
1207
1208        pgthreshold = self._getProgressThreshold()
1209        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1210        self.refreshCatalog(clear=1, pghandler=handler)
1211
1212        elapse = time.time() - elapse
1213        c_elapse = time.clock() - c_elapse
1214
1215        RESPONSE.redirect(
1216            URL1 +
1217            '/manage_catalogAdvanced?manage_tabs_message=' +
1218            urllib.quote('Catalog Updated \n'
1219                         'Total time: %s\n'
1220                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1221    ###)
1222
1223    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1224        if isinstance(name, str):
1225            name = (name,)
1226        courses = self.portal_catalog(portal_type="Course")
1227        num_objects = len(courses)
1228        if pghandler:
1229            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1230        for i in xrange(num_objects):
1231            if pghandler: pghandler.report(i)
1232            course_brain = courses[i]
1233            course_object = course_brain.getObject()
1234            pl = course_brain.getPath().split('/')
1235            data = {}
1236            cid = data[self.key] = course_brain.getId
1237            data['faculty'] = pl[-4]
1238            data['department'] = pl[-3]
1239            doc = course_object.getContent()
1240            for field in name:
1241                if field not in (self.key,'faculty','department'):
1242                    data[field] = getattr(doc,field)
1243            self.modifyRecord(**data)
1244        if pghandler: pghandler.finish()
1245    ###)
1246
1247    def refreshCatalog(self, clear=0, pghandler=None): ###(
1248        """ re-index everything we can find """
1249        if clear:
1250            self._catalog.clear()
1251        courses = self.portal_catalog(portal_type="Course")
1252        num_objects = len(courses)
1253        if pghandler:
1254            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1255        #from pdb import set_trace;set_trace()
1256        for i in xrange(num_objects):
1257            if pghandler: pghandler.report(i)
1258            course_brain = courses[i]
1259            course_doc = course_brain.getObject().getContent()
1260            pl = course_brain.getPath().split('/')
1261            data = {}
1262            for field in self.schema():
1263                data[field] = getattr(course_doc,field,None)
1264            data[self.key] = course_brain.getId
1265            ai = pl.index('academics')
1266            data['faculty'] = pl[ai +1]
1267            data['department'] = pl[ai +2]
1268            if clear:
1269                self.addRecord(**data)
1270            else:
1271                self.modifyRecord(**data)
1272        if pghandler: pghandler.finish()
1273    ###)
1274
1275    security.declarePrivate('notify_event_listener') ###(
1276    def notify_event_listener(self,event_type,object,infos):
1277        "listen for events"
1278        if not infos.has_key('rpath'):
1279            return
1280        pt = getattr(object,'portal_type',None)
1281        mt = getattr(object,'meta_type',None)
1282        if pt != 'Course':
1283            return
1284        data = {}
1285        rpl = infos['rpath'].split('/')
1286        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1287            return
1288        course_id = object.getId()
1289        data[self.key] = course_id
1290        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1291            try:
1292                self.addRecord(**data)
1293            except ValueError:
1294                return
1295            course_id = object.getId()
1296            doc = object.getContent()
1297            if doc is None:
1298                return
1299            for field in self.schema():
1300                data[field] = getattr(doc,field,None)
1301            data[self.key] = course_id
1302            ai = rpl.index('academics')
1303            data['faculty'] = rpl[ai +1]
1304            data['department'] = rpl[ai +2]
1305            self.modifyRecord(**data)
1306            return
1307        if event_type == "sys_del_object":
1308            self.deleteRecord(course_id)
1309            return
1310        if event_type == "sys_modify_object" and mt == 'Course':
1311            #from pdb import set_trace;set_trace()
1312            for field in self.schema():
1313                data[field] = getattr(object,field,None)
1314            course_id = object.aq_parent.getId()
1315            data[self.key] = course_id
1316            ai = rpl.index('academics')
1317            data['faculty'] = rpl[ai +1]
1318            data['department'] = rpl[ai +2]
1319            self.modifyRecord(**data)
1320    ###)
1321
1322
1323InitializeClass(CoursesCatalog)
1324###)
1325
1326class CourseResults(WAeUPTable): ###(
1327    security = ClassSecurityInfo()
1328
1329    meta_type = 'WAeUP Results Catalog'
1330    name = "course_results"
1331    key = "key" #student_id + level + course_id
1332    def __init__(self,name=None):
1333        if name ==  None:
1334            name = self.name
1335        WAeUPTable.__init__(self, name)
1336        self._queue = []
1337
1338    def addMultipleRecords(self, records): ###(
1339        """add many records"""
1340        added_keys = []
1341        for data in records:
1342            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1343            data['%s' % self.key] = uid
1344            res = self.searchResults({"%s" % self.key : uid})
1345            if len(res) > 0:
1346                raise ValueError("More than one record with uid %s" % uid)
1347            self.catalog_object(dict2ob(data), uid=uid)
1348        return uid
1349    ###)
1350
1351    def deleteResultsHere(self,level_id,student_id): ###(
1352        #import pdb;pdb.set_trace()
1353        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1354        course_results = self.course_results.evalAdvancedQuery(query)
1355        for result in course_results:
1356            self.deleteRecord(result.key)
1357    ###)
1358
1359    def moveResultsHere(self,level,student_id): ###(
1360        #import pdb;pdb.set_trace()
1361        level_id = level.getId()
1362        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1363        course_results = self.course_results.evalAdvancedQuery(query)
1364        existing_courses = [cr.code for cr in course_results]
1365        to_delete = []
1366        for code,obj in level.objectItems():
1367            to_delete.append(code)
1368            carry_over = False
1369            if code.endswith('_co'):
1370                carry_over = True
1371                code  = code[:-3]
1372            if code in existing_courses:
1373                continue
1374            course_result_doc = obj.getContent()
1375            data = {}
1376            course_id = code
1377            for field in self.schema():
1378                data[field] = getattr(course_result_doc,field,'')
1379            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1380            data['student_id'] = student_id
1381            data['level_id'] = level_id
1382            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1383            data['session_id'] = session_id
1384            #data['queue_status'] = OBJECT_CREATED
1385            data['code'] = course_id
1386            data['carry_over'] = carry_over
1387            self.catalog_object(dict2ob(data), uid=key)
1388        level.manage_delObjects(to_delete)
1389    ###)
1390
1391    def getCourses(self,student_id,level_id): ###(
1392        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1393        course_results = self.course_results.evalAdvancedQuery(query)
1394        carry_overs = []
1395        normal1 = []
1396        normal2 = []
1397        normal3 = []
1398        total_credits = 0
1399        gpa = 0
1400        for brain in course_results:
1401            d = {}
1402
1403            for field in self.schema():
1404                d[field] = getattr(brain,field,'')
1405
1406            d['weight'] = ''
1407            d['grade'] = ''
1408            d['score'] = ''
1409
1410            if str(brain.credits).isdigit():
1411                credits = int(brain.credits)
1412                total_credits += credits
1413                score = getattr(brain,'score',0)
1414                if score and str(score).isdigit() and int(score) > 0:
1415                    score = int(score)
1416                    grade,weight = self.getGradesFromScore(score)
1417                    gpa += weight * credits
1418                    d['weight'] = weight
1419                    d['grade'] = grade
1420                    d['score'] = score
1421            d['coe'] = ''
1422            if brain.core_or_elective:
1423                d['coe'] = 'Core'
1424            elif brain.core_or_elective == False:
1425                d['coe'] = 'Elective'
1426            id = code = d['id'] = brain.code
1427            d['code'] = code
1428            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1429            if res:
1430                course = res[0]
1431                d['title'] = course.title
1432                # The courses_catalog contains strings and integers in its semester field.
1433                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1434                d['semester'] = str(course.semester)
1435            else:
1436                d['title'] = "Course has been removed from course list"
1437                d['semester'] = ''
1438            if brain.carry_over:
1439                d['coe'] = 'CO'
1440                carry_overs.append(d)
1441            else:
1442                if d['semester'] == '1':
1443                    normal1.append(d)
1444
1445                elif d['semester'] == '2':
1446                    normal2.append(d)
1447                else:
1448                    normal3.append(d)
1449        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1450        #                                "%(semester)s%(code)s" % y))
1451        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1452                                             "%(semester)s%(code)s" % y))
1453        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1454    ###)
1455
1456InitializeClass(CourseResults)
1457###)
1458
1459class OnlinePaymentsImport(WAeUPTable): ###(
1460
1461    meta_type = 'WAeUP Online Payment Transactions'
1462    name = "online_payments_import"
1463    key = "order_id"
1464    def __init__(self,name=None):
1465        if name ==  None:
1466            name = self.name
1467        WAeUPTable.__init__(self, name)
1468
1469
1470InitializeClass(OnlinePaymentsImport)
1471###)
1472
1473class ReturningImport(WAeUPTable): ###(
1474
1475    meta_type = 'Returning Import Table'
1476    name = "returning_import"
1477    key = "matric_no"
1478    def __init__(self,name=None):
1479        if name ==  None:
1480            name = self.name
1481        WAeUPTable.__init__(self, name)
1482
1483
1484InitializeClass(ReturningImport)
1485###)
1486
1487class ResultsImport(WAeUPTable): ###(
1488
1489    meta_type = 'Results Import Table'
1490    name = "results_import"
1491    key = "key"
1492    def __init__(self,name=None):
1493        if name ==  None:
1494            name = self.name
1495        WAeUPTable.__init__(self, name)
1496
1497
1498InitializeClass(ResultsImport)
1499
1500###)
1501
1502class PaymentsCatalog(WAeUPTable): ###(
1503    security = ClassSecurityInfo()
1504
1505    meta_type = 'WAeUP Payments Catalog'
1506    name = "payments_catalog"
1507    key = "order_id"
1508    def __init__(self,name=None):
1509        if name ==  None:
1510            name = self.name
1511        WAeUPTable.__init__(self, name)
1512
1513
1514    security.declarePrivate('notify_event_listener') ###(
1515    def notify_event_listener(self,event_type,object,infos):
1516        "listen for events"
1517        if not infos.has_key('rpath'):
1518            return
1519        pt = getattr(object,'portal_type',None)
1520        mt = getattr(object,'meta_type',None)
1521        data = {}
1522        if pt != 'Payment':
1523            return
1524        if mt == 'CPS Proxy Folder':
1525            return # is handled only for the real object
1526        if event_type == 'sys_del_object':
1527            self.deleteRecord(object.order_id)
1528        if event_type not in ('sys_modify_object'):
1529            return
1530        for field in self.schema():
1531            data[field] = getattr(object,field,'')
1532        rpl = infos['rpath'].split('/')
1533        #import pdb;pdb.set_trace()
1534        student_id = rpl[-4]
1535        data['student_id'] = student_id
1536        modified = False
1537        try:
1538            self.modifyRecord(**data)
1539            modified = True
1540        except KeyError:
1541            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1542            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1543            pass
1544        if not modified:
1545            try:
1546                self.addRecord(**data)
1547            except:
1548                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1549                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1550        ###)
1551
1552
1553InitializeClass(PaymentsCatalog)
1554
1555###)
1556
1557# BBB:
1558AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.