source: WAeUP_SRP/base/WAeUPTables.py @ 3040

Last change on this file since 3040 was 3018, checked in by Henrik Bettermann, 17 years ago

fixed

  • Property svn:keywords set to Id
File size: 58.1 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3018 2008-01-13 09:35:17Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        records = self()
225        nr2export = len(records)
226        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
227        chunk = 2000
228        total = 0
229        start = DateTime.DateTime().timeTime()
230        start_chunk = DateTime.DateTime().timeTime()
231        for record in records:
232            not_all = False
233            d = self.record2dict(fields,record)
234            lines.append(d)
235            total += 1
236            if total and not total % chunk or total == len(records):
237                csv_writer.writerows(lines)
238                anz = len(lines)
239                logger.info("wrote %(anz)d  total written %(total)d" % vars())
240                end_chunk = DateTime.DateTime().timeTime()
241                duration = end_chunk-start_chunk
242                per_record = duration/anz
243                till_now = end_chunk - start
244                avarage_per_record = till_now/total
245                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
246                estimated_end = estimated_end.strftime("%H:%M:%S")
247                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
248                start_chunk = DateTime.DateTime().timeTime()
249                lines = []
250        end = DateTime.DateTime().timeTime()
251        logger.info('total time %6.2f m' % ((end-start)/60))
252        import os
253        filename, extension = os.path.splitext(export_file)
254        from subprocess import call
255        msg = "wrote %(total)d records to %(export_file)s" % vars()
256        #try:
257        #    retcode = call('gzip %s' % (export_file),shell=True)
258        #    if retcode == 0:
259        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
260        #except OSError, e:
261        #    retcode = -99
262        #    logger.info("zip failed with %s" % e)
263        logger.info(msg)
264        args = {'portal_status_message': msg}
265        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
266        url = self.REQUEST.get('URL2')
267        return self.REQUEST.RESPONSE.redirect(url)
268    ###)
269
270    security.declarePrivate("_import_old") ###(
271    def _import_old(self,filename,schema,layout, mode,logger):
272        "import data from csv"
273        import transaction
274        import random
275        pm = self.portal_membership
276        member = pm.getAuthenticatedMember()
277        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
278        import_fn = "%s/import/%s.csv" % (i_home,filename)
279        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
280        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
281        start = True
282        tr_count = 1
283        total_imported = 0
284        total_not_imported = 0
285        total = 0
286        iname =  "%s" % filename
287        not_imported = []
288        imported = []
289        valid_records = []
290        invalid_records = []
291        d = {}
292        d['mode'] = mode
293        d['imported'] = total_imported
294        d['not_imported'] = total_not_imported
295        d['valid_records'] = valid_records
296        d['invalid_records'] = invalid_records
297        d['import_fn'] = import_fn
298        d['imported_fn'] = imported_fn
299        d['not_imported_fn'] = not_imported_fn
300        if schema is None:
301            em = 'No schema specified'
302            logger.error(em)
303            return d
304        if layout is None:
305            em = 'No layout specified'
306            logger.error(em)
307            return d
308        validators = {}
309        for widget in layout.keys():
310            try:
311                validators[widget] = layout[widget].validate
312            except AttributeError:
313                logger.info('%s has no validate attribute' % widget)
314                return d
315        # if mode == 'edit':
316        #     importer = self.importEdit
317        # elif mode == 'add':
318        #     importer = self.importAdd
319        # else:
320        #     importer = None
321        try:
322            items = csv.DictReader(open(import_fn,"rb"),
323                                   dialect="excel",
324                                   skipinitialspace=True)
325        except:
326            em = 'Error reading %s.csv' % filename
327            logger.error(em)
328            return d
329        #import pdb;pdb.set_trace()
330        for item in items:
331            if start:
332                start = False
333                logger.info('%s starts import from %s.csv' % (member,filename))
334                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
335                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
336                                   dialect="excel",
337                                   skipinitialspace=True).next()
338                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
339                diff2schema = set(import_keys).difference(set(schema.keys()))
340                diff2layout = set(import_keys).difference(set(layout.keys()))
341                if diff2layout:
342                    em = "not ignorable key(s) %s found in heading" % diff2layout
343                    logger.info(em)
344                    return d
345                s = ','.join(['"%s"' % fn for fn in import_keys])
346                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
347                #s = '"id",' + s
348                open(imported_fn,"a").write(s + '\n')
349                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
350                format_error = format + ',"%(Error)s"'
351                #format = '"%(id)s",'+ format
352                adapters = [MappingStorageAdapter(schema, item)]
353            dm = DataModel(item, adapters,context=self)
354            ds = DataStructure(data=item,datamodel=dm)
355            error_string = ""
356            #import pdb;pdb.set_trace()
357            for k in import_keys:
358                if not validators[k](ds,mode=mode):
359                    error_string += " %s : %s" % (k,ds.getError(k))
360            # if not error_string and importer:
361            #     item.update(dm)
362            #     item['id'],error = importer(item)
363            #     if error:
364            #         error_string += error
365            if error_string:
366                item['Error'] = error_string
367                invalid_records.append(dm)
368                not_imported.append(format_error % item)
369                total_not_imported += 1
370            else:
371                em = format % item
372                valid_records.append(dm)
373                imported.append(em)
374                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
375                tr_count += 1
376                total_imported += 1
377            total += 1
378        if len(imported) > 0:
379            open(imported_fn,"a").write('\n'.join(imported))
380        if len(not_imported) > 0:
381            open(not_imported_fn,"a").write('\n'.join(not_imported))
382        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
383        d['imported'] = total_imported
384        d['not_imported'] = total_not_imported
385        d['valid_records'] = valid_records
386        d['invalid_records'] = invalid_records
387        d['imported_fn'] = imported_fn
388        d['not_imported_fn'] = not_imported_fn
389        #logger.info(em)
390        return d
391    ###)
392
393    security.declarePrivate("_import") ###(
394    def _import_new(self,csv_items,schema, layout, mode,logger):
395        "import data from csv.Dictreader Instance"
396        start = True
397        tr_count = 1
398        total_imported = 0
399        total_not_imported = 0
400        total = 0
401        iname =  "%s" % filename
402        not_imported = []
403        valid_records = []
404        invalid_records = []
405        duplicate_records = []
406        d = {}
407        d['mode'] = mode
408        d['valid_records'] = valid_records
409        d['invalid_records'] = invalid_records
410        d['invalid_records'] = duplicate_records
411        # d['import_fn'] = import_fn
412        # d['imported_fn'] = imported_fn
413        # d['not_imported_fn'] = not_imported_fn
414        validators = {}
415        for widget in layout.keys():
416            try:
417                validators[widget] = layout[widget].validate
418            except AttributeError:
419                logger.info('%s has no validate attribute' % widget)
420                return d
421        for item in csv_items:
422            if start:
423                start = False
424                logger.info('%s starts import from %s.csv' % (member,filename))
425                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
426                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
427                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
428                diff2schema = set(import_keys).difference(set(schema.keys()))
429                diff2layout = set(import_keys).difference(set(layout.keys()))
430                if diff2layout:
431                    em = "not ignorable key(s) %s found in heading" % diff2layout
432                    logger.info(em)
433                    return d
434                # s = ','.join(['"%s"' % fn for fn in import_keys])
435                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
436                # #s = '"id",' + s
437                # open(imported_fn,"a").write(s + '\n')
438                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
439                # format_error = format + ',"%(Error)s"'
440                # #format = '"%(id)s",'+ format
441                adapters = [MappingStorageAdapter(schema, item)]
442            dm = DataModel(item, adapters,context=self)
443            ds = DataStructure(data=item,datamodel=dm)
444            error_string = ""
445            for k in import_keys:
446                if not validators[k](ds,mode=mode):
447                    error_string += " %s : %s" % (k,ds.getError(k))
448            if error_string:
449                item['Error'] = error_string
450                #invalid_records.append(dm)
451                invalid_records.append(item)
452                total_not_imported += 1
453            else:
454                em = format % item
455                valid_records.append(dm)
456                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
457                tr_count += 1
458                total_imported += 1
459            total += 1
460        # if len(imported) > 0:
461        #     open(imported_fn,"a").write('\n'.join(imported))
462        # if len(not_imported) > 0:
463        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
464        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
465        d['imported'] = total_imported
466        d['not_imported'] = total_not_imported
467        d['valid_records'] = valid_records
468        d['invalid_records'] = invalid_records
469        return d
470    ###)
471
472    security.declarePublic("missingValue")###(
473    def missingValue(self):
474        from Missing import MV
475        return MV
476    ###)
477###)
478
479class AccommodationTable(WAeUPTable): ###(
480
481    meta_type = 'WAeUP Accommodation Tool'
482    name = "portal_accommodation"
483    key = "bed"
484    def __init__(self,name=None):
485        if name ==  None:
486            name = self.name
487        WAeUPTable.__init__(self, name)
488
489    def searchAndReserveBed(self, student_id,bed_type):
490        #records = self.searchResults({'student' : student_id})
491        #import pdb;pdb.set_trace()
492        records = self.evalAdvancedQuery(Eq('student',student_id))
493        if len(records) > 0:
494            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
495
496        #records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
497        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
498        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
499        if len(records) == 0:
500            return -2,"No bed available"
501        rec = records[0]
502        self.modifyRecord(bed=rec.bed,student=student_id)
503        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
504        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
505        return 1,rec.bed
506
507
508InitializeClass(AccommodationTable)
509
510###)
511
512class PinTable(WAeUPTable): ###(
513    from ZODB.POSException import ConflictError
514    security = ClassSecurityInfo()
515    meta_type = 'WAeUP Pin Tool'
516    name = "portal_pins"
517    key = 'pin'
518
519    def __init__(self,name=None):
520        if name ==  None:
521            name = self.name
522        WAeUPTable.__init__(self, name)
523
524    security.declareProtected(ModifyPortalContent,"dumpAll")###(
525    def dumpAll(self,include_unused=None):
526        """dump all data in the table to a csv"""
527        member = self.portal_membership.getAuthenticatedMember()
528        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
529        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
530        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
531        res_list = []
532        lines = []
533        if hasattr(self,"export_keys"):
534            fields = self.export_keys
535        else:
536            fields = []
537            for f in self.schema():
538                fields.append(f)
539        headline = ','.join(fields)
540        out = open(export_file,"wb")
541        out.write(headline +'\n')
542        out.close()
543        out = open(export_file,"a")
544        csv_writer = csv.DictWriter(out,fields,)
545        if include_unused is not None and str(member) not in ('admin','joachim'):
546            logger.info('%s tries to dump pintable with unused pins' % (member))
547            return
548        if include_unused is not None:
549            records = self()
550        else:
551            records = self.evalAdvancedQuery(~Eq('student',''))
552        nr2export = len(records)
553        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
554        chunk = 2000
555        total = 0
556        start = DateTime.DateTime().timeTime()
557        start_chunk = DateTime.DateTime().timeTime()
558        for record in records:
559            not_all = False
560            d = self.record2dict(fields,record)
561            lines.append(d)
562            total += 1
563            if total and not total % chunk or total == len(records):
564                csv_writer.writerows(lines)
565                anz = len(lines)
566                logger.info("wrote %(anz)d  total written %(total)d" % vars())
567                end_chunk = DateTime.DateTime().timeTime()
568                duration = end_chunk-start_chunk
569                per_record = duration/anz
570                till_now = end_chunk - start
571                avarage_per_record = till_now/total
572                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
573                estimated_end = estimated_end.strftime("%H:%M:%S")
574                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
575                start_chunk = DateTime.DateTime().timeTime()
576                lines = []
577        end = DateTime.DateTime().timeTime()
578        logger.info('total time %6.2f m' % ((end-start)/60))
579        import os
580        filename, extension = os.path.splitext(export_file)
581        from subprocess import call
582        msg = "wrote %(total)d records to %(export_file)s" % vars()
583        #try:
584        #    retcode = call('gzip %s' % (export_file),shell=True)
585        #    if retcode == 0:
586        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
587        #except OSError, e:
588        #    retcode = -99
589        #    logger.info("zip failed with %s" % e)
590        logger.info(msg)
591        args = {'portal_status_message': msg}
592        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
593        url = self.REQUEST.get('URL2')
594        return self.REQUEST.RESPONSE.redirect(url)
595    ###)
596
597
598
599    def searchAndSetRecord(self, uid, student_id,prefix):
600
601        # The following line must be activated after resetting the
602        # the portal_pins table. This is to avoid duplicate entries
603        # and disable duplicate payments.
604
605        #student_id = student_id.upper()
606
607        #records = self.searchResults(student = student_id)
608        #if len(records) > 0 and prefix in ('CLR','APP'):
609        #    for r in records:
610        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
611        #            return -2
612        records = self.searchResults({"%s" % self.key : uid})
613        if len(records) > 1:
614            # Can not happen, but anyway...
615            raise ValueError("More than one record with uid %s" % uid)
616        if len(records) == 0:
617            return -1,None
618        record = records[0]
619        if record.student == "":
620            record_data = {}
621            for field in self.schema() + self.indexes():
622                record_data[field] = getattr(record, field)
623            # Add the updated data:
624            record_data['student'] = student_id
625            try:
626                self.catalog_object(dict2ob(record_data), uid)
627                return 1,record
628            except ConflictError:
629                return 2,record
630        if record.student.upper() != student_id.upper():
631            return 0,record
632        if record.student.upper() == student_id.upper():
633            return 2,record
634        return -3,record
635InitializeClass(PinTable)
636###)
637
638class PumeResultsTable(WAeUPTable): ###(
639
640    meta_type = 'WAeUP PumeResults Tool'
641    name = "portal_pumeresults"
642    key = "jamb_reg_no"
643    def __init__(self,name=None):
644        if name ==  None:
645            name = self.name
646        WAeUPTable.__init__(self, name)
647
648
649InitializeClass(PumeResultsTable)
650
651###)
652
653class ApplicantsCatalog(WAeUPTable): ###(
654
655    meta_type = 'WAeUP Applicants Catalog'
656    name = "applicants_catalog"
657    key = "reg_no"
658    security = ClassSecurityInfo()
659    #export_keys = (
660    #               "reg_no",
661    #               "status",
662    #               "lastname",
663    #               "sex",
664    #               "date_of_birth",
665    #               "lga",
666    #               "email",
667    #               "phone",
668    #               "passport",
669    #               "entry_mode",
670    #               "pin",
671    #               "screening_type",
672    #               "registration_date",
673    #               "testdate",
674    #               "application_date",
675    #               "screening_date",
676    #               "faculty",
677    #               "department",
678    #               "course1",
679    #               "course2",
680    #               "course3",
681    #               "eng_score",
682    #               "subj1",
683    #               "subj1score",
684    #               "subj2",
685    #               "subj2score",
686    #               "subj3",
687    #               "subj3score",
688    #               "aggregate",
689    #               "course_admitted",
690    #               )
691
692    def __init__(self,name=None):
693        if name ==  None:
694            name = self.name
695        WAeUPTable.__init__(self, name)
696
697    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
698    def new_importCSV(self,filename="JAMB_data",
699                  schema_id="application",
700                  layout_id="import_application",
701                  mode='add'):
702        """ import JAMB data """
703        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
704        pm = self.portal_membership
705        member = pm.getAuthenticatedMember()
706        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
707        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
708        import_fn = "%s/import/%s.csv" % (i_home,filename)
709        if mode not in ('add','edit'):
710            logger.info("invalid mode: %s" % mode)
711        if os.path.exists(lock_fn):
712            logger.info("import of %(import_fn)s already in progress" % vars())
713            return
714        lock_file = open(lock_fn,"w")
715        lock_file.write("%(current)s \n" % vars())
716        lock_file.close()
717        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
718        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
719        stool = getToolByName(self, 'portal_schemas')
720        ltool = getToolByName(self, 'portal_layouts')
721        schema = stool._getOb(schema_id)
722        if schema is None:
723            em = 'No such schema %s' % schema_id
724            logger.error(em)
725            return
726        for postfix in ('_import',''):
727            layout_name = "%(layout_id)s%(postfix)s" % vars()
728            if hasattr(ltool,layout_name):
729                break
730        layout = ltool._getOb(layout_name)
731        if layout is None:
732            em = 'No such layout %s' % layout_id
733            logger.error(em)
734            return
735        try:
736            csv_file = csv.DictReader(open(import_fn,"rb"))
737        except:
738            em = 'Error reading %s.csv' % filename
739            logger.error(em)
740            return
741        d = self._import_new(csv_items,schema,layout,mode,logger)
742        imported = []
743        edited = []
744        duplicates = []
745        not_found = []
746        if len(d['valid_records']) > 0:
747            for record in d['valid_records']:
748                #import pdb;pdb.set_trace()
749                if mode == "add":
750                    try:
751                        self.addRecord(**dict(record.items()))
752                        imported.append(**dict(record.items()))
753                        logger.info("added %s" % record.items())
754                    except ValueError:
755                        dupplicate.append(**dict(record.items()))
756                        logger.info("duplicate %s" % record.items())
757                elif mode == "edit":
758                    try:
759                        self.modifyRecord(**dict(record.items()))
760                        edited.append(**dict(record.items()))
761                        logger.info("edited %s" % record.items())
762                    except KeyError:
763                        not_found.append(**dict(record.items()))
764                        logger.info("not found %s" % record.items())
765        invalid = d['invalid_records']
766        for itype in ("imported","edited","not_found","duplicate","invalid"):
767            outlist = locals[itype]
768            if len(outlist):
769                d = {}
770                for k in outlist[0].keys():
771                    d[k] = k
772                outlist[0] = d
773                outfile = open("file_name_%s" % itype,'w')
774                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
775                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
776###)
777
778    security.declareProtected(ModifyPortalContent,"importCSV")###(
779    def importCSV(self,filename="JAMB_data",
780                  schema_id="application",
781                  layout_id="application_pce",
782                  mode='add'):
783        """ import JAMB data """
784        stool = getToolByName(self, 'portal_schemas')
785        ltool = getToolByName(self, 'portal_layouts')
786        schema = stool._getOb(schema_id)
787        if schema is None:
788            em = 'No such schema %s' % schema_id
789            logger.error(em)
790            return
791        layout = ltool._getOb(layout_id)
792        if layout is None:
793            em = 'No such layout %s' % layout_id
794            logger.error(em)
795            return
796        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
797        d = self._import_old(filename,schema,layout,mode,logger)
798        if len(d['valid_records']) > 0:
799            for record in d['valid_records']:
800                #import pdb;pdb.set_trace()
801                if mode == "add":
802                    self.addRecord(**dict(record.items()))
803                    logger.info("added %s" % record.items())
804                elif mode == "edit":
805                    self.modifyRecord(**dict(record.items()))
806                    logger.info("edited %s" % record.items())
807                else:
808                    logger.info("invalid mode: %s" % mode)
809        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
810    ###)
811
812InitializeClass(ApplicantsCatalog)
813
814###)
815
816class StudentsCatalog(WAeUPTable): ###(
817    security = ClassSecurityInfo()
818
819    meta_type = 'WAeUP Students Catalog'
820    name = "students_catalog"
821    key = "id"
822    affected_types = {   ###(
823                      'StudentApplication':
824                      {'id': 'application',
825                       'fields':
826                       ('jamb_reg_no',
827                        'entry_mode',
828                        #'entry_level',
829                        'entry_session',
830                       )
831                      },
832                      'StudentClearance':
833                      {'id': 'clearance',
834                       'fields':
835                       ('matric_no',
836                        'lga',
837                       )
838                      },
839                      'StudentPersonal':
840                      {'id': 'personal',
841                       'fields':
842                       ('name',
843                        'sex',
844                        'perm_address',
845                        'email',
846                        'phone',
847                       )
848                      },
849                      'StudentStudyCourse':
850                      {'id': 'study_course',
851                       'fields':
852                       ('course', # study_course
853                        'faculty', # from certificate
854                        'department', # from certificate
855                        'end_level', # from certificate
856                        'level', # current_level
857                        'mode',  # current_mode
858                        'session', # current_session
859                        'verdict', # current_verdict
860                       )
861                      },
862                     }
863    ###)
864
865    def __init__(self,name=None):
866        if name ==  None:
867            name = self.name
868        WAeUPTable.__init__(self, name)
869        return
870
871    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
872        """ clears the whole enchilada """
873        self._catalog.clear()
874
875        if REQUEST and RESPONSE:
876            RESPONSE.redirect(
877              URL1 +
878              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
879
880    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
881        """ clear the catalog, then re-index everything """
882
883        elapse = time.time()
884        c_elapse = time.clock()
885
886        pgthreshold = self._getProgressThreshold()
887        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
888        self.refreshCatalog(clear=1, pghandler=handler)
889
890        elapse = time.time() - elapse
891        c_elapse = time.clock() - c_elapse
892
893        RESPONSE.redirect(
894            URL1 +
895            '/manage_catalogAdvanced?manage_tabs_message=' +
896            urllib.quote('Catalog Updated \n'
897                         'Total time: %s\n'
898                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
899    ###)
900
901    def fill_certificates_dict(self): ###(
902        "return certificate data in  dict"
903        certificates_brains = self.portal_catalog(portal_type ='Certificate')
904        d = {}
905        for cb in certificates_brains:
906            certificate_doc = cb.getObject().getContent()
907            cb_path = cb.getPath().split('/')
908            ld = {}
909            ld['faculty'] = cb_path[-4]
910            ld['department'] = cb_path[-3]
911            ld['end_level'] = getattr(certificate_doc,'end_level','999')
912            d[cb.getId] = ld
913        return d
914    ###)
915
916    def get_from_doc_department(self,doc,cached_data={}): ###(
917        "return the students department"
918        if doc is None:
919            return None
920        if cached_data.has_key(doc.study_course):
921            return cached_data[doc.study_course]['department']
922        certificate_res = self.portal_catalog(id = doc.study_course)
923        if len(certificate_res) != 1:
924            return None
925        return certificate_res[0].getPath().split('/')[-3]
926
927    def get_from_doc_faculty(self,doc,cached_data={}):
928        "return the students faculty"
929        if doc is None:
930            return None
931        if cached_data.has_key(doc.study_course):
932            return cached_data[doc.study_course]['faculty']
933        certificate_res = self.portal_catalog(id = doc.study_course)
934        if len(certificate_res) != 1:
935            return None
936        return certificate_res[0].getPath().split('/')[-4]
937
938    def get_from_doc_end_level(self,doc,cached_data={}):
939        "return the students end_level"
940        if doc is None:
941            return None
942        if cached_data.has_key(doc.study_course):
943            return cached_data[doc.study_course]['end_level']
944        certificate_res = self.portal_catalog(id = doc.study_course)
945        if len(certificate_res) != 1:
946            return None
947        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
948
949    def get_from_doc_level(self,doc,cached_data={}):
950        "return the students level"
951        if doc is None:
952            return None
953        return getattr(doc,'current_level',None)
954
955    def get_from_doc_mode(self,doc,cached_data={}):
956        "return the students mode"
957        if doc is None:
958            return None
959        cm = getattr(doc,'current_mode',None)
960        return cm
961
962
963    def get_from_doc_session(self,doc,cached_data={}):
964        "return the students current_session"
965        if doc is None:
966            return None
967        return getattr(doc,'current_session',None)
968
969    def get_from_doc_entry_session(self,doc,cached_data={}):
970        "return the students entry_session"
971        if doc is None:
972            return None
973        es = getattr(doc,'entry_session',None)
974        if es is not None and len(es) == 2:
975            return es
976        try:
977            digit = int(doc.jamb_reg_no[0])
978        except:
979            return "-1"
980        if digit < 8:
981            return "0%c" % doc.jamb_reg_no[0]
982        return "9%c" % doc.jamb_reg_no[0]
983
984    def get_from_doc_course(self,doc,cached_data={}):
985        "return the students study_course"
986        if doc is None:
987            return None
988        return getattr(doc,'study_course',None)
989
990    def get_from_doc_name(self,doc,cached_data={}):
991        "return the students name from the personal"
992        if doc is None:
993            return None
994        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
995
996    def get_from_doc_verdict(self,doc,cached_data={}):
997        "return the students study_course"
998        if doc is None:
999            return None
1000        return getattr(doc,'current_verdict',None)
1001    ###)
1002
1003    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1004        if isinstance(name, str):
1005            name = (name,)
1006        reindextypes = {}
1007        reindex_special = []
1008        for n in name:
1009            if n in ("review_state","registered_courses"):
1010                reindex_special.append(n)
1011            else:
1012                for pt in self.affected_types.keys():
1013                    if n in self.affected_types[pt]['fields']:
1014                        if reindextypes.has_key(pt):
1015                            reindextypes[pt].append(n)
1016                        else:
1017                            reindextypes[pt]= [n]
1018                        break
1019        cached_data = {}
1020        if set(name).intersection(set(('faculty','department','end_level'))):
1021            cached_data = self.fill_certificates_dict()
1022        students = self.portal_catalog(portal_type="Student")
1023        if hasattr(self,'portal_catalog_real'):
1024            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1025        else:
1026            aq_portal = self.portal_catalog.evalAdvancedQuery
1027        num_objects = len(students)
1028        if pghandler:
1029            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1030        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1031        #import pdb;pdb.set_trace()
1032        for i in xrange(num_objects):
1033            if pghandler: pghandler.report(i)
1034            student_brain = students[i]
1035            student_object = student_brain.getObject()
1036            # query = Eq('path',student_brain.getPath())
1037            # sub_brains_list = aq_portal(query)
1038            # sub_brains = {}
1039            # for sub_brain in sub_brains_list:
1040            #     sub_brains[sub_brain.portal_type] = sub_brain
1041            # student_path = student_brain.getPath()
1042            data = {}
1043            modified = False
1044            sid = data['id'] = student_brain.getId
1045            if reindex_special and 'review_state' in reindex_special:
1046                modified = True
1047                data['review_state'] = student_brain.review_state
1048            sub_objects = False
1049            for pt in reindextypes.keys():
1050                modified = True
1051                try:
1052                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1053                    #doc = sub_brains[pt].getObject().getContent()
1054                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1055                    # doc = self.unrestrictedTraverse(path).getContent()
1056                    sub_objects = True
1057                except:
1058                    continue
1059                for field in set(name).intersection(self.affected_types[pt]['fields']):
1060                    if hasattr(self,'get_from_doc_%s' % field):
1061                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1062                                                                              cached_data=cached_data)
1063                    else:
1064                        data[field] = getattr(doc,field)
1065            if not sub_objects and noattr:
1066                import_res = self.returning_import(id = sid)
1067                if not import_res:
1068                    continue
1069                import_record = import_res[0]
1070                data['matric_no'] = import_record.matric_no
1071                data['sex'] = import_record.Sex == 'F'
1072                data['name'] = "%s %s %s" % (import_record.Firstname,
1073                                             import_record.Middlename,
1074                                             import_record.Lastname)
1075                data['jamb_reg_no'] = import_record.Entryregno
1076            #if reindex_special and 'registered_courses' in reindex_special:
1077            #    try:
1078            #        study_course = getattr(student_object,"study_course")
1079            #        level_ids = study_course.objectIds()
1080            #    except:
1081            #        continue
1082            #    if not level_ids:
1083            #        continue
1084            #    modified = True
1085            #    level_ids.sort()
1086            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1087            #    courses = []
1088            #    for c in course_ids:
1089            #        if c.endswith('_co'):
1090            #            courses.append(c[:-3])
1091            #        else:
1092            #            courses.append(c)
1093            #    data['registered_courses'] = courses
1094            if modified:
1095                self.modifyRecord(**data)
1096        if pghandler: pghandler.finish()
1097    ###)
1098
1099    def refreshCatalog(self, clear=0, pghandler=None): ###(
1100        """ re-index everything we can find """
1101        students_folder = self.portal_url.getPortalObject().campus.students
1102        if clear:
1103            self._catalog.clear()
1104        students = self.portal_catalog(portal_type="Student")
1105        num_objects = len(students)
1106        cached_data = self.fill_certificates_dict()
1107        if pghandler:
1108            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1109        for i in xrange(num_objects):
1110            if pghandler: pghandler.report(i)
1111            student_brain = students[i]
1112            spath = student_brain.getPath()
1113            student_object = student_brain.getObject()
1114            data = {}
1115            sid = data['id'] = student_brain.getId
1116            data['review_state'] = student_brain.review_state
1117            sub_objects = False
1118            for pt in self.affected_types.keys():
1119                modified = True
1120                try:
1121                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1122                    sub_objects = True
1123                except:
1124                    #from pdb import set_trace;set_trace()
1125                    continue
1126                for field in self.affected_types[pt]['fields']:
1127                    if hasattr(self,'get_from_doc_%s' % field):
1128                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1129                                                                              cached_data=cached_data)
1130                    else:
1131                        data[field] = getattr(doc,field,None)
1132            if not sub_objects:
1133                import_res = self.returning_import(id = sid)
1134                if not import_res:
1135                    continue
1136                import_record = import_res[0]
1137                data['matric_no'] = import_record.matric_no
1138                data['sex'] = import_record.Sex == 'F'
1139                data['name'] = "%s %s %s" % (import_record.Firstname,
1140                                             import_record.Middlename,
1141                                             import_record.Lastname)
1142                data['jamb_reg_no'] = import_record.Entryregno
1143            self.addRecord(**data)
1144        if pghandler: pghandler.finish()
1145    ###)
1146
1147    security.declarePrivate('notify_event_listener') ###(
1148    def notify_event_listener(self,event_type,object,infos):
1149        "listen for events"
1150        if not infos.has_key('rpath'):
1151            return
1152        pt = getattr(object,'portal_type',None)
1153        mt = getattr(object,'meta_type',None)
1154        students_catalog = self
1155        data = {}
1156        if pt == 'Student' and\
1157           mt == 'CPS Proxy Folder' and\
1158           event_type.startswith('workflow'):
1159            data['id'] = object.getId()
1160            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1161            students_catalog.modifyRecord(**data)
1162            return
1163        rpl = infos['rpath'].split('/')
1164        if pt == 'Student' and mt == 'CPS Proxy Folder':
1165            student_id = object.id
1166            if event_type == "sys_add_object":
1167                try:
1168                    self.addRecord(id = student_id)
1169                except ValueError:
1170                    pass
1171                return
1172            elif event_type == 'sys_del_object':
1173                self.deleteRecord(student_id)
1174        if pt not in self.affected_types.keys():
1175            return
1176        if event_type not in ('sys_modify_object'):
1177            return
1178        if mt == 'CPS Proxy Folder':
1179            return
1180        for field in self.affected_types[pt]['fields']:
1181            if hasattr(self,'get_from_doc_%s' % field):
1182                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1183            else:
1184                data[field] = getattr(object,field)
1185        data['id'] = rpl[2]
1186        self.modifyRecord(**data)
1187    ###)
1188
1189
1190InitializeClass(StudentsCatalog)
1191
1192###)
1193
1194class CoursesCatalog(WAeUPTable): ###(
1195    security = ClassSecurityInfo()
1196
1197    meta_type = 'WAeUP Courses Catalog'
1198    name =  "courses_catalog"
1199    key = "code"
1200    def __init__(self,name=None):
1201        if name ==  None:
1202            name =  self.name
1203        WAeUPTable.__init__(self, name)
1204
1205    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1206        """ clear the catalog, then re-index everything """
1207
1208        elapse = time.time()
1209        c_elapse = time.clock()
1210
1211        pgthreshold = self._getProgressThreshold()
1212        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1213        self.refreshCatalog(clear=1, pghandler=handler)
1214
1215        elapse = time.time() - elapse
1216        c_elapse = time.clock() - c_elapse
1217
1218        RESPONSE.redirect(
1219            URL1 +
1220            '/manage_catalogAdvanced?manage_tabs_message=' +
1221            urllib.quote('Catalog Updated \n'
1222                         'Total time: %s\n'
1223                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1224    ###)
1225
1226    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1227        if isinstance(name, str):
1228            name = (name,)
1229        courses = self.portal_catalog(portal_type="Course")
1230        num_objects = len(courses)
1231        if pghandler:
1232            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1233        for i in xrange(num_objects):
1234            if pghandler: pghandler.report(i)
1235            course_brain = courses[i]
1236            course_object = course_brain.getObject()
1237            pl = course_brain.getPath().split('/')
1238            data = {}
1239            cid = data[self.key] = course_brain.getId
1240            data['faculty'] = pl[-4]
1241            data['department'] = pl[-3]
1242            doc = course_object.getContent()
1243            for field in name:
1244                if field not in (self.key,'faculty','department'):
1245                    data[field] = getattr(doc,field)
1246            self.modifyRecord(**data)
1247        if pghandler: pghandler.finish()
1248    ###)
1249
1250    def refreshCatalog(self, clear=0, pghandler=None): ###(
1251        """ re-index everything we can find """
1252        if clear:
1253            self._catalog.clear()
1254        courses = self.portal_catalog(portal_type="Course")
1255        num_objects = len(courses)
1256        if pghandler:
1257            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1258        #from pdb import set_trace;set_trace()
1259        for i in xrange(num_objects):
1260            if pghandler: pghandler.report(i)
1261            course_brain = courses[i]
1262            course_doc = course_brain.getObject().getContent()
1263            pl = course_brain.getPath().split('/')
1264            data = {}
1265            for field in self.schema():
1266                data[field] = getattr(course_doc,field,None)
1267            data[self.key] = course_brain.getId
1268            ai = pl.index('academics')
1269            data['faculty'] = pl[ai +1]
1270            data['department'] = pl[ai +2]
1271            if clear:
1272                self.addRecord(**data)
1273            else:
1274                self.modifyRecord(**data)
1275        if pghandler: pghandler.finish()
1276    ###)
1277
1278    security.declarePrivate('notify_event_listener') ###(
1279    def notify_event_listener(self,event_type,object,infos):
1280        "listen for events"
1281        if not infos.has_key('rpath'):
1282            return
1283        pt = getattr(object,'portal_type',None)
1284        mt = getattr(object,'meta_type',None)
1285        if pt != 'Course':
1286            return
1287        data = {}
1288        rpl = infos['rpath'].split('/')
1289        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1290            return
1291        course_id = object.getId()
1292        data[self.key] = course_id
1293        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1294            try:
1295                self.addRecord(**data)
1296            except ValueError:
1297                return
1298            course_id = object.getId()
1299            doc = object.getContent()
1300            if doc is None:
1301                return
1302            for field in self.schema():
1303                data[field] = getattr(doc,field,None)
1304            data[self.key] = course_id
1305            ai = rpl.index('academics')
1306            data['faculty'] = rpl[ai +1]
1307            data['department'] = rpl[ai +2]
1308            self.modifyRecord(**data)
1309            return
1310        if event_type == "sys_del_object":
1311            self.deleteRecord(course_id)
1312            return
1313        if event_type == "sys_modify_object" and mt == 'Course':
1314            #from pdb import set_trace;set_trace()
1315            for field in self.schema():
1316                data[field] = getattr(object,field,None)
1317            course_id = object.aq_parent.getId()
1318            data[self.key] = course_id
1319            ai = rpl.index('academics')
1320            data['faculty'] = rpl[ai +1]
1321            data['department'] = rpl[ai +2]
1322            self.modifyRecord(**data)
1323    ###)
1324
1325
1326InitializeClass(CoursesCatalog)
1327###)
1328
1329class CourseResults(WAeUPTable): ###(
1330    security = ClassSecurityInfo()
1331
1332    meta_type = 'WAeUP Results Catalog'
1333    name = "course_results"
1334    key = "key" #student_id + level + course_id
1335    def __init__(self,name=None):
1336        if name ==  None:
1337            name = self.name
1338        WAeUPTable.__init__(self, name)
1339        self._queue = []
1340
1341    def addMultipleRecords(self, records): ###(
1342        """add many records"""
1343        added_keys = []
1344        for data in records:
1345            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1346            data['%s' % self.key] = uid
1347            res = self.searchResults({"%s" % self.key : uid})
1348            if len(res) > 0:
1349                raise ValueError("More than one record with uid %s" % uid)
1350            self.catalog_object(dict2ob(data), uid=uid)
1351        return uid
1352    ###)
1353
1354    def deleteResultsHere(self,level_id,student_id): ###(
1355        #import pdb;pdb.set_trace()
1356        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1357        course_results = self.course_results.evalAdvancedQuery(query)
1358        for result in course_results:
1359            self.deleteRecord(result.key)
1360    ###)
1361
1362    def moveResultsHere(self,level,student_id): ###(
1363        #import pdb;pdb.set_trace()
1364        level_id = level.getId()
1365        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1366        course_results = self.course_results.evalAdvancedQuery(query)
1367        existing_courses = [cr.code for cr in course_results]
1368        to_delete = []
1369        for code,obj in level.objectItems():
1370            to_delete.append(code)
1371            carry_over = False
1372            if code.endswith('_co'):
1373                carry_over = True
1374                code  = code[:-3]
1375            if code in existing_courses:
1376                continue
1377            course_result_doc = obj.getContent()
1378            data = {}
1379            course_id = code
1380            for field in self.schema():
1381                data[field] = getattr(course_result_doc,field,'')
1382            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1383            data['student_id'] = student_id
1384            data['level_id'] = level_id
1385            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1386            data['session_id'] = session_id
1387            #data['queue_status'] = OBJECT_CREATED
1388            data['code'] = course_id
1389            data['carry_over'] = carry_over
1390            self.catalog_object(dict2ob(data), uid=key)
1391        level.manage_delObjects(to_delete)
1392    ###)
1393
1394    def getCourses(self,student_id,level_id): ###(
1395        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1396        course_results = self.course_results.evalAdvancedQuery(query)
1397        carry_overs = []
1398        normal1 = []
1399        normal2 = []
1400        normal3 = []
1401        total_credits = 0
1402        gpa = 0
1403        for brain in course_results:
1404            d = {}
1405
1406            for field in self.schema():
1407                d[field] = getattr(brain,field,'')
1408
1409            d['weight'] = ''
1410            d['grade'] = ''
1411            d['score'] = ''
1412
1413            if str(brain.credits).isdigit():
1414                credits = int(brain.credits)
1415                total_credits += credits
1416                score = getattr(brain,'score',0)
1417                if score and str(score).isdigit() and int(score) > 0:
1418                    score = int(score)
1419                    grade,weight = self.getGradesFromScore(score)
1420                    gpa += weight * credits
1421                    d['weight'] = weight
1422                    d['grade'] = grade
1423                    d['score'] = score
1424            d['coe'] = ''
1425            if brain.core_or_elective:
1426                d['coe'] = 'Core'
1427            elif brain.core_or_elective == False:
1428                d['coe'] = 'Elective'
1429            id = code = d['id'] = brain.code
1430            d['code'] = code
1431            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1432            if res:
1433                course = res[0]
1434                d['title'] = course.title
1435                # The courses_catalog contains strings and integers in its semester field.
1436                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1437                d['semester'] = str(course.semester)
1438            else:
1439                d['title'] = "Course has been removed from course list"
1440                d['semester'] = ''
1441            if brain.carry_over:
1442                d['coe'] = 'CO'
1443                carry_overs.append(d)
1444            else:
1445                if d['semester'] == '1':
1446                    normal1.append(d)
1447
1448                elif d['semester'] == '2':
1449                    normal2.append(d)
1450                else:
1451                    normal3.append(d)
1452        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1453        #                                "%(semester)s%(code)s" % y))
1454        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1455                                             "%(semester)s%(code)s" % y))
1456        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1457    ###)
1458
1459InitializeClass(CourseResults)
1460###)
1461
1462class OnlinePaymentsImport(WAeUPTable): ###(
1463
1464    meta_type = 'WAeUP Online Payment Transactions'
1465    name = "online_payments_import"
1466    key = "order_id"
1467    def __init__(self,name=None):
1468        if name ==  None:
1469            name = self.name
1470        WAeUPTable.__init__(self, name)
1471
1472
1473InitializeClass(OnlinePaymentsImport)
1474###)
1475
1476class ReturningImport(WAeUPTable): ###(
1477
1478    meta_type = 'Returning Import Table'
1479    name = "returning_import"
1480    key = "matric_no"
1481    def __init__(self,name=None):
1482        if name ==  None:
1483            name = self.name
1484        WAeUPTable.__init__(self, name)
1485
1486
1487InitializeClass(ReturningImport)
1488###)
1489
1490class ResultsImport(WAeUPTable): ###(
1491
1492    meta_type = 'Results Import Table'
1493    name = "results_import"
1494    key = "key"
1495    def __init__(self,name=None):
1496        if name ==  None:
1497            name = self.name
1498        WAeUPTable.__init__(self, name)
1499
1500
1501InitializeClass(ResultsImport)
1502
1503###)
1504
1505class PaymentsCatalog(WAeUPTable): ###(
1506    security = ClassSecurityInfo()
1507
1508    meta_type = 'WAeUP Payments Catalog'
1509    name = "payments_catalog"
1510    key = "order_id"
1511    def __init__(self,name=None):
1512        if name ==  None:
1513            name = self.name
1514        WAeUPTable.__init__(self, name)
1515
1516
1517    security.declarePrivate('notify_event_listener') ###(
1518    def notify_event_listener(self,event_type,object,infos):
1519        "listen for events"
1520        if not infos.has_key('rpath'):
1521            return
1522        pt = getattr(object,'portal_type',None)
1523        mt = getattr(object,'meta_type',None)
1524        data = {}
1525        if pt != 'Payment':
1526            return
1527        if mt == 'CPS Proxy Folder':
1528            return # is handled only for the real object
1529        if event_type == 'sys_del_object':
1530            self.deleteRecord(object.order_id)
1531        if event_type not in ('sys_modify_object'):
1532            return
1533        for field in self.schema():
1534            data[field] = getattr(object,field,'')
1535        rpl = infos['rpath'].split('/')
1536        #import pdb;pdb.set_trace()
1537        student_id = rpl[-4]
1538        data['student_id'] = student_id
1539        modified = False
1540        try:
1541            self.modifyRecord(**data)
1542            modified = True
1543        except KeyError:
1544            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1545            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1546            pass
1547        if not modified:
1548            try:
1549                self.addRecord(**data)
1550            except:
1551                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1552                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1553        ###)
1554
1555
1556InitializeClass(PaymentsCatalog)
1557
1558###)
1559
1560# BBB:
1561AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.