source: WAeUP_SRP/trunk/WAeUPTables.py @ 5569

Last change on this file since 5569 was 5561, checked in by Henrik Bettermann, 14 years ago

Change default session in in dumpPayments.

  • Property svn:keywords set to Id
File size: 72.1 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 5561 2010-10-06 09:00:28Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re,os
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record,index): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if index == 'translate':
106                    if key == 'lga':
107                        v_dump = self.portal_vocabularies.local_gov_areas.get(v)
108                        if not v_dump:
109                            v_dump = v
110                    elif key == 'aos':
111                        v_dump = self.portal_vocabularies.aos.get(v)
112                d[key] = v_dump
113            else:
114                d[key] = ''
115        return d
116
117###)
118
119    def addRecord(self, **data): ###(
120        # The uid is the same as "bed".
121        uid = data[self.key]
122        res = self.searchResults({"%s" % self.key : uid})
123        if len(res) > 0:
124            raise ValueError("More than one record with uid %s" % uid)
125        self.catalog_object(dict2ob(data), uid=uid)
126        return uid
127
128###)
129
130    def deleteRecord(self, uid):
131        self.uncatalog_object(uid)
132
133    def getRecordByKey(self,key):
134        if not key:
135            return None
136        res = self.evalAdvancedQuery(Eq(self.key,key))
137        if res:
138            return res[0]
139        return None
140
141    def searchAndSetRecord(self, **data):
142        raise NotImplemented
143
144    def modifyRecord(self, record=None, **data): ###(
145        #records = self.searchResults(uid=uid)
146        uid = data[self.key]
147        if record is None:
148            records = self.searchResults({"%s" % self.key : uid})
149            if len(records) > 1:
150                # Can not happen, but anyway...
151                raise ValueError("More than one record with uid %s" % uid)
152            if len(records) == 0:
153                raise KeyError("No record for uid %s" % uid)
154            record = records[0]
155        record_data = {}
156        for field in self.schema() + self.indexes():
157            record_data[field] = getattr(record, field)
158        # Add the updated data:
159        record_data.update(data)
160        self.catalog_object(dict2ob(record_data), uid)
161
162###)
163
164    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
165        if isinstance(name, str):
166            name =  (name,)
167        paths = self._catalog.uids.items()
168        i = 0
169        #import pdb;pdb.set_trace()
170        for p,rid in paths:
171            i += 1
172            metadata = self.getMetadataForRID(rid)
173            record_data = {}
174            for field in name:
175                record_data[field] = metadata.get(field)
176            uid = metadata.get(self.key)
177            self.catalog_object(dict2ob(record_data), uid, idxs=name,
178                                update_metadata=0)
179
180###)
181
182    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
183    def exportAllRecords(self):
184        "export a WAeUPTable"
185        #import pdb;pdb.set_trace()
186        fields = [field for field in self.schema()]
187        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
188        csv = []
189        csv.append(','.join(['"%s"' % fn for fn in fields]))
190        for uid in self._catalog.uids:
191            records = self.searchResults({"%s" % self.key : uid})
192            if len(records) > 1:
193                # Can not happen, but anyway...
194                raise ValueError("More than one record with uid %s" % uid)
195            if len(records) == 0:
196                raise KeyError("No record for uid %s" % uid)
197            rec = records[0]
198            csv.append(format % rec)
199        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
200        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
201
202###)
203
204    security.declareProtected(ModifyPortalContent,"dumpAll")###(
205    def dumpAll(self,index=None,value=None):
206        """dump all data in the table to a csv"""
207        member = self.portal_membership.getAuthenticatedMember()
208        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
209        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
210        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
211        res_list = []
212        lines = []
213        if hasattr(self,"export_keys"):
214            fields = self.export_keys
215        else:
216            fields = []
217            for f in self.schema():
218                fields.append(f)
219        headline = ','.join(fields)
220        out = open(export_file,"wb")
221        out.write(headline +'\n')
222        out.close()
223        out = open(export_file,"a")
224        csv_writer = csv.DictWriter(out,fields,)
225        if index is not None and value is not None:
226            records = self.evalAdvancedQuery(Eq(index,value))
227        else:
228            records = self()
229        nr2export = len(records)
230        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
231        chunk = 2000
232        total = 0
233        start = DateTime.DateTime().timeTime()
234        start_chunk = DateTime.DateTime().timeTime()
235        for record in records:
236            not_all = False
237            d = self.record2dict(fields,record,index)
238            lines.append(d)
239            total += 1
240            if total and not total % chunk or total == len(records):
241                csv_writer.writerows(lines)
242                anz = len(lines)
243                logger.info("wrote %(anz)d  total written %(total)d" % vars())
244                end_chunk = DateTime.DateTime().timeTime()
245                duration = end_chunk-start_chunk
246                per_record = duration/anz
247                till_now = end_chunk - start
248                avarage_per_record = till_now/total
249                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
250                estimated_end = estimated_end.strftime("%H:%M:%S")
251                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
252                start_chunk = DateTime.DateTime().timeTime()
253                lines = []
254        end = DateTime.DateTime().timeTime()
255        logger.info('total time %6.2f m' % ((end-start)/60))
256        import os
257        filename, extension = os.path.splitext(export_file)
258        from subprocess import call
259        msg = "wrote %(total)d records to %(export_file)s" % vars()
260        #try:
261        #    retcode = call('gzip %s' % (export_file),shell=True)
262        #    if retcode == 0:
263        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
264        #except OSError, e:
265        #    retcode = -99
266        #    logger.info("zip failed with %s" % e)
267        logger.info(msg)
268        args = {'portal_status_message': msg}
269        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
270        url = self.REQUEST.get('URL2')
271        return 'ready'
272        #return self.REQUEST.RESPONSE.redirect(url)
273    ###)
274
275
276    security.declarePrivate("_import") ###(
277    def _import_new(self,csv_items,schema, layout, mode,logger):
278        "import data from csv.Dictreader Instance"
279        start = True
280        tr_count = 1
281        total_imported = 0
282        total_not_imported = 0
283        total = 0
284        iname =  "%s" % filename
285        not_imported = []
286        valid_records = []
287        invalid_records = []
288        duplicate_records = []
289        d = {}
290        d['mode'] = mode
291        d['valid_records'] = valid_records
292        d['invalid_records'] = invalid_records
293        d['invalid_records'] = duplicate_records
294        # d['import_fn'] = import_fn
295        # d['imported_fn'] = imported_fn
296        # d['not_imported_fn'] = not_imported_fn
297        validators = {}
298        for widget in layout.keys():
299            try:
300                validators[widget] = layout[widget].validate
301            except AttributeError:
302                logger.info('%s has no validate attribute' % widget)
303                return d
304        for item in csv_items:
305            if start:
306                start = False
307                logger.info('%s starts import from %s.csv' % (member,filename))
308                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
309                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
310                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
311                diff2schema = set(import_keys).difference(set(schema.keys()))
312                diff2layout = set(import_keys).difference(set(layout.keys()))
313                if diff2layout:
314                    em = "not ignorable key(s) %s found in heading" % diff2layout
315                    logger.info(em)
316                    return d
317                # s = ','.join(['"%s"' % fn for fn in import_keys])
318                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
319                # #s = '"id",' + s
320                # open(imported_fn,"a").write(s + '\n')
321                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
322                # format_error = format + ',"%(Error)s"'
323                # #format = '"%(id)s",'+ format
324                adapters = [MappingStorageAdapter(schema, item)]
325            dm = DataModel(item, adapters,context=self)
326            ds = DataStructure(data=item,datamodel=dm)
327            error_string = ""
328            for k in import_keys:
329                if not validators[k](ds,mode=mode):
330                    error_string += " %s : %s" % (k,ds.getError(k))
331            if error_string:
332                item['Error'] = error_string
333                #invalid_records.append(dm)
334                invalid_records.append(item)
335                total_not_imported += 1
336            else:
337                em = format % item
338                valid_records.append(dm)
339                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
340                tr_count += 1
341                total_imported += 1
342            total += 1
343        # if len(imported) > 0:
344        #     open(imported_fn,"a").write('\n'.join(imported))
345        # if len(not_imported) > 0:
346        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
347        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
348        d['imported'] = total_imported
349        d['not_imported'] = total_not_imported
350        d['valid_records'] = valid_records
351        d['invalid_records'] = invalid_records
352        return d
353    ###)
354
355    security.declarePublic("missingValue")###(
356    def missingValue(self):
357        from Missing import MV
358        return MV
359    ###)
360###)
361
362class AccommodationTable(WAeUPTable): ###(
363
364    meta_type = 'WAeUP Accommodation Tool'
365    name = "portal_accommodation"
366    key = "bed"
367    not_occupied = NOT_OCCUPIED
368    def __init__(self,name=None):
369        if name ==  None:
370            name = self.name
371        WAeUPTable.__init__(self, name)
372
373    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
374        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
375        records = self.evalAdvancedQuery(Eq('student',student_id))
376        if len(records) == 1:
377            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
378            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
379            return -1,records[0].bed
380        elif len(records) > 1:
381            logger.info('%s found more than one (reserved) bed' % (student_id))
382            return -3,'more than one bed'
383        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
384        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
385        if len(records) == 0:
386            logger.info('no bed %s available for %s' % (bed_type,student_id))
387            return -2,"no bed"
388        if random_order:
389            import random
390            bed_no = random.randint(0,len(records)-1)
391        else:
392            bed_no = 0
393        rec = records[bed_no]
394        self.modifyRecord(bed=rec.bed,student=student_id)
395        logger.info('%s booked bed %s' % (student_id,rec.bed))
396        return 1,rec.bed
397    ###)
398
399
400InitializeClass(AccommodationTable)
401
402###)
403
404class PinTable(WAeUPTable): ###(
405    from ZODB.POSException import ConflictError
406    security = ClassSecurityInfo()
407    meta_type = 'WAeUP Pin Tool'
408    name = "portal_pins"
409    key = 'pin'
410
411    def __init__(self,name=None):
412        if name ==  None:
413            name = self.name
414        WAeUPTable.__init__(self, name)
415
416    security.declareProtected(ModifyPortalContent,"dumpAll")###(
417    def dumpAll(self,include_unused=None,index=None):
418        """dump all data in the table to a csv"""
419        member = self.portal_membership.getAuthenticatedMember()
420        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
421        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
422        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
423        res_list = []
424        lines = []
425        if hasattr(self,"export_keys"):
426            fields = self.export_keys
427        else:
428            fields = []
429            for f in self.schema():
430                fields.append(f)
431        headline = ','.join(fields)
432        out = open(export_file,"wb")
433        out.write(headline +'\n')
434        out.close()
435        out = open(export_file,"a")
436        csv_writer = csv.DictWriter(out,fields,)
437        if include_unused is not None and str(member) not in ('admin','joachim'):
438            logger.info('%s tries to dump pintable with unused pins' % (member))
439            return
440        if include_unused is not None:
441            records = self()
442        else:
443            records = self.evalAdvancedQuery(~Eq('student',''))
444        nr2export = len(records)
445        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
446        chunk = 2000
447        total = 0
448        start = DateTime.DateTime().timeTime()
449        start_chunk = DateTime.DateTime().timeTime()
450        for record in records:
451            not_all = False
452            d = self.record2dict(fields,record,index)
453            lines.append(d)
454            total += 1
455            if total and not total % chunk or total == len(records):
456                csv_writer.writerows(lines)
457                anz = len(lines)
458                logger.info("wrote %(anz)d  total written %(total)d" % vars())
459                end_chunk = DateTime.DateTime().timeTime()
460                duration = end_chunk-start_chunk
461                per_record = duration/anz
462                till_now = end_chunk - start
463                avarage_per_record = till_now/total
464                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
465                estimated_end = estimated_end.strftime("%H:%M:%S")
466                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
467                start_chunk = DateTime.DateTime().timeTime()
468                lines = []
469        end = DateTime.DateTime().timeTime()
470        logger.info('total time %6.2f m' % ((end-start)/60))
471        import os
472        filename, extension = os.path.splitext(export_file)
473        from subprocess import call
474        msg = "wrote %(total)d records to %(export_file)s" % vars()
475        #try:
476        #    retcode = call('gzip %s' % (export_file),shell=True)
477        #    if retcode == 0:
478        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
479        #except OSError, e:
480        #    retcode = -99
481        #    logger.info("zip failed with %s" % e)
482        logger.info(msg)
483        args = {'portal_status_message': msg}
484        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
485        url = self.REQUEST.get('URL2')
486        return self.REQUEST.RESPONSE.redirect(url)
487    ###)
488
489
490
491    def searchAndSetRecord(self, uid, student_id,prefix):
492
493        # The following line must be activated after resetting the
494        # the portal_pins table. This is to avoid duplicate entries
495        # and disable duplicate payments.
496
497        #student_id = student_id.upper()
498
499        #records = self.searchResults(student = student_id)
500        #if len(records) > 0 and prefix in ('CLR','APP'):
501        #    for r in records:
502        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
503        #            return -2
504        records = self.searchResults({"%s" % self.key : uid})
505        if len(records) > 1:
506            # Can not happen, but anyway...
507            raise ValueError("More than one record with uid %s" % uid)
508        if len(records) == 0:
509            return -1,None
510        record = records[0]
511        if record.student == "":
512            record_data = {}
513            for field in self.schema() + self.indexes():
514                record_data[field] = getattr(record, field)
515            # Add the updated data:
516            record_data['student'] = student_id
517            try:
518                self.catalog_object(dict2ob(record_data), uid)
519                return 1,record
520            except ConflictError:
521                return 2,record
522        if record.student.upper() != student_id.upper():
523            return 0,record
524        if record.student.upper() == student_id.upper():
525            return 2,record
526        return -3,record
527InitializeClass(PinTable)
528###)
529
530class PumeResultsTable(WAeUPTable): ###(
531
532    meta_type = 'WAeUP PumeResults Tool'
533    name = "portal_pumeresults"
534    key = "jamb_reg_no"
535    def __init__(self,name=None):
536        if name ==  None:
537            name = self.name
538        WAeUPTable.__init__(self, name)
539
540
541InitializeClass(PumeResultsTable)
542
543###)
544
545class ApplicantsCatalog(WAeUPTable): ###(
546
547    meta_type = 'WAeUP Applicants Catalog'
548    name = "applicants_catalog"
549    key = "reg_no"
550    security = ClassSecurityInfo()
551    #export_keys = (
552    #               "reg_no",
553    #               "status",
554    #               "lastname",
555    #               "sex",
556    #               "date_of_birth",
557    #               "lga",
558    #               "email",
559    #               "phone",
560    #               "passport",
561    #               "entry_mode",
562    #               "pin",
563    #               "screening_type",
564    #               "registration_date",
565    #               "testdate",
566    #               "application_date",
567    #               "screening_date",
568    #               "faculty",
569    #               "department",
570    #               "course1",
571    #               "course2",
572    #               "course3",
573    #               "eng_score",
574    #               "subj1",
575    #               "subj1score",
576    #               "subj2",
577    #               "subj2score",
578    #               "subj3",
579    #               "subj3score",
580    #               "aggregate",
581    #               "course_admitted",
582    #               )
583
584    def __init__(self,name=None):
585        if name ==  None:
586            name = self.name
587        WAeUPTable.__init__(self, name)
588
589    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
590    def new_importCSV(self,filename="JAMB_data",
591                  schema_id="application",
592                  layout_id="import_application",
593                  mode='add'):
594        """ import JAMB data """
595        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
596        pm = self.portal_membership
597        member = pm.getAuthenticatedMember()
598        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
599        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
600        import_fn = "%s/import/%s.csv" % (i_home,filename)
601        if mode not in ('add','edit'):
602            logger.info("invalid mode: %s" % mode)
603        if os.path.exists(lock_fn):
604            logger.info("import of %(import_fn)s already in progress" % vars())
605            return
606        lock_file = open(lock_fn,"w")
607        lock_file.write("%(current)s \n" % vars())
608        lock_file.close()
609        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
610        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
611        stool = getToolByName(self, 'portal_schemas')
612        ltool = getToolByName(self, 'portal_layouts')
613        schema = stool._getOb(schema_id)
614        if schema is None:
615            em = 'No such schema %s' % schema_id
616            logger.error(em)
617            return
618        for postfix in ('_import',''):
619            layout_name = "%(layout_id)s%(postfix)s" % vars()
620            if hasattr(ltool,layout_name):
621                break
622        layout = ltool._getOb(layout_name)
623        if layout is None:
624            em = 'No such layout %s' % layout_id
625            logger.error(em)
626            return
627        try:
628            csv_file = csv.DictReader(open(import_fn,"rb"))
629        except:
630            em = 'Error reading %s.csv' % filename
631            logger.error(em)
632            return
633        d = self._import_new(csv_items,schema,layout,mode,logger)
634        imported = []
635        edited = []
636        duplicates = []
637        not_found = []
638        if len(d['valid_records']) > 0:
639            for record in d['valid_records']:
640                #import pdb;pdb.set_trace()
641                if mode == "add":
642                    try:
643                        self.addRecord(**dict(record.items()))
644                        imported.append(**dict(record.items()))
645                        logger.info("added %s" % record.items())
646                    except ValueError:
647                        dupplicate.append(**dict(record.items()))
648                        logger.info("duplicate %s" % record.items())
649                elif mode == "edit":
650                    try:
651                        self.modifyRecord(**dict(record.items()))
652                        edited.append(**dict(record.items()))
653                        logger.info("edited %s" % record.items())
654                    except KeyError:
655                        not_found.append(**dict(record.items()))
656                        logger.info("not found %s" % record.items())
657        invalid = d['invalid_records']
658        for itype in ("imported","edited","not_found","duplicate","invalid"):
659            outlist = locals[itype]
660            if len(outlist):
661                d = {}
662                for k in outlist[0].keys():
663                    d[k] = k
664                outlist[0] = d
665                outfile = open("file_name_%s" % itype,'w')
666                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
667                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
668###)
669
670    security.declareProtected(ModifyPortalContent,"importCSV")###(
671    def importCSV(self,filename="JAMB_data",
672                  schema_id="application",
673                  layout_id="application_pce",
674                  mode='add'):
675        """ import JAMB data """
676        stool = getToolByName(self, 'portal_schemas')
677        ltool = getToolByName(self, 'portal_layouts')
678        schema = stool._getOb(schema_id)
679        if schema is None:
680            em = 'No such schema %s' % schema_id
681            logger.error(em)
682            return
683        layout = ltool._getOb(layout_id)
684        if layout is None:
685            em = 'No such layout %s' % layout_id
686            logger.error(em)
687            return
688        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
689        d = self._import_old(filename,schema,layout,mode,logger)
690        if len(d['valid_records']) > 0:
691            for record in d['valid_records']:
692                #import pdb;pdb.set_trace()
693                if mode == "add":
694                    self.addRecord(**dict(record.items()))
695                    logger.info("added %s" % record.items())
696                elif mode == "edit":
697                    self.modifyRecord(**dict(record.items()))
698                    logger.info("edited %s" % record.items())
699                else:
700                    logger.info("invalid mode: %s" % mode)
701        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
702    ###)
703
704InitializeClass(ApplicantsCatalog)
705
706###)
707
708class StudentsCatalog(WAeUPTable): ###(
709    security = ClassSecurityInfo()
710
711    meta_type = 'WAeUP Students Catalog'
712    name = "students_catalog"
713    key = "id"
714    affected_types = {   ###(
715                      'StudentApplication':
716                      {'id': 'application',
717                       'fields':
718                       ('jamb_reg_no',
719                        'entry_mode',
720                        #'entry_level',
721                        'entry_session',
722                       )
723                      },
724                      'StudentClearance':
725                      {'id': 'clearance',
726                       'fields':
727                       ('matric_no',
728                        'lga',
729                        'date_of_birth',  # birthday
730                       )
731                      },
732                      'StudentPersonal':
733                      {'id': 'personal',
734                       'fields':
735                       ('name',
736                        'sex',
737                        'perm_address',
738                        'email',
739                        'phone',
740                        'marit_stat',
741                        'firstname',
742                        'middlename',
743                        'lastname',
744                       )
745                      },
746                      'StudentStudyCourse':
747                      {'id': 'study_course',
748                       'fields':
749                       ('course', # study_course
750                        'faculty', # from certificate
751                        'department', # from certificate
752                        'end_level', # from certificate
753                        'level', # current_level
754                        'mode',  # from certificate
755                        'session', # current_session
756                        'verdict', # current_verdict
757                       )
758                      },
759                     }
760    ###)
761
762    def __init__(self,name=None):
763        if name ==  None:
764            name = self.name
765        WAeUPTable.__init__(self, name)
766        return
767
768    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
769        """ clears the whole enchilada """
770        self._catalog.clear()
771
772        if REQUEST and RESPONSE:
773            RESPONSE.redirect(
774              URL1 +
775              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
776
777    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
778        """ clear the catalog, then re-index everything """
779
780        elapse = time.time()
781        c_elapse = time.clock()
782
783        pgthreshold = self._getProgressThreshold()
784        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
785        self.refreshCatalog(clear=1, pghandler=handler)
786
787        elapse = time.time() - elapse
788        c_elapse = time.clock() - c_elapse
789
790        RESPONSE.redirect(
791            URL1 +
792            '/manage_catalogAdvanced?manage_tabs_message=' +
793            urllib.quote('Catalog Updated \n'
794                         'Total time: %s\n'
795                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
796    ###)
797
798    def fill_certificates_dict(self): ###(
799        "return certificate data in  dict"
800        certificates_brains = self.portal_catalog(portal_type ='Certificate')
801        d = {}
802        for cb in certificates_brains:
803            certificate_doc = cb.getObject().getContent()
804            cb_path = cb.getPath().split('/')
805            ld = {}
806            ld['faculty'] = cb_path[-4]
807            ld['department'] = cb_path[-3]
808            ld['end_level'] = getattr(certificate_doc,'end_level','999')
809            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
810            d[cb.getId] = ld
811        return d
812    ###)
813
814    def get_from_doc_department(self,doc,cached_data={}): ###(
815        "return the students department"
816        if doc is None:
817            return None
818        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
819            return self._v_certificates[doc.study_course]['department']
820        certificate_res = self.portal_catalog(id = doc.study_course)
821        if len(certificate_res) != 1:
822            return None
823        return certificate_res[0].getPath().split('/')[-3]
824
825    def get_from_doc_faculty(self,doc,cached_data={}):
826        "return the students faculty"
827        if doc is None:
828            return None
829        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
830            return self._v_certificates[doc.study_course]['faculty']
831        certificate_res = self.portal_catalog(id = doc.study_course)
832        if len(certificate_res) != 1:
833            return None
834        return certificate_res[0].getPath().split('/')[-4]
835
836    def get_from_doc_end_level(self,doc,cached_data={}):
837        "return the students end_level"
838        if doc is None:
839            return None
840        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
841            return self._v_certificates[doc.study_course]['end_level']
842        certificate_res = self.portal_catalog(id = doc.study_course)
843        if len(certificate_res) != 1:
844            return None
845        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
846
847    def get_from_doc_level(self,doc,cached_data={}):
848        "return the students level"
849        if doc is None:
850            return None
851        return getattr(doc,'current_level',None)
852
853    #def get_from_doc_mode(self,doc,cached_data={}):
854    #    "return the students mode"
855    #    if doc is None:
856    #        return None
857    #    cm = getattr(doc,'current_mode',None)
858    #    return cm
859   
860    def get_from_doc_mode(self,doc,cached_data={}):
861        "return the students mode"
862        if doc is None:
863            return None
864        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
865            return self._v_certificates[doc.study_course]['study_mode']
866        certificate_res = self.portal_catalog(id = doc.study_course)
867        if len(certificate_res) != 1:
868            return None
869        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
870
871
872    def get_from_doc_marit_stat(self,doc,cached_data={}):
873        "return the students marit_stat"
874        if doc is None:
875            return None
876        ms = getattr(doc,'marit_stat',None)
877        if ms == True:
878            return 'married'
879        elif ms == False:
880            return 'single'
881        else:
882            return 'undefined'
883           
884    def get_from_doc_date_of_birth(self,doc,cached_data={}):
885        "return the students date of birth"
886        if doc is None:
887            return None
888        return getattr(doc,'birthday',None)           
889
890    def get_from_doc_session(self,doc,cached_data={}):
891        "return the students current_session"
892        if doc is None:
893            return None
894        return getattr(doc,'current_session',None)
895
896    def get_from_doc_entry_session(self,doc,cached_data={}):
897        "return the students entry_session"
898        if doc is None:
899            return None
900        es = getattr(doc,'entry_session',None)
901        if es is not None and len(es) < 3:
902            return es
903        elif len(es) == 9:
904            return es[2:4]   
905        try:
906            digit = int(doc.jamb_reg_no[0])
907        except:
908            return "-1"
909        if digit < 9:
910            return "0%c" % doc.jamb_reg_no[0]
911        return "9%c" % doc.jamb_reg_no[0]
912
913    def get_from_doc_course(self,doc,cached_data={}):
914        "return the students study_course"
915        if doc is None:
916            return None
917        return getattr(doc,'study_course',None)
918
919    def get_from_doc_name(self,doc,cached_data={}):
920        "return the students name from the personal"
921        if doc is None:
922            return None
923        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
924
925    def get_from_doc_verdict(self,doc,cached_data={}):
926        "return the students study_course"
927        if doc is None:
928            return None
929        return getattr(doc,'current_verdict',None)
930    ###)
931
932    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
933        if not hasattr(self,'_v_certificates'):
934            self._v_certificates = self.fill_certificates_dict()
935        if isinstance(name, str):
936            name = (name,)
937        reindextypes = {}
938        reindex_special = []
939        for n in name:
940            if n in ("review_state"):
941                reindex_special.append(n)
942            else:
943                for pt in self.affected_types.keys():
944                    if n in self.affected_types[pt]['fields']:
945                        if reindextypes.has_key(pt):
946                            reindextypes[pt].append(n)
947                        else:
948                            reindextypes[pt]= [n]
949                        break
950        #cached_data = {}
951        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
952        #    cached_data = self.fill_certificates_dict()
953        students = self.portal_catalog(portal_type="Student")
954        if hasattr(self,'portal_catalog_real'):
955            aq_portal = self.portal_catalog_real.evalAdvancedQuery
956        else:
957            aq_portal = self.portal_catalog.evalAdvancedQuery
958        num_objects = len(students)
959        if pghandler:
960            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
961        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
962        #import pdb;pdb.set_trace()
963        for i in xrange(num_objects):
964            if pghandler: pghandler.report(i)
965            student_brain = students[i]
966            student_object = student_brain.getObject()
967            data = {}
968            modified = False
969            sid = data['id'] = student_brain.getId
970            if reindex_special and 'review_state' in reindex_special:
971                modified = True
972                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
973            sub_objects = False
974            for pt in reindextypes.keys():
975                modified = True
976                try:
977                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
978                    sub_objects = True
979                except:
980                    continue
981                for field in set(name).intersection(self.affected_types[pt]['fields']):
982                    if hasattr(self,'get_from_doc_%s' % field):
983                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
984                    else:
985                        data[field] = getattr(doc,field)
986            if not sub_objects and noattr:
987                import_res = self.returning_import(id = sid)
988                if not import_res:
989                    continue
990                import_record = import_res[0]
991                data['matric_no'] = import_record.matric_no
992                data['sex'] = import_record.Sex == 'F'
993                data['name'] = "%s %s %s" % (import_record.Firstname,
994                                             import_record.Middlename,
995                                             import_record.Lastname)
996                data['jamb_reg_no'] = import_record.Entryregno
997            if modified:
998                self.modifyRecord(**data)
999        if pghandler: pghandler.finish()
1000    ###)
1001
1002    def refreshCatalog(self, clear=0, pghandler=None): ###(
1003        """ re-index everything we can find """
1004        students_folder = self.portal_url.getPortalObject().campus.students
1005        if clear:
1006            self._catalog.clear()
1007        students = self.portal_catalog(portal_type="Student")
1008        num_objects = len(students)
1009        #cached_data = self.fill_certificates_dict()
1010        if not hasattr(self,'_v_certificates'):
1011            self._v_certificates = self.fill_certificates_dict()
1012        if pghandler:
1013            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1014        for i in xrange(num_objects):
1015            if pghandler: pghandler.report(i)
1016            student_brain = students[i]
1017            spath = student_brain.getPath()
1018            student_object = student_brain.getObject()
1019            data = {}
1020            sid = data['id'] = student_brain.getId
1021            #data['review_state'] = student_brain.review_state
1022            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1023            sub_objects = False
1024            for pt in self.affected_types.keys():
1025                modified = True
1026                try:
1027                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1028                    sub_objects = True
1029                except:
1030                    #from pdb import set_trace;set_trace()
1031                    continue
1032                for field in self.affected_types[pt]['fields']:
1033                    if hasattr(self,'get_from_doc_%s' % field):
1034                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1035                                                                              cached_data=cached_data)
1036                    else:
1037                        data[field] = getattr(doc,field,None)
1038            if not sub_objects:
1039                import_res = self.returning_import(id = sid)
1040                if not import_res:
1041                    continue
1042                import_record = import_res[0]
1043                data['matric_no'] = import_record.matric_no
1044                data['sex'] = import_record.Sex == 'F'
1045                data['name'] = "%s %s %s" % (import_record.Firstname,
1046                                             import_record.Middlename,
1047                                             import_record.Lastname)
1048                data['jamb_reg_no'] = import_record.Entryregno
1049            self.addRecord(**data)
1050        if pghandler: pghandler.finish()
1051    ###)
1052
1053    security.declarePrivate('notify_event_listener') ###(
1054    def notify_event_listener(self,event_type,object,infos):
1055        "listen for events"
1056        if not infos.has_key('rpath'):
1057            return
1058        pt = getattr(object,'portal_type',None)
1059        mt = getattr(object,'meta_type',None)
1060        students_catalog = self
1061        data = {}
1062        if pt == 'Student' and\
1063           mt == 'CPS Proxy Folder' and\
1064           event_type.startswith('workflow'):
1065            data['id'] = object.getId()
1066            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1067            students_catalog.modifyRecord(**data)
1068            return
1069        rpl = infos['rpath'].split('/')
1070        if pt == 'Student' and mt == 'CPS Proxy Folder':
1071            student_id = object.id
1072            if event_type == "sys_add_object":
1073                try:
1074                    self.addRecord(id = student_id)
1075                except ValueError:
1076                    pass
1077                return
1078            elif event_type == 'sys_del_object':
1079                self.deleteRecord(student_id)
1080        if pt not in self.affected_types.keys():
1081            return
1082        if event_type not in ('sys_modify_object'):
1083            return
1084        if mt == 'CPS Proxy Folder':
1085            return
1086        if not hasattr(self,'_v_certificates'):
1087            self._v_certificates = self.fill_certificates_dict()
1088        for field in self.affected_types[pt]['fields']:
1089            if hasattr(self,'get_from_doc_%s' % field):
1090                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1091            else:
1092                data[field] = getattr(object,field)
1093        data['id'] = rpl[2]
1094        self.modifyRecord(**data)
1095    ###)
1096
1097
1098InitializeClass(StudentsCatalog)
1099
1100###)
1101
1102class CertificatesCatalog(WAeUPTable): ###(
1103    security = ClassSecurityInfo()
1104
1105    meta_type = 'WAeUP Certificates Catalog'
1106    name =  "certificates_catalog"
1107    key = "code"
1108    def __init__(self,name=None):
1109        if name ==  None:
1110            name =  self.name
1111        WAeUPTable.__init__(self, name)
1112
1113    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1114        """ clear the catalog, then re-index everything """
1115
1116        elapse = time.time()
1117        c_elapse = time.clock()
1118
1119        pgthreshold = self._getProgressThreshold()
1120        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1121        self.refreshCatalog(clear=1, pghandler=handler)
1122
1123        elapse = time.time() - elapse
1124        c_elapse = time.clock() - c_elapse
1125
1126        RESPONSE.redirect(
1127            URL1 +
1128            '/manage_catalogAdvanced?manage_tabs_message=' +
1129            urllib.quote('Catalog Updated \n'
1130                         'Total time: %s\n'
1131                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1132    ###)
1133
1134    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1135        if isinstance(name, str):
1136            name = (name,)
1137        certificates = self.portal_catalog(portal_type="Certificate")
1138        num_objects = len(certificates)
1139        if pghandler:
1140            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1141        for i in xrange(num_objects):
1142            if pghandler: pghandler.report(i)
1143            certificate_brain = certificates[i]
1144            certificate_object = certificate_brain.getObject()
1145            pl = certificate_brain.getPath().split('/')
1146            data = {}
1147            cid = data[self.key] = certificate_brain.getId
1148            data['faculty'] = pl[-4]
1149            data['department'] = pl[-3]
1150            doc = certificate_object.getContent()
1151            for field in name:
1152                if field not in (self.key,'faculty','department'):
1153                    data[field] = getattr(doc,field)
1154            self.modifyRecord(**data)
1155        if pghandler: pghandler.finish()
1156    ###)
1157
1158    def refreshCatalog(self, clear=0, pghandler=None): ###(
1159        """ re-index everything we can find """
1160        if clear:
1161            self._catalog.clear()
1162        certificates = self.portal_catalog(portal_type="Certificate")
1163        num_objects = len(certificates)
1164        if pghandler:
1165            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1166        #from pdb import set_trace;set_trace()
1167        for i in xrange(num_objects):
1168            if pghandler: pghandler.report(i)
1169            certificate_brain = certificates[i]
1170            certificate_doc = certificate_brain.getObject().getContent()
1171            pl = certificate_brain.getPath().split('/')
1172            data = {}
1173            for field in self.schema():
1174                data[field] = getattr(certificate_doc,field,None)
1175            data[self.key] = certificate_brain.getId
1176            ai = pl.index('academics')
1177            data['faculty'] = pl[ai +1]
1178            data['department'] = pl[ai +2]
1179            if clear:
1180                self.addRecord(**data)
1181            else:
1182                self.modifyRecord(**data)
1183        if pghandler: pghandler.finish()
1184    ###)
1185
1186    security.declarePrivate('notify_event_listener') ###(
1187    def notify_event_listener(self,event_type,object,infos):
1188        "listen for events"
1189        if not infos.has_key('rpath'):
1190            return
1191        pt = getattr(object,'portal_type',None)
1192        mt = getattr(object,'meta_type',None)
1193        if pt != 'Certificate':
1194            return
1195        data = {}
1196        rpl = infos['rpath'].split('/')
1197        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1198            return
1199        certificate_id = object.getId()
1200        data[self.key] = certificate_id
1201        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1202            try:
1203                self.addRecord(**data)
1204            except ValueError:
1205                return
1206            certificate_id = object.getId()
1207            doc = object.getContent()
1208            if doc is None:
1209                return
1210            for field in self.schema():
1211                data[field] = getattr(doc,field,None)
1212            data[self.key] = certificate_id
1213            ai = rpl.index('academics')
1214            data['faculty'] = rpl[ai +1]
1215            data['department'] = rpl[ai +2]
1216            self.modifyRecord(**data)
1217            return
1218        if event_type == "sys_del_object":
1219            self.deleteRecord(certificate_id)
1220            return
1221        if event_type == "sys_modify_object" and mt == 'Certificate':
1222            #from pdb import set_trace;set_trace()
1223            for field in self.schema():
1224                data[field] = getattr(object,field,None)
1225            certificate_id = object.aq_parent.getId()
1226            data[self.key] = certificate_id
1227            ai = rpl.index('academics')
1228            data['faculty'] = rpl[ai +1]
1229            data['department'] = rpl[ai +2]
1230            self.modifyRecord(**data)
1231    ###)
1232
1233
1234InitializeClass(CertificatesCatalog)
1235###)
1236
1237class CoursesCatalog(WAeUPTable): ###(
1238    security = ClassSecurityInfo()
1239
1240    meta_type = 'WAeUP Courses Catalog'
1241    name =  "courses_catalog"
1242    key = "code"
1243    def __init__(self,name=None):
1244        if name ==  None:
1245            name =  self.name
1246        WAeUPTable.__init__(self, name)
1247
1248    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1249        """ clear the catalog, then re-index everything """
1250
1251        elapse = time.time()
1252        c_elapse = time.clock()
1253
1254        pgthreshold = self._getProgressThreshold()
1255        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1256        self.refreshCatalog(clear=1, pghandler=handler)
1257
1258        elapse = time.time() - elapse
1259        c_elapse = time.clock() - c_elapse
1260
1261        RESPONSE.redirect(
1262            URL1 +
1263            '/manage_catalogAdvanced?manage_tabs_message=' +
1264            urllib.quote('Catalog Updated \n'
1265                         'Total time: %s\n'
1266                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1267    ###)
1268
1269    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1270        if isinstance(name, str):
1271            name = (name,)
1272        courses = self.portal_catalog(portal_type="Course")
1273        num_objects = len(courses)
1274        if pghandler:
1275            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1276        for i in xrange(num_objects):
1277            if pghandler: pghandler.report(i)
1278            course_brain = courses[i]
1279            course_object = course_brain.getObject()
1280            pl = course_brain.getPath().split('/')
1281            data = {}
1282            cid = data[self.key] = course_brain.getId
1283            data['faculty'] = pl[-4]
1284            data['department'] = pl[-3]
1285            doc = course_object.getContent()
1286            for field in name:
1287                if field not in (self.key,'faculty','department'):
1288                    data[field] = getattr(doc,field)
1289            self.modifyRecord(**data)
1290        if pghandler: pghandler.finish()
1291    ###)
1292
1293    def refreshCatalog(self, clear=0, pghandler=None): ###(
1294        """ re-index everything we can find """
1295        if clear:
1296            self._catalog.clear()
1297        courses = self.portal_catalog(portal_type="Course")
1298        num_objects = len(courses)
1299        if pghandler:
1300            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1301        #from pdb import set_trace;set_trace()
1302        for i in xrange(num_objects):
1303            if pghandler: pghandler.report(i)
1304            course_brain = courses[i]
1305            course_doc = course_brain.getObject().getContent()
1306            pl = course_brain.getPath().split('/')
1307            data = {}
1308            for field in self.schema():
1309                data[field] = getattr(course_doc,field,None)
1310            data[self.key] = course_brain.getId
1311            ai = pl.index('academics')
1312            data['faculty'] = pl[ai +1]
1313            data['department'] = pl[ai +2]
1314            if clear:
1315                self.addRecord(**data)
1316            else:
1317                self.modifyRecord(**data)
1318        if pghandler: pghandler.finish()
1319    ###)
1320
1321    security.declarePrivate('notify_event_listener') ###(
1322    def notify_event_listener(self,event_type,object,infos):
1323        "listen for events"
1324        if not infos.has_key('rpath'):
1325            return
1326        pt = getattr(object,'portal_type',None)
1327        mt = getattr(object,'meta_type',None)
1328        if pt != 'Course':
1329            return
1330        data = {}
1331        rpl = infos['rpath'].split('/')
1332        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1333            return
1334        course_id = object.getId()
1335        data[self.key] = course_id
1336        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1337            try:
1338                self.addRecord(**data)
1339            except ValueError:
1340                return
1341            course_id = object.getId()
1342            doc = object.getContent()
1343            if doc is None:
1344                return
1345            for field in self.schema():
1346                data[field] = getattr(doc,field,None)
1347            data[self.key] = course_id
1348            ai = rpl.index('academics')
1349            data['faculty'] = rpl[ai +1]
1350            data['department'] = rpl[ai +2]
1351            self.modifyRecord(**data)
1352            return
1353        if event_type == "sys_del_object":
1354            self.deleteRecord(course_id)
1355            return
1356        if event_type == "sys_modify_object" and mt == 'Course':
1357            #from pdb import set_trace;set_trace()
1358            for field in self.schema():
1359                data[field] = getattr(object,field,None)
1360            course_id = object.aq_parent.getId()
1361            data[self.key] = course_id
1362            ai = rpl.index('academics')
1363            data['faculty'] = rpl[ai +1]
1364            data['department'] = rpl[ai +2]
1365            self.modifyRecord(**data)
1366    ###)
1367
1368
1369InitializeClass(CoursesCatalog)
1370###)
1371
1372class CourseResults(WAeUPTable): ###(
1373    security = ClassSecurityInfo()
1374
1375    meta_type = 'WAeUP Results Catalog'
1376    name = "course_results"
1377    key = "key" #student_id + level + course_id
1378    def __init__(self,name=None):
1379        if name ==  None:
1380            name = self.name
1381        WAeUPTable.__init__(self, name)
1382        self._queue = []
1383
1384    def addMultipleRecords(self, records): ###(
1385        """add many records"""
1386        existing_uids = []
1387        for data in records:
1388            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1389            data['%s' % self.key] = uid
1390            query = Eq(self.key, uid)
1391            res = self.course_results.evalAdvancedQuery(query)
1392            if len(res) > 0:
1393                rec = res[0]
1394                equal = True
1395                for attr in ('student_id','level_id','course_id'):
1396                    if getattr(rec,attr,'') != data[attr]:
1397                        equal = False
1398                        break
1399                if equal:
1400                    existing_uids += uid,
1401                    continue
1402            self.catalog_object(dict2ob(data), uid=uid)
1403        return existing_uids
1404    ###)
1405
1406    def deleteResultsHere(self,level_id,student_id): ###(
1407        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1408        course_results = self.course_results.evalAdvancedQuery(query)
1409        #import pdb;pdb.set_trace()
1410        for result in course_results:
1411            self.deleteRecord(result.key)
1412    ###)
1413
1414    def moveResultsHere(self,level,student_id): ###(
1415        #import pdb;pdb.set_trace()
1416        level_id = level.getId()
1417        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1418        course_results = self.course_results.evalAdvancedQuery(query)
1419        existing_courses = [cr.code for cr in course_results]
1420        to_delete = []
1421        for code,obj in level.objectItems():
1422            to_delete.append(code)
1423            carry_over = False
1424            if code.endswith('_co'):
1425                carry_over = True
1426                code  = code[:-3]
1427            if code in existing_courses:
1428                continue
1429            course_result_doc = obj.getContent()
1430            data = {}
1431            course_id = code
1432            for field in self.schema():
1433                data[field] = getattr(course_result_doc,field,'')
1434            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1435            data['student_id'] = student_id
1436            data['level_id'] = level_id
1437            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1438            data['session_id'] = session_id
1439            #data['queue_status'] = OBJECT_CREATED
1440            data['code'] = course_id
1441            data['carry_over'] = carry_over
1442            self.catalog_object(dict2ob(data), uid=key)
1443        level.manage_delObjects(to_delete)
1444    ###)
1445
1446    def getCourses(self,student_id,level_id): ###(
1447        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1448        course_results = self.course_results.evalAdvancedQuery(query)
1449        carry_overs = []
1450        normal1 = []
1451        normal2 = []
1452        normal3 = []
1453        total_credits = 0
1454        gpa = 0
1455        for brain in course_results:
1456            d = {}
1457
1458            for field in self.schema():
1459                d[field] = getattr(brain,field,None)
1460                if repr(d[field]) == 'Missing.Value':
1461                    d[field] = ''
1462            d['weight'] = ''
1463            d['grade'] = ''
1464            d['score'] = ''
1465
1466            if str(brain.credits).isdigit():
1467                credits = int(brain.credits)
1468                total_credits += credits
1469                score = getattr(brain,'score',0)
1470                if score and str(score).isdigit() and int(score) > 0:
1471                    score = int(score)
1472                    grade,weight = self.getGradesFromScore(score,'')
1473                    gpa += weight * credits
1474                    d['weight'] = weight
1475                    d['grade'] = grade
1476                    d['score'] = score
1477
1478            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1479            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1480            #else:
1481            #    d['score_calc'] = ''
1482            try:
1483                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1484            except:
1485                d['score_calc'] = ''
1486
1487            if d['score_calc']:
1488                grade = self.getGradesFromScore(d['score_calc'],level_id)
1489                d['grade'] = grade
1490
1491            d['coe'] = ''
1492            if brain.core_or_elective:
1493                d['coe'] = 'Core'
1494            elif brain.core_or_elective == False:
1495                d['coe'] = 'Elective'
1496            id = code = d['id'] = brain.code
1497            d['code'] = code
1498            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1499            if res:
1500                course = res[0]
1501                d['title'] = course.title
1502                # The courses_catalog contains strings and integers in its semester field.
1503                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1504                d['semester'] = str(course.semester)
1505            else:
1506                d['title'] = "Course has been removed from course list"
1507                d['semester'] = ''
1508            if brain.carry_over:
1509                d['coe'] = 'CO'
1510                carry_overs.append(d)
1511            else:
1512                if d['semester'] == '1':
1513                    normal1.append(d)
1514
1515                elif d['semester'] == '2':
1516                    normal2.append(d)
1517                else:
1518                    normal3.append(d)
1519        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1520        #                                "%(semester)s%(code)s" % y))
1521        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1522                                             "%(semester)s%(code)s" % y))
1523        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1524    ###)
1525
1526   
1527    # for transcript only
1528    def getAllCourses(self,student_id): ###(
1529        query = Eq('student_id',student_id)
1530        course_results = self.course_results.evalAdvancedQuery(query)
1531        courses = []
1532        for brain in course_results:
1533            d = {}
1534
1535            for field in self.schema():
1536                d[field] = getattr(brain,field,'')
1537
1538            d['weight'] = ''
1539            d['grade'] = ''
1540            d['score'] = ''
1541
1542            if str(brain.credits).isdigit():
1543                credits = int(brain.credits)
1544                score = getattr(brain,'score',0)
1545                if score and str(score).isdigit() and int(score) > 0:
1546                    score = int(score)
1547                    grade,weight = self.getGradesFromScore(score,'')
1548                    d['weight'] = weight
1549                    d['grade'] = grade
1550                    d['score'] = score
1551            d['coe'] = ''
1552            if brain.core_or_elective:
1553                d['coe'] = 'Core'
1554            elif brain.core_or_elective == False:
1555                d['coe'] = 'Elective'
1556            id = code = d['id'] = brain.code
1557            d['code'] = code
1558            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1559            if res:
1560                course = res[0]
1561                d['title'] = course.title
1562                # The courses_catalog contains strings and integers in its semester field.
1563                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1564                d['semester'] = str(course.semester)
1565            else:
1566                d['title'] = "Course has been removed from course list"
1567                d['semester'] = ''
1568            if brain.carry_over:
1569                d['coe'] = 'CO'
1570            courses.append(d)
1571        return courses
1572    ###)
1573   
1574    def getYearGroupAverage(self,session_id,level_id): ###(
1575        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1576        course_results = self.course_results.evalAdvancedQuery(query)
1577        yga1 = 0
1578        yg1 = []
1579        counter1 = 0
1580        yga2 = 0
1581        yg2 = []
1582        counter2 = 0
1583        yga3 = 0
1584        yg3 = []
1585        counter3 = 0       
1586        #import pdb;pdb.set_trace()
1587        for brain in course_results:
1588            try:
1589                om = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1590                if not om > 0:
1591                    continue
1592                code = brain.code               
1593                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1594                if res:
1595                    course = res[0]
1596                    # The courses_catalog contains strings and integers in its semester field.
1597                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1598                    semester = str(course.semester)
1599                else:
1600                    semester = ''
1601                if semester == '1':
1602                    counter1 += 1
1603                    yga1 += om
1604                    yg1.append(om)
1605                elif semester == '2':
1606                    counter2 += 1
1607                    yga2 += om     
1608                    yg2.append(om)   
1609                if semester == '3':
1610                    counter3 += 1
1611                    yga3 += om
1612                    yg3.append(om)
1613            except:
1614                continue               
1615        if counter1:
1616            yga1 /= counter1
1617            yga1 = '%.2f' % yga1   
1618        if counter2:
1619            yga2 /= counter2
1620            yga2 = '%.2f' % yga2   
1621        if counter3:
1622            yga3 /= counter3
1623            yga3 = '%.2f' % yga3                                   
1624        return yga1, yga2, yga3, counter1, counter2, counter3, yg1, yg2, yg3
1625    ###)
1626   
1627   
1628    #security.declarePublic("calculateCoursePosition")
1629    def calculateCoursePosition(self,session_id,level_id,code,score,semester=None):
1630        #"""calculate Course Position"""
1631        query = Eq('session_id',session_id) & Eq('level_id',level_id) & Eq('code',code)
1632        course_results = self.course_results.evalAdvancedQuery(query)
1633        ygc = []
1634        #import pdb;pdb.set_trace() 
1635        for brain in course_results:
1636            try:
1637                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1638                    continue
1639                #code = brain.code   
1640                if semester:
1641                    res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1642                    if res:
1643                        course = res[0]
1644                        # The courses_catalog contains strings and integers in its semester field.
1645                        # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1646                        semester_from_course = str(course.semester)
1647                    else:
1648                        continue
1649                    if semester != semester_from_course:
1650                        continue
1651                ygc.append(float(brain.ca1) + float(brain.ca2) + float(brain.exam))
1652            except:
1653                continue     
1654        ygc.sort(reverse=True)
1655        if not len(ygc):
1656            return 'no result'
1657        #import pdb;pdb.set_trace()       
1658        for pos in range(len(ygc)):
1659            if ygc[pos] <= float(score):
1660                break
1661        output = {}   
1662        output['pos'] =  '%d of %d' % (pos+1,len(ygc))
1663        output['ygc'] = ygc
1664        return output
1665       
1666    security.declareProtected(ModifyPortalContent,"calculateAllCoursePositions")
1667    def calculateAllCoursePositions(self,session_id=None):
1668        """calculate All Course Positions"""
1669        logger = logging.getLogger('WAeUPTables.CourseResults.calculateAllCoursePositions')
1670        member = self.portal_membership.getAuthenticatedMember()
1671        logger.info('%s starts recalculation of positions in session %s' % (member,session_id))
1672        if session_id:
1673            query = Eq('session_id',session_id)
1674        else:
1675            return 'no session_id provided'
1676        course_results = self.course_results.evalAdvancedQuery(query)
1677        for brain in course_results:
1678            try:
1679                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1680                    data = {}
1681                    data[self.key] = brain.key
1682                    data['pic'] = ''
1683                    self.modifyRecord(**data)                   
1684                    continue
1685                res = self.courses_catalog.evalAdvancedQuery(Eq('code',brain.code))
1686                if res:
1687                    course = res[0]
1688                    semester_from_course = str(course.semester)
1689                else:
1690                    continue                   
1691                score = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1692                pic = self.calculateCoursePosition(session_id,brain.level_id,brain.code,score,semester_from_course)['pos']
1693                data = {}
1694                data[self.key] = brain.key
1695                data['pic'] = pic
1696                self.modifyRecord(**data)
1697            except:
1698                data = {}
1699                data[self.key] = brain.key
1700                data['pic'] = ''
1701                self.modifyRecord(**data)
1702                continue       
1703        logger.info('recalculation finished')             
1704        return 'ready'   
1705   
1706    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
1707        ""
1708        query = Eq('student_id',student_id)
1709        cr_catalog = self.course_results
1710        course_results = cr_catalog.evalAdvancedQuery(query)
1711        courses = []
1712        fields = self.schema()
1713        format = '"%(' + ')s","%('.join(fields) + ')s"'
1714        for brain in course_results:
1715            d = {}
1716            for field in fields:
1717                d[field] = getattr(brain,field,'')
1718            courses.append(format % d)
1719               
1720        if export:
1721            export_file = "%s/export/course_results_removed.csv" % (i_home)
1722            if not os.path.exists(export_file): 
1723                file_handler = open(export_file,"a")
1724                headline = ','.join(fields)
1725                file_handler.write(headline +'\n')
1726            else:
1727                file_handler = open(export_file,"a")
1728            for line in courses:
1729                file_handler.write(line +'\n')
1730
1731        if remove:
1732            for brain in course_results:
1733                key = getattr(brain,'key','')
1734                cr_catalog.deleteRecord(key)
1735       
1736        return courses
1737    ###)   
1738   
1739   
1740
1741InitializeClass(CourseResults)
1742###)
1743
1744class OnlinePaymentsImport(WAeUPTable): ###(
1745
1746    meta_type = 'WAeUP Online Payment Transactions'
1747    name = "online_payments_import"
1748    key = "order_id"
1749    def __init__(self,name=None):
1750        if name ==  None:
1751            name = self.name
1752        WAeUPTable.__init__(self, name)
1753
1754
1755InitializeClass(OnlinePaymentsImport)
1756###)
1757
1758class ReturningImport(WAeUPTable): ###(
1759
1760    meta_type = 'Returning Import Table'
1761    name = "returning_import"
1762    key = "matric_no"
1763    def __init__(self,name=None):
1764        if name ==  None:
1765            name = self.name
1766        WAeUPTable.__init__(self, name)
1767
1768
1769InitializeClass(ReturningImport)
1770###)
1771
1772class ResultsImport(WAeUPTable): ###(
1773
1774    meta_type = 'Results Import Table'
1775    name = "results_import"
1776    key = "key"
1777    def __init__(self,name=None):
1778        if name ==  None:
1779            name = self.name
1780        WAeUPTable.__init__(self, name)
1781
1782
1783InitializeClass(ResultsImport)
1784
1785###)
1786
1787class PaymentsCatalog(WAeUPTable): ###(
1788    security = ClassSecurityInfo()
1789
1790    meta_type = 'WAeUP Payments Catalog'
1791    name = "payments_catalog"
1792    key = "order_id"
1793    def __init__(self,name=None):
1794        if name ==  None:
1795            name = self.name
1796        WAeUPTable.__init__(self, name)
1797
1798
1799    security.declarePrivate('notify_event_listener') ###(
1800    def notify_event_listener(self,event_type,object,infos):
1801        "listen for events"
1802        if not infos.has_key('rpath'):
1803            return
1804        pt = getattr(object,'portal_type',None)
1805        mt = getattr(object,'meta_type',None)
1806        data = {}
1807        if pt != 'Payment':
1808            return
1809        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1810            self.deleteRecord(object.getContent().order_id)
1811        if mt == 'CPS Proxy Folder':
1812            return # is handled only for the real object
1813        if event_type not in ('sys_modify_object'):
1814            return
1815        for field in self.schema():
1816            data[field] = getattr(object,field,'')
1817        rpl = infos['rpath'].split('/')
1818        #import pdb;pdb.set_trace()
1819        student_id = rpl[-4]
1820        data['student_id'] = student_id
1821        modified = False
1822        try:
1823            self.modifyRecord(**data)
1824            modified = True
1825        except KeyError:
1826            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1827            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1828            pass
1829        if not modified:
1830            try:
1831                self.addRecord(**data)
1832            except:
1833                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1834                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1835        ###)
1836
1837
1838    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
1839        ""
1840        query = Eq('student_id',student_id)
1841        pm_catalog = self.payments_catalog
1842        payments = pm_catalog.evalAdvancedQuery(query)
1843        payments_dic = []
1844        fields = self.schema()
1845        format = '"%(' + ')s","%('.join(fields) + ')s"'
1846        for brain in payments:
1847            d = {}
1848            for field in fields:
1849                d[field] = getattr(brain,field,'')
1850            payments_dic.append(format % d)
1851               
1852        if export:
1853            export_file = "%s/export/payments_removed.csv" % (i_home)
1854            if not os.path.exists(export_file): 
1855                file_handler = open(export_file,"a")
1856                headline = ','.join(fields)
1857                file_handler.write(headline +'\n')
1858            else:
1859                file_handler = open(export_file,"a")
1860            for line in payments_dic:
1861                file_handler.write(line +'\n')
1862
1863        if remove:
1864            for brain in payments:
1865                order_id = getattr(brain,'order_id','')
1866                pm_catalog.deleteRecord(order_id)
1867       
1868        return payments_dic
1869    ###)   
1870
1871    security.declareProtected(ModifyPortalContent,"dumpPayments")###(
1872    def dumpPayments(self,session_id=''):
1873        """dump all valid payments and combine with student data """
1874       
1875        if not session_id:
1876            session_id  = self.getSessionId()[0]
1877        member = self.portal_membership.getAuthenticatedMember()
1878        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpPayments')
1879        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
1880        export_file = "%s/export/valid_payments%s_%s.csv" % (i_home,session_id,current,)       
1881       
1882        pm_catalog = self.payments_catalog
1883        query = Eq('status','paid') & Eq('type','online') & Eq('session_id',session_id)
1884        payments = pm_catalog.evalAdvancedQuery(query)
1885        payments_dic = []
1886        s_catalog = self.students_catalog
1887        fields_pm = pm_catalog.schema()
1888        fields_s = s_catalog.schema()
1889        fields =  fields_pm + fields_s
1890       
1891        format = '"%(' + ')s","%('.join(fields) + ')s"'
1892        #import pdb;pdb.set_trace()
1893        for brain in payments:
1894            d = {}
1895            for field in fields_pm:
1896                d[field] = getattr(brain,field,'')
1897           
1898            student_id = getattr(brain,'student_id','')
1899            query = Eq('id',student_id)   
1900            student = s_catalog.evalAdvancedQuery(query)
1901            if student:
1902                for field in fields_s:
1903                    d[field] = getattr(student[0],field,'')               
1904            payments_dic.append(format % d)     
1905           
1906        if not os.path.exists(export_file): 
1907            file_handler = open(export_file,"a")
1908            headline = ','.join(fields)
1909            file_handler.write(headline +'\n')
1910        else:
1911            file_handler = open(export_file,"a")
1912        for line in payments_dic:
1913            file_handler.write(line +'\n')     
1914       
1915        return 'ready'       
1916           
1917
1918InitializeClass(PaymentsCatalog)
1919
1920###)
1921
1922class RemovedStudentIds(WAeUPTable): ###(
1923
1924    meta_type = 'WAeUP Removed StudentIds'
1925    name = "removed_student_ids"
1926    key = "id"
1927    def __init__(self,name=None):
1928        if name ==  None:
1929            name = self.name
1930        WAeUPTable.__init__(self, name)
1931
1932
1933InitializeClass(RemovedStudentIds)
1934
1935###)
1936
1937# BBB:
1938AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.