source: WAeUP_SRP/trunk/WAeUPTables.py @ 4056

Last change on this file since 4056 was 4036, checked in by Henrik Bettermann, 16 years ago

implement year group average

  • Property svn:keywords set to Id
File size: 70.5 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 4036 2009-04-01 08:40:55Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re,os
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self,index=None,value=None):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        if index is not None and value is not None:
225            records = self.evalAdvancedQuery(Eq(index,value))
226        else:
227            records = self()
228        nr2export = len(records)
229        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
230        chunk = 2000
231        total = 0
232        start = DateTime.DateTime().timeTime()
233        start_chunk = DateTime.DateTime().timeTime()
234        for record in records:
235            not_all = False
236            d = self.record2dict(fields,record)
237            lines.append(d)
238            total += 1
239            if total and not total % chunk or total == len(records):
240                csv_writer.writerows(lines)
241                anz = len(lines)
242                logger.info("wrote %(anz)d  total written %(total)d" % vars())
243                end_chunk = DateTime.DateTime().timeTime()
244                duration = end_chunk-start_chunk
245                per_record = duration/anz
246                till_now = end_chunk - start
247                avarage_per_record = till_now/total
248                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
249                estimated_end = estimated_end.strftime("%H:%M:%S")
250                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
251                start_chunk = DateTime.DateTime().timeTime()
252                lines = []
253        end = DateTime.DateTime().timeTime()
254        logger.info('total time %6.2f m' % ((end-start)/60))
255        import os
256        filename, extension = os.path.splitext(export_file)
257        from subprocess import call
258        msg = "wrote %(total)d records to %(export_file)s" % vars()
259        #try:
260        #    retcode = call('gzip %s' % (export_file),shell=True)
261        #    if retcode == 0:
262        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
263        #except OSError, e:
264        #    retcode = -99
265        #    logger.info("zip failed with %s" % e)
266        logger.info(msg)
267        args = {'portal_status_message': msg}
268        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
269        url = self.REQUEST.get('URL2')
270        return self.REQUEST.RESPONSE.redirect(url)
271    ###)
272
273    security.declarePrivate("_import_old") ###(
274    def _import_old(self,filename,schema,layout, mode,logger):
275        "import data from csv"
276        import transaction
277        import random
278        pm = self.portal_membership
279        member = pm.getAuthenticatedMember()
280        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
281        import_fn = "%s/import/%s.csv" % (i_home,filename)
282        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
283        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
284        start = True
285        tr_count = 1
286        total_imported = 0
287        total_not_imported = 0
288        total = 0
289        iname =  "%s" % filename
290        not_imported = []
291        imported = []
292        valid_records = []
293        invalid_records = []
294        d = {}
295        d['mode'] = mode
296        d['imported'] = total_imported
297        d['not_imported'] = total_not_imported
298        d['valid_records'] = valid_records
299        d['invalid_records'] = invalid_records
300        d['import_fn'] = import_fn
301        d['imported_fn'] = imported_fn
302        d['not_imported_fn'] = not_imported_fn
303        if schema is None:
304            em = 'No schema specified'
305            logger.error(em)
306            return d
307        if layout is None:
308            em = 'No layout specified'
309            logger.error(em)
310            return d
311        validators = {}
312        for widget in layout.keys():
313            try:
314                validators[widget] = layout[widget].validate
315            except AttributeError:
316                logger.info('%s has no validate attribute' % widget)
317                return d
318        # if mode == 'edit':
319        #     importer = self.importEdit
320        # elif mode == 'add':
321        #     importer = self.importAdd
322        # else:
323        #     importer = None
324        try:
325            items = csv.DictReader(open(import_fn,"rb"),
326                                   dialect="excel",
327                                   skipinitialspace=True)
328        except:
329            em = 'Error reading %s.csv' % filename
330            logger.error(em)
331            return d
332        #import pdb;pdb.set_trace()
333        for item in items:
334            if start:
335                start = False
336                logger.info('%s starts import from %s.csv' % (member,filename))
337                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
338                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
339                                   dialect="excel",
340                                   skipinitialspace=True).next()
341                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
342                diff2schema = set(import_keys).difference(set(schema.keys()))
343                diff2layout = set(import_keys).difference(set(layout.keys()))
344                if diff2layout:
345                    em = "not ignorable key(s) %s found in heading" % diff2layout
346                    logger.info(em)
347                    return d
348                s = ','.join(['"%s"' % fn for fn in import_keys])
349                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
350                #s = '"id",' + s
351                open(imported_fn,"a").write(s + '\n')
352                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
353                format_error = format + ',"%(Error)s"'
354                #format = '"%(id)s",'+ format
355                adapters = [MappingStorageAdapter(schema, item)]
356            dm = DataModel(item, adapters,context=self)
357            ds = DataStructure(data=item,datamodel=dm)
358            error_string = ""
359            #import pdb;pdb.set_trace()
360            for k in import_keys:
361                if not validators[k](ds,mode=mode):
362                    error_string += " %s : %s" % (k,ds.getError(k))
363            # if not error_string and importer:
364            #     item.update(dm)
365            #     item['id'],error = importer(item)
366            #     if error:
367            #         error_string += error
368            if error_string:
369                item['Error'] = error_string
370                invalid_records.append(dm)
371                not_imported.append(format_error % item)
372                total_not_imported += 1
373            else:
374                em = format % item
375                valid_records.append(dm)
376                imported.append(em)
377                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
378                tr_count += 1
379                total_imported += 1
380            total += 1
381        if len(imported) > 0:
382            open(imported_fn,"a").write('\n'.join(imported))
383        if len(not_imported) > 0:
384            open(not_imported_fn,"a").write('\n'.join(not_imported))
385        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
386        d['imported'] = total_imported
387        d['not_imported'] = total_not_imported
388        d['valid_records'] = valid_records
389        d['invalid_records'] = invalid_records
390        d['imported_fn'] = imported_fn
391        d['not_imported_fn'] = not_imported_fn
392        #logger.info(em)
393        return d
394    ###)
395
396    security.declarePrivate("_import") ###(
397    def _import_new(self,csv_items,schema, layout, mode,logger):
398        "import data from csv.Dictreader Instance"
399        start = True
400        tr_count = 1
401        total_imported = 0
402        total_not_imported = 0
403        total = 0
404        iname =  "%s" % filename
405        not_imported = []
406        valid_records = []
407        invalid_records = []
408        duplicate_records = []
409        d = {}
410        d['mode'] = mode
411        d['valid_records'] = valid_records
412        d['invalid_records'] = invalid_records
413        d['invalid_records'] = duplicate_records
414        # d['import_fn'] = import_fn
415        # d['imported_fn'] = imported_fn
416        # d['not_imported_fn'] = not_imported_fn
417        validators = {}
418        for widget in layout.keys():
419            try:
420                validators[widget] = layout[widget].validate
421            except AttributeError:
422                logger.info('%s has no validate attribute' % widget)
423                return d
424        for item in csv_items:
425            if start:
426                start = False
427                logger.info('%s starts import from %s.csv' % (member,filename))
428                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
429                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
430                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
431                diff2schema = set(import_keys).difference(set(schema.keys()))
432                diff2layout = set(import_keys).difference(set(layout.keys()))
433                if diff2layout:
434                    em = "not ignorable key(s) %s found in heading" % diff2layout
435                    logger.info(em)
436                    return d
437                # s = ','.join(['"%s"' % fn for fn in import_keys])
438                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
439                # #s = '"id",' + s
440                # open(imported_fn,"a").write(s + '\n')
441                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
442                # format_error = format + ',"%(Error)s"'
443                # #format = '"%(id)s",'+ format
444                adapters = [MappingStorageAdapter(schema, item)]
445            dm = DataModel(item, adapters,context=self)
446            ds = DataStructure(data=item,datamodel=dm)
447            error_string = ""
448            for k in import_keys:
449                if not validators[k](ds,mode=mode):
450                    error_string += " %s : %s" % (k,ds.getError(k))
451            if error_string:
452                item['Error'] = error_string
453                #invalid_records.append(dm)
454                invalid_records.append(item)
455                total_not_imported += 1
456            else:
457                em = format % item
458                valid_records.append(dm)
459                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
460                tr_count += 1
461                total_imported += 1
462            total += 1
463        # if len(imported) > 0:
464        #     open(imported_fn,"a").write('\n'.join(imported))
465        # if len(not_imported) > 0:
466        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
467        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
468        d['imported'] = total_imported
469        d['not_imported'] = total_not_imported
470        d['valid_records'] = valid_records
471        d['invalid_records'] = invalid_records
472        return d
473    ###)
474
475    security.declarePublic("missingValue")###(
476    def missingValue(self):
477        from Missing import MV
478        return MV
479    ###)
480###)
481
482class AccommodationTable(WAeUPTable): ###(
483
484    meta_type = 'WAeUP Accommodation Tool'
485    name = "portal_accommodation"
486    key = "bed"
487    not_occupied = NOT_OCCUPIED
488    def __init__(self,name=None):
489        if name ==  None:
490            name = self.name
491        WAeUPTable.__init__(self, name)
492
493    def searchAndReserveBed(self, student_id,bed_type,random_order=False): ###(
494        logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
495        records = self.evalAdvancedQuery(Eq('student',student_id))
496        if len(records) == 1:
497            #return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
498            logger.info('%s found (reserved) bed %s' % (student_id,records[0].bed))
499            return -1,records[0].bed
500        elif len(records) > 1:
501            logger.info('%s found more than one (reserved) bed' % (student_id))
502            return -3,'more than one bed'
503        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
504        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
505        if len(records) == 0:
506            logger.info('no bed %s available for %s' % (bed_type,student_id))
507            return -2,"no bed"
508        if random_order:
509            import random
510            bed_no = random.randint(0,len(records)-1)
511        else:
512            bed_no = 0
513        rec = records[bed_no]
514        self.modifyRecord(bed=rec.bed,student=student_id)
515        logger.info('%s booked bed %s' % (student_id,rec.bed))
516        return 1,rec.bed
517    ###)
518
519
520InitializeClass(AccommodationTable)
521
522###)
523
524class PinTable(WAeUPTable): ###(
525    from ZODB.POSException import ConflictError
526    security = ClassSecurityInfo()
527    meta_type = 'WAeUP Pin Tool'
528    name = "portal_pins"
529    key = 'pin'
530
531    def __init__(self,name=None):
532        if name ==  None:
533            name = self.name
534        WAeUPTable.__init__(self, name)
535
536    security.declareProtected(ModifyPortalContent,"dumpAll")###(
537    def dumpAll(self,include_unused=None):
538        """dump all data in the table to a csv"""
539        member = self.portal_membership.getAuthenticatedMember()
540        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
541        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
542        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
543        res_list = []
544        lines = []
545        if hasattr(self,"export_keys"):
546            fields = self.export_keys
547        else:
548            fields = []
549            for f in self.schema():
550                fields.append(f)
551        headline = ','.join(fields)
552        out = open(export_file,"wb")
553        out.write(headline +'\n')
554        out.close()
555        out = open(export_file,"a")
556        csv_writer = csv.DictWriter(out,fields,)
557        if include_unused is not None and str(member) not in ('admin','joachim'):
558            logger.info('%s tries to dump pintable with unused pins' % (member))
559            return
560        if include_unused is not None:
561            records = self()
562        else:
563            records = self.evalAdvancedQuery(~Eq('student',''))
564        nr2export = len(records)
565        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
566        chunk = 2000
567        total = 0
568        start = DateTime.DateTime().timeTime()
569        start_chunk = DateTime.DateTime().timeTime()
570        for record in records:
571            not_all = False
572            d = self.record2dict(fields,record)
573            lines.append(d)
574            total += 1
575            if total and not total % chunk or total == len(records):
576                csv_writer.writerows(lines)
577                anz = len(lines)
578                logger.info("wrote %(anz)d  total written %(total)d" % vars())
579                end_chunk = DateTime.DateTime().timeTime()
580                duration = end_chunk-start_chunk
581                per_record = duration/anz
582                till_now = end_chunk - start
583                avarage_per_record = till_now/total
584                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
585                estimated_end = estimated_end.strftime("%H:%M:%S")
586                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
587                start_chunk = DateTime.DateTime().timeTime()
588                lines = []
589        end = DateTime.DateTime().timeTime()
590        logger.info('total time %6.2f m' % ((end-start)/60))
591        import os
592        filename, extension = os.path.splitext(export_file)
593        from subprocess import call
594        msg = "wrote %(total)d records to %(export_file)s" % vars()
595        #try:
596        #    retcode = call('gzip %s' % (export_file),shell=True)
597        #    if retcode == 0:
598        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
599        #except OSError, e:
600        #    retcode = -99
601        #    logger.info("zip failed with %s" % e)
602        logger.info(msg)
603        args = {'portal_status_message': msg}
604        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
605        url = self.REQUEST.get('URL2')
606        return self.REQUEST.RESPONSE.redirect(url)
607    ###)
608
609
610
611    def searchAndSetRecord(self, uid, student_id,prefix):
612
613        # The following line must be activated after resetting the
614        # the portal_pins table. This is to avoid duplicate entries
615        # and disable duplicate payments.
616
617        #student_id = student_id.upper()
618
619        #records = self.searchResults(student = student_id)
620        #if len(records) > 0 and prefix in ('CLR','APP'):
621        #    for r in records:
622        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
623        #            return -2
624        records = self.searchResults({"%s" % self.key : uid})
625        if len(records) > 1:
626            # Can not happen, but anyway...
627            raise ValueError("More than one record with uid %s" % uid)
628        if len(records) == 0:
629            return -1,None
630        record = records[0]
631        if record.student == "":
632            record_data = {}
633            for field in self.schema() + self.indexes():
634                record_data[field] = getattr(record, field)
635            # Add the updated data:
636            record_data['student'] = student_id
637            try:
638                self.catalog_object(dict2ob(record_data), uid)
639                return 1,record
640            except ConflictError:
641                return 2,record
642        if record.student.upper() != student_id.upper():
643            return 0,record
644        if record.student.upper() == student_id.upper():
645            return 2,record
646        return -3,record
647InitializeClass(PinTable)
648###)
649
650class PumeResultsTable(WAeUPTable): ###(
651
652    meta_type = 'WAeUP PumeResults Tool'
653    name = "portal_pumeresults"
654    key = "jamb_reg_no"
655    def __init__(self,name=None):
656        if name ==  None:
657            name = self.name
658        WAeUPTable.__init__(self, name)
659
660
661InitializeClass(PumeResultsTable)
662
663###)
664
665class ApplicantsCatalog(WAeUPTable): ###(
666
667    meta_type = 'WAeUP Applicants Catalog'
668    name = "applicants_catalog"
669    key = "reg_no"
670    security = ClassSecurityInfo()
671    #export_keys = (
672    #               "reg_no",
673    #               "status",
674    #               "lastname",
675    #               "sex",
676    #               "date_of_birth",
677    #               "lga",
678    #               "email",
679    #               "phone",
680    #               "passport",
681    #               "entry_mode",
682    #               "pin",
683    #               "screening_type",
684    #               "registration_date",
685    #               "testdate",
686    #               "application_date",
687    #               "screening_date",
688    #               "faculty",
689    #               "department",
690    #               "course1",
691    #               "course2",
692    #               "course3",
693    #               "eng_score",
694    #               "subj1",
695    #               "subj1score",
696    #               "subj2",
697    #               "subj2score",
698    #               "subj3",
699    #               "subj3score",
700    #               "aggregate",
701    #               "course_admitted",
702    #               )
703
704    def __init__(self,name=None):
705        if name ==  None:
706            name = self.name
707        WAeUPTable.__init__(self, name)
708
709    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
710    def new_importCSV(self,filename="JAMB_data",
711                  schema_id="application",
712                  layout_id="import_application",
713                  mode='add'):
714        """ import JAMB data """
715        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
716        pm = self.portal_membership
717        member = pm.getAuthenticatedMember()
718        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
719        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
720        import_fn = "%s/import/%s.csv" % (i_home,filename)
721        if mode not in ('add','edit'):
722            logger.info("invalid mode: %s" % mode)
723        if os.path.exists(lock_fn):
724            logger.info("import of %(import_fn)s already in progress" % vars())
725            return
726        lock_file = open(lock_fn,"w")
727        lock_file.write("%(current)s \n" % vars())
728        lock_file.close()
729        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
730        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
731        stool = getToolByName(self, 'portal_schemas')
732        ltool = getToolByName(self, 'portal_layouts')
733        schema = stool._getOb(schema_id)
734        if schema is None:
735            em = 'No such schema %s' % schema_id
736            logger.error(em)
737            return
738        for postfix in ('_import',''):
739            layout_name = "%(layout_id)s%(postfix)s" % vars()
740            if hasattr(ltool,layout_name):
741                break
742        layout = ltool._getOb(layout_name)
743        if layout is None:
744            em = 'No such layout %s' % layout_id
745            logger.error(em)
746            return
747        try:
748            csv_file = csv.DictReader(open(import_fn,"rb"))
749        except:
750            em = 'Error reading %s.csv' % filename
751            logger.error(em)
752            return
753        d = self._import_new(csv_items,schema,layout,mode,logger)
754        imported = []
755        edited = []
756        duplicates = []
757        not_found = []
758        if len(d['valid_records']) > 0:
759            for record in d['valid_records']:
760                #import pdb;pdb.set_trace()
761                if mode == "add":
762                    try:
763                        self.addRecord(**dict(record.items()))
764                        imported.append(**dict(record.items()))
765                        logger.info("added %s" % record.items())
766                    except ValueError:
767                        dupplicate.append(**dict(record.items()))
768                        logger.info("duplicate %s" % record.items())
769                elif mode == "edit":
770                    try:
771                        self.modifyRecord(**dict(record.items()))
772                        edited.append(**dict(record.items()))
773                        logger.info("edited %s" % record.items())
774                    except KeyError:
775                        not_found.append(**dict(record.items()))
776                        logger.info("not found %s" % record.items())
777        invalid = d['invalid_records']
778        for itype in ("imported","edited","not_found","duplicate","invalid"):
779            outlist = locals[itype]
780            if len(outlist):
781                d = {}
782                for k in outlist[0].keys():
783                    d[k] = k
784                outlist[0] = d
785                outfile = open("file_name_%s" % itype,'w')
786                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
787                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
788###)
789
790    security.declareProtected(ModifyPortalContent,"importCSV")###(
791    def importCSV(self,filename="JAMB_data",
792                  schema_id="application",
793                  layout_id="application_pce",
794                  mode='add'):
795        """ import JAMB data """
796        stool = getToolByName(self, 'portal_schemas')
797        ltool = getToolByName(self, 'portal_layouts')
798        schema = stool._getOb(schema_id)
799        if schema is None:
800            em = 'No such schema %s' % schema_id
801            logger.error(em)
802            return
803        layout = ltool._getOb(layout_id)
804        if layout is None:
805            em = 'No such layout %s' % layout_id
806            logger.error(em)
807            return
808        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
809        d = self._import_old(filename,schema,layout,mode,logger)
810        if len(d['valid_records']) > 0:
811            for record in d['valid_records']:
812                #import pdb;pdb.set_trace()
813                if mode == "add":
814                    self.addRecord(**dict(record.items()))
815                    logger.info("added %s" % record.items())
816                elif mode == "edit":
817                    self.modifyRecord(**dict(record.items()))
818                    logger.info("edited %s" % record.items())
819                else:
820                    logger.info("invalid mode: %s" % mode)
821        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
822    ###)
823
824InitializeClass(ApplicantsCatalog)
825
826###)
827
828class StudentsCatalog(WAeUPTable): ###(
829    security = ClassSecurityInfo()
830
831    meta_type = 'WAeUP Students Catalog'
832    name = "students_catalog"
833    key = "id"
834    affected_types = {   ###(
835                      'StudentApplication':
836                      {'id': 'application',
837                       'fields':
838                       ('jamb_reg_no',
839                        'entry_mode',
840                        #'entry_level',
841                        'entry_session',
842                       )
843                      },
844                      'StudentClearance':
845                      {'id': 'clearance',
846                       'fields':
847                       ('matric_no',
848                        'lga',
849                       )
850                      },
851                      'StudentPersonal':
852                      {'id': 'personal',
853                       'fields':
854                       ('name',
855                        'sex',
856                        'perm_address',
857                        'email',
858                        'phone',
859                       )
860                      },
861                      'StudentStudyCourse':
862                      {'id': 'study_course',
863                       'fields':
864                       ('course', # study_course
865                        'faculty', # from certificate
866                        'department', # from certificate
867                        'end_level', # from certificate
868                        'level', # current_level
869                        'mode',  # from certificate
870                        'session', # current_session
871                        'verdict', # current_verdict
872                       )
873                      },
874                     }
875    ###)
876
877    def __init__(self,name=None):
878        if name ==  None:
879            name = self.name
880        WAeUPTable.__init__(self, name)
881        return
882
883    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
884        """ clears the whole enchilada """
885        self._catalog.clear()
886
887        if REQUEST and RESPONSE:
888            RESPONSE.redirect(
889              URL1 +
890              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
891
892    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
893        """ clear the catalog, then re-index everything """
894
895        elapse = time.time()
896        c_elapse = time.clock()
897
898        pgthreshold = self._getProgressThreshold()
899        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
900        self.refreshCatalog(clear=1, pghandler=handler)
901
902        elapse = time.time() - elapse
903        c_elapse = time.clock() - c_elapse
904
905        RESPONSE.redirect(
906            URL1 +
907            '/manage_catalogAdvanced?manage_tabs_message=' +
908            urllib.quote('Catalog Updated \n'
909                         'Total time: %s\n'
910                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
911    ###)
912
913    def fill_certificates_dict(self): ###(
914        "return certificate data in  dict"
915        certificates_brains = self.portal_catalog(portal_type ='Certificate')
916        d = {}
917        for cb in certificates_brains:
918            certificate_doc = cb.getObject().getContent()
919            cb_path = cb.getPath().split('/')
920            ld = {}
921            ld['faculty'] = cb_path[-4]
922            ld['department'] = cb_path[-3]
923            ld['end_level'] = getattr(certificate_doc,'end_level','999')
924            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
925            d[cb.getId] = ld
926        return d
927    ###)
928
929    def get_from_doc_department(self,doc,cached_data={}): ###(
930        "return the students department"
931        if doc is None:
932            return None
933        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
934            return self._v_certificates[doc.study_course]['department']
935        certificate_res = self.portal_catalog(id = doc.study_course)
936        if len(certificate_res) != 1:
937            return None
938        return certificate_res[0].getPath().split('/')[-3]
939
940    def get_from_doc_faculty(self,doc,cached_data={}):
941        "return the students faculty"
942        if doc is None:
943            return None
944        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
945            return self._v_certificates[doc.study_course]['faculty']
946        certificate_res = self.portal_catalog(id = doc.study_course)
947        if len(certificate_res) != 1:
948            return None
949        return certificate_res[0].getPath().split('/')[-4]
950
951    def get_from_doc_end_level(self,doc,cached_data={}):
952        "return the students end_level"
953        if doc is None:
954            return None
955        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
956            return self._v_certificates[doc.study_course]['end_level']
957        certificate_res = self.portal_catalog(id = doc.study_course)
958        if len(certificate_res) != 1:
959            return None
960        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
961
962    def get_from_doc_level(self,doc,cached_data={}):
963        "return the students level"
964        if doc is None:
965            return None
966        return getattr(doc,'current_level',None)
967
968    #def get_from_doc_mode(self,doc,cached_data={}):
969    #    "return the students mode"
970    #    if doc is None:
971    #        return None
972    #    cm = getattr(doc,'current_mode',None)
973    #    return cm
974   
975    def get_from_doc_mode(self,doc,cached_data={}):
976        "return the students mode"
977        if doc is None:
978            return None
979        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
980            return self._v_certificates[doc.study_course]['study_mode']
981        certificate_res = self.portal_catalog(id = doc.study_course)
982        if len(certificate_res) != 1:
983            return None
984        return getattr(certificate_res[0].getObject().getContent(),'study_mode','unknown')   
985
986
987    def get_from_doc_session(self,doc,cached_data={}):
988        "return the students current_session"
989        if doc is None:
990            return None
991        return getattr(doc,'current_session',None)
992
993    def get_from_doc_entry_session(self,doc,cached_data={}):
994        "return the students entry_session"
995        if doc is None:
996            return None
997        es = getattr(doc,'entry_session',None)
998        if es is not None and len(es) < 3:
999            return es
1000        elif len(es) == 9:
1001            return es[2:4]   
1002        try:
1003            digit = int(doc.jamb_reg_no[0])
1004        except:
1005            return "-1"
1006        if digit < 9:
1007            return "0%c" % doc.jamb_reg_no[0]
1008        return "9%c" % doc.jamb_reg_no[0]
1009
1010    def get_from_doc_course(self,doc,cached_data={}):
1011        "return the students study_course"
1012        if doc is None:
1013            return None
1014        return getattr(doc,'study_course',None)
1015
1016    def get_from_doc_name(self,doc,cached_data={}):
1017        "return the students name from the personal"
1018        if doc is None:
1019            return None
1020        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
1021
1022    def get_from_doc_verdict(self,doc,cached_data={}):
1023        "return the students study_course"
1024        if doc is None:
1025            return None
1026        return getattr(doc,'current_verdict',None)
1027    ###)
1028
1029    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1030        if not hasattr(self,'_v_certificates'):
1031            self._v_certificates = self.fill_certificates_dict()
1032        if isinstance(name, str):
1033            name = (name,)
1034        reindextypes = {}
1035        reindex_special = []
1036        for n in name:
1037            if n in ("review_state"):
1038                reindex_special.append(n)
1039            else:
1040                for pt in self.affected_types.keys():
1041                    if n in self.affected_types[pt]['fields']:
1042                        if reindextypes.has_key(pt):
1043                            reindextypes[pt].append(n)
1044                        else:
1045                            reindextypes[pt]= [n]
1046                        break
1047        #cached_data = {}
1048        #if set(name).intersection(set(('faculty','department','end_level','mode'))):
1049        #    cached_data = self.fill_certificates_dict()
1050        students = self.portal_catalog(portal_type="Student")
1051        if hasattr(self,'portal_catalog_real'):
1052            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1053        else:
1054            aq_portal = self.portal_catalog.evalAdvancedQuery
1055        num_objects = len(students)
1056        if pghandler:
1057            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1058        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1059        #import pdb;pdb.set_trace()
1060        for i in xrange(num_objects):
1061            if pghandler: pghandler.report(i)
1062            student_brain = students[i]
1063            student_object = student_brain.getObject()
1064            data = {}
1065            modified = False
1066            sid = data['id'] = student_brain.getId
1067            if reindex_special and 'review_state' in reindex_special:
1068                modified = True
1069                data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1070            sub_objects = False
1071            for pt in reindextypes.keys():
1072                modified = True
1073                try:
1074                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1075                    sub_objects = True
1076                except:
1077                    continue
1078                for field in set(name).intersection(self.affected_types[pt]['fields']):
1079                    if hasattr(self,'get_from_doc_%s' % field):
1080                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc)
1081                    else:
1082                        data[field] = getattr(doc,field)
1083            if not sub_objects and noattr:
1084                import_res = self.returning_import(id = sid)
1085                if not import_res:
1086                    continue
1087                import_record = import_res[0]
1088                data['matric_no'] = import_record.matric_no
1089                data['sex'] = import_record.Sex == 'F'
1090                data['name'] = "%s %s %s" % (import_record.Firstname,
1091                                             import_record.Middlename,
1092                                             import_record.Lastname)
1093                data['jamb_reg_no'] = import_record.Entryregno
1094            if modified:
1095                self.modifyRecord(**data)
1096        if pghandler: pghandler.finish()
1097    ###)
1098
1099    def refreshCatalog(self, clear=0, pghandler=None): ###(
1100        """ re-index everything we can find """
1101        students_folder = self.portal_url.getPortalObject().campus.students
1102        if clear:
1103            self._catalog.clear()
1104        students = self.portal_catalog(portal_type="Student")
1105        num_objects = len(students)
1106        #cached_data = self.fill_certificates_dict()
1107        if not hasattr(self,'_v_certificates'):
1108            self._v_certificates = self.fill_certificates_dict()
1109        if pghandler:
1110            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1111        for i in xrange(num_objects):
1112            if pghandler: pghandler.report(i)
1113            student_brain = students[i]
1114            spath = student_brain.getPath()
1115            student_object = student_brain.getObject()
1116            data = {}
1117            sid = data['id'] = student_brain.getId
1118            #data['review_state'] = student_brain.review_state
1119            data['review_state'] = student_object.portal_workflow.getInfoFor(student_object,'review_state',None)
1120            sub_objects = False
1121            for pt in self.affected_types.keys():
1122                modified = True
1123                try:
1124                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1125                    sub_objects = True
1126                except:
1127                    #from pdb import set_trace;set_trace()
1128                    continue
1129                for field in self.affected_types[pt]['fields']:
1130                    if hasattr(self,'get_from_doc_%s' % field):
1131                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1132                                                                              cached_data=cached_data)
1133                    else:
1134                        data[field] = getattr(doc,field,None)
1135            if not sub_objects:
1136                import_res = self.returning_import(id = sid)
1137                if not import_res:
1138                    continue
1139                import_record = import_res[0]
1140                data['matric_no'] = import_record.matric_no
1141                data['sex'] = import_record.Sex == 'F'
1142                data['name'] = "%s %s %s" % (import_record.Firstname,
1143                                             import_record.Middlename,
1144                                             import_record.Lastname)
1145                data['jamb_reg_no'] = import_record.Entryregno
1146            self.addRecord(**data)
1147        if pghandler: pghandler.finish()
1148    ###)
1149
1150    security.declarePrivate('notify_event_listener') ###(
1151    def notify_event_listener(self,event_type,object,infos):
1152        "listen for events"
1153        if not infos.has_key('rpath'):
1154            return
1155        pt = getattr(object,'portal_type',None)
1156        mt = getattr(object,'meta_type',None)
1157        students_catalog = self
1158        data = {}
1159        if pt == 'Student' and\
1160           mt == 'CPS Proxy Folder' and\
1161           event_type.startswith('workflow'):
1162            data['id'] = object.getId()
1163            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1164            students_catalog.modifyRecord(**data)
1165            return
1166        rpl = infos['rpath'].split('/')
1167        if pt == 'Student' and mt == 'CPS Proxy Folder':
1168            student_id = object.id
1169            if event_type == "sys_add_object":
1170                try:
1171                    self.addRecord(id = student_id)
1172                except ValueError:
1173                    pass
1174                return
1175            elif event_type == 'sys_del_object':
1176                self.deleteRecord(student_id)
1177        if pt not in self.affected_types.keys():
1178            return
1179        if event_type not in ('sys_modify_object'):
1180            return
1181        if mt == 'CPS Proxy Folder':
1182            return
1183        if not hasattr(self,'_v_certificates'):
1184            self._v_certificates = self.fill_certificates_dict()
1185        for field in self.affected_types[pt]['fields']:
1186            if hasattr(self,'get_from_doc_%s' % field):
1187                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1188            else:
1189                data[field] = getattr(object,field)
1190        data['id'] = rpl[2]
1191        self.modifyRecord(**data)
1192    ###)
1193
1194
1195InitializeClass(StudentsCatalog)
1196
1197###)
1198
1199class CertificatesCatalog(WAeUPTable): ###(
1200    security = ClassSecurityInfo()
1201
1202    meta_type = 'WAeUP Certificates Catalog'
1203    name =  "certificates_catalog"
1204    key = "code"
1205    def __init__(self,name=None):
1206        if name ==  None:
1207            name =  self.name
1208        WAeUPTable.__init__(self, name)
1209
1210    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1211        """ clear the catalog, then re-index everything """
1212
1213        elapse = time.time()
1214        c_elapse = time.clock()
1215
1216        pgthreshold = self._getProgressThreshold()
1217        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1218        self.refreshCatalog(clear=1, pghandler=handler)
1219
1220        elapse = time.time() - elapse
1221        c_elapse = time.clock() - c_elapse
1222
1223        RESPONSE.redirect(
1224            URL1 +
1225            '/manage_catalogAdvanced?manage_tabs_message=' +
1226            urllib.quote('Catalog Updated \n'
1227                         'Total time: %s\n'
1228                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1229    ###)
1230
1231    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1232        if isinstance(name, str):
1233            name = (name,)
1234        certificates = self.portal_catalog(portal_type="Certificate")
1235        num_objects = len(certificates)
1236        if pghandler:
1237            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1238        for i in xrange(num_objects):
1239            if pghandler: pghandler.report(i)
1240            certificate_brain = certificates[i]
1241            certificate_object = certificate_brain.getObject()
1242            pl = certificate_brain.getPath().split('/')
1243            data = {}
1244            cid = data[self.key] = certificate_brain.getId
1245            data['faculty'] = pl[-4]
1246            data['department'] = pl[-3]
1247            doc = certificate_object.getContent()
1248            for field in name:
1249                if field not in (self.key,'faculty','department'):
1250                    data[field] = getattr(doc,field)
1251            self.modifyRecord(**data)
1252        if pghandler: pghandler.finish()
1253    ###)
1254
1255    def refreshCatalog(self, clear=0, pghandler=None): ###(
1256        """ re-index everything we can find """
1257        if clear:
1258            self._catalog.clear()
1259        certificates = self.portal_catalog(portal_type="Certificate")
1260        num_objects = len(certificates)
1261        if pghandler:
1262            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1263        #from pdb import set_trace;set_trace()
1264        for i in xrange(num_objects):
1265            if pghandler: pghandler.report(i)
1266            certificate_brain = certificates[i]
1267            certificate_doc = certificate_brain.getObject().getContent()
1268            pl = certificate_brain.getPath().split('/')
1269            data = {}
1270            for field in self.schema():
1271                data[field] = getattr(certificate_doc,field,None)
1272            data[self.key] = certificate_brain.getId
1273            ai = pl.index('academics')
1274            data['faculty'] = pl[ai +1]
1275            data['department'] = pl[ai +2]
1276            if clear:
1277                self.addRecord(**data)
1278            else:
1279                self.modifyRecord(**data)
1280        if pghandler: pghandler.finish()
1281    ###)
1282
1283    security.declarePrivate('notify_event_listener') ###(
1284    def notify_event_listener(self,event_type,object,infos):
1285        "listen for events"
1286        if not infos.has_key('rpath'):
1287            return
1288        pt = getattr(object,'portal_type',None)
1289        mt = getattr(object,'meta_type',None)
1290        if pt != 'Certificate':
1291            return
1292        data = {}
1293        rpl = infos['rpath'].split('/')
1294        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1295            return
1296        certificate_id = object.getId()
1297        data[self.key] = certificate_id
1298        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1299            try:
1300                self.addRecord(**data)
1301            except ValueError:
1302                return
1303            certificate_id = object.getId()
1304            doc = object.getContent()
1305            if doc is None:
1306                return
1307            for field in self.schema():
1308                data[field] = getattr(doc,field,None)
1309            data[self.key] = certificate_id
1310            ai = rpl.index('academics')
1311            data['faculty'] = rpl[ai +1]
1312            data['department'] = rpl[ai +2]
1313            self.modifyRecord(**data)
1314            return
1315        if event_type == "sys_del_object":
1316            self.deleteRecord(certificate_id)
1317            return
1318        if event_type == "sys_modify_object" and mt == 'Certificate':
1319            #from pdb import set_trace;set_trace()
1320            for field in self.schema():
1321                data[field] = getattr(object,field,None)
1322            certificate_id = object.aq_parent.getId()
1323            data[self.key] = certificate_id
1324            ai = rpl.index('academics')
1325            data['faculty'] = rpl[ai +1]
1326            data['department'] = rpl[ai +2]
1327            self.modifyRecord(**data)
1328    ###)
1329
1330
1331InitializeClass(CertificatesCatalog)
1332###)
1333
1334class CoursesCatalog(WAeUPTable): ###(
1335    security = ClassSecurityInfo()
1336
1337    meta_type = 'WAeUP Courses Catalog'
1338    name =  "courses_catalog"
1339    key = "code"
1340    def __init__(self,name=None):
1341        if name ==  None:
1342            name =  self.name
1343        WAeUPTable.__init__(self, name)
1344
1345    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1346        """ clear the catalog, then re-index everything """
1347
1348        elapse = time.time()
1349        c_elapse = time.clock()
1350
1351        pgthreshold = self._getProgressThreshold()
1352        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1353        self.refreshCatalog(clear=1, pghandler=handler)
1354
1355        elapse = time.time() - elapse
1356        c_elapse = time.clock() - c_elapse
1357
1358        RESPONSE.redirect(
1359            URL1 +
1360            '/manage_catalogAdvanced?manage_tabs_message=' +
1361            urllib.quote('Catalog Updated \n'
1362                         'Total time: %s\n'
1363                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1364    ###)
1365
1366    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1367        if isinstance(name, str):
1368            name = (name,)
1369        courses = self.portal_catalog(portal_type="Course")
1370        num_objects = len(courses)
1371        if pghandler:
1372            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1373        for i in xrange(num_objects):
1374            if pghandler: pghandler.report(i)
1375            course_brain = courses[i]
1376            course_object = course_brain.getObject()
1377            pl = course_brain.getPath().split('/')
1378            data = {}
1379            cid = data[self.key] = course_brain.getId
1380            data['faculty'] = pl[-4]
1381            data['department'] = pl[-3]
1382            doc = course_object.getContent()
1383            for field in name:
1384                if field not in (self.key,'faculty','department'):
1385                    data[field] = getattr(doc,field)
1386            self.modifyRecord(**data)
1387        if pghandler: pghandler.finish()
1388    ###)
1389
1390    def refreshCatalog(self, clear=0, pghandler=None): ###(
1391        """ re-index everything we can find """
1392        if clear:
1393            self._catalog.clear()
1394        courses = self.portal_catalog(portal_type="Course")
1395        num_objects = len(courses)
1396        if pghandler:
1397            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1398        #from pdb import set_trace;set_trace()
1399        for i in xrange(num_objects):
1400            if pghandler: pghandler.report(i)
1401            course_brain = courses[i]
1402            course_doc = course_brain.getObject().getContent()
1403            pl = course_brain.getPath().split('/')
1404            data = {}
1405            for field in self.schema():
1406                data[field] = getattr(course_doc,field,None)
1407            data[self.key] = course_brain.getId
1408            ai = pl.index('academics')
1409            data['faculty'] = pl[ai +1]
1410            data['department'] = pl[ai +2]
1411            if clear:
1412                self.addRecord(**data)
1413            else:
1414                self.modifyRecord(**data)
1415        if pghandler: pghandler.finish()
1416    ###)
1417
1418    security.declarePrivate('notify_event_listener') ###(
1419    def notify_event_listener(self,event_type,object,infos):
1420        "listen for events"
1421        if not infos.has_key('rpath'):
1422            return
1423        pt = getattr(object,'portal_type',None)
1424        mt = getattr(object,'meta_type',None)
1425        if pt != 'Course':
1426            return
1427        data = {}
1428        rpl = infos['rpath'].split('/')
1429        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1430            return
1431        course_id = object.getId()
1432        data[self.key] = course_id
1433        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1434            try:
1435                self.addRecord(**data)
1436            except ValueError:
1437                return
1438            course_id = object.getId()
1439            doc = object.getContent()
1440            if doc is None:
1441                return
1442            for field in self.schema():
1443                data[field] = getattr(doc,field,None)
1444            data[self.key] = course_id
1445            ai = rpl.index('academics')
1446            data['faculty'] = rpl[ai +1]
1447            data['department'] = rpl[ai +2]
1448            self.modifyRecord(**data)
1449            return
1450        if event_type == "sys_del_object":
1451            self.deleteRecord(course_id)
1452            return
1453        if event_type == "sys_modify_object" and mt == 'Course':
1454            #from pdb import set_trace;set_trace()
1455            for field in self.schema():
1456                data[field] = getattr(object,field,None)
1457            course_id = object.aq_parent.getId()
1458            data[self.key] = course_id
1459            ai = rpl.index('academics')
1460            data['faculty'] = rpl[ai +1]
1461            data['department'] = rpl[ai +2]
1462            self.modifyRecord(**data)
1463    ###)
1464
1465
1466InitializeClass(CoursesCatalog)
1467###)
1468
1469class CourseResults(WAeUPTable): ###(
1470    security = ClassSecurityInfo()
1471
1472    meta_type = 'WAeUP Results Catalog'
1473    name = "course_results"
1474    key = "key" #student_id + level + course_id
1475    def __init__(self,name=None):
1476        if name ==  None:
1477            name = self.name
1478        WAeUPTable.__init__(self, name)
1479        self._queue = []
1480
1481    def addMultipleRecords(self, records): ###(
1482        """add many records"""
1483        existing_uids = []
1484        for data in records:
1485            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1486            data['%s' % self.key] = uid
1487            query = Eq(self.key, uid)
1488            res = self.course_results.evalAdvancedQuery(query)
1489            if len(res) > 0:
1490                rec = res[0]
1491                equal = True
1492                for attr in ('student_id','level_id','course_id'):
1493                    if getattr(rec,attr,'') != data[attr]:
1494                        equal = False
1495                        break
1496                if equal:
1497                    existing_uids += uid,
1498                    continue
1499            self.catalog_object(dict2ob(data), uid=uid)
1500        return existing_uids
1501    ###)
1502
1503    def deleteResultsHere(self,level_id,student_id): ###(
1504        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1505        course_results = self.course_results.evalAdvancedQuery(query)
1506        #import pdb;pdb.set_trace()
1507        for result in course_results:
1508            self.deleteRecord(result.key)
1509    ###)
1510
1511    def moveResultsHere(self,level,student_id): ###(
1512        #import pdb;pdb.set_trace()
1513        level_id = level.getId()
1514        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1515        course_results = self.course_results.evalAdvancedQuery(query)
1516        existing_courses = [cr.code for cr in course_results]
1517        to_delete = []
1518        for code,obj in level.objectItems():
1519            to_delete.append(code)
1520            carry_over = False
1521            if code.endswith('_co'):
1522                carry_over = True
1523                code  = code[:-3]
1524            if code in existing_courses:
1525                continue
1526            course_result_doc = obj.getContent()
1527            data = {}
1528            course_id = code
1529            for field in self.schema():
1530                data[field] = getattr(course_result_doc,field,'')
1531            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1532            data['student_id'] = student_id
1533            data['level_id'] = level_id
1534            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1535            data['session_id'] = session_id
1536            #data['queue_status'] = OBJECT_CREATED
1537            data['code'] = course_id
1538            data['carry_over'] = carry_over
1539            self.catalog_object(dict2ob(data), uid=key)
1540        level.manage_delObjects(to_delete)
1541    ###)
1542
1543    def getCourses(self,student_id,level_id): ###(
1544        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1545        course_results = self.course_results.evalAdvancedQuery(query)
1546        carry_overs = []
1547        normal1 = []
1548        normal2 = []
1549        normal3 = []
1550        total_credits = 0
1551        gpa = 0
1552        for brain in course_results:
1553            d = {}
1554
1555            for field in self.schema():
1556                d[field] = getattr(brain,field,None)
1557                if repr(d[field]) == 'Missing.Value':
1558                    d[field] = ''
1559            d['weight'] = ''
1560            d['grade'] = ''
1561            d['score'] = ''
1562
1563            if str(brain.credits).isdigit():
1564                credits = int(brain.credits)
1565                total_credits += credits
1566                score = getattr(brain,'score',0)
1567                if score and str(score).isdigit() and int(score) > 0:
1568                    score = int(score)
1569                    grade,weight = self.getGradesFromScore(score,'')
1570                    gpa += weight * credits
1571                    d['weight'] = weight
1572                    d['grade'] = grade
1573                    d['score'] = score
1574
1575            #if str(brain.ca1).isdigit() and str(brain.ca2).isdigit() and str(brain.exam).isdigit():
1576            #    d['score_calc'] = int(brain.ca1) + int(brain.ca2) + int(brain.exam)
1577            #else:
1578            #    d['score_calc'] = ''
1579            try:
1580                d['score_calc'] = float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1581            except:
1582                d['score_calc'] = ''
1583
1584            if d['score_calc']:
1585                grade = self.getGradesFromScore(d['score_calc'],level_id)
1586                d['grade'] = grade
1587
1588            d['coe'] = ''
1589            if brain.core_or_elective:
1590                d['coe'] = 'Core'
1591            elif brain.core_or_elective == False:
1592                d['coe'] = 'Elective'
1593            id = code = d['id'] = brain.code
1594            d['code'] = code
1595            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1596            if res:
1597                course = res[0]
1598                d['title'] = course.title
1599                # The courses_catalog contains strings and integers in its semester field.
1600                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1601                d['semester'] = str(course.semester)
1602            else:
1603                d['title'] = "Course has been removed from course list"
1604                d['semester'] = ''
1605            if brain.carry_over:
1606                d['coe'] = 'CO'
1607                carry_overs.append(d)
1608            else:
1609                if d['semester'] == '1':
1610                    normal1.append(d)
1611
1612                elif d['semester'] == '2':
1613                    normal2.append(d)
1614                else:
1615                    normal3.append(d)
1616        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1617        #                                "%(semester)s%(code)s" % y))
1618        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1619                                             "%(semester)s%(code)s" % y))
1620        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1621    ###)
1622
1623
1624    def getAllCourses(self,student_id): ###(
1625        query = Eq('student_id',student_id)
1626        course_results = self.course_results.evalAdvancedQuery(query)
1627        courses = []
1628        for brain in course_results:
1629            d = {}
1630
1631            for field in self.schema():
1632                d[field] = getattr(brain,field,'')
1633
1634            d['weight'] = ''
1635            d['grade'] = ''
1636            d['score'] = ''
1637
1638            if str(brain.credits).isdigit():
1639                credits = int(brain.credits)
1640                score = getattr(brain,'score',0)
1641                if score and str(score).isdigit() and int(score) > 0:
1642                    score = int(score)
1643                    grade,weight = self.getGradesFromScore(score)
1644                    d['weight'] = weight
1645                    d['grade'] = grade
1646                    d['score'] = score
1647            d['coe'] = ''
1648            if brain.core_or_elective:
1649                d['coe'] = 'Core'
1650            elif brain.core_or_elective == False:
1651                d['coe'] = 'Elective'
1652            id = code = d['id'] = brain.code
1653            d['code'] = code
1654            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1655            if res:
1656                course = res[0]
1657                d['title'] = course.title
1658                # The courses_catalog contains strings and integers in its semester field.
1659                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1660                d['semester'] = str(course.semester)
1661            else:
1662                d['title'] = "Course has been removed from course list"
1663                d['semester'] = ''
1664            if brain.carry_over:
1665                d['coe'] = 'CO'
1666            courses.append(d)
1667        return courses
1668    ###)
1669   
1670    def getYearGroupAverage(self,session_id,level_id): ###(
1671        query = Eq('session_id',session_id) & Eq('level_id',level_id)
1672        course_results = self.course_results.evalAdvancedQuery(query)
1673        yga1 = 0
1674        counter1 = 0
1675        yga2 = 0
1676        counter2 = 0
1677        yga3 = 0
1678        counter3 = 0       
1679        #import pdb;pdb.set_trace()
1680        for brain in course_results:
1681            try:
1682                if not float(brain.ca1) + float(brain.ca2) + float(brain.exam) > 0:
1683                    continue
1684                code = brain.code               
1685                res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1686                if res:
1687                    course = res[0]
1688                    # The courses_catalog contains strings and integers in its semester field.
1689                    # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1690                    semester = str(course.semester)
1691                else:
1692                    semester = ''
1693                if semester == '1':
1694                    counter1 += 1
1695                    yga1 += float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1696                elif semester == '2':
1697                    counter2 += 1
1698                    yga2 += float(brain.ca1) + float(brain.ca2) + float(brain.exam)         
1699                if semester == '3':
1700                    counter3 += 1
1701                    yga3 += float(brain.ca1) + float(brain.ca2) + float(brain.exam)
1702            except:
1703                continue               
1704        if counter1:
1705            yga1 /= counter1
1706            yga1 = '%.2f' % yga1   
1707        if counter2:
1708            yga2 /= counter2
1709            yga2 = '%.2f' % yga2   
1710        if counter3:
1711            yga3 /= counter3
1712            yga3 = '%.2f' % yga3                                   
1713        return yga1, yga2, yga3, counter1, counter2, counter3
1714    ###)
1715   
1716    def exportRemoveAllCourses(self,student_id,export=False,remove=False): ###(
1717        ""
1718        query = Eq('student_id',student_id)
1719        cr_catalog = self.course_results
1720        course_results = cr_catalog.evalAdvancedQuery(query)
1721        courses = []
1722        fields = self.schema()
1723        format = '"%(' + ')s","%('.join(fields) + ')s"'
1724        for brain in course_results:
1725            d = {}
1726            for field in fields:
1727                d[field] = getattr(brain,field,'')
1728            courses.append(format % d)
1729               
1730        if export:
1731            export_file = "%s/export/course_results_removed.csv" % (i_home)
1732            if not os.path.exists(export_file): 
1733                file_handler = open(export_file,"a")
1734                headline = ','.join(fields)
1735                file_handler.write(headline +'\n')
1736            else:
1737                file_handler = open(export_file,"a")
1738            for line in courses:
1739                file_handler.write(line +'\n')
1740
1741        if remove:
1742            for brain in course_results:
1743                key = getattr(brain,'key','')
1744                cr_catalog.deleteRecord(key)
1745       
1746        return courses
1747    ###)   
1748   
1749   
1750
1751InitializeClass(CourseResults)
1752###)
1753
1754class OnlinePaymentsImport(WAeUPTable): ###(
1755
1756    meta_type = 'WAeUP Online Payment Transactions'
1757    name = "online_payments_import"
1758    key = "order_id"
1759    def __init__(self,name=None):
1760        if name ==  None:
1761            name = self.name
1762        WAeUPTable.__init__(self, name)
1763
1764
1765InitializeClass(OnlinePaymentsImport)
1766###)
1767
1768class ReturningImport(WAeUPTable): ###(
1769
1770    meta_type = 'Returning Import Table'
1771    name = "returning_import"
1772    key = "matric_no"
1773    def __init__(self,name=None):
1774        if name ==  None:
1775            name = self.name
1776        WAeUPTable.__init__(self, name)
1777
1778
1779InitializeClass(ReturningImport)
1780###)
1781
1782class ResultsImport(WAeUPTable): ###(
1783
1784    meta_type = 'Results Import Table'
1785    name = "results_import"
1786    key = "key"
1787    def __init__(self,name=None):
1788        if name ==  None:
1789            name = self.name
1790        WAeUPTable.__init__(self, name)
1791
1792
1793InitializeClass(ResultsImport)
1794
1795###)
1796
1797class PaymentsCatalog(WAeUPTable): ###(
1798    security = ClassSecurityInfo()
1799
1800    meta_type = 'WAeUP Payments Catalog'
1801    name = "payments_catalog"
1802    key = "order_id"
1803    def __init__(self,name=None):
1804        if name ==  None:
1805            name = self.name
1806        WAeUPTable.__init__(self, name)
1807
1808
1809    security.declarePrivate('notify_event_listener') ###(
1810    def notify_event_listener(self,event_type,object,infos):
1811        "listen for events"
1812        if not infos.has_key('rpath'):
1813            return
1814        pt = getattr(object,'portal_type',None)
1815        mt = getattr(object,'meta_type',None)
1816        data = {}
1817        if pt != 'Payment':
1818            return
1819        if event_type == 'sys_del_object' and mt == 'CPS Proxy Folder':
1820            self.deleteRecord(object.getContent().order_id)
1821        if mt == 'CPS Proxy Folder':
1822            return # is handled only for the real object
1823        if event_type not in ('sys_modify_object'):
1824            return
1825        for field in self.schema():
1826            data[field] = getattr(object,field,'')
1827        rpl = infos['rpath'].split('/')
1828        #import pdb;pdb.set_trace()
1829        student_id = rpl[-4]
1830        data['student_id'] = student_id
1831        modified = False
1832        try:
1833            self.modifyRecord(**data)
1834            modified = True
1835        except KeyError:
1836            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1837            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1838            pass
1839        if not modified:
1840            try:
1841                self.addRecord(**data)
1842            except:
1843                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1844                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1845        ###)
1846
1847
1848    def exportRemoveAllPayments(self,student_id,export=False,remove=False): ###(
1849        ""
1850        query = Eq('student_id',student_id)
1851        pm_catalog = self.payments_catalog
1852        payments = pm_catalog.evalAdvancedQuery(query)
1853        payments_dic = []
1854        fields = self.schema()
1855        format = '"%(' + ')s","%('.join(fields) + ')s"'
1856        for brain in payments:
1857            d = {}
1858            for field in fields:
1859                d[field] = getattr(brain,field,'')
1860            payments_dic.append(format % d)
1861               
1862        if export:
1863            export_file = "%s/export/payments_removed.csv" % (i_home)
1864            if not os.path.exists(export_file): 
1865                file_handler = open(export_file,"a")
1866                headline = ','.join(fields)
1867                file_handler.write(headline +'\n')
1868            else:
1869                file_handler = open(export_file,"a")
1870            for line in payments_dic:
1871                file_handler.write(line +'\n')
1872
1873        if remove:
1874            for brain in payments:
1875                order_id = getattr(brain,'order_id','')
1876                pm_catalog.deleteRecord(order_id)
1877       
1878        return payments_dic
1879    ###)   
1880
1881InitializeClass(PaymentsCatalog)
1882
1883###)
1884
1885# BBB:
1886AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.