source: WAeUP_SRP/base/WAeUPTables.py @ 3403

Last change on this file since 3403 was 3362, checked in by joachim, 17 years ago

fix for #112 okene, make create level more errortolerant

  • Property svn:keywords set to Id
File size: 63.8 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 3362 2008-03-20 16:20:15Z joachim $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43NOT_OCCUPIED = 'not_occupied'
44
45from interfaces import IWAeUPTable
46
47class AttributeHolder(object):
48    pass
49
50def dict2ob(dict):
51    ob = AttributeHolder()
52    for key, value in dict.items():
53        setattr(ob, key, value)
54    return ob
55
56class WAeUPTable(ZCatalog): ###(
57
58    implements(IWAeUPTable)
59    security = ClassSecurityInfo()
60    meta_type = None
61
62    def __init__(self,name=None):
63        if name ==  None:
64            name = self.name
65        ZCatalog.__init__(self,name)
66
67    def refreshCatalog(self, clear=0, pghandler=None): ###(
68        """ don't refresh for a normal table """
69
70        if self.REQUEST and self.REQUEST.RESPONSE:
71            self.REQUEST.RESPONSE.redirect(
72              URL1 +
73              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
74
75###)
76
77    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
78        """ clears the whole enchilada """
79
80        #if REQUEST and RESPONSE:
81        #    RESPONSE.redirect(
82        #      URL1 +
83        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
84
85        self._catalog.clear()
86        if REQUEST and RESPONSE:
87            RESPONSE.redirect(
88              URL1 +
89              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
90
91###)
92
93    def record2dict(self,fields,record): ###(
94        d = {}
95        for key in fields:
96            v = getattr(record, key, None)
97            v_dump = v
98            if key == 'sex':
99                if v == True:
100                    v_dump = 'F'
101                elif v == False:
102                    v_dump = 'M'
103                d[key] = v_dump
104            elif v:
105                if key == 'lga':
106                    v_dump = self.portal_vocabularies.local_gov_areas.get(v)
107                    if not v_dump:
108                        v_dump = v
109                elif key == 'aos':
110                    v_dump = self.portal_vocabularies.aos.get(v)
111                d[key] = v_dump
112            else:
113                d[key] = ''
114        return d
115
116###)
117
118    def addRecord(self, **data): ###(
119        # The uid is the same as "bed".
120        uid = data[self.key]
121        res = self.searchResults({"%s" % self.key : uid})
122        if len(res) > 0:
123            raise ValueError("More than one record with uid %s" % uid)
124        self.catalog_object(dict2ob(data), uid=uid)
125        return uid
126
127###)
128
129    def deleteRecord(self, uid):
130        self.uncatalog_object(uid)
131
132    def getRecordByKey(self,key):
133        if not key:
134            return None
135        res = self.evalAdvancedQuery(Eq(self.key,key))
136        if res:
137            return res[0]
138        return None
139
140    def searchAndSetRecord(self, **data):
141        raise NotImplemented
142
143    def modifyRecord(self, record=None, **data): ###(
144        #records = self.searchResults(uid=uid)
145        uid = data[self.key]
146        if record is None:
147            records = self.searchResults({"%s" % self.key : uid})
148            if len(records) > 1:
149                # Can not happen, but anyway...
150                raise ValueError("More than one record with uid %s" % uid)
151            if len(records) == 0:
152                raise KeyError("No record for uid %s" % uid)
153            record = records[0]
154        record_data = {}
155        for field in self.schema() + self.indexes():
156            record_data[field] = getattr(record, field)
157        # Add the updated data:
158        record_data.update(data)
159        self.catalog_object(dict2ob(record_data), uid)
160
161###)
162
163    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
164        if isinstance(name, str):
165            name =  (name,)
166        paths = self._catalog.uids.items()
167        i = 0
168        #import pdb;pdb.set_trace()
169        for p,rid in paths:
170            i += 1
171            metadata = self.getMetadataForRID(rid)
172            record_data = {}
173            for field in name:
174                record_data[field] = metadata.get(field)
175            uid = metadata.get(self.key)
176            self.catalog_object(dict2ob(record_data), uid, idxs=name,
177                                update_metadata=0)
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
182    def exportAllRecords(self):
183        "export a WAeUPTable"
184        #import pdb;pdb.set_trace()
185        fields = [field for field in self.schema()]
186        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
187        csv = []
188        csv.append(','.join(['"%s"' % fn for fn in fields]))
189        for uid in self._catalog.uids:
190            records = self.searchResults({"%s" % self.key : uid})
191            if len(records) > 1:
192                # Can not happen, but anyway...
193                raise ValueError("More than one record with uid %s" % uid)
194            if len(records) == 0:
195                raise KeyError("No record for uid %s" % uid)
196            rec = records[0]
197            csv.append(format % rec)
198        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
199        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
200
201###)
202
203    security.declareProtected(ModifyPortalContent,"dumpAll")###(
204    def dumpAll(self):
205        """dump all data in the table to a csv"""
206        member = self.portal_membership.getAuthenticatedMember()
207        logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpAll')
208        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
209        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
210        res_list = []
211        lines = []
212        if hasattr(self,"export_keys"):
213            fields = self.export_keys
214        else:
215            fields = []
216            for f in self.schema():
217                fields.append(f)
218        headline = ','.join(fields)
219        out = open(export_file,"wb")
220        out.write(headline +'\n')
221        out.close()
222        out = open(export_file,"a")
223        csv_writer = csv.DictWriter(out,fields,)
224        records = self()
225        nr2export = len(records)
226        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
227        chunk = 2000
228        total = 0
229        start = DateTime.DateTime().timeTime()
230        start_chunk = DateTime.DateTime().timeTime()
231        for record in records:
232            not_all = False
233            d = self.record2dict(fields,record)
234            lines.append(d)
235            total += 1
236            if total and not total % chunk or total == len(records):
237                csv_writer.writerows(lines)
238                anz = len(lines)
239                logger.info("wrote %(anz)d  total written %(total)d" % vars())
240                end_chunk = DateTime.DateTime().timeTime()
241                duration = end_chunk-start_chunk
242                per_record = duration/anz
243                till_now = end_chunk - start
244                avarage_per_record = till_now/total
245                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
246                estimated_end = estimated_end.strftime("%H:%M:%S")
247                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
248                start_chunk = DateTime.DateTime().timeTime()
249                lines = []
250        end = DateTime.DateTime().timeTime()
251        logger.info('total time %6.2f m' % ((end-start)/60))
252        import os
253        filename, extension = os.path.splitext(export_file)
254        from subprocess import call
255        msg = "wrote %(total)d records to %(export_file)s" % vars()
256        #try:
257        #    retcode = call('gzip %s' % (export_file),shell=True)
258        #    if retcode == 0:
259        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
260        #except OSError, e:
261        #    retcode = -99
262        #    logger.info("zip failed with %s" % e)
263        logger.info(msg)
264        args = {'portal_status_message': msg}
265        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
266        url = self.REQUEST.get('URL2')
267        return self.REQUEST.RESPONSE.redirect(url)
268    ###)
269
270    security.declarePrivate("_import_old") ###(
271    def _import_old(self,filename,schema,layout, mode,logger):
272        "import data from csv"
273        import transaction
274        import random
275        pm = self.portal_membership
276        member = pm.getAuthenticatedMember()
277        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
278        import_fn = "%s/import/%s.csv" % (i_home,filename)
279        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
280        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
281        start = True
282        tr_count = 1
283        total_imported = 0
284        total_not_imported = 0
285        total = 0
286        iname =  "%s" % filename
287        not_imported = []
288        imported = []
289        valid_records = []
290        invalid_records = []
291        d = {}
292        d['mode'] = mode
293        d['imported'] = total_imported
294        d['not_imported'] = total_not_imported
295        d['valid_records'] = valid_records
296        d['invalid_records'] = invalid_records
297        d['import_fn'] = import_fn
298        d['imported_fn'] = imported_fn
299        d['not_imported_fn'] = not_imported_fn
300        if schema is None:
301            em = 'No schema specified'
302            logger.error(em)
303            return d
304        if layout is None:
305            em = 'No layout specified'
306            logger.error(em)
307            return d
308        validators = {}
309        for widget in layout.keys():
310            try:
311                validators[widget] = layout[widget].validate
312            except AttributeError:
313                logger.info('%s has no validate attribute' % widget)
314                return d
315        # if mode == 'edit':
316        #     importer = self.importEdit
317        # elif mode == 'add':
318        #     importer = self.importAdd
319        # else:
320        #     importer = None
321        try:
322            items = csv.DictReader(open(import_fn,"rb"),
323                                   dialect="excel",
324                                   skipinitialspace=True)
325        except:
326            em = 'Error reading %s.csv' % filename
327            logger.error(em)
328            return d
329        #import pdb;pdb.set_trace()
330        for item in items:
331            if start:
332                start = False
333                logger.info('%s starts import from %s.csv' % (member,filename))
334                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
335                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
336                                   dialect="excel",
337                                   skipinitialspace=True).next()
338                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
339                diff2schema = set(import_keys).difference(set(schema.keys()))
340                diff2layout = set(import_keys).difference(set(layout.keys()))
341                if diff2layout:
342                    em = "not ignorable key(s) %s found in heading" % diff2layout
343                    logger.info(em)
344                    return d
345                s = ','.join(['"%s"' % fn for fn in import_keys])
346                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
347                #s = '"id",' + s
348                open(imported_fn,"a").write(s + '\n')
349                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
350                format_error = format + ',"%(Error)s"'
351                #format = '"%(id)s",'+ format
352                adapters = [MappingStorageAdapter(schema, item)]
353            dm = DataModel(item, adapters,context=self)
354            ds = DataStructure(data=item,datamodel=dm)
355            error_string = ""
356            #import pdb;pdb.set_trace()
357            for k in import_keys:
358                if not validators[k](ds,mode=mode):
359                    error_string += " %s : %s" % (k,ds.getError(k))
360            # if not error_string and importer:
361            #     item.update(dm)
362            #     item['id'],error = importer(item)
363            #     if error:
364            #         error_string += error
365            if error_string:
366                item['Error'] = error_string
367                invalid_records.append(dm)
368                not_imported.append(format_error % item)
369                total_not_imported += 1
370            else:
371                em = format % item
372                valid_records.append(dm)
373                imported.append(em)
374                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
375                tr_count += 1
376                total_imported += 1
377            total += 1
378        if len(imported) > 0:
379            open(imported_fn,"a").write('\n'.join(imported))
380        if len(not_imported) > 0:
381            open(not_imported_fn,"a").write('\n'.join(not_imported))
382        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
383        d['imported'] = total_imported
384        d['not_imported'] = total_not_imported
385        d['valid_records'] = valid_records
386        d['invalid_records'] = invalid_records
387        d['imported_fn'] = imported_fn
388        d['not_imported_fn'] = not_imported_fn
389        #logger.info(em)
390        return d
391    ###)
392
393    security.declarePrivate("_import") ###(
394    def _import_new(self,csv_items,schema, layout, mode,logger):
395        "import data from csv.Dictreader Instance"
396        start = True
397        tr_count = 1
398        total_imported = 0
399        total_not_imported = 0
400        total = 0
401        iname =  "%s" % filename
402        not_imported = []
403        valid_records = []
404        invalid_records = []
405        duplicate_records = []
406        d = {}
407        d['mode'] = mode
408        d['valid_records'] = valid_records
409        d['invalid_records'] = invalid_records
410        d['invalid_records'] = duplicate_records
411        # d['import_fn'] = import_fn
412        # d['imported_fn'] = imported_fn
413        # d['not_imported_fn'] = not_imported_fn
414        validators = {}
415        for widget in layout.keys():
416            try:
417                validators[widget] = layout[widget].validate
418            except AttributeError:
419                logger.info('%s has no validate attribute' % widget)
420                return d
421        for item in csv_items:
422            if start:
423                start = False
424                logger.info('%s starts import from %s.csv' % (member,filename))
425                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
426                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
427                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
428                diff2schema = set(import_keys).difference(set(schema.keys()))
429                diff2layout = set(import_keys).difference(set(layout.keys()))
430                if diff2layout:
431                    em = "not ignorable key(s) %s found in heading" % diff2layout
432                    logger.info(em)
433                    return d
434                # s = ','.join(['"%s"' % fn for fn in import_keys])
435                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
436                # #s = '"id",' + s
437                # open(imported_fn,"a").write(s + '\n')
438                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
439                # format_error = format + ',"%(Error)s"'
440                # #format = '"%(id)s",'+ format
441                adapters = [MappingStorageAdapter(schema, item)]
442            dm = DataModel(item, adapters,context=self)
443            ds = DataStructure(data=item,datamodel=dm)
444            error_string = ""
445            for k in import_keys:
446                if not validators[k](ds,mode=mode):
447                    error_string += " %s : %s" % (k,ds.getError(k))
448            if error_string:
449                item['Error'] = error_string
450                #invalid_records.append(dm)
451                invalid_records.append(item)
452                total_not_imported += 1
453            else:
454                em = format % item
455                valid_records.append(dm)
456                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
457                tr_count += 1
458                total_imported += 1
459            total += 1
460        # if len(imported) > 0:
461        #     open(imported_fn,"a").write('\n'.join(imported))
462        # if len(not_imported) > 0:
463        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
464        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
465        d['imported'] = total_imported
466        d['not_imported'] = total_not_imported
467        d['valid_records'] = valid_records
468        d['invalid_records'] = invalid_records
469        return d
470    ###)
471
472    security.declarePublic("missingValue")###(
473    def missingValue(self):
474        from Missing import MV
475        return MV
476    ###)
477###)
478
479class AccommodationTable(WAeUPTable): ###(
480
481    meta_type = 'WAeUP Accommodation Tool'
482    name = "portal_accommodation"
483    key = "bed"
484    not_occupied = NOT_OCCUPIED
485    def __init__(self,name=None):
486        if name ==  None:
487            name = self.name
488        WAeUPTable.__init__(self, name)
489
490    def searchAndReserveBed(self, student_id,bed_type): ###(
491        #records = self.searchResults({'student' : student_id})
492        #import pdb;pdb.set_trace()
493        records = self.evalAdvancedQuery(Eq('student',student_id))
494        if len(records) > 0:
495            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
496
497        #records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
498        query = Eq('bed_type',bed_type) & Eq('student',NOT_OCCUPIED)
499        records = self.evalAdvancedQuery(query,sortSpecs=('sort_id','bed'))
500        if len(records) == 0:
501            return -2,"No bed available"
502        rec = records[0]
503        self.modifyRecord(bed=rec.bed,student=student_id)
504        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
505        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
506        return 1,rec.bed
507    ###)
508
509
510InitializeClass(AccommodationTable)
511
512###)
513
514class PinTable(WAeUPTable): ###(
515    from ZODB.POSException import ConflictError
516    security = ClassSecurityInfo()
517    meta_type = 'WAeUP Pin Tool'
518    name = "portal_pins"
519    key = 'pin'
520
521    def __init__(self,name=None):
522        if name ==  None:
523            name = self.name
524        WAeUPTable.__init__(self, name)
525
526    security.declareProtected(ModifyPortalContent,"dumpAll")###(
527    def dumpAll(self,include_unused=None):
528        """dump all data in the table to a csv"""
529        member = self.portal_membership.getAuthenticatedMember()
530        logger = logging.getLogger('WAeUPTables.PinTable.dumpAll')
531        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
532        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
533        res_list = []
534        lines = []
535        if hasattr(self,"export_keys"):
536            fields = self.export_keys
537        else:
538            fields = []
539            for f in self.schema():
540                fields.append(f)
541        headline = ','.join(fields)
542        out = open(export_file,"wb")
543        out.write(headline +'\n')
544        out.close()
545        out = open(export_file,"a")
546        csv_writer = csv.DictWriter(out,fields,)
547        if include_unused is not None and str(member) not in ('admin','joachim'):
548            logger.info('%s tries to dump pintable with unused pins' % (member))
549            return
550        if include_unused is not None:
551            records = self()
552        else:
553            records = self.evalAdvancedQuery(~Eq('student',''))
554        nr2export = len(records)
555        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
556        chunk = 2000
557        total = 0
558        start = DateTime.DateTime().timeTime()
559        start_chunk = DateTime.DateTime().timeTime()
560        for record in records:
561            not_all = False
562            d = self.record2dict(fields,record)
563            lines.append(d)
564            total += 1
565            if total and not total % chunk or total == len(records):
566                csv_writer.writerows(lines)
567                anz = len(lines)
568                logger.info("wrote %(anz)d  total written %(total)d" % vars())
569                end_chunk = DateTime.DateTime().timeTime()
570                duration = end_chunk-start_chunk
571                per_record = duration/anz
572                till_now = end_chunk - start
573                avarage_per_record = till_now/total
574                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
575                estimated_end = estimated_end.strftime("%H:%M:%S")
576                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
577                start_chunk = DateTime.DateTime().timeTime()
578                lines = []
579        end = DateTime.DateTime().timeTime()
580        logger.info('total time %6.2f m' % ((end-start)/60))
581        import os
582        filename, extension = os.path.splitext(export_file)
583        from subprocess import call
584        msg = "wrote %(total)d records to %(export_file)s" % vars()
585        #try:
586        #    retcode = call('gzip %s' % (export_file),shell=True)
587        #    if retcode == 0:
588        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
589        #except OSError, e:
590        #    retcode = -99
591        #    logger.info("zip failed with %s" % e)
592        logger.info(msg)
593        args = {'portal_status_message': msg}
594        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
595        url = self.REQUEST.get('URL2')
596        return self.REQUEST.RESPONSE.redirect(url)
597    ###)
598
599
600
601    def searchAndSetRecord(self, uid, student_id,prefix):
602
603        # The following line must be activated after resetting the
604        # the portal_pins table. This is to avoid duplicate entries
605        # and disable duplicate payments.
606
607        #student_id = student_id.upper()
608
609        #records = self.searchResults(student = student_id)
610        #if len(records) > 0 and prefix in ('CLR','APP'):
611        #    for r in records:
612        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
613        #            return -2
614        records = self.searchResults({"%s" % self.key : uid})
615        if len(records) > 1:
616            # Can not happen, but anyway...
617            raise ValueError("More than one record with uid %s" % uid)
618        if len(records) == 0:
619            return -1,None
620        record = records[0]
621        if record.student == "":
622            record_data = {}
623            for field in self.schema() + self.indexes():
624                record_data[field] = getattr(record, field)
625            # Add the updated data:
626            record_data['student'] = student_id
627            try:
628                self.catalog_object(dict2ob(record_data), uid)
629                return 1,record
630            except ConflictError:
631                return 2,record
632        if record.student.upper() != student_id.upper():
633            return 0,record
634        if record.student.upper() == student_id.upper():
635            return 2,record
636        return -3,record
637InitializeClass(PinTable)
638###)
639
640class PumeResultsTable(WAeUPTable): ###(
641
642    meta_type = 'WAeUP PumeResults Tool'
643    name = "portal_pumeresults"
644    key = "jamb_reg_no"
645    def __init__(self,name=None):
646        if name ==  None:
647            name = self.name
648        WAeUPTable.__init__(self, name)
649
650
651InitializeClass(PumeResultsTable)
652
653###)
654
655class ApplicantsCatalog(WAeUPTable): ###(
656
657    meta_type = 'WAeUP Applicants Catalog'
658    name = "applicants_catalog"
659    key = "reg_no"
660    security = ClassSecurityInfo()
661    #export_keys = (
662    #               "reg_no",
663    #               "status",
664    #               "lastname",
665    #               "sex",
666    #               "date_of_birth",
667    #               "lga",
668    #               "email",
669    #               "phone",
670    #               "passport",
671    #               "entry_mode",
672    #               "pin",
673    #               "screening_type",
674    #               "registration_date",
675    #               "testdate",
676    #               "application_date",
677    #               "screening_date",
678    #               "faculty",
679    #               "department",
680    #               "course1",
681    #               "course2",
682    #               "course3",
683    #               "eng_score",
684    #               "subj1",
685    #               "subj1score",
686    #               "subj2",
687    #               "subj2score",
688    #               "subj3",
689    #               "subj3score",
690    #               "aggregate",
691    #               "course_admitted",
692    #               )
693
694    def __init__(self,name=None):
695        if name ==  None:
696            name = self.name
697        WAeUPTable.__init__(self, name)
698
699    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
700    def new_importCSV(self,filename="JAMB_data",
701                  schema_id="application",
702                  layout_id="import_application",
703                  mode='add'):
704        """ import JAMB data """
705        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
706        pm = self.portal_membership
707        member = pm.getAuthenticatedMember()
708        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
709        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
710        import_fn = "%s/import/%s.csv" % (i_home,filename)
711        if mode not in ('add','edit'):
712            logger.info("invalid mode: %s" % mode)
713        if os.path.exists(lock_fn):
714            logger.info("import of %(import_fn)s already in progress" % vars())
715            return
716        lock_file = open(lock_fn,"w")
717        lock_file.write("%(current)s \n" % vars())
718        lock_file.close()
719        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
720        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
721        stool = getToolByName(self, 'portal_schemas')
722        ltool = getToolByName(self, 'portal_layouts')
723        schema = stool._getOb(schema_id)
724        if schema is None:
725            em = 'No such schema %s' % schema_id
726            logger.error(em)
727            return
728        for postfix in ('_import',''):
729            layout_name = "%(layout_id)s%(postfix)s" % vars()
730            if hasattr(ltool,layout_name):
731                break
732        layout = ltool._getOb(layout_name)
733        if layout is None:
734            em = 'No such layout %s' % layout_id
735            logger.error(em)
736            return
737        try:
738            csv_file = csv.DictReader(open(import_fn,"rb"))
739        except:
740            em = 'Error reading %s.csv' % filename
741            logger.error(em)
742            return
743        d = self._import_new(csv_items,schema,layout,mode,logger)
744        imported = []
745        edited = []
746        duplicates = []
747        not_found = []
748        if len(d['valid_records']) > 0:
749            for record in d['valid_records']:
750                #import pdb;pdb.set_trace()
751                if mode == "add":
752                    try:
753                        self.addRecord(**dict(record.items()))
754                        imported.append(**dict(record.items()))
755                        logger.info("added %s" % record.items())
756                    except ValueError:
757                        dupplicate.append(**dict(record.items()))
758                        logger.info("duplicate %s" % record.items())
759                elif mode == "edit":
760                    try:
761                        self.modifyRecord(**dict(record.items()))
762                        edited.append(**dict(record.items()))
763                        logger.info("edited %s" % record.items())
764                    except KeyError:
765                        not_found.append(**dict(record.items()))
766                        logger.info("not found %s" % record.items())
767        invalid = d['invalid_records']
768        for itype in ("imported","edited","not_found","duplicate","invalid"):
769            outlist = locals[itype]
770            if len(outlist):
771                d = {}
772                for k in outlist[0].keys():
773                    d[k] = k
774                outlist[0] = d
775                outfile = open("file_name_%s" % itype,'w')
776                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
777                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
778###)
779
780    security.declareProtected(ModifyPortalContent,"importCSV")###(
781    def importCSV(self,filename="JAMB_data",
782                  schema_id="application",
783                  layout_id="application_pce",
784                  mode='add'):
785        """ import JAMB data """
786        stool = getToolByName(self, 'portal_schemas')
787        ltool = getToolByName(self, 'portal_layouts')
788        schema = stool._getOb(schema_id)
789        if schema is None:
790            em = 'No such schema %s' % schema_id
791            logger.error(em)
792            return
793        layout = ltool._getOb(layout_id)
794        if layout is None:
795            em = 'No such layout %s' % layout_id
796            logger.error(em)
797            return
798        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
799        d = self._import_old(filename,schema,layout,mode,logger)
800        if len(d['valid_records']) > 0:
801            for record in d['valid_records']:
802                #import pdb;pdb.set_trace()
803                if mode == "add":
804                    self.addRecord(**dict(record.items()))
805                    logger.info("added %s" % record.items())
806                elif mode == "edit":
807                    self.modifyRecord(**dict(record.items()))
808                    logger.info("edited %s" % record.items())
809                else:
810                    logger.info("invalid mode: %s" % mode)
811        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
812    ###)
813
814InitializeClass(ApplicantsCatalog)
815
816###)
817
818class StudentsCatalog(WAeUPTable): ###(
819    security = ClassSecurityInfo()
820
821    meta_type = 'WAeUP Students Catalog'
822    name = "students_catalog"
823    key = "id"
824    affected_types = {   ###(
825                      'StudentApplication':
826                      {'id': 'application',
827                       'fields':
828                       ('jamb_reg_no',
829                        'entry_mode',
830                        #'entry_level',
831                        'entry_session',
832                       )
833                      },
834                      'StudentClearance':
835                      {'id': 'clearance',
836                       'fields':
837                       ('matric_no',
838                        'lga',
839                       )
840                      },
841                      'StudentPersonal':
842                      {'id': 'personal',
843                       'fields':
844                       ('name',
845                        'sex',
846                        'perm_address',
847                        'email',
848                        'phone',
849                       )
850                      },
851                      'StudentStudyCourse':
852                      {'id': 'study_course',
853                       'fields':
854                       ('course', # study_course
855                        'faculty', # from certificate
856                        'department', # from certificate
857                        'end_level', # from certificate
858                        'level', # current_level
859                        'mode',  # current_mode
860                        'session', # current_session
861                        'verdict', # current_verdict
862                       )
863                      },
864                     }
865    ###)
866
867    def __init__(self,name=None):
868        if name ==  None:
869            name = self.name
870        WAeUPTable.__init__(self, name)
871        return
872
873    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
874        """ clears the whole enchilada """
875        self._catalog.clear()
876
877        if REQUEST and RESPONSE:
878            RESPONSE.redirect(
879              URL1 +
880              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
881
882    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
883        """ clear the catalog, then re-index everything """
884
885        elapse = time.time()
886        c_elapse = time.clock()
887
888        pgthreshold = self._getProgressThreshold()
889        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
890        self.refreshCatalog(clear=1, pghandler=handler)
891
892        elapse = time.time() - elapse
893        c_elapse = time.clock() - c_elapse
894
895        RESPONSE.redirect(
896            URL1 +
897            '/manage_catalogAdvanced?manage_tabs_message=' +
898            urllib.quote('Catalog Updated \n'
899                         'Total time: %s\n'
900                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
901    ###)
902
903    def fill_certificates_dict(self): ###(
904        "return certificate data in  dict"
905        certificates_brains = self.portal_catalog(portal_type ='Certificate')
906        d = {}
907        for cb in certificates_brains:
908            certificate_doc = cb.getObject().getContent()
909            cb_path = cb.getPath().split('/')
910            ld = {}
911            ld['faculty'] = cb_path[-4]
912            ld['department'] = cb_path[-3]
913            ld['end_level'] = getattr(certificate_doc,'end_level','999')
914            ld['study_mode'] = getattr(certificate_doc,'study_mode','')
915            d[cb.getId] = ld
916        return d
917    ###)
918
919    def get_from_doc_department(self,doc,cached_data={}): ###(
920        "return the students department"
921        if doc is None:
922            return None
923        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
924            return self._v_certificates[doc.study_course]['department']
925        certificate_res = self.portal_catalog(id = doc.study_course)
926        if len(certificate_res) != 1:
927            return None
928        return certificate_res[0].getPath().split('/')[-3]
929
930    def get_from_doc_faculty(self,doc,cached_data={}):
931        "return the students faculty"
932        if doc is None:
933            return None
934        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
935            return self._v_certificates[doc.study_course]['faculty']
936        certificate_res = self.portal_catalog(id = doc.study_course)
937        if len(certificate_res) != 1:
938            return None
939        return certificate_res[0].getPath().split('/')[-4]
940
941    def get_from_doc_end_level(self,doc,cached_data={}):
942        "return the students end_level"
943        if doc is None:
944            return None
945        if hasattr(self,"_v_certificates") and self._v_certificates.has_key(doc.study_course):
946            return self._v_certificates[doc.study_course]['end_level']
947        certificate_res = self.portal_catalog(id = doc.study_course)
948        if len(certificate_res) != 1:
949            return None
950        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
951
952    def get_from_doc_level(self,doc,cached_data={}):
953        "return the students level"
954        if doc is None:
955            return None
956        return getattr(doc,'current_level',None)
957
958    def get_from_doc_mode(self,doc,cached_data={}):
959        "return the students mode"
960        if doc is None:
961            return None
962        cm = getattr(doc,'current_mode',None)
963        return cm
964
965
966    def get_from_doc_session(self,doc,cached_data={}):
967        "return the students current_session"
968        if doc is None:
969            return None
970        return getattr(doc,'current_session',None)
971
972    def get_from_doc_entry_session(self,doc,cached_data={}):
973        "return the students entry_session"
974        if doc is None:
975            return None
976        es = getattr(doc,'entry_session',None)
977        if es is not None and len(es) == 2:
978            return es
979        try:
980            digit = int(doc.jamb_reg_no[0])
981        except:
982            return "-1"
983        if digit < 8:
984            return "0%c" % doc.jamb_reg_no[0]
985        return "9%c" % doc.jamb_reg_no[0]
986
987    def get_from_doc_course(self,doc,cached_data={}):
988        "return the students study_course"
989        if doc is None:
990            return None
991        return getattr(doc,'study_course',None)
992
993    def get_from_doc_name(self,doc,cached_data={}):
994        "return the students name from the personal"
995        if doc is None:
996            return None
997        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
998
999    def get_from_doc_verdict(self,doc,cached_data={}):
1000        "return the students study_course"
1001        if doc is None:
1002            return None
1003        return getattr(doc,'current_verdict',None)
1004    ###)
1005
1006    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1007        if isinstance(name, str):
1008            name = (name,)
1009        reindextypes = {}
1010        reindex_special = []
1011        for n in name:
1012            if n in ("review_state","registered_courses"):
1013                reindex_special.append(n)
1014            else:
1015                for pt in self.affected_types.keys():
1016                    if n in self.affected_types[pt]['fields']:
1017                        if reindextypes.has_key(pt):
1018                            reindextypes[pt].append(n)
1019                        else:
1020                            reindextypes[pt]= [n]
1021                        break
1022        cached_data = {}
1023        if set(name).intersection(set(('faculty','department','end_level'))):
1024            cached_data = self.fill_certificates_dict()
1025        students = self.portal_catalog(portal_type="Student")
1026        if hasattr(self,'portal_catalog_real'):
1027            aq_portal = self.portal_catalog_real.evalAdvancedQuery
1028        else:
1029            aq_portal = self.portal_catalog.evalAdvancedQuery
1030        num_objects = len(students)
1031        if pghandler:
1032            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1033        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
1034        #import pdb;pdb.set_trace()
1035        for i in xrange(num_objects):
1036            if pghandler: pghandler.report(i)
1037            student_brain = students[i]
1038            student_object = student_brain.getObject()
1039            # query = Eq('path',student_brain.getPath())
1040            # sub_brains_list = aq_portal(query)
1041            # sub_brains = {}
1042            # for sub_brain in sub_brains_list:
1043            #     sub_brains[sub_brain.portal_type] = sub_brain
1044            # student_path = student_brain.getPath()
1045            data = {}
1046            modified = False
1047            sid = data['id'] = student_brain.getId
1048            if reindex_special and 'review_state' in reindex_special:
1049                modified = True
1050                data['review_state'] = student_brain.review_state
1051            sub_objects = False
1052            for pt in reindextypes.keys():
1053                modified = True
1054                try:
1055                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1056                    #doc = sub_brains[pt].getObject().getContent()
1057                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
1058                    # doc = self.unrestrictedTraverse(path).getContent()
1059                    sub_objects = True
1060                except:
1061                    continue
1062                for field in set(name).intersection(self.affected_types[pt]['fields']):
1063                    if hasattr(self,'get_from_doc_%s' % field):
1064                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1065                                                                              cached_data=cached_data)
1066                    else:
1067                        data[field] = getattr(doc,field)
1068            if not sub_objects and noattr:
1069                import_res = self.returning_import(id = sid)
1070                if not import_res:
1071                    continue
1072                import_record = import_res[0]
1073                data['matric_no'] = import_record.matric_no
1074                data['sex'] = import_record.Sex == 'F'
1075                data['name'] = "%s %s %s" % (import_record.Firstname,
1076                                             import_record.Middlename,
1077                                             import_record.Lastname)
1078                data['jamb_reg_no'] = import_record.Entryregno
1079            #if reindex_special and 'registered_courses' in reindex_special:
1080            #    try:
1081            #        study_course = getattr(student_object,"study_course")
1082            #        level_ids = study_course.objectIds()
1083            #    except:
1084            #        continue
1085            #    if not level_ids:
1086            #        continue
1087            #    modified = True
1088            #    level_ids.sort()
1089            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1090            #    courses = []
1091            #    for c in course_ids:
1092            #        if c.endswith('_co'):
1093            #            courses.append(c[:-3])
1094            #        else:
1095            #            courses.append(c)
1096            #    data['registered_courses'] = courses
1097            if modified:
1098                self.modifyRecord(**data)
1099        if pghandler: pghandler.finish()
1100    ###)
1101
1102    def refreshCatalog(self, clear=0, pghandler=None): ###(
1103        """ re-index everything we can find """
1104        students_folder = self.portal_url.getPortalObject().campus.students
1105        if clear:
1106            self._catalog.clear()
1107        students = self.portal_catalog(portal_type="Student")
1108        num_objects = len(students)
1109        cached_data = self.fill_certificates_dict()
1110        if pghandler:
1111            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1112        for i in xrange(num_objects):
1113            if pghandler: pghandler.report(i)
1114            student_brain = students[i]
1115            spath = student_brain.getPath()
1116            student_object = student_brain.getObject()
1117            data = {}
1118            sid = data['id'] = student_brain.getId
1119            data['review_state'] = student_brain.review_state
1120            sub_objects = False
1121            for pt in self.affected_types.keys():
1122                modified = True
1123                try:
1124                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1125                    sub_objects = True
1126                except:
1127                    #from pdb import set_trace;set_trace()
1128                    continue
1129                for field in self.affected_types[pt]['fields']:
1130                    if hasattr(self,'get_from_doc_%s' % field):
1131                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1132                                                                              cached_data=cached_data)
1133                    else:
1134                        data[field] = getattr(doc,field,None)
1135            if not sub_objects:
1136                import_res = self.returning_import(id = sid)
1137                if not import_res:
1138                    continue
1139                import_record = import_res[0]
1140                data['matric_no'] = import_record.matric_no
1141                data['sex'] = import_record.Sex == 'F'
1142                data['name'] = "%s %s %s" % (import_record.Firstname,
1143                                             import_record.Middlename,
1144                                             import_record.Lastname)
1145                data['jamb_reg_no'] = import_record.Entryregno
1146            self.addRecord(**data)
1147        if pghandler: pghandler.finish()
1148    ###)
1149
1150    security.declarePrivate('notify_event_listener') ###(
1151    def notify_event_listener(self,event_type,object,infos):
1152        "listen for events"
1153        if not infos.has_key('rpath'):
1154            return
1155        pt = getattr(object,'portal_type',None)
1156        mt = getattr(object,'meta_type',None)
1157        students_catalog = self
1158        data = {}
1159        if pt == 'Student' and\
1160           mt == 'CPS Proxy Folder' and\
1161           event_type.startswith('workflow'):
1162            data['id'] = object.getId()
1163            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1164            students_catalog.modifyRecord(**data)
1165            return
1166        rpl = infos['rpath'].split('/')
1167        if pt == 'Student' and mt == 'CPS Proxy Folder':
1168            student_id = object.id
1169            if event_type == "sys_add_object":
1170                try:
1171                    self.addRecord(id = student_id)
1172                except ValueError:
1173                    pass
1174                return
1175            elif event_type == 'sys_del_object':
1176                self.deleteRecord(student_id)
1177        if pt not in self.affected_types.keys():
1178            return
1179        if event_type not in ('sys_modify_object'):
1180            return
1181        if mt == 'CPS Proxy Folder':
1182            return
1183        if not hasattr(self,'_v_certificates'):
1184            self._v_certificates = self.fill_certificates_dict()
1185        for field in self.affected_types[pt]['fields']:
1186            if hasattr(self,'get_from_doc_%s' % field):
1187                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1188            else:
1189                data[field] = getattr(object,field)
1190        data['id'] = rpl[2]
1191        self.modifyRecord(**data)
1192    ###)
1193
1194
1195InitializeClass(StudentsCatalog)
1196
1197###)
1198
1199class CertificatesCatalog(WAeUPTable): ###(
1200    security = ClassSecurityInfo()
1201
1202    meta_type = 'WAeUP Certificates Catalog'
1203    name =  "certificates_catalog"
1204    key = "code"
1205    def __init__(self,name=None):
1206        if name ==  None:
1207            name =  self.name
1208        WAeUPTable.__init__(self, name)
1209
1210    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1211        """ clear the catalog, then re-index everything """
1212
1213        elapse = time.time()
1214        c_elapse = time.clock()
1215
1216        pgthreshold = self._getProgressThreshold()
1217        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1218        self.refreshCatalog(clear=1, pghandler=handler)
1219
1220        elapse = time.time() - elapse
1221        c_elapse = time.clock() - c_elapse
1222
1223        RESPONSE.redirect(
1224            URL1 +
1225            '/manage_catalogAdvanced?manage_tabs_message=' +
1226            urllib.quote('Catalog Updated \n'
1227                         'Total time: %s\n'
1228                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1229    ###)
1230
1231    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1232        if isinstance(name, str):
1233            name = (name,)
1234        certificates = self.portal_catalog(portal_type="Certificate")
1235        num_objects = len(Certificates)
1236        if pghandler:
1237            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1238        for i in xrange(num_objects):
1239            if pghandler: pghandler.report(i)
1240            certificate_brain = certificates[i]
1241            certificate_object = certificate_brain.getObject()
1242            pl = certificate_brain.getPath().split('/')
1243            data = {}
1244            cid = data[self.key] = certificate_brain.getId
1245            data['faculty'] = pl[-4]
1246            data['department'] = pl[-3]
1247            doc = certificate_object.getContent()
1248            for field in name:
1249                if field not in (self.key,'faculty','department'):
1250                    data[field] = getattr(doc,field)
1251            self.modifyRecord(**data)
1252        if pghandler: pghandler.finish()
1253    ###)
1254
1255    def refreshCatalog(self, clear=0, pghandler=None): ###(
1256        """ re-index everything we can find """
1257        if clear:
1258            self._catalog.clear()
1259        certificates = self.portal_catalog(portal_type="Certificate")
1260        num_objects = len(certificates)
1261        if pghandler:
1262            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1263        #from pdb import set_trace;set_trace()
1264        for i in xrange(num_objects):
1265            if pghandler: pghandler.report(i)
1266            certificate_brain = certificates[i]
1267            certificate_doc = certificate_brain.getObject().getContent()
1268            pl = certificate_brain.getPath().split('/')
1269            data = {}
1270            for field in self.schema():
1271                data[field] = getattr(certificate_doc,field,None)
1272            data[self.key] = certificate_brain.getId
1273            ai = pl.index('academics')
1274            data['faculty'] = pl[ai +1]
1275            data['department'] = pl[ai +2]
1276            if clear:
1277                self.addRecord(**data)
1278            else:
1279                self.modifyRecord(**data)
1280        if pghandler: pghandler.finish()
1281    ###)
1282
1283    security.declarePrivate('notify_event_listener') ###(
1284    def notify_event_listener(self,event_type,object,infos):
1285        "listen for events"
1286        if not infos.has_key('rpath'):
1287            return
1288        pt = getattr(object,'portal_type',None)
1289        mt = getattr(object,'meta_type',None)
1290        if pt != 'Certificate':
1291            return
1292        data = {}
1293        rpl = infos['rpath'].split('/')
1294        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1295            return
1296        certificate_id = object.getId()
1297        data[self.key] = certificate_id
1298        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1299            try:
1300                self.addRecord(**data)
1301            except ValueError:
1302                return
1303            certificate_id = object.getId()
1304            doc = object.getContent()
1305            if doc is None:
1306                return
1307            for field in self.schema():
1308                data[field] = getattr(doc,field,None)
1309            data[self.key] = certificate_id
1310            ai = rpl.index('academics')
1311            data['faculty'] = rpl[ai +1]
1312            data['department'] = rpl[ai +2]
1313            self.modifyRecord(**data)
1314            return
1315        if event_type == "sys_del_object":
1316            self.deleteRecord(certificate_id)
1317            return
1318        if event_type == "sys_modify_object" and mt == 'Certificate':
1319            #from pdb import set_trace;set_trace()
1320            for field in self.schema():
1321                data[field] = getattr(object,field,None)
1322            certificate_id = object.aq_parent.getId()
1323            data[self.key] = certificate_id
1324            ai = rpl.index('academics')
1325            data['faculty'] = rpl[ai +1]
1326            data['department'] = rpl[ai +2]
1327            self.modifyRecord(**data)
1328    ###)
1329
1330
1331InitializeClass(CertificatesCatalog)
1332###)
1333
1334class CoursesCatalog(WAeUPTable): ###(
1335    security = ClassSecurityInfo()
1336
1337    meta_type = 'WAeUP Courses Catalog'
1338    name =  "courses_catalog"
1339    key = "code"
1340    def __init__(self,name=None):
1341        if name ==  None:
1342            name =  self.name
1343        WAeUPTable.__init__(self, name)
1344
1345    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1346        """ clear the catalog, then re-index everything """
1347
1348        elapse = time.time()
1349        c_elapse = time.clock()
1350
1351        pgthreshold = self._getProgressThreshold()
1352        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1353        self.refreshCatalog(clear=1, pghandler=handler)
1354
1355        elapse = time.time() - elapse
1356        c_elapse = time.clock() - c_elapse
1357
1358        RESPONSE.redirect(
1359            URL1 +
1360            '/manage_catalogAdvanced?manage_tabs_message=' +
1361            urllib.quote('Catalog Updated \n'
1362                         'Total time: %s\n'
1363                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1364    ###)
1365
1366    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1367        if isinstance(name, str):
1368            name = (name,)
1369        courses = self.portal_catalog(portal_type="Course")
1370        num_objects = len(courses)
1371        if pghandler:
1372            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1373        for i in xrange(num_objects):
1374            if pghandler: pghandler.report(i)
1375            course_brain = courses[i]
1376            course_object = course_brain.getObject()
1377            pl = course_brain.getPath().split('/')
1378            data = {}
1379            cid = data[self.key] = course_brain.getId
1380            data['faculty'] = pl[-4]
1381            data['department'] = pl[-3]
1382            doc = course_object.getContent()
1383            for field in name:
1384                if field not in (self.key,'faculty','department'):
1385                    data[field] = getattr(doc,field)
1386            self.modifyRecord(**data)
1387        if pghandler: pghandler.finish()
1388    ###)
1389
1390    def refreshCatalog(self, clear=0, pghandler=None): ###(
1391        """ re-index everything we can find """
1392        if clear:
1393            self._catalog.clear()
1394        courses = self.portal_catalog(portal_type="Course")
1395        num_objects = len(courses)
1396        if pghandler:
1397            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1398        #from pdb import set_trace;set_trace()
1399        for i in xrange(num_objects):
1400            if pghandler: pghandler.report(i)
1401            course_brain = courses[i]
1402            course_doc = course_brain.getObject().getContent()
1403            pl = course_brain.getPath().split('/')
1404            data = {}
1405            for field in self.schema():
1406                data[field] = getattr(course_doc,field,None)
1407            data[self.key] = course_brain.getId
1408            ai = pl.index('academics')
1409            data['faculty'] = pl[ai +1]
1410            data['department'] = pl[ai +2]
1411            if clear:
1412                self.addRecord(**data)
1413            else:
1414                self.modifyRecord(**data)
1415        if pghandler: pghandler.finish()
1416    ###)
1417
1418    security.declarePrivate('notify_event_listener') ###(
1419    def notify_event_listener(self,event_type,object,infos):
1420        "listen for events"
1421        if not infos.has_key('rpath'):
1422            return
1423        pt = getattr(object,'portal_type',None)
1424        mt = getattr(object,'meta_type',None)
1425        if pt != 'Course':
1426            return
1427        data = {}
1428        rpl = infos['rpath'].split('/')
1429        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1430            return
1431        course_id = object.getId()
1432        data[self.key] = course_id
1433        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1434            try:
1435                self.addRecord(**data)
1436            except ValueError:
1437                return
1438            course_id = object.getId()
1439            doc = object.getContent()
1440            if doc is None:
1441                return
1442            for field in self.schema():
1443                data[field] = getattr(doc,field,None)
1444            data[self.key] = course_id
1445            ai = rpl.index('academics')
1446            data['faculty'] = rpl[ai +1]
1447            data['department'] = rpl[ai +2]
1448            self.modifyRecord(**data)
1449            return
1450        if event_type == "sys_del_object":
1451            self.deleteRecord(course_id)
1452            return
1453        if event_type == "sys_modify_object" and mt == 'Course':
1454            #from pdb import set_trace;set_trace()
1455            for field in self.schema():
1456                data[field] = getattr(object,field,None)
1457            course_id = object.aq_parent.getId()
1458            data[self.key] = course_id
1459            ai = rpl.index('academics')
1460            data['faculty'] = rpl[ai +1]
1461            data['department'] = rpl[ai +2]
1462            self.modifyRecord(**data)
1463    ###)
1464
1465
1466InitializeClass(CoursesCatalog)
1467###)
1468
1469class CourseResults(WAeUPTable): ###(
1470    security = ClassSecurityInfo()
1471
1472    meta_type = 'WAeUP Results Catalog'
1473    name = "course_results"
1474    key = "key" #student_id + level + course_id
1475    def __init__(self,name=None):
1476        if name ==  None:
1477            name = self.name
1478        WAeUPTable.__init__(self, name)
1479        self._queue = []
1480
1481    def addMultipleRecords(self, records): ###(
1482        """add many records"""
1483        existing_uids = []
1484        for data in records:
1485            uid = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1486            data['%s' % self.key] = uid
1487            query = Eq(self.key, uid)
1488            res = self.course_results.evalAdvancedQuery(query)
1489            if len(res) > 0:
1490                rec = res[0]
1491                equal = True
1492                for attr in ('student_id','level_id','course_id'):
1493                    if getattr(rec,attr,'') != data[attr]:
1494                        equal = False
1495                        break
1496                if equal:
1497                    existing_uids += uid,
1498                    continue
1499            self.catalog_object(dict2ob(data), uid=uid)
1500        return existing_uids
1501    ###)
1502
1503    def deleteResultsHere(self,level_id,student_id): ###(
1504        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1505        course_results = self.course_results.evalAdvancedQuery(query)
1506        #import pdb;pdb.set_trace()
1507        for result in course_results:
1508            self.deleteRecord(result.key)
1509    ###)
1510
1511    def moveResultsHere(self,level,student_id): ###(
1512        #import pdb;pdb.set_trace()
1513        level_id = level.getId()
1514        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1515        course_results = self.course_results.evalAdvancedQuery(query)
1516        existing_courses = [cr.code for cr in course_results]
1517        to_delete = []
1518        for code,obj in level.objectItems():
1519            to_delete.append(code)
1520            carry_over = False
1521            if code.endswith('_co'):
1522                carry_over = True
1523                code  = code[:-3]
1524            if code in existing_courses:
1525                continue
1526            course_result_doc = obj.getContent()
1527            data = {}
1528            course_id = code
1529            for field in self.schema():
1530                data[field] = getattr(course_result_doc,field,'')
1531            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1532            data['student_id'] = student_id
1533            data['level_id'] = level_id
1534            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1535            data['session_id'] = session_id
1536            #data['queue_status'] = OBJECT_CREATED
1537            data['code'] = course_id
1538            data['carry_over'] = carry_over
1539            self.catalog_object(dict2ob(data), uid=key)
1540        level.manage_delObjects(to_delete)
1541    ###)
1542
1543    def getCourses(self,student_id,level_id): ###(
1544        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1545        course_results = self.course_results.evalAdvancedQuery(query)
1546        carry_overs = []
1547        normal1 = []
1548        normal2 = []
1549        normal3 = []
1550        total_credits = 0
1551        gpa = 0
1552        for brain in course_results:
1553            d = {}
1554
1555            for field in self.schema():
1556                d[field] = getattr(brain,field,'')
1557
1558            d['weight'] = ''
1559            d['grade'] = ''
1560            d['score'] = ''
1561
1562            if str(brain.credits).isdigit():
1563                credits = int(brain.credits)
1564                total_credits += credits
1565                score = getattr(brain,'score',0)
1566                if score and str(score).isdigit() and int(score) > 0:
1567                    score = int(score)
1568                    grade,weight = self.getGradesFromScore(score)
1569                    gpa += weight * credits
1570                    d['weight'] = weight
1571                    d['grade'] = grade
1572                    d['score'] = score
1573            d['coe'] = ''
1574            if brain.core_or_elective:
1575                d['coe'] = 'Core'
1576            elif brain.core_or_elective == False:
1577                d['coe'] = 'Elective'
1578            id = code = d['id'] = brain.code
1579            d['code'] = code
1580            res = self.courses_catalog.evalAdvancedQuery(Eq('code',code))
1581            if res:
1582                course = res[0]
1583                d['title'] = course.title
1584                # The courses_catalog contains strings and integers in its semester field.
1585                # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1586                d['semester'] = str(course.semester)
1587            else:
1588                d['title'] = "Course has been removed from course list"
1589                d['semester'] = ''
1590            if brain.carry_over:
1591                d['coe'] = 'CO'
1592                carry_overs.append(d)
1593            else:
1594                if d['semester'] == '1':
1595                    normal1.append(d)
1596
1597                elif d['semester'] == '2':
1598                    normal2.append(d)
1599                else:
1600                    normal3.append(d)
1601        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1602        #                                "%(semester)s%(code)s" % y))
1603        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1604                                             "%(semester)s%(code)s" % y))
1605        return total_credits,gpa,carry_overs,normal1,normal2,normal3
1606    ###)
1607
1608InitializeClass(CourseResults)
1609###)
1610
1611class OnlinePaymentsImport(WAeUPTable): ###(
1612
1613    meta_type = 'WAeUP Online Payment Transactions'
1614    name = "online_payments_import"
1615    key = "order_id"
1616    def __init__(self,name=None):
1617        if name ==  None:
1618            name = self.name
1619        WAeUPTable.__init__(self, name)
1620
1621
1622InitializeClass(OnlinePaymentsImport)
1623###)
1624
1625class ReturningImport(WAeUPTable): ###(
1626
1627    meta_type = 'Returning Import Table'
1628    name = "returning_import"
1629    key = "matric_no"
1630    def __init__(self,name=None):
1631        if name ==  None:
1632            name = self.name
1633        WAeUPTable.__init__(self, name)
1634
1635
1636InitializeClass(ReturningImport)
1637###)
1638
1639class ResultsImport(WAeUPTable): ###(
1640
1641    meta_type = 'Results Import Table'
1642    name = "results_import"
1643    key = "key"
1644    def __init__(self,name=None):
1645        if name ==  None:
1646            name = self.name
1647        WAeUPTable.__init__(self, name)
1648
1649
1650InitializeClass(ResultsImport)
1651
1652###)
1653
1654class PaymentsCatalog(WAeUPTable): ###(
1655    security = ClassSecurityInfo()
1656
1657    meta_type = 'WAeUP Payments Catalog'
1658    name = "payments_catalog"
1659    key = "order_id"
1660    def __init__(self,name=None):
1661        if name ==  None:
1662            name = self.name
1663        WAeUPTable.__init__(self, name)
1664
1665
1666    security.declarePrivate('notify_event_listener') ###(
1667    def notify_event_listener(self,event_type,object,infos):
1668        "listen for events"
1669        if not infos.has_key('rpath'):
1670            return
1671        pt = getattr(object,'portal_type',None)
1672        mt = getattr(object,'meta_type',None)
1673        data = {}
1674        if pt != 'Payment':
1675            return
1676        if mt == 'CPS Proxy Folder':
1677            return # is handled only for the real object
1678        if event_type == 'sys_del_object':
1679            self.deleteRecord(object.order_id)
1680        if event_type not in ('sys_modify_object'):
1681            return
1682        for field in self.schema():
1683            data[field] = getattr(object,field,'')
1684        rpl = infos['rpath'].split('/')
1685        #import pdb;pdb.set_trace()
1686        student_id = rpl[-4]
1687        data['student_id'] = student_id
1688        modified = False
1689        try:
1690            self.modifyRecord(**data)
1691            modified = True
1692        except KeyError:
1693            #logger = logging.getLogger('WAeUPTables.PaymentsCatalog.%s' % self.__name__)
1694            #logger.info("could not modify entry for %(student_id)s with %(order_id)s" % data)
1695            pass
1696        if not modified:
1697            try:
1698                self.addRecord(**data)
1699            except:
1700                logger = logging.getLogger('WAeUPTables.PaymentsCatalog.notify_event_listener')
1701                logger.info("could not add or modify entry for %(student_id)s with %(order_id)s" % data)
1702        ###)
1703
1704
1705InitializeClass(PaymentsCatalog)
1706
1707###)
1708
1709# BBB:
1710AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.