source: WAeUP_SRP/trunk/WAeUPTables.py @ 2190

Last change on this file since 2190 was 2189, checked in by joachim, 17 years ago

dumpAll function implemented for WAeUPTables usage: applicants_catalog/dumpAll

  • Property svn:keywords set to Id
File size: 61.7 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 2189 2007-09-11 09:02:49Z joachim $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43
44from interfaces import IWAeUPTable
45
46class AttributeHolder(object):
47    pass
48
49def dict2ob(dict):
50    ob = AttributeHolder()
51    for key, value in dict.items():
52        setattr(ob, key, value)
53    return ob
54
55class WAeUPTable(ZCatalog): ###(
56
57    implements(IWAeUPTable)
58    security = ClassSecurityInfo()
59    meta_type = None
60
61    def __init__(self,name=None):
62        if name ==  None:
63            name = self.name
64        ZCatalog.__init__(self,name)
65
66    def refreshCatalog(self, clear=0, pghandler=None): ###(
67        """ don't refresh for a normal table """
68
69        if self.REQUEST and self.REQUEST.RESPONSE:
70            self.REQUEST.RESPONSE.redirect(
71              URL1 +
72              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
73
74###)
75
76    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
77        """ clears the whole enchilada """
78
79        #if REQUEST and RESPONSE:
80        #    RESPONSE.redirect(
81        #      URL1 +
82        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
83
84        self._catalog.clear()
85        if REQUEST and RESPONSE:
86            RESPONSE.redirect(
87              URL1 +
88              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
89
90###)
91
92    def record2dict(self,fields,record):
93        d = {}
94        for key in fields:
95            v = getattr(record, key, None)
96            if v:
97                if key == 'lga':
98                    v = self.portal_vocabularies.local_gov_areas.get(v)
99                d[key] = v
100            else:
101                d[key] = ''
102        return d
103   
104    def addRecord(self, **data): ###(
105        # The uid is the same as "bed".
106        uid = data[self.key]
107        res = self.searchResults({"%s" % self.key : uid})
108        if len(res) > 0:
109            raise ValueError("More than one record with uid %s" % uid)
110        self.catalog_object(dict2ob(data), uid=uid)
111        return uid
112
113###)
114
115    def deleteRecord(self, uid):
116        self.uncatalog_object(uid)
117
118    def searchAndSetRecord(self, **data):
119        raise NotImplemented
120
121    def modifyRecord(self, record=None, **data): ###(
122        #records = self.searchResults(uid=uid)
123        uid = data[self.key]
124        if record is None:
125            records = self.searchResults({"%s" % self.key : uid})
126            if len(records) > 1:
127                # Can not happen, but anyway...
128                raise ValueError("More than one record with uid %s" % uid)
129            if len(records) == 0:
130                raise KeyError("No record for uid %s" % uid)
131            record = records[0]
132        record_data = {}
133        for field in self.schema() + self.indexes():
134            record_data[field] = getattr(record, field)
135        # Add the updated data:
136        record_data.update(data)
137        self.catalog_object(dict2ob(record_data), uid)
138
139###)
140
141    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
142        if isinstance(name, str):
143            name =  (name,)
144        paths = self._catalog.uids.items()
145        i = 0
146        #import pdb;pdb.set_trace()
147        for p,rid in paths:
148            i += 1
149            metadata = self.getMetadataForRID(rid)
150            record_data = {}
151            for field in name:
152                record_data[field] = metadata.get(field)
153            uid = metadata.get(self.key)
154            self.catalog_object(dict2ob(record_data), uid, idxs=name,
155                                update_metadata=0)
156
157###)
158
159    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
160    def exportAllRecords(self):
161        "export a WAeUPTable"
162        #import pdb;pdb.set_trace()
163        fields = [field for field in self.schema()]
164        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
165        csv = []
166        csv.append(','.join(['"%s"' % fn for fn in fields]))
167        for uid in self._catalog.uids:
168            records = self.searchResults({"%s" % self.key : uid})
169            if len(records) > 1:
170                # Can not happen, but anyway...
171                raise ValueError("More than one record with uid %s" % uid)
172            if len(records) == 0:
173                raise KeyError("No record for uid %s" % uid)
174            rec = records[0]
175            csv.append(format % rec)
176        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
177        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
178
179###)
180
181    security.declareProtected(ModifyPortalContent,"dumpAll")###(
182    def dumpAll(self):
183        """dump all data in the table to a csv"""
184        member = self.portal_membership.getAuthenticatedMember()
185        logger = logging.getLogger('WAeUPTables.dump_%s' % self.__name__)
186        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
187        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
188        print export_file
189        res_list = []
190        lines = []
191        if hasattr(self,"export_keys"):
192            fields = self.export_keys
193        else:
194            fields = []
195            for f in self.schema():
196                fields.append(f)
197        headline = ','.join(fields)
198        #open(export_file,"a").write(headline +'\n')
199        out = open(export_file,"wb")
200        out.write(headline +'\n')
201        out.close()
202        out = open(export_file,"a")
203        csv_writer = csv.DictWriter(out,fields,)
204        format = '"%(' + ')s","%('.join(fields) + ')s"'
205        records = self()
206        nr2export = len(records)
207        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
208        chunk = 2000
209        total = 0
210        start = DateTime.DateTime().timeTime()
211        start_chunk = DateTime.DateTime().timeTime()
212        for record in records:
213            not_all = False
214            d = self.record2dict(fields,record)
215            #d['state'],d['lga'] = formatLGA(d['lga'],voc = self.portal_vocabularies.local_gov_areas)
216            #lines.append(format % d)
217            lines.append(d)
218            total += 1
219            if total and not total % chunk or total == len(records):
220                #open(export_file,"a").write('\n'.join(lines) +'\n')
221                csv_writer.writerows(lines)
222                anz = len(lines)
223                logger.info("wrote %(anz)d  total written %(total)d" % vars())
224                end_chunk = DateTime.DateTime().timeTime()
225                duration = end_chunk-start_chunk
226                per_record = duration/anz
227                till_now = end_chunk - start
228                avarage_per_record = till_now/total
229                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
230                estimated_end = estimated_end.strftime("%H:%M:%S")
231                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
232                start_chunk = DateTime.DateTime().timeTime()
233                lines = []
234        end = DateTime.DateTime().timeTime()
235        logger.info('total time %6.2f m' % ((end-start)/60))
236        import os
237        filename, extension = os.path.splitext(export_file)
238        from subprocess import call
239        msg = "wrote %(total)d records to %(export_file)s" % vars()
240        try:
241            retcode = call('gzip %s' % (export_file),shell=True)
242            if retcode == 0:
243                msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
244        except OSError, e:
245            retcode = -99
246            logger.info("zip failed with %s" % e)
247        logger.info(msg)
248        args = {'portal_status_message': msg}
249        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
250        url = self.REQUEST.get('URL2')
251        return self.REQUEST.RESPONSE.redirect(url)
252    ###)
253
254    security.declarePrivate("_import_old") ###(
255    def _import_old(self,filename,schema,layout, mode,logger):
256        "import data from csv"
257        import transaction
258        import random
259        pm = self.portal_membership
260        member = pm.getAuthenticatedMember()
261        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
262        import_fn = "%s/import/%s.csv" % (i_home,filename)
263        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
264        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
265        start = True
266        tr_count = 1
267        total_imported = 0
268        total_not_imported = 0
269        total = 0
270        iname =  "%s" % filename
271        not_imported = []
272        imported = []
273        valid_records = []
274        invalid_records = []
275        d = {}
276        d['mode'] = mode
277        d['imported'] = total_imported
278        d['not_imported'] = total_not_imported
279        d['valid_records'] = valid_records
280        d['invalid_records'] = invalid_records
281        d['import_fn'] = import_fn
282        d['imported_fn'] = imported_fn
283        d['not_imported_fn'] = not_imported_fn
284        if schema is None:
285            em = 'No schema specified'
286            logger.error(em)
287            return d
288        if layout is None:
289            em = 'No layout specified'
290            logger.error(em)
291            return d
292        validators = {}
293        for widget in layout.keys():
294            try:
295                validators[widget] = layout[widget].validate
296            except AttributeError:
297                logger.info('%s has no validate attribute' % widget)
298                return d
299        # if mode == 'edit':
300        #     importer = self.importEdit
301        # elif mode == 'add':
302        #     importer = self.importAdd
303        # else:
304        #     importer = None
305        try:
306            items = csv.DictReader(open(import_fn,"rb"),
307                                   dialect="excel",
308                                   skipinitialspace=True)
309        except:
310            em = 'Error reading %s.csv' % filename
311            logger.error(em)
312            return d
313        #import pdb;pdb.set_trace()
314        for item in items:
315            if start:
316                start = False
317                logger.info('%s starts import from %s.csv' % (member,filename))
318                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
319                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
320                                   dialect="excel",
321                                   skipinitialspace=True).next()
322                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
323                diff2schema = set(import_keys).difference(set(schema.keys()))
324                diff2layout = set(import_keys).difference(set(layout.keys()))
325                if diff2layout:
326                    em = "not ignorable key(s) %s found in heading" % diff2layout
327                    logger.info(em)
328                    return d
329                s = ','.join(['"%s"' % fn for fn in import_keys])
330                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
331                #s = '"id",' + s
332                open(imported_fn,"a").write(s + '\n')
333                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
334                format_error = format + ',"%(Error)s"'
335                #format = '"%(id)s",'+ format
336                adapters = [MappingStorageAdapter(schema, item)]
337            dm = DataModel(item, adapters,context=self)
338            ds = DataStructure(data=item,datamodel=dm)
339            error_string = ""
340            for k in import_keys:
341                if not validators[k](ds,mode=mode):
342                    error_string += " %s : %s" % (k,ds.getError(k))
343            # if not error_string and importer:
344            #     item.update(dm)
345            #     item['id'],error = importer(item)
346            #     if error:
347            #         error_string += error
348            if error_string:
349                item['Error'] = error_string
350                invalid_records.append(dm)
351                not_imported.append(format_error % item)
352                total_not_imported += 1
353            else:
354                em = format % item
355                valid_records.append(dm)
356                imported.append(em)
357                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
358                tr_count += 1
359                total_imported += 1
360            total += 1
361        if len(imported) > 0:
362            open(imported_fn,"a").write('\n'.join(imported))
363        if len(not_imported) > 0:
364            open(not_imported_fn,"a").write('\n'.join(not_imported))
365        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
366        d['imported'] = total_imported
367        d['not_imported'] = total_not_imported
368        d['valid_records'] = valid_records
369        d['invalid_records'] = invalid_records
370        d['imported_fn'] = imported_fn
371        d['not_imported_fn'] = not_imported_fn
372        #logger.info(em)
373        return d
374    ###)
375
376    security.declarePrivate("_import") ###(
377    def _import_new(self,csv_items,schema, layout, mode,logger):
378        "import data from csv.Dictreader Instance"
379        start = True
380        tr_count = 1
381        total_imported = 0
382        total_not_imported = 0
383        total = 0
384        iname =  "%s" % filename
385        not_imported = []
386        valid_records = []
387        invalid_records = []
388        duplicate_records = []
389        d = {}
390        d['mode'] = mode
391        d['valid_records'] = valid_records
392        d['invalid_records'] = invalid_records
393        d['invalid_records'] = duplicate_records
394        # d['import_fn'] = import_fn
395        # d['imported_fn'] = imported_fn
396        # d['not_imported_fn'] = not_imported_fn
397        validators = {}
398        for widget in layout.keys():
399            try:
400                validators[widget] = layout[widget].validate
401            except AttributeError:
402                logger.info('%s has no validate attribute' % widget)
403                return d
404        for item in csv_items:
405            if start:
406                start = False
407                logger.info('%s starts import from %s.csv' % (member,filename))
408                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
409                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
410                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
411                diff2schema = set(import_keys).difference(set(schema.keys()))
412                diff2layout = set(import_keys).difference(set(layout.keys()))
413                if diff2layout:
414                    em = "not ignorable key(s) %s found in heading" % diff2layout
415                    logger.info(em)
416                    return d
417                # s = ','.join(['"%s"' % fn for fn in import_keys])
418                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
419                # #s = '"id",' + s
420                # open(imported_fn,"a").write(s + '\n')
421                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
422                # format_error = format + ',"%(Error)s"'
423                # #format = '"%(id)s",'+ format
424                adapters = [MappingStorageAdapter(schema, item)]
425            dm = DataModel(item, adapters,context=self)
426            ds = DataStructure(data=item,datamodel=dm)
427            error_string = ""
428            for k in import_keys:
429                if not validators[k](ds,mode=mode):
430                    error_string += " %s : %s" % (k,ds.getError(k))
431            if error_string:
432                item['Error'] = error_string
433                #invalid_records.append(dm)
434                invalid_records.append(item)
435                total_not_imported += 1
436            else:
437                em = format % item
438                valid_records.append(dm)
439                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
440                tr_count += 1
441                total_imported += 1
442            total += 1
443        # if len(imported) > 0:
444        #     open(imported_fn,"a").write('\n'.join(imported))
445        # if len(not_imported) > 0:
446        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
447        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
448        d['imported'] = total_imported
449        d['not_imported'] = total_not_imported
450        d['valid_records'] = valid_records
451        d['invalid_records'] = invalid_records
452        return d
453    ###)
454
455###)
456
457class AccommodationTable(WAeUPTable): ###(
458
459    meta_type = 'WAeUP Accommodation Tool'
460    name = "portal_accommodation"
461    key = "bed"
462    def __init__(self,name=None):
463        if name ==  None:
464            name = self.name
465        WAeUPTable.__init__(self, name)
466
467    def searchAndReserveBed(self, student_id,bed_type):
468        records = self.searchResults({'student' : student_id})
469        if len(records) > 0:
470            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
471
472        records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
473        #import pdb;pdb.set_trace()
474        if len(records) == 0:
475            return -2,"No bed available"
476        rec = records[0]
477        self.modifyRecord(bed=rec.bed,student=student_id)
478        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
479        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
480        return 1,rec.bed
481
482
483InitializeClass(AccommodationTable)
484
485###)
486
487class PinTable(WAeUPTable): ###(
488    from ZODB.POSException import ConflictError
489    meta_type = 'WAeUP Pin Tool'
490    name = "portal_pins"
491    key = 'pin'
492    def __init__(self,name=None):
493        if name ==  None:
494            name = self.name
495        WAeUPTable.__init__(self, name)
496
497
498    def searchAndSetRecord(self, uid, student_id,prefix):
499       
500        # The following line must be activated after resetting the
501        # the portal_pins table. This is to avoid duplicate entries
502        # and disable duplicate payments.
503       
504        #student_id = student_id.upper()
505
506        records = self.searchResults(student = student_id)
507        if len(records) > 0 and prefix in ('CLR','APP'):
508            for r in records:
509                if r.pin != uid and r.prefix_batch.startswith(prefix):
510                    return -2
511        records = self.searchResults({"%s" % self.key : uid})
512        if len(records) > 1:
513            # Can not happen, but anyway...
514            raise ValueError("More than one record with uid %s" % uid)
515        if len(records) == 0:
516            return -1
517        record = records[0]
518        if record.student == "":
519            record_data = {}
520            for field in self.schema() + self.indexes():
521                record_data[field] = getattr(record, field)
522            # Add the updated data:
523            record_data['student'] = student_id
524            try:
525                self.catalog_object(dict2ob(record_data), uid)
526                return 1
527            except ConflictError:
528                return 2
529        if record.student.upper() != student_id.upper():
530            return 0
531        if record.student.upper() == student_id.upper():
532            return 2
533        return -3
534
535InitializeClass(PinTable)
536
537###)
538
539class PumeResultsTable(WAeUPTable): ###(
540
541    meta_type = 'WAeUP PumeResults Tool'
542    name = "portal_pumeresults"
543    key = "jamb_reg_no"
544    def __init__(self,name=None):
545        if name ==  None:
546            name = self.name
547        WAeUPTable.__init__(self, name)
548
549
550InitializeClass(PumeResultsTable)
551
552###)
553
554class ApplicantsCatalog(WAeUPTable): ###(
555
556    meta_type = 'WAeUP Applicants Catalog'
557    name = "applicants_catalog"
558    key = "reg_no"
559    security = ClassSecurityInfo()
560    export_keys = (
561                   "reg_no",
562                   "status",
563                   "lastname",
564                   "sex",
565                   "date_of_birth",
566                   "lga",
567                   "email",
568                   "phone",
569                   "passport",
570                   "entry_mode",
571                   "pin",
572                   "screening_type",
573                   "registration_date",
574                   "testdate",
575                   "application_date",
576                   "screening_date",
577                   "faculty",
578                   "department",
579                   "course1",
580                   "course2",
581                   "course3",
582                   "eng_score",
583                   "subj1",
584                   "subj1score",
585                   "subj2",
586                   "subj2score",
587                   "subj3",
588                   "subj3score",
589                   "aggregate",
590                   "course_admitted",
591                   )
592    def __init__(self,name=None):
593        if name ==  None:
594            name = self.name
595        WAeUPTable.__init__(self, name)
596
597   
598
599    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
600    def new_importCSV(self,filename="JAMB_data",
601                  schema_id="application",
602                  layout_id="application_import",
603                  mode='add'):
604        """ import JAMB data """
605        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
606        pm = self.portal_membership
607        member = pm.getAuthenticatedMember()
608        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
609        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
610        import_fn = "%s/import/%s.csv" % (i_home,filename)
611        if mode not in ('add','edit'):
612            logger.info("invalid mode: %s" % mode)
613        if os.path.exists(lock_fn):
614            logger.info("import of %(import_fn)s already in progress" % vars())
615            return
616        lock_file = open(lock_fn,"w")
617        lock_file.write("%(current)s \n" % vars())
618        lock_file.close()
619        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
620        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
621        stool = getToolByName(self, 'portal_schemas')
622        ltool = getToolByName(self, 'portal_layouts')
623        schema = stool._getOb(schema_id)
624        if schema is None:
625            em = 'No such schema %s' % schema_id
626            logger.error(em)
627            return
628        for postfix in ('_import',''):
629            layout_name = "%(layout_id)s%(postfix)s" % vars()
630            if hasattr(ltool,layout_name):
631                break
632        layout = ltool._getOb(layout_name)
633        if layout is None:
634            em = 'No such layout %s' % layout_id
635            logger.error(em)
636            return
637        try:
638            csv_file = csv.DictReader(open(import_fn,"rb"))
639        except:
640            em = 'Error reading %s.csv' % filename
641            logger.error(em)
642            return
643        d = self._import_new(csv_items,schema,layout,mode,logger)
644        imported = []
645        edited = []
646        duplicates = []
647        not_found = []
648        if len(d['valid_records']) > 0:
649            for record in d['valid_records']:
650                #import pdb;pdb.set_trace()
651                if mode == "add":
652                    try:
653                        self.addRecord(**dict(record.items()))
654                        imported.append(**dict(record.items()))
655                        logger.info("added %s" % record.items())
656                    except ValueError:
657                        dupplicate.append(**dict(record.items()))
658                        logger.info("duplicate %s" % record.items())
659                elif mode == "edit":
660                    try:
661                        self.modifyRecord(**dict(record.items()))
662                        edited.append(**dict(record.items()))
663                        logger.info("edited %s" % record.items())
664                    except KeyError:
665                        not_found.append(**dict(record.items()))
666                        logger.info("not found %s" % record.items())
667        invalid = d['invalid_records']
668        for itype in ("imported","edited","not_found","duplicate","invalid"):
669            outlist = locals[itype]
670            if len(outlist):
671                d = {}
672                for k in outlist[0].keys():
673                    d[k] = k
674                outlist[0] = d
675                outfile = open("file_name_%s" % itype,'w')
676                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
677                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
678###)
679
680    security.declareProtected(ModifyPortalContent,"importCSV")###(
681    def importCSV(self,filename="JAMB_data",
682                  schema_id="application",
683                  layout_id="application",
684                  mode='add'):
685        """ import JAMB data """
686        stool = getToolByName(self, 'portal_schemas')
687        ltool = getToolByName(self, 'portal_layouts')
688        schema = stool._getOb(schema_id)
689        if schema is None:
690            em = 'No such schema %s' % schema_id
691            logger.error(em)
692            return
693        layout = ltool._getOb(layout_id)
694        if layout is None:
695            em = 'No such layout %s' % layout_id
696            logger.error(em)
697            return
698        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
699        d = self._import_old(filename,schema,layout,mode,logger)
700        if len(d['valid_records']) > 0:
701            for record in d['valid_records']:
702                #import pdb;pdb.set_trace()
703                if mode == "add":
704                    self.addRecord(**dict(record.items()))
705                    logger.info("added %s" % record.items())
706                elif mode == "edit":
707                    self.modifyRecord(**dict(record.items()))
708                    logger.info("edited %s" % record.items())
709                else:
710                    logger.info("invalid mode: %s" % mode)
711        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
712###)
713
714InitializeClass(ApplicantsCatalog)
715
716###)
717
718class StudentsCatalog(WAeUPTable): ###(
719    security = ClassSecurityInfo()
720
721    meta_type = 'WAeUP Students Catalog'
722    name = "students_catalog"
723    key = "id"
724    affected_types = {   ###(
725                      'StudentApplication':
726                      {'id': 'application',
727                       'fields':
728                       ('jamb_reg_no',
729                        'entry_mode',
730                        #'entry_level',
731                        'entry_session',
732                       )
733                      },
734                      'StudentClearance':
735                      {'id': 'clearance',
736                       'fields':
737                       ('matric_no',
738                        'lga',
739                       )
740                      },
741                      'StudentPersonal':
742                      {'id': 'personal',
743                       'fields':
744                       ('name',
745                        'sex',
746                        'perm_address',
747                        'email',
748                        'phone',
749                       )
750                      },
751                      'StudentStudyCourse':
752                      {'id': 'study_course',
753                       'fields':
754                       ('course', # study_course
755                        'faculty', # from certificate
756                        'department', # from certificate
757                        'end_level', # from certificate
758                        'level', # current_level
759                        'mode',  # current_mode
760                        'session', # current_session
761                        'verdict', # current_verdict
762                       )
763                      },
764                     }
765    ###)
766
767    def __init__(self,name=None):
768        if name ==  None:
769            name = self.name
770        WAeUPTable.__init__(self, name)
771        return
772
773    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
774        """ clears the whole enchilada """
775        self._catalog.clear()
776
777        if REQUEST and RESPONSE:
778            RESPONSE.redirect(
779              URL1 +
780              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
781
782    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
783        """ clear the catalog, then re-index everything """
784
785        elapse = time.time()
786        c_elapse = time.clock()
787
788        pgthreshold = self._getProgressThreshold()
789        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
790        self.refreshCatalog(clear=1, pghandler=handler)
791
792        elapse = time.time() - elapse
793        c_elapse = time.clock() - c_elapse
794
795        RESPONSE.redirect(
796            URL1 +
797            '/manage_catalogAdvanced?manage_tabs_message=' +
798            urllib.quote('Catalog Updated \n'
799                         'Total time: %s\n'
800                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
801    ###)
802
803    def fill_certificates_dict(self): ###(
804        "return certificate data in  dict"
805        certificates_brains = self.portal_catalog(portal_type ='Certificate')
806        d = {}
807        for cb in certificates_brains:
808            certificate_doc = cb.getObject().getContent()
809            cb_path = cb.getPath().split('/')
810            ld = {}
811            ld['faculty'] = cb_path[-4]
812            ld['department'] = cb_path[-3]
813            ld['end_level'] = getattr(certificate_doc,'end_level','999')
814            d[cb.getId] = ld
815        return d
816    ###)
817
818    def get_from_doc_department(self,doc,cached_data={}): ###(
819        "return the students department"
820        if doc is None:
821            return None
822        if cached_data.has_key(doc.study_course):
823            return cached_data[doc.study_course]['department']
824        certificate_res = self.portal_catalog(id = doc.study_course)
825        if len(certificate_res) != 1:
826            return None
827        return certificate_res[0].getPath().split('/')[-3]
828
829    def get_from_doc_faculty(self,doc,cached_data={}):
830        "return the students faculty"
831        if doc is None:
832            return None
833        if cached_data.has_key(doc.study_course):
834            return cached_data[doc.study_course]['faculty']
835        certificate_res = self.portal_catalog(id = doc.study_course)
836        if len(certificate_res) != 1:
837            return None
838        return certificate_res[0].getPath().split('/')[-4]
839
840    def get_from_doc_end_level(self,doc,cached_data={}):
841        "return the students end_level"
842        if doc is None:
843            return None
844        if cached_data.has_key(doc.study_course):
845            return cached_data[doc.study_course]['end_level']
846        certificate_res = self.portal_catalog(id = doc.study_course)
847        if len(certificate_res) != 1:
848            return None
849        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
850
851    def get_from_doc_level(self,doc,cached_data={}):
852        "return the students level"
853        if doc is None:
854            return None
855        return getattr(doc,'current_level',None)
856
857    def get_from_doc_mode(self,doc,cached_data={}):
858        "return the students mode"
859        if doc is None:
860            return None
861        cm = getattr(doc,'current_mode',None)
862        return cm
863
864
865    def get_from_doc_session(self,doc,cached_data={}):
866        "return the students current_session"
867        if doc is None:
868            return None
869        return getattr(doc,'current_session',None)
870
871    def get_from_doc_entry_session(self,doc,cached_data={}):
872        "return the students entry_session"
873        if doc is None:
874            return None
875        es = getattr(doc,'entry_session',None)
876        if es is not None and len(es) == 2:
877            return es
878        try:
879            digit = int(doc.jamb_reg_no[0])
880        except:
881            return "-1"
882        if digit < 8:
883            return "0%c" % doc.jamb_reg_no[0]
884        return "9%c" % doc.jamb_reg_no[0]
885
886    def get_from_doc_course(self,doc,cached_data={}):
887        "return the students study_course"
888        if doc is None:
889            return None
890        return getattr(doc,'study_course',None)
891
892    def get_from_doc_name(self,doc,cached_data={}):
893        "return the students name from the personal"
894        if doc is None:
895            return None
896        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
897
898    def get_from_doc_verdict(self,doc,cached_data={}):
899        "return the students study_course"
900        if doc is None:
901            return None
902        return getattr(doc,'current_verdict',None)
903    ###)
904
905    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
906        if isinstance(name, str):
907            name = (name,)
908        reindextypes = {}
909        reindex_special = []
910        for n in name:
911            if n in ("review_state","registered_courses"):
912                reindex_special.append(n)
913            else:
914                for pt in self.affected_types.keys():
915                    if n in self.affected_types[pt]['fields']:
916                        if reindextypes.has_key(pt):
917                            reindextypes[pt].append(n)
918                        else:
919                            reindextypes[pt]= [n]
920                        break
921        cached_data = {}
922        if set(name).intersection(set(('faculty','department','end_level'))):
923            cached_data = self.fill_certificates_dict()
924        students = self.portal_catalog(portal_type="Student")
925        if hasattr(self,'portal_catalog_real'):
926            aq_portal = self.portal_catalog_real.evalAdvancedQuery
927        else:
928            aq_portal = self.portal_catalog.evalAdvancedQuery
929        num_objects = len(students)
930        if pghandler:
931            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
932        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
933        #import pdb;pdb.set_trace()
934        for i in xrange(num_objects):
935            if pghandler: pghandler.report(i)
936            student_brain = students[i]
937            student_object = student_brain.getObject()
938            # query = Eq('path',student_brain.getPath())
939            # sub_brains_list = aq_portal(query)
940            # sub_brains = {}
941            # for sub_brain in sub_brains_list:
942            #     sub_brains[sub_brain.portal_type] = sub_brain
943            # student_path = student_brain.getPath()
944            data = {}
945            modified = False
946            sid = data['id'] = student_brain.getId
947            if reindex_special and 'review_state' in reindex_special:
948                modified = True
949                data['review_state'] = student_brain.review_state
950            sub_objects = False
951            for pt in reindextypes.keys():
952                modified = True
953                try:
954                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
955                    #doc = sub_brains[pt].getObject().getContent()
956                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
957                    # doc = self.unrestrictedTraverse(path).getContent()
958                    sub_objects = True
959                except:
960                    continue
961                for field in set(name).intersection(self.affected_types[pt]['fields']):
962                    if hasattr(self,'get_from_doc_%s' % field):
963                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
964                                                                              cached_data=cached_data)
965                    else:
966                        data[field] = getattr(doc,field)
967            if not sub_objects and noattr:
968                import_res = self.returning_import(id = sid)
969                if not import_res:
970                    continue
971                import_record = import_res[0]
972                data['matric_no'] = import_record.matric_no
973                data['sex'] = import_record.Sex == 'F'
974                data['name'] = "%s %s %s" % (import_record.Firstname,
975                                             import_record.Middlename,
976                                             import_record.Lastname)
977                data['jamb_reg_no'] = import_record.Entryregno
978            if reindex_special and 'registered_courses' in reindex_special:
979                try:
980                    study_course = getattr(student_object,"study_course")
981                    level_ids = study_course.objectIds()
982                except:
983                    continue
984                if not level_ids:
985                    continue
986                modified = True
987                level_ids.sort()
988                course_ids = getattr(study_course,level_ids[-1]).objectIds()
989                courses = []
990                for c in course_ids:
991                    if c.endswith('_co'):
992                        courses.append(c[:-3])
993                    else:
994                        courses.append(c)
995                data['registered_courses'] = courses
996            if modified:
997                self.modifyRecord(**data)
998        if pghandler: pghandler.finish()
999    ###)
1000
1001    def refreshCatalog(self, clear=0, pghandler=None): ###(
1002        """ re-index everything we can find """
1003        students_folder = self.portal_url.getPortalObject().campus.students
1004        if clear:
1005            self._catalog.clear()
1006        students = self.portal_catalog(portal_type="Student")
1007        num_objects = len(students)
1008        cached_data = self.fill_certificates_dict()
1009        if pghandler:
1010            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1011        for i in xrange(num_objects):
1012            if pghandler: pghandler.report(i)
1013            student_brain = students[i]
1014            spath = student_brain.getPath()
1015            student_object = student_brain.getObject()
1016            data = {}
1017            sid = data['id'] = student_brain.getId
1018            data['review_state'] = student_brain.review_state
1019            sub_objects = False
1020            for pt in self.affected_types.keys():
1021                modified = True
1022                try:
1023                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1024                    sub_objects = True
1025                except:
1026                    #from pdb import set_trace;set_trace()
1027                    continue
1028                for field in self.affected_types[pt]['fields']:
1029                    if hasattr(self,'get_from_doc_%s' % field):
1030                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1031                                                                              cached_data=cached_data)
1032                    else:
1033                        data[field] = getattr(doc,field,None)
1034            if not sub_objects:
1035                import_res = self.returning_import(id = sid)
1036                if not import_res:
1037                    continue
1038                import_record = import_res[0]
1039                data['matric_no'] = import_record.matric_no
1040                data['sex'] = import_record.Sex == 'F'
1041                data['name'] = "%s %s %s" % (import_record.Firstname,
1042                                             import_record.Middlename,
1043                                             import_record.Lastname)
1044                data['jamb_reg_no'] = import_record.Entryregno
1045            else:
1046                study_course = getattr(student_object,'study_course',None)
1047                current_level = data.get('level',None)
1048                data['registered_courses'] = []
1049                if study_course and current_level and current_level in study_course.objectIds():
1050                    level_obj = getattr(study_course,current_level)
1051                    courses = []
1052                    for c in level_obj.objectIds():
1053                        if c.endswith('_co'):
1054                            courses.append(c[:-3])
1055                        else:
1056                            courses.append(c)
1057                    data['registered_courses'] = courses
1058            self.addRecord(**data)
1059        if pghandler: pghandler.finish()
1060    ###)
1061
1062    security.declarePrivate('notify_event_listener') ###(
1063    def notify_event_listener(self,event_type,object,infos):
1064        "listen for events"
1065        if not infos.has_key('rpath'):
1066            return
1067        pt = getattr(object,'portal_type',None)
1068        mt = getattr(object,'meta_type',None)
1069        students_catalog = self
1070        data = {}
1071        if pt == 'Student' and\
1072           mt == 'CPS Proxy Folder' and\
1073           event_type.startswith('workflow'):
1074            data['id'] = object.getId()
1075            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1076            students_catalog.modifyRecord(**data)
1077            return
1078        rpl = infos['rpath'].split('/')
1079        if pt == 'Student' and mt == 'CPS Proxy Folder'\
1080           and event_type == "sys_add_object":
1081            student_id = object.id
1082            try:
1083                self.addRecord(id = student_id)
1084            except ValueError:
1085                pass
1086            return
1087        elif pt == 'StudentCourseResult' and mt == 'CPS Proxy Folder':
1088            if event_type not in ("sys_add_object","sys_del_object"):
1089                return
1090            level_session = getattr(object.aq_parent.getContent(),'session','unknown')
1091            if level_session not in (self.getSessionId()[-2:],'2006/2007'):
1092                return
1093            course_id = object.getId()
1094            if course_id.endswith('_co'):
1095                course_id = course_id[:-3]
1096            student_id = object.absolute_url_path().split('/')[-4]
1097            res = students_catalog(id = student_id)
1098            if not res:
1099                return
1100            student_rec = res[0]
1101            registered_courses = getattr(student_rec,'registered_courses',None)
1102            if not registered_courses:
1103                registered_courses = []
1104            #import pdb;pdb.set_trace()
1105            if event_type == "sys_add_object":
1106                if course_id not in registered_courses:
1107                    registered_courses.append(course_id)
1108                else:
1109                    return
1110            elif registered_courses and event_type == "sys_del_object":
1111                removed = False
1112                while course_id in registered_courses:
1113                    removed = True
1114                    registered_courses.remove(course_id)
1115                if not removed:
1116                    return
1117            data['id'] = student_id
1118            data['registered_courses'] = registered_courses
1119            self.modifyRecord(record = student_rec, **data)
1120            return
1121        if pt not in self.affected_types.keys():
1122            return
1123        if event_type not in ('sys_modify_object'):
1124            return
1125        if mt == 'CPS Proxy Folder':
1126            return
1127        for field in self.affected_types[pt]['fields']:
1128            if hasattr(self,'get_from_doc_%s' % field):
1129                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1130            else:
1131                data[field] = getattr(object,field)
1132        data['id'] = rpl[2]
1133        self.modifyRecord(**data)
1134    ###)
1135
1136
1137InitializeClass(StudentsCatalog)
1138
1139###)
1140
1141class CoursesCatalog(WAeUPTable): ###(
1142    security = ClassSecurityInfo()
1143
1144    meta_type = 'WAeUP Courses Catalog'
1145    name =  "courses_catalog"
1146    key = "code"
1147    def __init__(self,name=None):
1148        if name ==  None:
1149            name =  self.name
1150        WAeUPTable.__init__(self, name)
1151
1152    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1153        """ clear the catalog, then re-index everything """
1154
1155        elapse = time.time()
1156        c_elapse = time.clock()
1157
1158        pgthreshold = self._getProgressThreshold()
1159        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1160        self.refreshCatalog(clear=1, pghandler=handler)
1161
1162        elapse = time.time() - elapse
1163        c_elapse = time.clock() - c_elapse
1164
1165        RESPONSE.redirect(
1166            URL1 +
1167            '/manage_catalogAdvanced?manage_tabs_message=' +
1168            urllib.quote('Catalog Updated \n'
1169                         'Total time: %s\n'
1170                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1171    ###)
1172
1173    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1174        if isinstance(name, str):
1175            name = (name,)
1176        courses = self.portal_catalog(portal_type="Course")
1177        num_objects = len(courses)
1178        if pghandler:
1179            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1180        for i in xrange(num_objects):
1181            if pghandler: pghandler.report(i)
1182            course_brain = courses[i]
1183            course_object = course_brain.getObject()
1184            pl = course_brain.getPath().split('/')
1185            data = {}
1186            cid = data[self.key] = course_brain.getId
1187            data['faculty'] = pl[-4]
1188            data['department'] = pl[-3]
1189            doc = course_object.getContent()
1190            for field in name:
1191                if field not in (self.key,'faculty','department'):
1192                    data[field] = getattr(doc,field)
1193            self.modifyRecord(**data)
1194        if pghandler: pghandler.finish()
1195    ###)
1196
1197    def refreshCatalog(self, clear=0, pghandler=None): ###(
1198        """ re-index everything we can find """
1199        if clear:
1200            self._catalog.clear()
1201        courses = self.portal_catalog(portal_type="Course")
1202        num_objects = len(courses)
1203        if pghandler:
1204            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1205        #from pdb import set_trace;set_trace()
1206        for i in xrange(num_objects):
1207            if pghandler: pghandler.report(i)
1208            course_brain = courses[i]
1209            course_doc = course_brain.getObject().getContent()
1210            pl = course_brain.getPath().split('/')
1211            data = {}
1212            for field in self.schema():
1213                data[field] = getattr(course_doc,field,None)
1214            data[self.key] = course_brain.getId
1215            ai = pl.index('academics')
1216            data['faculty'] = pl[ai +1]
1217            data['department'] = pl[ai +2]
1218            if clear:
1219                self.addRecord(**data)
1220            else:
1221                self.modifyRecord(**data)
1222        if pghandler: pghandler.finish()
1223    ###)
1224
1225    security.declarePrivate('notify_event_listener') ###(
1226    def notify_event_listener(self,event_type,object,infos):
1227        "listen for events"
1228        if not infos.has_key('rpath'):
1229            return
1230        pt = getattr(object,'portal_type',None)
1231        mt = getattr(object,'meta_type',None)
1232        if pt != 'Course':
1233            return
1234        data = {}
1235        rpl = infos['rpath'].split('/')
1236        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1237            return
1238        course_id = object.getId()
1239        data[self.key] = course_id
1240        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1241            try:
1242                self.addRecord(**data)
1243            except ValueError:
1244                return
1245            course_id = object.getId()
1246            doc = object.getContent()
1247            if doc is None:
1248                return
1249            for field in self.schema():
1250                data[field] = getattr(doc,field,None)
1251            data[self.key] = course_id
1252            ai = rpl.index('academics')
1253            data['faculty'] = rpl[ai +1]
1254            data['department'] = rpl[ai +2]
1255            self.modifyRecord(**data)
1256            return
1257        if event_type == "sys_del_object":
1258            self.deleteRecord(course_id)
1259            return
1260        if event_type == "sys_modify_object" and mt == 'Course':
1261            #from pdb import set_trace;set_trace()
1262            for field in self.schema():
1263                data[field] = getattr(object,field,None)
1264            course_id = object.aq_parent.getId()
1265            data[self.key] = course_id
1266            ai = rpl.index('academics')
1267            data['faculty'] = rpl[ai +1]
1268            data['department'] = rpl[ai +2]
1269            self.modifyRecord(**data)
1270    ###)
1271
1272
1273InitializeClass(CoursesCatalog)
1274###)
1275
1276class CourseResults(WAeUPTable): ###(
1277    security = ClassSecurityInfo()
1278
1279    meta_type = 'WAeUP Results Catalog'
1280    name = "course_results"
1281    key = "key" #student_id + level + course_id
1282    def __init__(self,name=None):
1283        if name ==  None:
1284            name = self.name
1285        WAeUPTable.__init__(self, name)
1286        self._queue = []
1287
1288    def addRecord(self, **data): ###(
1289        """add one record"""
1290
1291        uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1292        if key in self._queue:
1293            return uid
1294        data['queue_status'] = ADDING_SHEDULED
1295        data['%s' % self.key] = uid
1296        res = self.searchResults({"%s" % self.key : uid})
1297        if len(res) > 0:
1298            raise ValueError("More than one record with uid %s" % uid)
1299        self.catalog_object(dict2ob(data), uid=uid)
1300        if not hasattr(self,'_queue'):
1301            self._queue = []
1302        self._queue.append(key)
1303        self._p_changed = 1
1304        return uid
1305    ###)
1306
1307    # def _p_resolveConflict(self, oldstate, committed, newstate):
1308    #     # Apply the changes made in going from old to newstate to
1309    #     # committed
1310
1311    #     # Note that in the case of undo, the olddata is the data for
1312    #     # the transaction being undone and newdata is the data for the
1313    #     # transaction previous to the undone transaction.
1314
1315    #     # Find the conflict policy on the new state to make sure changes
1316    #     # to it will be applied
1317
1318    #     # Committed is always the currently committed data.
1319    #     import pdb;pdb.set_trace()
1320    #     oldstate_data  =  oldstate['_queue']
1321    #     committed_data = committed['_queue']
1322    #     newstate_data  =  newstate['_queue']
1323
1324    #     # Merge newstate changes into committed
1325    #     for uid, new in newstate_data.items():
1326
1327    #         # Decide if this is a change
1328    #         old = oldstate_data.get(uid)
1329    #         current = committed_data.get(uid)
1330
1331
1332    def addMultipleRecords(self, records): ###(
1333        """add many records"""
1334        added_keys = []
1335        for data in records:
1336            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1337            added_keys.append(key)
1338            if key in self._queue:
1339                return uid
1340            data['queue_status'] = ADDING_SHEDULED
1341            data['%s' % self.key] = uid
1342            res = self.searchResults({"%s" % self.key : uid})
1343            if len(res) > 0:
1344                raise ValueError("More than one record with uid %s" % uid)
1345            self.catalog_object(dict2ob(data), uid=uid)
1346        if not hasattr(self,'_queue'):
1347            self._queue = added_keys
1348        self._queue.extend(added_keys)
1349        self._p_changed = 1
1350        return uid
1351    ###)
1352
1353    def deleteRecord(self, uid): ###(
1354        self.uncatalog_object(uid)
1355        if uid in self._queue:
1356            self._queue.remove(uid)
1357    ###)
1358
1359    def updateCourseResults(self,student_id,level_id,portal_catalog_results,course_results): ###(
1360        # query = Eq('path',"%s/campus/students/%s/study_course/%s" %
1361        #            (self.portal_url.getPortalPath(),
1362        #             student_id,
1363        #             level_id)) &\
1364        #             Eq('portal_type', "StudentCourseResult")
1365        # level_results = self.portal_catalog_real.evalAdvancedQuery(query)
1366        # level_results = [r for r in course_results
1367        #                  if level_id in r.relative_path.split('/')]
1368        course_results_ids = [cr.getId for cr in course_results]
1369        for r in portal_catalog_results:
1370            if r.getId in course_results_ids:
1371                continue
1372            course_result_doc = r.getObject().getContent()
1373            data = {}
1374            course_id = r.getId
1375            for field in self.schema():
1376                data[field] = getattr(course_result_doc,field,'')
1377            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1378            data['student_id'] = student_id
1379            data['level_id'] = level_id
1380            data['queue_status'] = OBJECT_CREATED
1381            data['code'] = course_id
1382            self.catalog_object(dict2ob(data), uid=key)
1383        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1384        return self.course_results.evalAdvancedQuery(query)
1385    ###)
1386
1387    def getCourses(self,student_id,level_id): ###(
1388        level_path = "%s/campus/students/%s/study_course/%s" % (self.portal_url.getPortalPath(),
1389                                                                student_id,
1390                                                                level_id)
1391        # portal_catalog_query = Eq('path',level_path) &\
1392        #                        Eq('portal_type', "StudentCourseResult")
1393        # portal_catalog_results = self.portal_catalog_real.evalAdvancedQuery(portal_catalog_query)
1394        portal_catalog_results = self.portal_catalog(path = level_path,
1395                                                     portal_type = "StudentCourseResult")
1396        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1397        course_results = self.course_results.evalAdvancedQuery(query)
1398        if len(course_results) != len(portal_catalog_results):
1399            course_results = self.updateCourseResults(student_id,
1400                                                      level_id,
1401                                                      portal_catalog_results,
1402                                                      course_results)
1403        carry_overs = []
1404        normal = []
1405        credits = 0
1406        for brain in course_results:
1407            d = {}
1408            credits += int(brain.credits)
1409            for field in self.schema():
1410                d[field] = getattr(brain,field,'')
1411            d['sheduled'] = brain.queue_status == ADDING_SHEDULED
1412            d['coe'] = 'Elective'
1413            if brain.core_or_elective:
1414                d['coe'] = 'Core'
1415            id = code = d['id'] = brain.code
1416            is_carry_over = False
1417            if code.endswith('_co'):
1418                is_carry_over = True
1419                code = code[:-3]
1420            d['code'] = code
1421            d['title'] = self.courses_catalog.evalAdvancedQuery(Eq('code',code))[0].title
1422            if is_carry_over:
1423                d['coe'] = 'Core'
1424                carry_overs.append(d)
1425            else:
1426                normal.append(d)
1427        normal.sort(cmp=lambda x,y: cmp(x['semester'], y['semester']))
1428        carry_overs.sort(cmp=lambda x,y: cmp(x['semester'], y['semester']))
1429        return credits,carry_overs,normal
1430    ###)
1431
1432    def addObject(self,record): ###(
1433        key = record.key
1434        student_id,level_id,course_id = key.split('|')
1435        level = getattr(getattr(self.portal_url.getPortalObject().campus.students,student_id).study_course,level_id)
1436        cr_id = level.invokeFactory('StudentCourseResult', course_id)
1437        course_result = getattr(level,cr_id)
1438        self.portal_workflow.doActionFor(course_result,'open')
1439        d = {}
1440        for field in self.schema():
1441            d[field] = getattr(record,field,'')
1442        course_result.getContent().edit(mapping=d)
1443    ###)
1444
1445    security.declareProtected(ModifyPortalContent,"process_queue") ###(
1446    def process_queue(self,limit=None):
1447        """adds objects and removes them from the queue.
1448        If limit is specified, at most (limit) events are removed.
1449        """
1450        if not hasattr(self,'_queue'):
1451            return 0
1452        queue= self._queue
1453        if not limit or len(queue) <= limit:
1454            keys = self._queue[:]
1455        else:
1456            keys = queue[:limit]
1457        if not keys:
1458            records = self.evalAdvancedQuery(Eq('queue_status',ADDING_SHEDULED))
1459        else:
1460            records = self.evalAdvancedQuery(In("%s" % self.key,keys))
1461        for record in records:
1462            if record.queue_status == OBJECT_CREATED:
1463                continue
1464            self.addObject(record)
1465            data = {}
1466            data['key'] = record.key
1467            data['queue_status'] = OBJECT_CREATED
1468            self.modifyRecord(**data)
1469        count = 0
1470        for key in keys:
1471            count +=1
1472            if key in self._queue:
1473                self._queue.remove(key)
1474        self._p_changed = 1
1475        return count,len(self._queue)
1476    ###)
1477
1478    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
1479        """ clears the whole enchilada """
1480        self._catalog.clear()
1481
1482        if REQUEST and RESPONSE:
1483            RESPONSE.redirect(
1484              URL1 +
1485              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
1486    ###)
1487
1488    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1489        """ clear the catalog, then re-index everything """
1490
1491        elapse = time.time()
1492        c_elapse = time.clock()
1493
1494        pgthreshold = self._getProgressThreshold()
1495        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1496        self.refreshCatalog(clear=1, pghandler=handler)
1497
1498        elapse = time.time() - elapse
1499        c_elapse = time.clock() - c_elapse
1500
1501        RESPONSE.redirect(
1502            URL1 +
1503            '/manage_catalogAdvanced?manage_tabs_message=' +
1504            urllib.quote('Catalog Updated \n'
1505                         'Total time: %s\n'
1506                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1507    ###)
1508
1509    def refreshCatalog(self, clear=1, pghandler=None): ###(
1510        """ re-index everything we can find """
1511        students_folder = self.portal_url.getPortalObject().campus.students
1512        if clear:
1513            self._catalog.clear()
1514        course_results = self.portal_catalog(portal_type="StudentCourseResult")
1515        num_objects = len(course_results)
1516        if pghandler:
1517            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1518        #import pdb;pdb.set_trace()
1519        for i in xrange(num_objects):
1520            if pghandler:
1521                pghandler.report(i)
1522            course_result_brain = course_results[i]
1523            path_list = course_result_brain.getPath().split('/')
1524            course_result_doc = course_result_brain.getObject().getContent()
1525            data = {}
1526            level_id = path_list[-2]
1527            course_id = path_list[-1]
1528            student_id = path_list[-4]
1529            for field in self.schema():
1530                data[field] = getattr(course_result_doc,field,'')
1531            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1532            data['student_id'] = student_id
1533            data['level_id'] = level_id
1534            data['queue_status'] = OBJECT_CREATED
1535            self.catalog_object(dict2ob(data), uid=key)
1536        if pghandler: pghandler.finish()
1537    ###)
1538
1539    security.declarePrivate('notify_event_listener') ###(
1540    def notify_event_listener(self,event_type,object,infos):
1541        "listen for events"
1542        if not infos.has_key('rpath'):
1543            return
1544        pt = getattr(object,'portal_type',None)
1545        mt = getattr(object,'meta_type',None)
1546        data = {}
1547        rpl = infos['rpath'].split('/')
1548        if mt == 'CPS Proxy Folder':
1549            return
1550        if pt == 'StudentCourseResult' and event_type == "sys_modify_object":
1551            data["%s" % self.key] = uid = "%s|%s|%s" % (rpl[-5],rpl[-3],rpl[-2])
1552            records = self.searchResults({"%s" % self.key : uid})
1553            if len(records) > 1:
1554                # Can not happen, but anyway...
1555                raise ValueError("More than one record with uid %s" % uid)
1556            if len(records) == 0:
1557                raise KeyError("No record for uid %s" % uid)
1558            record = records[0]
1559            for field in ('core_or_elective','score'):
1560                value = getattr(object,field,None)
1561                data[field] = value
1562            try:
1563                self.modifyRecord(record,**data)
1564            except KeyError:
1565                pass
1566        if pt == 'StudentStudyLevel' and event_type == "sys_del_object":
1567            #import pdb;pdb.set_trace()
1568            student_id = rpl[-3]
1569            level_id = rpl[-1]
1570            res = self.searchResults(student_id = student_id,
1571                                     level_id = level_id)
1572            for cr in res:
1573                self.deleteRecord(cr.key)
1574    ###)
1575
1576InitializeClass(CourseResults)
1577###)
1578
1579class OnlinePaymentsImport(WAeUPTable): ###(
1580
1581    meta_type = 'WAeUP Online Payment Transactions'
1582    name = "online_payments_import"
1583    key = "order_id"
1584    def __init__(self,name=None):
1585        if name ==  None:
1586            name = self.name
1587        WAeUPTable.__init__(self, name)
1588
1589
1590InitializeClass(OnlinePaymentsImport)
1591###)
1592
1593class ReturningImport(WAeUPTable): ###(
1594
1595    meta_type = 'Returning Import Table'
1596    name = "returning_import"
1597    key = "matric_no"
1598    def __init__(self,name=None):
1599        if name ==  None:
1600            name = self.name
1601        WAeUPTable.__init__(self, name)
1602
1603
1604InitializeClass(ReturningImport)
1605###)
1606
1607class ResultsImport(WAeUPTable): ###(
1608
1609    meta_type = 'Results Import Table'
1610    name = "results_import"
1611    key = "key"
1612    def __init__(self,name=None):
1613        if name ==  None:
1614            name = self.name
1615        WAeUPTable.__init__(self, name)
1616
1617
1618InitializeClass(ResultsImport)
1619
1620###)
1621
1622class PaymentsCatalog(WAeUPTable): ###(
1623
1624    meta_type = 'WAeUP Payments Catalog'
1625    name = "students_catalog"
1626    key = "id"
1627    def __init__(self,name=None):
1628        if name ==  None:
1629            name = self.name
1630        WAeUPTable.__init__(self, name)
1631
1632
1633InitializeClass(PaymentsCatalog)
1634
1635###)
1636
1637# BBB:
1638AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.