source: WAeUP_SRP/base/WAeUPTables.py @ 2659

Last change on this file since 2659 was 2632, checked in by joachim, 17 years ago

add student_levels, fix for #397,cleanup removeUnusedDocIds

  • Property svn:keywords set to Id
File size: 54.6 KB
RevLine 
[966]1#-*- mode: python; mode: fold -*-
[363]2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 2632 2007-11-12 17:23:46Z joachim $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
[1620]24from Products.ZCatalog.ProgressHandler import ZLogHandler
[780]25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
[2094]27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
[1700]33import urllib
[1620]34import DateTime,time
[780]35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
[2084]41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43
[363]44from interfaces import IWAeUPTable
45
46class AttributeHolder(object):
47    pass
48
49def dict2ob(dict):
50    ob = AttributeHolder()
51    for key, value in dict.items():
52        setattr(ob, key, value)
53    return ob
54
[1146]55class WAeUPTable(ZCatalog): ###(
[834]56
[363]57    implements(IWAeUPTable)
[780]58    security = ClassSecurityInfo()
[2094]59    meta_type = None
[2099]60
[2094]61    def __init__(self,name=None):
62        if name ==  None:
63            name = self.name
64        ZCatalog.__init__(self,name)
[2099]65
[2094]66    def refreshCatalog(self, clear=0, pghandler=None): ###(
[1620]67        """ don't refresh for a normal table """
68
69        if self.REQUEST and self.REQUEST.RESPONSE:
70            self.REQUEST.RESPONSE.redirect(
71              URL1 +
72              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
73
[2094]74###)
75
76    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
[1620]77        """ clears the whole enchilada """
[1986]78
[1916]79        #if REQUEST and RESPONSE:
80        #    RESPONSE.redirect(
81        #      URL1 +
82        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
[1620]83
[1916]84        self._catalog.clear()
[1620]85        if REQUEST and RESPONSE:
86            RESPONSE.redirect(
87              URL1 +
[1916]88              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
[1620]89
[2094]90###)
91
[2632]92    def record2dict(self,fields,record): ###(
[2189]93        d = {}
94        for key in fields:
95            v = getattr(record, key, None)
[2192]96            if key == 'sex':
97                if v:
98                    v = 'F'
99                else:
100                    v = 'M'
101                d[key] = v
102            elif v:
[2189]103                if key == 'lga':
104                    v = self.portal_vocabularies.local_gov_areas.get(v)
[2627]105                elif key == 'aos':
106                    v = self.portal_vocabularies.aos.get(v)
[2189]107                d[key] = v
108            else:
109                d[key] = ''
110        return d
[2191]111
[2632]112###)
113
[2094]114    def addRecord(self, **data): ###(
[502]115        # The uid is the same as "bed".
116        uid = data[self.key]
117        res = self.searchResults({"%s" % self.key : uid})
118        if len(res) > 0:
119            raise ValueError("More than one record with uid %s" % uid)
120        self.catalog_object(dict2ob(data), uid=uid)
121        return uid
[834]122
[2094]123###)
124
[363]125    def deleteRecord(self, uid):
126        self.uncatalog_object(uid)
[834]127
[502]128    def searchAndSetRecord(self, **data):
129        raise NotImplemented
130
[2094]131    def modifyRecord(self, record=None, **data): ###(
[502]132        #records = self.searchResults(uid=uid)
133        uid = data[self.key]
[2069]134        if record is None:
135            records = self.searchResults({"%s" % self.key : uid})
136            if len(records) > 1:
137                # Can not happen, but anyway...
138                raise ValueError("More than one record with uid %s" % uid)
139            if len(records) == 0:
140                raise KeyError("No record for uid %s" % uid)
141            record = records[0]
[363]142        record_data = {}
143        for field in self.schema() + self.indexes():
144            record_data[field] = getattr(record, field)
145        # Add the updated data:
146        record_data.update(data)
147        self.catalog_object(dict2ob(record_data), uid)
148
[2094]149###)
150
151    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
[1062]152        if isinstance(name, str):
[2094]153            name =  (name,)
[1062]154        paths = self._catalog.uids.items()
155        i = 0
156        #import pdb;pdb.set_trace()
157        for p,rid in paths:
158            i += 1
159            metadata = self.getMetadataForRID(rid)
160            record_data = {}
161            for field in name:
162                record_data[field] = metadata.get(field)
163            uid = metadata.get(self.key)
164            self.catalog_object(dict2ob(record_data), uid, idxs=name,
165                                update_metadata=0)
[1082]166
[2094]167###)
168
169    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
[780]170    def exportAllRecords(self):
171        "export a WAeUPTable"
172        #import pdb;pdb.set_trace()
173        fields = [field for field in self.schema()]
174        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
175        csv = []
176        csv.append(','.join(['"%s"' % fn for fn in fields]))
177        for uid in self._catalog.uids:
178            records = self.searchResults({"%s" % self.key : uid})
179            if len(records) > 1:
180                # Can not happen, but anyway...
181                raise ValueError("More than one record with uid %s" % uid)
182            if len(records) == 0:
183                raise KeyError("No record for uid %s" % uid)
184            rec = records[0]
185            csv.append(format % rec)
186        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
187        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
[2094]188
189###)
190
[2189]191    security.declareProtected(ModifyPortalContent,"dumpAll")###(
192    def dumpAll(self):
193        """dump all data in the table to a csv"""
194        member = self.portal_membership.getAuthenticatedMember()
195        logger = logging.getLogger('WAeUPTables.dump_%s' % self.__name__)
196        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
197        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
198        print export_file
199        res_list = []
200        lines = []
201        if hasattr(self,"export_keys"):
202            fields = self.export_keys
203        else:
204            fields = []
205            for f in self.schema():
206                fields.append(f)
207        headline = ','.join(fields)
208        #open(export_file,"a").write(headline +'\n')
209        out = open(export_file,"wb")
210        out.write(headline +'\n')
211        out.close()
212        out = open(export_file,"a")
213        csv_writer = csv.DictWriter(out,fields,)
214        format = '"%(' + ')s","%('.join(fields) + ')s"'
215        records = self()
216        nr2export = len(records)
217        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
218        chunk = 2000
219        total = 0
220        start = DateTime.DateTime().timeTime()
221        start_chunk = DateTime.DateTime().timeTime()
222        for record in records:
223            not_all = False
224            d = self.record2dict(fields,record)
225            #d['state'],d['lga'] = formatLGA(d['lga'],voc = self.portal_vocabularies.local_gov_areas)
226            #lines.append(format % d)
227            lines.append(d)
228            total += 1
229            if total and not total % chunk or total == len(records):
230                #open(export_file,"a").write('\n'.join(lines) +'\n')
231                csv_writer.writerows(lines)
232                anz = len(lines)
233                logger.info("wrote %(anz)d  total written %(total)d" % vars())
234                end_chunk = DateTime.DateTime().timeTime()
235                duration = end_chunk-start_chunk
236                per_record = duration/anz
237                till_now = end_chunk - start
238                avarage_per_record = till_now/total
239                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
240                estimated_end = estimated_end.strftime("%H:%M:%S")
241                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
242                start_chunk = DateTime.DateTime().timeTime()
243                lines = []
244        end = DateTime.DateTime().timeTime()
245        logger.info('total time %6.2f m' % ((end-start)/60))
246        import os
247        filename, extension = os.path.splitext(export_file)
248        from subprocess import call
249        msg = "wrote %(total)d records to %(export_file)s" % vars()
[2561]250        #try:
251        #    retcode = call('gzip %s' % (export_file),shell=True)
252        #    if retcode == 0:
253        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
254        #except OSError, e:
255        #    retcode = -99
256        #    logger.info("zip failed with %s" % e)
[2189]257        logger.info(msg)
258        args = {'portal_status_message': msg}
259        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
260        url = self.REQUEST.get('URL2')
261        return self.REQUEST.RESPONSE.redirect(url)
262    ###)
263
[2185]264    security.declarePrivate("_import_old") ###(
265    def _import_old(self,filename,schema,layout, mode,logger):
[2094]266        "import data from csv"
267        import transaction
268        import random
269        pm = self.portal_membership
270        member = pm.getAuthenticatedMember()
271        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
272        import_fn = "%s/import/%s.csv" % (i_home,filename)
273        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
274        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
275        start = True
276        tr_count = 1
277        total_imported = 0
278        total_not_imported = 0
279        total = 0
280        iname =  "%s" % filename
[2112]281        not_imported = []
282        imported = []
283        valid_records = []
284        invalid_records = []
285        d = {}
286        d['mode'] = mode
287        d['imported'] = total_imported
288        d['not_imported'] = total_not_imported
289        d['valid_records'] = valid_records
290        d['invalid_records'] = invalid_records
291        d['import_fn'] = import_fn
292        d['imported_fn'] = imported_fn
293        d['not_imported_fn'] = not_imported_fn
[2094]294        if schema is None:
295            em = 'No schema specified'
296            logger.error(em)
[2112]297            return d
[2094]298        if layout is None:
299            em = 'No layout specified'
300            logger.error(em)
[2112]301            return d
[2094]302        validators = {}
303        for widget in layout.keys():
[2112]304            try:
305                validators[widget] = layout[widget].validate
306            except AttributeError:
307                logger.info('%s has no validate attribute' % widget)
308                return d
[2094]309        # if mode == 'edit':
310        #     importer = self.importEdit
311        # elif mode == 'add':
312        #     importer = self.importAdd
313        # else:
314        #     importer = None
315        try:
[2185]316            items = csv.DictReader(open(import_fn,"rb"),
317                                   dialect="excel",
318                                   skipinitialspace=True)
[2094]319        except:
320            em = 'Error reading %s.csv' % filename
321            logger.error(em)
322            return d
[2185]323        #import pdb;pdb.set_trace()
[2094]324        for item in items:
325            if start:
326                start = False
327                logger.info('%s starts import from %s.csv' % (member,filename))
328                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
[2185]329                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
330                                   dialect="excel",
331                                   skipinitialspace=True).next()
[2094]332                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
333                diff2schema = set(import_keys).difference(set(schema.keys()))
334                diff2layout = set(import_keys).difference(set(layout.keys()))
335                if diff2layout:
336                    em = "not ignorable key(s) %s found in heading" % diff2layout
337                    logger.info(em)
338                    return d
339                s = ','.join(['"%s"' % fn for fn in import_keys])
340                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
341                #s = '"id",' + s
342                open(imported_fn,"a").write(s + '\n')
343                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
344                format_error = format + ',"%(Error)s"'
345                #format = '"%(id)s",'+ format
346                adapters = [MappingStorageAdapter(schema, item)]
347            dm = DataModel(item, adapters,context=self)
348            ds = DataStructure(data=item,datamodel=dm)
349            error_string = ""
[2503]350            #import pdb;pdb.set_trace()
[2094]351            for k in import_keys:
352                if not validators[k](ds,mode=mode):
353                    error_string += " %s : %s" % (k,ds.getError(k))
354            # if not error_string and importer:
355            #     item.update(dm)
356            #     item['id'],error = importer(item)
357            #     if error:
358            #         error_string += error
359            if error_string:
360                item['Error'] = error_string
361                invalid_records.append(dm)
362                not_imported.append(format_error % item)
363                total_not_imported += 1
364            else:
365                em = format % item
366                valid_records.append(dm)
367                imported.append(em)
368                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
369                tr_count += 1
370                total_imported += 1
371            total += 1
372        if len(imported) > 0:
373            open(imported_fn,"a").write('\n'.join(imported))
374        if len(not_imported) > 0:
375            open(not_imported_fn,"a").write('\n'.join(not_imported))
376        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
377        d['imported'] = total_imported
378        d['not_imported'] = total_not_imported
379        d['valid_records'] = valid_records
380        d['invalid_records'] = invalid_records
381        d['imported_fn'] = imported_fn
382        d['not_imported_fn'] = not_imported_fn
383        #logger.info(em)
384        return d
[1935]385    ###)
[2185]386
387    security.declarePrivate("_import") ###(
388    def _import_new(self,csv_items,schema, layout, mode,logger):
389        "import data from csv.Dictreader Instance"
390        start = True
391        tr_count = 1
392        total_imported = 0
393        total_not_imported = 0
394        total = 0
395        iname =  "%s" % filename
396        not_imported = []
397        valid_records = []
398        invalid_records = []
399        duplicate_records = []
400        d = {}
401        d['mode'] = mode
402        d['valid_records'] = valid_records
403        d['invalid_records'] = invalid_records
404        d['invalid_records'] = duplicate_records
405        # d['import_fn'] = import_fn
406        # d['imported_fn'] = imported_fn
407        # d['not_imported_fn'] = not_imported_fn
408        validators = {}
409        for widget in layout.keys():
410            try:
411                validators[widget] = layout[widget].validate
412            except AttributeError:
413                logger.info('%s has no validate attribute' % widget)
414                return d
415        for item in csv_items:
416            if start:
417                start = False
418                logger.info('%s starts import from %s.csv' % (member,filename))
419                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
420                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
421                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
422                diff2schema = set(import_keys).difference(set(schema.keys()))
423                diff2layout = set(import_keys).difference(set(layout.keys()))
424                if diff2layout:
425                    em = "not ignorable key(s) %s found in heading" % diff2layout
426                    logger.info(em)
427                    return d
428                # s = ','.join(['"%s"' % fn for fn in import_keys])
429                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
430                # #s = '"id",' + s
431                # open(imported_fn,"a").write(s + '\n')
432                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
433                # format_error = format + ',"%(Error)s"'
434                # #format = '"%(id)s",'+ format
435                adapters = [MappingStorageAdapter(schema, item)]
436            dm = DataModel(item, adapters,context=self)
437            ds = DataStructure(data=item,datamodel=dm)
438            error_string = ""
439            for k in import_keys:
440                if not validators[k](ds,mode=mode):
441                    error_string += " %s : %s" % (k,ds.getError(k))
442            if error_string:
443                item['Error'] = error_string
444                #invalid_records.append(dm)
445                invalid_records.append(item)
446                total_not_imported += 1
447            else:
448                em = format % item
449                valid_records.append(dm)
450                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
451                tr_count += 1
452                total_imported += 1
453            total += 1
454        # if len(imported) > 0:
455        #     open(imported_fn,"a").write('\n'.join(imported))
456        # if len(not_imported) > 0:
457        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
458        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
459        d['imported'] = total_imported
460        d['not_imported'] = total_not_imported
461        d['valid_records'] = valid_records
462        d['invalid_records'] = invalid_records
463        return d
464    ###)
465
[2396]466    security.declarePublic("missingValue")###(
467    def missingValue(self):
468        from Missing import MV
469        return MV
470    ###)
[2094]471###)
[834]472
[1146]473class AccommodationTable(WAeUPTable): ###(
[834]474
[404]475    meta_type = 'WAeUP Accommodation Tool'
[2094]476    name = "portal_accommodation"
[502]477    key = "bed"
[2094]478    def __init__(self,name=None):
479        if name ==  None:
480            name = self.name
481        WAeUPTable.__init__(self, name)
[363]482
[635]483    def searchAndReserveBed(self, student_id,bed_type):
484        records = self.searchResults({'student' : student_id})
485        if len(records) > 0:
[1293]486            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
[834]487
[673]488        records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
[686]489        #import pdb;pdb.set_trace()
[635]490        if len(records) == 0:
[1306]491            return -2,"No bed available"
[635]492        rec = records[0]
493        self.modifyRecord(bed=rec.bed,student=student_id)
[1571]494        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
495        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
[635]496        return 1,rec.bed
[363]497
[834]498
[404]499InitializeClass(AccommodationTable)
[411]500
[1146]501###)
502
503class PinTable(WAeUPTable): ###(
[1030]504    from ZODB.POSException import ConflictError
[440]505    meta_type = 'WAeUP Pin Tool'
[2094]506    name = "portal_pins"
[502]507    key = 'pin'
[2094]508    def __init__(self,name=None):
509        if name ==  None:
510            name = self.name
511        WAeUPTable.__init__(self, name)
[1082]512
513
[710]514    def searchAndSetRecord(self, uid, student_id,prefix):
[2191]515
516        # The following line must be activated after resetting the
517        # the portal_pins table. This is to avoid duplicate entries
[2184]518        # and disable duplicate payments.
[2191]519
[2184]520        #student_id = student_id.upper()
521
[710]522        records = self.searchResults(student = student_id)
[2579]523        #if len(records) > 0 and prefix in ('CLR','APP'):
524        #    for r in records:
525        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
526        #            return -2
[502]527        records = self.searchResults({"%s" % self.key : uid})
528        if len(records) > 1:
529            # Can not happen, but anyway...
530            raise ValueError("More than one record with uid %s" % uid)
531        if len(records) == 0:
532            return -1
533        record = records[0]
534        if record.student == "":
535            record_data = {}
536            for field in self.schema() + self.indexes():
537                record_data[field] = getattr(record, field)
538            # Add the updated data:
[635]539            record_data['student'] = student_id
[1030]540            try:
541                self.catalog_object(dict2ob(record_data), uid)
542                return 1
543            except ConflictError:
544                return 2
[990]545        if record.student.upper() != student_id.upper():
[502]546            return 0
[997]547        if record.student.upper() == student_id.upper():
[502]548            return 2
[997]549        return -3
[440]550InitializeClass(PinTable)
[1146]551###)
[966]552
[1146]553class PumeResultsTable(WAeUPTable): ###(
554
[966]555    meta_type = 'WAeUP PumeResults Tool'
[2094]556    name = "portal_pumeresults"
[966]557    key = "jamb_reg_no"
[2094]558    def __init__(self,name=None):
559        if name ==  None:
560            name = self.name
561        WAeUPTable.__init__(self, name)
[966]562
563
564InitializeClass(PumeResultsTable)
565
[1146]566###)
[971]567
[2094]568class ApplicantsCatalog(WAeUPTable): ###(
569
[2113]570    meta_type = 'WAeUP Applicants Catalog'
[2094]571    name = "applicants_catalog"
572    key = "reg_no"
573    security = ClassSecurityInfo()
[2537]574    #export_keys = (
575    #               "reg_no",
576    #               "status",
577    #               "lastname",
578    #               "sex",
579    #               "date_of_birth",
580    #               "lga",
581    #               "email",
582    #               "phone",
583    #               "passport",
584    #               "entry_mode",
585    #               "pin",
586    #               "screening_type",
587    #               "registration_date",
588    #               "testdate",
589    #               "application_date",
590    #               "screening_date",
591    #               "faculty",
592    #               "department",
593    #               "course1",
594    #               "course2",
595    #               "course3",
596    #               "eng_score",
597    #               "subj1",
598    #               "subj1score",
599    #               "subj2",
600    #               "subj2score",
601    #               "subj3",
602    #               "subj3score",
603    #               "aggregate",
604    #               "course_admitted",
605    #               )
[2632]606    ###)
607
[2094]608    def __init__(self,name=None):
609        if name ==  None:
610            name = self.name
611        WAeUPTable.__init__(self, name)
612
[2185]613    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
614    def new_importCSV(self,filename="JAMB_data",
615                  schema_id="application",
[2503]616                  layout_id="import_application",
[2185]617                  mode='add'):
618        """ import JAMB data """
619        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
620        pm = self.portal_membership
621        member = pm.getAuthenticatedMember()
622        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
623        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
624        import_fn = "%s/import/%s.csv" % (i_home,filename)
625        if mode not in ('add','edit'):
626            logger.info("invalid mode: %s" % mode)
627        if os.path.exists(lock_fn):
628            logger.info("import of %(import_fn)s already in progress" % vars())
629            return
630        lock_file = open(lock_fn,"w")
631        lock_file.write("%(current)s \n" % vars())
632        lock_file.close()
633        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
634        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
635        stool = getToolByName(self, 'portal_schemas')
636        ltool = getToolByName(self, 'portal_layouts')
637        schema = stool._getOb(schema_id)
638        if schema is None:
639            em = 'No such schema %s' % schema_id
640            logger.error(em)
641            return
642        for postfix in ('_import',''):
643            layout_name = "%(layout_id)s%(postfix)s" % vars()
644            if hasattr(ltool,layout_name):
645                break
646        layout = ltool._getOb(layout_name)
647        if layout is None:
648            em = 'No such layout %s' % layout_id
649            logger.error(em)
650            return
651        try:
652            csv_file = csv.DictReader(open(import_fn,"rb"))
653        except:
654            em = 'Error reading %s.csv' % filename
655            logger.error(em)
[2191]656            return
[2185]657        d = self._import_new(csv_items,schema,layout,mode,logger)
658        imported = []
659        edited = []
660        duplicates = []
661        not_found = []
662        if len(d['valid_records']) > 0:
663            for record in d['valid_records']:
664                #import pdb;pdb.set_trace()
665                if mode == "add":
666                    try:
667                        self.addRecord(**dict(record.items()))
668                        imported.append(**dict(record.items()))
669                        logger.info("added %s" % record.items())
670                    except ValueError:
671                        dupplicate.append(**dict(record.items()))
672                        logger.info("duplicate %s" % record.items())
673                elif mode == "edit":
674                    try:
675                        self.modifyRecord(**dict(record.items()))
676                        edited.append(**dict(record.items()))
677                        logger.info("edited %s" % record.items())
678                    except KeyError:
679                        not_found.append(**dict(record.items()))
680                        logger.info("not found %s" % record.items())
681        invalid = d['invalid_records']
682        for itype in ("imported","edited","not_found","duplicate","invalid"):
683            outlist = locals[itype]
684            if len(outlist):
685                d = {}
686                for k in outlist[0].keys():
687                    d[k] = k
[2191]688                outlist[0] = d
[2185]689                outfile = open("file_name_%s" % itype,'w')
690                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
691                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
692###)
693
[2094]694    security.declareProtected(ModifyPortalContent,"importCSV")###(
695    def importCSV(self,filename="JAMB_data",
696                  schema_id="application",
[2508]697                  layout_id="application_pce",
[2094]698                  mode='add'):
699        """ import JAMB data """
700        stool = getToolByName(self, 'portal_schemas')
701        ltool = getToolByName(self, 'portal_layouts')
702        schema = stool._getOb(schema_id)
703        if schema is None:
704            em = 'No such schema %s' % schema_id
705            logger.error(em)
706            return
707        layout = ltool._getOb(layout_id)
708        if layout is None:
709            em = 'No such layout %s' % layout_id
710            logger.error(em)
711            return
[2099]712        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
[2185]713        d = self._import_old(filename,schema,layout,mode,logger)
[2094]714        if len(d['valid_records']) > 0:
715            for record in d['valid_records']:
716                #import pdb;pdb.set_trace()
717                if mode == "add":
718                    self.addRecord(**dict(record.items()))
719                    logger.info("added %s" % record.items())
720                elif mode == "edit":
721                    self.modifyRecord(**dict(record.items()))
722                    logger.info("edited %s" % record.items())
723                else:
724                    logger.info("invalid mode: %s" % mode)
725        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
[2632]726    ###)
[2094]727
728InitializeClass(ApplicantsCatalog)
729
730###)
731
[1146]732class StudentsCatalog(WAeUPTable): ###(
[1620]733    security = ClassSecurityInfo()
[1146]734
[971]735    meta_type = 'WAeUP Students Catalog'
736    name = "students_catalog"
737    key = "id"
[1700]738    affected_types = {   ###(
[1749]739                      'StudentApplication':
[2069]740                      {'id': 'application',
741                       'fields':
742                       ('jamb_reg_no',
743                        'entry_mode',
744                        #'entry_level',
745                        'entry_session',
746                       )
747                      },
[1700]748                      'StudentClearance':
[2069]749                      {'id': 'clearance',
750                       'fields':
751                       ('matric_no',
752                        'lga',
753                       )
754                      },
755                      'StudentPersonal':
756                      {'id': 'personal',
757                       'fields':
758                       ('name',
759                        'sex',
760                        'perm_address',
761                        'email',
762                        'phone',
763                       )
764                      },
765                      'StudentStudyCourse':
766                      {'id': 'study_course',
767                       'fields':
768                       ('course', # study_course
769                        'faculty', # from certificate
770                        'department', # from certificate
771                        'end_level', # from certificate
772                        'level', # current_level
773                        'mode',  # current_mode
774                        'session', # current_session
775                        'verdict', # current_verdict
776                       )
777                      },
778                     }
[1700]779    ###)
[1625]780
[2094]781    def __init__(self,name=None):
782        if name ==  None:
783            name = self.name
784        WAeUPTable.__init__(self, name)
[1620]785        return
[1625]786
[1700]787    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
788        """ clears the whole enchilada """
789        self._catalog.clear()
[971]790
[1700]791        if REQUEST and RESPONSE:
792            RESPONSE.redirect(
793              URL1 +
794              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
[971]795
[1700]796    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
797        """ clear the catalog, then re-index everything """
798
799        elapse = time.time()
800        c_elapse = time.clock()
801
802        pgthreshold = self._getProgressThreshold()
803        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
804        self.refreshCatalog(clear=1, pghandler=handler)
805
806        elapse = time.time() - elapse
807        c_elapse = time.clock() - c_elapse
808
809        RESPONSE.redirect(
810            URL1 +
811            '/manage_catalogAdvanced?manage_tabs_message=' +
812            urllib.quote('Catalog Updated \n'
813                         'Total time: %s\n'
814                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
815    ###)
816
[2084]817    def fill_certificates_dict(self): ###(
[2078]818        "return certificate data in  dict"
819        certificates_brains = self.portal_catalog(portal_type ='Certificate')
820        d = {}
821        for cb in certificates_brains:
822            certificate_doc = cb.getObject().getContent()
823            cb_path = cb.getPath().split('/')
824            ld = {}
825            ld['faculty'] = cb_path[-4]
826            ld['department'] = cb_path[-3]
827            ld['end_level'] = getattr(certificate_doc,'end_level','999')
828            d[cb.getId] = ld
829        return d
[2084]830    ###)
831
[2078]832    def get_from_doc_department(self,doc,cached_data={}): ###(
[1620]833        "return the students department"
[1700]834        if doc is None:
[1620]835            return None
[2078]836        if cached_data.has_key(doc.study_course):
837            return cached_data[doc.study_course]['department']
[1700]838        certificate_res = self.portal_catalog(id = doc.study_course)
[1620]839        if len(certificate_res) != 1:
840            return None
841        return certificate_res[0].getPath().split('/')[-3]
842
[2078]843    def get_from_doc_faculty(self,doc,cached_data={}):
[1700]844        "return the students faculty"
845        if doc is None:
[1620]846            return None
[2078]847        if cached_data.has_key(doc.study_course):
848            return cached_data[doc.study_course]['faculty']
[1700]849        certificate_res = self.portal_catalog(id = doc.study_course)
850        if len(certificate_res) != 1:
851            return None
852        return certificate_res[0].getPath().split('/')[-4]
[1620]853
[2099]854    def get_from_doc_end_level(self,doc,cached_data={}):
[2069]855        "return the students end_level"
856        if doc is None:
857            return None
[2078]858        if cached_data.has_key(doc.study_course):
859            return cached_data[doc.study_course]['end_level']
[2069]860        certificate_res = self.portal_catalog(id = doc.study_course)
861        if len(certificate_res) != 1:
862            return None
863        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
864
[2078]865    def get_from_doc_level(self,doc,cached_data={}):
[1700]866        "return the students level"
867        if doc is None:
[1620]868            return None
[1700]869        return getattr(doc,'current_level',None)
[1620]870
[2078]871    def get_from_doc_mode(self,doc,cached_data={}):
[1705]872        "return the students mode"
[1700]873        if doc is None:
[1620]874            return None
[1705]875        cm = getattr(doc,'current_mode',None)
876        return cm
[1625]877
[1749]878
[2078]879    def get_from_doc_session(self,doc,cached_data={}):
[1705]880        "return the students current_session"
881        if doc is None:
882            return None
883        return getattr(doc,'current_session',None)
884
[2078]885    def get_from_doc_entry_session(self,doc,cached_data={}):
[1700]886        "return the students entry_session"
887        if doc is None:
[1620]888            return None
[1705]889        es = getattr(doc,'entry_session',None)
[1729]890        if es is not None and len(es) == 2:
[1705]891            return es
[1700]892        try:
893            digit = int(doc.jamb_reg_no[0])
894        except:
[1986]895            return "-1"
[1700]896        if digit < 8:
897            return "0%c" % doc.jamb_reg_no[0]
898        return "9%c" % doc.jamb_reg_no[0]
899
[2078]900    def get_from_doc_course(self,doc,cached_data={}):
[1620]901        "return the students study_course"
[1700]902        if doc is None:
[1620]903            return None
[1700]904        return getattr(doc,'study_course',None)
[1620]905
[2078]906    def get_from_doc_name(self,doc,cached_data={}):
[1620]907        "return the students name from the personal"
[1700]908        if doc is None:
[1620]909            return None
910        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
911
[2078]912    def get_from_doc_verdict(self,doc,cached_data={}):
[1700]913        "return the students study_course"
914        if doc is None:
[1620]915            return None
[1700]916        return getattr(doc,'current_verdict',None)
[1702]917    ###)
[1620]918
[1702]919    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
920        if isinstance(name, str):
921            name = (name,)
[1749]922        reindextypes = {}
[1702]923        reindex_special = []
924        for n in name:
925            if n in ("review_state","registered_courses"):
926                reindex_special.append(n)
927            else:
928                for pt in self.affected_types.keys():
[1707]929                    if n in self.affected_types[pt]['fields']:
[1702]930                        if reindextypes.has_key(pt):
931                            reindextypes[pt].append(n)
932                        else:
933                            reindextypes[pt]= [n]
934                        break
[2078]935        cached_data = {}
936        if set(name).intersection(set(('faculty','department','end_level'))):
937            cached_data = self.fill_certificates_dict()
[1702]938        students = self.portal_catalog(portal_type="Student")
[1954]939        if hasattr(self,'portal_catalog_real'):
940            aq_portal = self.portal_catalog_real.evalAdvancedQuery
941        else:
942            aq_portal = self.portal_catalog.evalAdvancedQuery
[1702]943        num_objects = len(students)
944        if pghandler:
945            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
946        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
[2084]947        #import pdb;pdb.set_trace()
[1702]948        for i in xrange(num_objects):
949            if pghandler: pghandler.report(i)
950            student_brain = students[i]
[1707]951            student_object = student_brain.getObject()
[2084]952            # query = Eq('path',student_brain.getPath())
953            # sub_brains_list = aq_portal(query)
954            # sub_brains = {}
955            # for sub_brain in sub_brains_list:
956            #     sub_brains[sub_brain.portal_type] = sub_brain
957            # student_path = student_brain.getPath()
[1702]958            data = {}
959            modified = False
960            sid = data['id'] = student_brain.getId
961            if reindex_special and 'review_state' in reindex_special:
962                modified = True
963                data['review_state'] = student_brain.review_state
[1707]964            sub_objects = False
965            for pt in reindextypes.keys():
[1702]966                modified = True
[1707]967                try:
968                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
[2084]969                    #doc = sub_brains[pt].getObject().getContent()
970                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
971                    # doc = self.unrestrictedTraverse(path).getContent()
[1707]972                    sub_objects = True
973                except:
974                    continue
[2084]975                for field in set(name).intersection(self.affected_types[pt]['fields']):
[1707]976                    if hasattr(self,'get_from_doc_%s' % field):
[2078]977                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
978                                                                              cached_data=cached_data)
[1707]979                    else:
980                        data[field] = getattr(doc,field)
981            if not sub_objects and noattr:
982                import_res = self.returning_import(id = sid)
983                if not import_res:
984                    continue
985                import_record = import_res[0]
986                data['matric_no'] = import_record.matric_no
987                data['sex'] = import_record.Sex == 'F'
988                data['name'] = "%s %s %s" % (import_record.Firstname,
989                                             import_record.Middlename,
990                                             import_record.Lastname)
[1815]991                data['jamb_reg_no'] = import_record.Entryregno
[2454]992            #if reindex_special and 'registered_courses' in reindex_special:
993            #    try:
994            #        study_course = getattr(student_object,"study_course")
995            #        level_ids = study_course.objectIds()
996            #    except:
997            #        continue
998            #    if not level_ids:
999            #        continue
1000            #    modified = True
1001            #    level_ids.sort()
1002            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1003            #    courses = []
1004            #    for c in course_ids:
1005            #        if c.endswith('_co'):
1006            #            courses.append(c[:-3])
1007            #        else:
1008            #            courses.append(c)
1009            #    data['registered_courses'] = courses
[1702]1010            if modified:
1011                self.modifyRecord(**data)
1012        if pghandler: pghandler.finish()
1013    ###)
[1620]1014
1015    def refreshCatalog(self, clear=0, pghandler=None): ###(
1016        """ re-index everything we can find """
1017        students_folder = self.portal_url.getPortalObject().campus.students
1018        if clear:
[1724]1019            self._catalog.clear()
[1700]1020        students = self.portal_catalog(portal_type="Student")
1021        num_objects = len(students)
[2078]1022        cached_data = self.fill_certificates_dict()
[1620]1023        if pghandler:
1024            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1025        for i in xrange(num_objects):
1026            if pghandler: pghandler.report(i)
[1700]1027            student_brain = students[i]
1028            spath = student_brain.getPath()
[1727]1029            student_object = student_brain.getObject()
[1620]1030            data = {}
[1700]1031            sid = data['id'] = student_brain.getId
1032            data['review_state'] = student_brain.review_state
[1707]1033            sub_objects = False
1034            for pt in self.affected_types.keys():
1035                modified = True
1036                try:
1037                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1038                    sub_objects = True
1039                except:
[1727]1040                    #from pdb import set_trace;set_trace()
[1707]1041                    continue
1042                for field in self.affected_types[pt]['fields']:
1043                    if hasattr(self,'get_from_doc_%s' % field):
[2078]1044                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1045                                                                              cached_data=cached_data)
[1707]1046                    else:
[1727]1047                        data[field] = getattr(doc,field,None)
1048            if not sub_objects:
[1700]1049                import_res = self.returning_import(id = sid)
1050                if not import_res:
[1620]1051                    continue
[1700]1052                import_record = import_res[0]
1053                data['matric_no'] = import_record.matric_no
1054                data['sex'] = import_record.Sex == 'F'
1055                data['name'] = "%s %s %s" % (import_record.Firstname,
1056                                             import_record.Middlename,
1057                                             import_record.Lastname)
[1815]1058                data['jamb_reg_no'] = import_record.Entryregno
[2454]1059            #else:
1060            #    study_course = getattr(student_object,'study_course',None)
1061            #    current_level = data.get('level',None)
1062            #    data['registered_courses'] = []
1063            #    if study_course and current_level and current_level in study_course.objectIds():
1064            #        level_obj = getattr(study_course,current_level)
1065            #        courses = []
1066            #        for c in level_obj.objectIds():
1067            #            if c.endswith('_co'):
1068            #                courses.append(c[:-3])
1069            #            else:
1070            #                courses.append(c)
1071            #        data['registered_courses'] = courses
[1700]1072            self.addRecord(**data)
[1620]1073        if pghandler: pghandler.finish()
1074    ###)
1075
[1700]1076    security.declarePrivate('notify_event_listener') ###(
[1620]1077    def notify_event_listener(self,event_type,object,infos):
1078        "listen for events"
[1716]1079        if not infos.has_key('rpath'):
1080            return
[1702]1081        pt = getattr(object,'portal_type',None)
1082        mt = getattr(object,'meta_type',None)
[1954]1083        students_catalog = self
[1702]1084        data = {}
1085        if pt == 'Student' and\
1086           mt == 'CPS Proxy Folder' and\
1087           event_type.startswith('workflow'):
1088            data['id'] = object.getId()
1089            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1090            students_catalog.modifyRecord(**data)
1091            return
[1700]1092        rpl = infos['rpath'].split('/')
[2396]1093        if pt == 'Student' and mt == 'CPS Proxy Folder':
[1700]1094            student_id = object.id
[2396]1095            if event_type == "sys_add_object":
1096                try:
1097                    self.addRecord(id = student_id)
1098                except ValueError:
1099                    pass
1100                return
1101            elif event_type == 'sys_del_object':
1102                self.deleteRecord(student_id)
1103                #import pdb;pdb.set_trace()
[2454]1104        #elif pt == 'StudentCourseResult' and mt == 'CPS Proxy Folder':
1105        #    if event_type not in ("sys_add_object","sys_del_object"):
1106        #        return
1107        #    level_session = getattr(object.aq_parent.getContent(),'session','unknown')
1108        #    if level_session not in (self.getSessionId()[-2:],'2006/2007'):
1109        #        return
1110        #    course_id = object.getId()
1111        #    if course_id.endswith('_co'):
1112        #        course_id = course_id[:-3]
1113        #    student_id = object.absolute_url_path().split('/')[-4]
1114        #    res = students_catalog(id = student_id)
1115        #    if not res:
1116        #        return
1117        #    student_rec = res[0]
1118        #    registered_courses = getattr(student_rec,'registered_courses',None)
1119        #    if not registered_courses:
1120        #        registered_courses = []
1121        #    if event_type == "sys_add_object":
1122        #        if course_id not in registered_courses:
1123        #            registered_courses.append(course_id)
1124        #        else:
1125        #            return
1126        #    elif registered_courses and event_type == "sys_del_object":
1127        #        removed = False
1128        #        while course_id in registered_courses:
1129        #            removed = True
1130        #            registered_courses.remove(course_id)
1131        #        if not removed:
1132        #            return
1133        #    data['id'] = student_id
1134        #    data['registered_courses'] = registered_courses
1135        #    self.modifyRecord(record = student_rec, **data)
1136        #    return
[1716]1137        if pt not in self.affected_types.keys():
[1700]1138            return
[1716]1139        if event_type not in ('sys_modify_object'):
1140            return
[1700]1141        if mt == 'CPS Proxy Folder':
1142            return
[1716]1143        for field in self.affected_types[pt]['fields']:
[1700]1144            if hasattr(self,'get_from_doc_%s' % field):
1145                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1146            else:
1147                data[field] = getattr(object,field)
1148        data['id'] = rpl[2]
[1716]1149        self.modifyRecord(**data)
[1700]1150    ###)
[1620]1151
[1625]1152
[971]1153InitializeClass(StudentsCatalog)
1154
[1146]1155###)
1156
1157class CoursesCatalog(WAeUPTable): ###(
[1716]1158    security = ClassSecurityInfo()
[1146]1159
1160    meta_type = 'WAeUP Courses Catalog'
[2094]1161    name =  "courses_catalog"
[1146]1162    key = "code"
[2094]1163    def __init__(self,name=None):
1164        if name ==  None:
1165            name =  self.name
1166        WAeUPTable.__init__(self, name)
[1146]1167
[1716]1168    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1169        """ clear the catalog, then re-index everything """
[1146]1170
[1716]1171        elapse = time.time()
1172        c_elapse = time.clock()
1173
1174        pgthreshold = self._getProgressThreshold()
1175        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1176        self.refreshCatalog(clear=1, pghandler=handler)
1177
1178        elapse = time.time() - elapse
1179        c_elapse = time.clock() - c_elapse
1180
1181        RESPONSE.redirect(
1182            URL1 +
1183            '/manage_catalogAdvanced?manage_tabs_message=' +
1184            urllib.quote('Catalog Updated \n'
1185                         'Total time: %s\n'
1186                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1187    ###)
1188
1189    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1190        if isinstance(name, str):
1191            name = (name,)
1192        courses = self.portal_catalog(portal_type="Course")
1193        num_objects = len(courses)
1194        if pghandler:
1195            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1196        for i in xrange(num_objects):
1197            if pghandler: pghandler.report(i)
1198            course_brain = courses[i]
1199            course_object = course_brain.getObject()
1200            pl = course_brain.getPath().split('/')
1201            data = {}
1202            cid = data[self.key] = course_brain.getId
1203            data['faculty'] = pl[-4]
1204            data['department'] = pl[-3]
1205            doc = course_object.getContent()
1206            for field in name:
1207                if field not in (self.key,'faculty','department'):
1208                    data[field] = getattr(doc,field)
1209            self.modifyRecord(**data)
1210        if pghandler: pghandler.finish()
1211    ###)
1212
1213    def refreshCatalog(self, clear=0, pghandler=None): ###(
1214        """ re-index everything we can find """
[1724]1215        if clear:
1216            self._catalog.clear()
[1716]1217        courses = self.portal_catalog(portal_type="Course")
1218        num_objects = len(courses)
1219        if pghandler:
1220            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
[1724]1221        #from pdb import set_trace;set_trace()
[1716]1222        for i in xrange(num_objects):
1223            if pghandler: pghandler.report(i)
1224            course_brain = courses[i]
[1724]1225            course_doc = course_brain.getObject().getContent()
[1716]1226            pl = course_brain.getPath().split('/')
1227            data = {}
[1724]1228            for field in self.schema():
[1749]1229                data[field] = getattr(course_doc,field,None)
[1716]1230            data[self.key] = course_brain.getId
[1724]1231            ai = pl.index('academics')
1232            data['faculty'] = pl[ai +1]
1233            data['department'] = pl[ai +2]
1234            if clear:
1235                self.addRecord(**data)
1236            else:
1237                self.modifyRecord(**data)
[1716]1238        if pghandler: pghandler.finish()
1239    ###)
1240
1241    security.declarePrivate('notify_event_listener') ###(
1242    def notify_event_listener(self,event_type,object,infos):
1243        "listen for events"
1244        if not infos.has_key('rpath'):
1245            return
1246        pt = getattr(object,'portal_type',None)
1247        mt = getattr(object,'meta_type',None)
1248        if pt != 'Course':
1249            return
1250        data = {}
1251        rpl = infos['rpath'].split('/')
1252        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1253            return
1254        course_id = object.getId()
1255        data[self.key] = course_id
[1724]1256        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
[1716]1257            try:
1258                self.addRecord(**data)
1259            except ValueError:
[1724]1260                return
1261            course_id = object.getId()
1262            doc = object.getContent()
1263            if doc is None:
1264                return
1265            for field in self.schema():
[1749]1266                data[field] = getattr(doc,field,None)
[1724]1267            data[self.key] = course_id
1268            ai = rpl.index('academics')
1269            data['faculty'] = rpl[ai +1]
1270            data['department'] = rpl[ai +2]
1271            self.modifyRecord(**data)
1272            return
[1716]1273        if event_type == "sys_del_object":
1274            self.deleteRecord(course_id)
[1724]1275            return
[1716]1276        if event_type == "sys_modify_object" and mt == 'Course':
[1724]1277            #from pdb import set_trace;set_trace()
[1716]1278            for field in self.schema():
[1749]1279                data[field] = getattr(object,field,None)
[1716]1280            course_id = object.aq_parent.getId()
1281            data[self.key] = course_id
[1724]1282            ai = rpl.index('academics')
1283            data['faculty'] = rpl[ai +1]
1284            data['department'] = rpl[ai +2]
[1716]1285            self.modifyRecord(**data)
1286    ###)
1287
1288
[1146]1289InitializeClass(CoursesCatalog)
[1151]1290###)
[1146]1291
[2084]1292class CourseResults(WAeUPTable): ###(
[2069]1293    security = ClassSecurityInfo()
1294
1295    meta_type = 'WAeUP Results Catalog'
1296    name = "course_results"
[2084]1297    key = "key" #student_id + level + course_id
[2094]1298    def __init__(self,name=None):
1299        if name ==  None:
1300            name = self.name
1301        WAeUPTable.__init__(self, name)
[2084]1302        self._queue = []
[2099]1303
[2094]1304    def addMultipleRecords(self, records): ###(
1305        """add many records"""
1306        added_keys = []
1307        for data in records:
1308            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1309            data['%s' % self.key] = uid
1310            res = self.searchResults({"%s" % self.key : uid})
1311            if len(res) > 0:
1312                raise ValueError("More than one record with uid %s" % uid)
1313            self.catalog_object(dict2ob(data), uid=uid)
1314        return uid
1315    ###)
1316
[2434]1317    def deleteResultsHere(self,level_id,student_id): ###(
1318        #import pdb;pdb.set_trace()
1319        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1320        course_results = self.course_results.evalAdvancedQuery(query)
1321        for result in course_results:
1322            self.deleteRecord(result.key)
[2084]1323    ###)
1324
[2434]1325    def moveResultsHere(self,level,student_id): ###(
1326        #import pdb;pdb.set_trace()
1327        level_id = level.getId()
1328        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1329        course_results = self.course_results.evalAdvancedQuery(query)
[2437]1330        existing_courses = [cr.code for cr in course_results]
[2434]1331        to_delete = []
1332        for code,obj in level.objectItems():
[2437]1333            to_delete.append(code)
[2434]1334            carry_over = False
1335            if code.endswith('_co'):
1336                carry_over = True
[2437]1337                code  = code[:-3]
[2434]1338            if code in existing_courses:
[2094]1339                continue
[2434]1340            course_result_doc = obj.getContent()
[2094]1341            data = {}
[2434]1342            course_id = code
[2094]1343            for field in self.schema():
1344                data[field] = getattr(course_result_doc,field,'')
1345            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
[2099]1346            data['student_id'] = student_id
1347            data['level_id'] = level_id
[2439]1348            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
[2442]1349            data['session_id'] = session_id
[2434]1350            #data['queue_status'] = OBJECT_CREATED
[2099]1351            data['code'] = course_id
[2434]1352            data['carry_over'] = carry_over
[2094]1353            self.catalog_object(dict2ob(data), uid=key)
[2434]1354        level.manage_delObjects(to_delete)
1355    ###)
1356
1357    def getCourses(self,student_id,level_id): ###(
[2094]1358        query = Eq('student_id',student_id) & Eq('level_id', level_id)
[2434]1359        course_results = self.course_results.evalAdvancedQuery(query)
1360        carry_overs = []
[2606]1361        normal1 = []
1362        normal2 = []
1363        normal3 = []
[2434]1364        credits = 0
1365        for brain in course_results:
1366            d = {}
1367            credits += int(brain.credits)
1368            for field in self.schema():
1369                d[field] = getattr(brain,field,'')
1370            #d['sheduled'] = brain.queue_status == ADDING_SHEDULED
1371            d['coe'] = 'Elective'
1372            if brain.core_or_elective:
1373                d['coe'] = 'Core'
1374            id = code = d['id'] = brain.code
1375            d['code'] = code
[2614]1376            course = self.courses_catalog.evalAdvancedQuery(Eq('code',code))[0]
1377            d['title'] = course.title
1378
1379            # The courses_catalog contains strings and integers in its semester field.
1380            # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1381            d['semester'] = str(course.semester)
[2448]1382            if brain.carry_over:
1383                d['coe'] = 'Carry-Over'
[2434]1384                carry_overs.append(d)
1385            else:
[2614]1386                if d['semester'] == '1':
[2606]1387                    normal1.append(d)
[2614]1388
1389                elif d['semester'] == '2':
[2606]1390                    normal2.append(d)
1391                else:
1392                    normal3.append(d)
1393        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1394        #                                "%(semester)s%(code)s" % y))
[2503]1395        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
[2460]1396                                             "%(semester)s%(code)s" % y))
[2606]1397        return credits,carry_overs,normal1,normal2,normal3
[2094]1398    ###)
1399
[2084]1400InitializeClass(CourseResults)
[2069]1401###)
1402
[1625]1403class OnlinePaymentsImport(WAeUPTable): ###(
[1620]1404
1405    meta_type = 'WAeUP Online Payment Transactions'
[1625]1406    name = "online_payments_import"
[1620]1407    key = "order_id"
[2094]1408    def __init__(self,name=None):
1409        if name ==  None:
1410            name = self.name
1411        WAeUPTable.__init__(self, name)
[1620]1412
1413
[2069]1414InitializeClass(OnlinePaymentsImport)
[1620]1415###)
1416
[1151]1417class ReturningImport(WAeUPTable): ###(
[1146]1418
[1151]1419    meta_type = 'Returning Import Table'
1420    name = "returning_import"
[1146]1421    key = "matric_no"
[2094]1422    def __init__(self,name=None):
1423        if name ==  None:
1424            name = self.name
1425        WAeUPTable.__init__(self, name)
[1146]1426
1427
[1151]1428InitializeClass(ReturningImport)
1429###)
[1146]1430
1431class ResultsImport(WAeUPTable): ###(
1432
1433    meta_type = 'Results Import Table'
1434    name = "results_import"
1435    key = "key"
[2094]1436    def __init__(self,name=None):
1437        if name ==  None:
1438            name = self.name
1439        WAeUPTable.__init__(self, name)
[1146]1440
1441
1442InitializeClass(ResultsImport)
1443
1444###)
1445
1446class PaymentsCatalog(WAeUPTable): ###(
1447
1448    meta_type = 'WAeUP Payments Catalog'
1449    name = "students_catalog"
1450    key = "id"
[2094]1451    def __init__(self,name=None):
1452        if name ==  None:
1453            name = self.name
1454        WAeUPTable.__init__(self, name)
[1146]1455
1456
1457InitializeClass(PaymentsCatalog)
1458
1459###)
1460
[414]1461# BBB:
1462AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.