source: WAeUP_SRP/base/WAeUPTables.py @ 2602

Last change on this file since 2602 was 2579, checked in by Henrik Bettermann, 17 years ago

Ziwschenversion

Ich arbeite noch an layout_application_edit.pt

  • Property svn:keywords set to Id
File size: 54.0 KB
RevLine 
[966]1#-*- mode: python; mode: fold -*-
[363]2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 2579 2007-11-08 10:14:35Z henrik $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
[1620]24from Products.ZCatalog.ProgressHandler import ZLogHandler
[780]25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
[2094]27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
[1700]33import urllib
[1620]34import DateTime,time
[780]35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
[2084]41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43
[363]44from interfaces import IWAeUPTable
45
46class AttributeHolder(object):
47    pass
48
49def dict2ob(dict):
50    ob = AttributeHolder()
51    for key, value in dict.items():
52        setattr(ob, key, value)
53    return ob
54
[1146]55class WAeUPTable(ZCatalog): ###(
[834]56
[363]57    implements(IWAeUPTable)
[780]58    security = ClassSecurityInfo()
[2094]59    meta_type = None
[2099]60
[2094]61    def __init__(self,name=None):
62        if name ==  None:
63            name = self.name
64        ZCatalog.__init__(self,name)
[2099]65
[2094]66    def refreshCatalog(self, clear=0, pghandler=None): ###(
[1620]67        """ don't refresh for a normal table """
68
69        if self.REQUEST and self.REQUEST.RESPONSE:
70            self.REQUEST.RESPONSE.redirect(
71              URL1 +
72              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
73
[2094]74###)
75
76    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
[1620]77        """ clears the whole enchilada """
[1986]78
[1916]79        #if REQUEST and RESPONSE:
80        #    RESPONSE.redirect(
81        #      URL1 +
82        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
[1620]83
[1916]84        self._catalog.clear()
[1620]85        if REQUEST and RESPONSE:
86            RESPONSE.redirect(
87              URL1 +
[1916]88              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
[1620]89
[2094]90###)
91
[2189]92    def record2dict(self,fields,record):
93        d = {}
94        for key in fields:
95            v = getattr(record, key, None)
[2192]96            if key == 'sex':
97                if v:
98                    v = 'F'
99                else:
100                    v = 'M'
101                d[key] = v
102            elif v:
[2189]103                if key == 'lga':
104                    v = self.portal_vocabularies.local_gov_areas.get(v)
105                d[key] = v
106            else:
107                d[key] = ''
108        return d
[2191]109
[2094]110    def addRecord(self, **data): ###(
[502]111        # The uid is the same as "bed".
112        uid = data[self.key]
113        res = self.searchResults({"%s" % self.key : uid})
114        if len(res) > 0:
115            raise ValueError("More than one record with uid %s" % uid)
116        self.catalog_object(dict2ob(data), uid=uid)
117        return uid
[834]118
[2094]119###)
120
[363]121    def deleteRecord(self, uid):
122        self.uncatalog_object(uid)
[834]123
[502]124    def searchAndSetRecord(self, **data):
125        raise NotImplemented
126
[2094]127    def modifyRecord(self, record=None, **data): ###(
[502]128        #records = self.searchResults(uid=uid)
129        uid = data[self.key]
[2069]130        if record is None:
131            records = self.searchResults({"%s" % self.key : uid})
132            if len(records) > 1:
133                # Can not happen, but anyway...
134                raise ValueError("More than one record with uid %s" % uid)
135            if len(records) == 0:
136                raise KeyError("No record for uid %s" % uid)
137            record = records[0]
[363]138        record_data = {}
139        for field in self.schema() + self.indexes():
140            record_data[field] = getattr(record, field)
141        # Add the updated data:
142        record_data.update(data)
143        self.catalog_object(dict2ob(record_data), uid)
144
[2094]145###)
146
147    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
[1062]148        if isinstance(name, str):
[2094]149            name =  (name,)
[1062]150        paths = self._catalog.uids.items()
151        i = 0
152        #import pdb;pdb.set_trace()
153        for p,rid in paths:
154            i += 1
155            metadata = self.getMetadataForRID(rid)
156            record_data = {}
157            for field in name:
158                record_data[field] = metadata.get(field)
159            uid = metadata.get(self.key)
160            self.catalog_object(dict2ob(record_data), uid, idxs=name,
161                                update_metadata=0)
[1082]162
[2094]163###)
164
165    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
[780]166    def exportAllRecords(self):
167        "export a WAeUPTable"
168        #import pdb;pdb.set_trace()
169        fields = [field for field in self.schema()]
170        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
171        csv = []
172        csv.append(','.join(['"%s"' % fn for fn in fields]))
173        for uid in self._catalog.uids:
174            records = self.searchResults({"%s" % self.key : uid})
175            if len(records) > 1:
176                # Can not happen, but anyway...
177                raise ValueError("More than one record with uid %s" % uid)
178            if len(records) == 0:
179                raise KeyError("No record for uid %s" % uid)
180            rec = records[0]
181            csv.append(format % rec)
182        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
183        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
[2094]184
185###)
186
[2189]187    security.declareProtected(ModifyPortalContent,"dumpAll")###(
188    def dumpAll(self):
189        """dump all data in the table to a csv"""
190        member = self.portal_membership.getAuthenticatedMember()
191        logger = logging.getLogger('WAeUPTables.dump_%s' % self.__name__)
192        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
193        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
194        print export_file
195        res_list = []
196        lines = []
197        if hasattr(self,"export_keys"):
198            fields = self.export_keys
199        else:
200            fields = []
201            for f in self.schema():
202                fields.append(f)
203        headline = ','.join(fields)
204        #open(export_file,"a").write(headline +'\n')
205        out = open(export_file,"wb")
206        out.write(headline +'\n')
207        out.close()
208        out = open(export_file,"a")
209        csv_writer = csv.DictWriter(out,fields,)
210        format = '"%(' + ')s","%('.join(fields) + ')s"'
211        records = self()
212        nr2export = len(records)
213        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
214        chunk = 2000
215        total = 0
216        start = DateTime.DateTime().timeTime()
217        start_chunk = DateTime.DateTime().timeTime()
218        for record in records:
219            not_all = False
220            d = self.record2dict(fields,record)
221            #d['state'],d['lga'] = formatLGA(d['lga'],voc = self.portal_vocabularies.local_gov_areas)
222            #lines.append(format % d)
223            lines.append(d)
224            total += 1
225            if total and not total % chunk or total == len(records):
226                #open(export_file,"a").write('\n'.join(lines) +'\n')
227                csv_writer.writerows(lines)
228                anz = len(lines)
229                logger.info("wrote %(anz)d  total written %(total)d" % vars())
230                end_chunk = DateTime.DateTime().timeTime()
231                duration = end_chunk-start_chunk
232                per_record = duration/anz
233                till_now = end_chunk - start
234                avarage_per_record = till_now/total
235                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
236                estimated_end = estimated_end.strftime("%H:%M:%S")
237                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
238                start_chunk = DateTime.DateTime().timeTime()
239                lines = []
240        end = DateTime.DateTime().timeTime()
241        logger.info('total time %6.2f m' % ((end-start)/60))
242        import os
243        filename, extension = os.path.splitext(export_file)
244        from subprocess import call
245        msg = "wrote %(total)d records to %(export_file)s" % vars()
[2561]246        #try:
247        #    retcode = call('gzip %s' % (export_file),shell=True)
248        #    if retcode == 0:
249        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
250        #except OSError, e:
251        #    retcode = -99
252        #    logger.info("zip failed with %s" % e)
[2189]253        logger.info(msg)
254        args = {'portal_status_message': msg}
255        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
256        url = self.REQUEST.get('URL2')
257        return self.REQUEST.RESPONSE.redirect(url)
258    ###)
259
[2185]260    security.declarePrivate("_import_old") ###(
261    def _import_old(self,filename,schema,layout, mode,logger):
[2094]262        "import data from csv"
263        import transaction
264        import random
265        pm = self.portal_membership
266        member = pm.getAuthenticatedMember()
267        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
268        import_fn = "%s/import/%s.csv" % (i_home,filename)
269        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
270        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
271        start = True
272        tr_count = 1
273        total_imported = 0
274        total_not_imported = 0
275        total = 0
276        iname =  "%s" % filename
[2112]277        not_imported = []
278        imported = []
279        valid_records = []
280        invalid_records = []
281        d = {}
282        d['mode'] = mode
283        d['imported'] = total_imported
284        d['not_imported'] = total_not_imported
285        d['valid_records'] = valid_records
286        d['invalid_records'] = invalid_records
287        d['import_fn'] = import_fn
288        d['imported_fn'] = imported_fn
289        d['not_imported_fn'] = not_imported_fn
[2094]290        if schema is None:
291            em = 'No schema specified'
292            logger.error(em)
[2112]293            return d
[2094]294        if layout is None:
295            em = 'No layout specified'
296            logger.error(em)
[2112]297            return d
[2094]298        validators = {}
299        for widget in layout.keys():
[2112]300            try:
301                validators[widget] = layout[widget].validate
302            except AttributeError:
303                logger.info('%s has no validate attribute' % widget)
304                return d
[2094]305        # if mode == 'edit':
306        #     importer = self.importEdit
307        # elif mode == 'add':
308        #     importer = self.importAdd
309        # else:
310        #     importer = None
311        try:
[2185]312            items = csv.DictReader(open(import_fn,"rb"),
313                                   dialect="excel",
314                                   skipinitialspace=True)
[2094]315        except:
316            em = 'Error reading %s.csv' % filename
317            logger.error(em)
318            return d
[2185]319        #import pdb;pdb.set_trace()
[2094]320        for item in items:
321            if start:
322                start = False
323                logger.info('%s starts import from %s.csv' % (member,filename))
324                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
[2185]325                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
326                                   dialect="excel",
327                                   skipinitialspace=True).next()
[2094]328                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
329                diff2schema = set(import_keys).difference(set(schema.keys()))
330                diff2layout = set(import_keys).difference(set(layout.keys()))
331                if diff2layout:
332                    em = "not ignorable key(s) %s found in heading" % diff2layout
333                    logger.info(em)
334                    return d
335                s = ','.join(['"%s"' % fn for fn in import_keys])
336                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
337                #s = '"id",' + s
338                open(imported_fn,"a").write(s + '\n')
339                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
340                format_error = format + ',"%(Error)s"'
341                #format = '"%(id)s",'+ format
342                adapters = [MappingStorageAdapter(schema, item)]
343            dm = DataModel(item, adapters,context=self)
344            ds = DataStructure(data=item,datamodel=dm)
345            error_string = ""
[2503]346            #import pdb;pdb.set_trace()
[2094]347            for k in import_keys:
348                if not validators[k](ds,mode=mode):
349                    error_string += " %s : %s" % (k,ds.getError(k))
350            # if not error_string and importer:
351            #     item.update(dm)
352            #     item['id'],error = importer(item)
353            #     if error:
354            #         error_string += error
355            if error_string:
356                item['Error'] = error_string
357                invalid_records.append(dm)
358                not_imported.append(format_error % item)
359                total_not_imported += 1
360            else:
361                em = format % item
362                valid_records.append(dm)
363                imported.append(em)
364                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
365                tr_count += 1
366                total_imported += 1
367            total += 1
368        if len(imported) > 0:
369            open(imported_fn,"a").write('\n'.join(imported))
370        if len(not_imported) > 0:
371            open(not_imported_fn,"a").write('\n'.join(not_imported))
372        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
373        d['imported'] = total_imported
374        d['not_imported'] = total_not_imported
375        d['valid_records'] = valid_records
376        d['invalid_records'] = invalid_records
377        d['imported_fn'] = imported_fn
378        d['not_imported_fn'] = not_imported_fn
379        #logger.info(em)
380        return d
[1935]381    ###)
[2185]382
383    security.declarePrivate("_import") ###(
384    def _import_new(self,csv_items,schema, layout, mode,logger):
385        "import data from csv.Dictreader Instance"
386        start = True
387        tr_count = 1
388        total_imported = 0
389        total_not_imported = 0
390        total = 0
391        iname =  "%s" % filename
392        not_imported = []
393        valid_records = []
394        invalid_records = []
395        duplicate_records = []
396        d = {}
397        d['mode'] = mode
398        d['valid_records'] = valid_records
399        d['invalid_records'] = invalid_records
400        d['invalid_records'] = duplicate_records
401        # d['import_fn'] = import_fn
402        # d['imported_fn'] = imported_fn
403        # d['not_imported_fn'] = not_imported_fn
404        validators = {}
405        for widget in layout.keys():
406            try:
407                validators[widget] = layout[widget].validate
408            except AttributeError:
409                logger.info('%s has no validate attribute' % widget)
410                return d
411        for item in csv_items:
412            if start:
413                start = False
414                logger.info('%s starts import from %s.csv' % (member,filename))
415                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
416                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
417                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
418                diff2schema = set(import_keys).difference(set(schema.keys()))
419                diff2layout = set(import_keys).difference(set(layout.keys()))
420                if diff2layout:
421                    em = "not ignorable key(s) %s found in heading" % diff2layout
422                    logger.info(em)
423                    return d
424                # s = ','.join(['"%s"' % fn for fn in import_keys])
425                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
426                # #s = '"id",' + s
427                # open(imported_fn,"a").write(s + '\n')
428                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
429                # format_error = format + ',"%(Error)s"'
430                # #format = '"%(id)s",'+ format
431                adapters = [MappingStorageAdapter(schema, item)]
432            dm = DataModel(item, adapters,context=self)
433            ds = DataStructure(data=item,datamodel=dm)
434            error_string = ""
435            for k in import_keys:
436                if not validators[k](ds,mode=mode):
437                    error_string += " %s : %s" % (k,ds.getError(k))
438            if error_string:
439                item['Error'] = error_string
440                #invalid_records.append(dm)
441                invalid_records.append(item)
442                total_not_imported += 1
443            else:
444                em = format % item
445                valid_records.append(dm)
446                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
447                tr_count += 1
448                total_imported += 1
449            total += 1
450        # if len(imported) > 0:
451        #     open(imported_fn,"a").write('\n'.join(imported))
452        # if len(not_imported) > 0:
453        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
454        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
455        d['imported'] = total_imported
456        d['not_imported'] = total_not_imported
457        d['valid_records'] = valid_records
458        d['invalid_records'] = invalid_records
459        return d
460    ###)
461
[2396]462    security.declarePublic("missingValue")###(
463    def missingValue(self):
464        from Missing import MV
465        return MV
466    ###)
[2094]467###)
[834]468
[1146]469class AccommodationTable(WAeUPTable): ###(
[834]470
[404]471    meta_type = 'WAeUP Accommodation Tool'
[2094]472    name = "portal_accommodation"
[502]473    key = "bed"
[2094]474    def __init__(self,name=None):
475        if name ==  None:
476            name = self.name
477        WAeUPTable.__init__(self, name)
[363]478
[635]479    def searchAndReserveBed(self, student_id,bed_type):
480        records = self.searchResults({'student' : student_id})
481        if len(records) > 0:
[1293]482            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
[834]483
[673]484        records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
[686]485        #import pdb;pdb.set_trace()
[635]486        if len(records) == 0:
[1306]487            return -2,"No bed available"
[635]488        rec = records[0]
489        self.modifyRecord(bed=rec.bed,student=student_id)
[1571]490        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
491        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
[635]492        return 1,rec.bed
[363]493
[834]494
[404]495InitializeClass(AccommodationTable)
[411]496
[1146]497###)
498
499class PinTable(WAeUPTable): ###(
[1030]500    from ZODB.POSException import ConflictError
[440]501    meta_type = 'WAeUP Pin Tool'
[2094]502    name = "portal_pins"
[502]503    key = 'pin'
[2094]504    def __init__(self,name=None):
505        if name ==  None:
506            name = self.name
507        WAeUPTable.__init__(self, name)
[1082]508
509
[710]510    def searchAndSetRecord(self, uid, student_id,prefix):
[2191]511
512        # The following line must be activated after resetting the
513        # the portal_pins table. This is to avoid duplicate entries
[2184]514        # and disable duplicate payments.
[2191]515
[2184]516        #student_id = student_id.upper()
517
[710]518        records = self.searchResults(student = student_id)
[2579]519        #if len(records) > 0 and prefix in ('CLR','APP'):
520        #    for r in records:
521        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
522        #            return -2
[502]523        records = self.searchResults({"%s" % self.key : uid})
524        if len(records) > 1:
525            # Can not happen, but anyway...
526            raise ValueError("More than one record with uid %s" % uid)
527        if len(records) == 0:
528            return -1
529        record = records[0]
530        if record.student == "":
531            record_data = {}
532            for field in self.schema() + self.indexes():
533                record_data[field] = getattr(record, field)
534            # Add the updated data:
[635]535            record_data['student'] = student_id
[1030]536            try:
537                self.catalog_object(dict2ob(record_data), uid)
538                return 1
539            except ConflictError:
540                return 2
[990]541        if record.student.upper() != student_id.upper():
[502]542            return 0
[997]543        if record.student.upper() == student_id.upper():
[502]544            return 2
[997]545        return -3
[440]546InitializeClass(PinTable)
[1146]547###)
[966]548
[1146]549class PumeResultsTable(WAeUPTable): ###(
550
[966]551    meta_type = 'WAeUP PumeResults Tool'
[2094]552    name = "portal_pumeresults"
[966]553    key = "jamb_reg_no"
[2094]554    def __init__(self,name=None):
555        if name ==  None:
556            name = self.name
557        WAeUPTable.__init__(self, name)
[966]558
559
560InitializeClass(PumeResultsTable)
561
[1146]562###)
[971]563
[2094]564class ApplicantsCatalog(WAeUPTable): ###(
565
[2113]566    meta_type = 'WAeUP Applicants Catalog'
[2094]567    name = "applicants_catalog"
568    key = "reg_no"
569    security = ClassSecurityInfo()
[2537]570    #export_keys = (
571    #               "reg_no",
572    #               "status",
573    #               "lastname",
574    #               "sex",
575    #               "date_of_birth",
576    #               "lga",
577    #               "email",
578    #               "phone",
579    #               "passport",
580    #               "entry_mode",
581    #               "pin",
582    #               "screening_type",
583    #               "registration_date",
584    #               "testdate",
585    #               "application_date",
586    #               "screening_date",
587    #               "faculty",
588    #               "department",
589    #               "course1",
590    #               "course2",
591    #               "course3",
592    #               "eng_score",
593    #               "subj1",
594    #               "subj1score",
595    #               "subj2",
596    #               "subj2score",
597    #               "subj3",
598    #               "subj3score",
599    #               "aggregate",
600    #               "course_admitted",
601    #               )
[2094]602    def __init__(self,name=None):
603        if name ==  None:
604            name = self.name
605        WAeUPTable.__init__(self, name)
606
[2189]607
[2191]608
[2185]609    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
610    def new_importCSV(self,filename="JAMB_data",
611                  schema_id="application",
[2503]612                  layout_id="import_application",
[2185]613                  mode='add'):
614        """ import JAMB data """
615        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
616        pm = self.portal_membership
617        member = pm.getAuthenticatedMember()
618        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
619        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
620        import_fn = "%s/import/%s.csv" % (i_home,filename)
621        if mode not in ('add','edit'):
622            logger.info("invalid mode: %s" % mode)
623        if os.path.exists(lock_fn):
624            logger.info("import of %(import_fn)s already in progress" % vars())
625            return
626        lock_file = open(lock_fn,"w")
627        lock_file.write("%(current)s \n" % vars())
628        lock_file.close()
629        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
630        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
631        stool = getToolByName(self, 'portal_schemas')
632        ltool = getToolByName(self, 'portal_layouts')
633        schema = stool._getOb(schema_id)
634        if schema is None:
635            em = 'No such schema %s' % schema_id
636            logger.error(em)
637            return
638        for postfix in ('_import',''):
639            layout_name = "%(layout_id)s%(postfix)s" % vars()
640            if hasattr(ltool,layout_name):
641                break
642        layout = ltool._getOb(layout_name)
643        if layout is None:
644            em = 'No such layout %s' % layout_id
645            logger.error(em)
646            return
647        try:
648            csv_file = csv.DictReader(open(import_fn,"rb"))
649        except:
650            em = 'Error reading %s.csv' % filename
651            logger.error(em)
[2191]652            return
[2185]653        d = self._import_new(csv_items,schema,layout,mode,logger)
654        imported = []
655        edited = []
656        duplicates = []
657        not_found = []
658        if len(d['valid_records']) > 0:
659            for record in d['valid_records']:
660                #import pdb;pdb.set_trace()
661                if mode == "add":
662                    try:
663                        self.addRecord(**dict(record.items()))
664                        imported.append(**dict(record.items()))
665                        logger.info("added %s" % record.items())
666                    except ValueError:
667                        dupplicate.append(**dict(record.items()))
668                        logger.info("duplicate %s" % record.items())
669                elif mode == "edit":
670                    try:
671                        self.modifyRecord(**dict(record.items()))
672                        edited.append(**dict(record.items()))
673                        logger.info("edited %s" % record.items())
674                    except KeyError:
675                        not_found.append(**dict(record.items()))
676                        logger.info("not found %s" % record.items())
677        invalid = d['invalid_records']
678        for itype in ("imported","edited","not_found","duplicate","invalid"):
679            outlist = locals[itype]
680            if len(outlist):
681                d = {}
682                for k in outlist[0].keys():
683                    d[k] = k
[2191]684                outlist[0] = d
[2185]685                outfile = open("file_name_%s" % itype,'w')
686                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
687                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
688###)
689
[2094]690    security.declareProtected(ModifyPortalContent,"importCSV")###(
691    def importCSV(self,filename="JAMB_data",
692                  schema_id="application",
[2508]693                  layout_id="application_pce",
[2094]694                  mode='add'):
695        """ import JAMB data """
696        stool = getToolByName(self, 'portal_schemas')
697        ltool = getToolByName(self, 'portal_layouts')
698        schema = stool._getOb(schema_id)
699        if schema is None:
700            em = 'No such schema %s' % schema_id
701            logger.error(em)
702            return
703        layout = ltool._getOb(layout_id)
704        if layout is None:
705            em = 'No such layout %s' % layout_id
706            logger.error(em)
707            return
[2099]708        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
[2185]709        d = self._import_old(filename,schema,layout,mode,logger)
[2094]710        if len(d['valid_records']) > 0:
711            for record in d['valid_records']:
712                #import pdb;pdb.set_trace()
713                if mode == "add":
714                    self.addRecord(**dict(record.items()))
715                    logger.info("added %s" % record.items())
716                elif mode == "edit":
717                    self.modifyRecord(**dict(record.items()))
718                    logger.info("edited %s" % record.items())
719                else:
720                    logger.info("invalid mode: %s" % mode)
721        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
722###)
723
724InitializeClass(ApplicantsCatalog)
725
726###)
727
[1146]728class StudentsCatalog(WAeUPTable): ###(
[1620]729    security = ClassSecurityInfo()
[1146]730
[971]731    meta_type = 'WAeUP Students Catalog'
732    name = "students_catalog"
733    key = "id"
[1700]734    affected_types = {   ###(
[1749]735                      'StudentApplication':
[2069]736                      {'id': 'application',
737                       'fields':
738                       ('jamb_reg_no',
739                        'entry_mode',
740                        #'entry_level',
741                        'entry_session',
742                       )
743                      },
[1700]744                      'StudentClearance':
[2069]745                      {'id': 'clearance',
746                       'fields':
747                       ('matric_no',
748                        'lga',
749                       )
750                      },
751                      'StudentPersonal':
752                      {'id': 'personal',
753                       'fields':
754                       ('name',
755                        'sex',
756                        'perm_address',
757                        'email',
758                        'phone',
759                       )
760                      },
761                      'StudentStudyCourse':
762                      {'id': 'study_course',
763                       'fields':
764                       ('course', # study_course
765                        'faculty', # from certificate
766                        'department', # from certificate
767                        'end_level', # from certificate
768                        'level', # current_level
769                        'mode',  # current_mode
770                        'session', # current_session
771                        'verdict', # current_verdict
772                       )
773                      },
774                     }
[1700]775    ###)
[1625]776
[2094]777    def __init__(self,name=None):
778        if name ==  None:
779            name = self.name
780        WAeUPTable.__init__(self, name)
[1620]781        return
[1625]782
[1700]783    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
784        """ clears the whole enchilada """
785        self._catalog.clear()
[971]786
[1700]787        if REQUEST and RESPONSE:
788            RESPONSE.redirect(
789              URL1 +
790              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
[971]791
[1700]792    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
793        """ clear the catalog, then re-index everything """
794
795        elapse = time.time()
796        c_elapse = time.clock()
797
798        pgthreshold = self._getProgressThreshold()
799        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
800        self.refreshCatalog(clear=1, pghandler=handler)
801
802        elapse = time.time() - elapse
803        c_elapse = time.clock() - c_elapse
804
805        RESPONSE.redirect(
806            URL1 +
807            '/manage_catalogAdvanced?manage_tabs_message=' +
808            urllib.quote('Catalog Updated \n'
809                         'Total time: %s\n'
810                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
811    ###)
812
[2084]813    def fill_certificates_dict(self): ###(
[2078]814        "return certificate data in  dict"
815        certificates_brains = self.portal_catalog(portal_type ='Certificate')
816        d = {}
817        for cb in certificates_brains:
818            certificate_doc = cb.getObject().getContent()
819            cb_path = cb.getPath().split('/')
820            ld = {}
821            ld['faculty'] = cb_path[-4]
822            ld['department'] = cb_path[-3]
823            ld['end_level'] = getattr(certificate_doc,'end_level','999')
824            d[cb.getId] = ld
825        return d
[2084]826    ###)
827
[2078]828    def get_from_doc_department(self,doc,cached_data={}): ###(
[1620]829        "return the students department"
[1700]830        if doc is None:
[1620]831            return None
[2078]832        if cached_data.has_key(doc.study_course):
833            return cached_data[doc.study_course]['department']
[1700]834        certificate_res = self.portal_catalog(id = doc.study_course)
[1620]835        if len(certificate_res) != 1:
836            return None
837        return certificate_res[0].getPath().split('/')[-3]
838
[2078]839    def get_from_doc_faculty(self,doc,cached_data={}):
[1700]840        "return the students faculty"
841        if doc is None:
[1620]842            return None
[2078]843        if cached_data.has_key(doc.study_course):
844            return cached_data[doc.study_course]['faculty']
[1700]845        certificate_res = self.portal_catalog(id = doc.study_course)
846        if len(certificate_res) != 1:
847            return None
848        return certificate_res[0].getPath().split('/')[-4]
[1620]849
[2099]850    def get_from_doc_end_level(self,doc,cached_data={}):
[2069]851        "return the students end_level"
852        if doc is None:
853            return None
[2078]854        if cached_data.has_key(doc.study_course):
855            return cached_data[doc.study_course]['end_level']
[2069]856        certificate_res = self.portal_catalog(id = doc.study_course)
857        if len(certificate_res) != 1:
858            return None
859        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
860
[2078]861    def get_from_doc_level(self,doc,cached_data={}):
[1700]862        "return the students level"
863        if doc is None:
[1620]864            return None
[1700]865        return getattr(doc,'current_level',None)
[1620]866
[2078]867    def get_from_doc_mode(self,doc,cached_data={}):
[1705]868        "return the students mode"
[1700]869        if doc is None:
[1620]870            return None
[1705]871        cm = getattr(doc,'current_mode',None)
872        return cm
[1625]873
[1749]874
[2078]875    def get_from_doc_session(self,doc,cached_data={}):
[1705]876        "return the students current_session"
877        if doc is None:
878            return None
879        return getattr(doc,'current_session',None)
880
[2078]881    def get_from_doc_entry_session(self,doc,cached_data={}):
[1700]882        "return the students entry_session"
883        if doc is None:
[1620]884            return None
[1705]885        es = getattr(doc,'entry_session',None)
[1729]886        if es is not None and len(es) == 2:
[1705]887            return es
[1700]888        try:
889            digit = int(doc.jamb_reg_no[0])
890        except:
[1986]891            return "-1"
[1700]892        if digit < 8:
893            return "0%c" % doc.jamb_reg_no[0]
894        return "9%c" % doc.jamb_reg_no[0]
895
[2078]896    def get_from_doc_course(self,doc,cached_data={}):
[1620]897        "return the students study_course"
[1700]898        if doc is None:
[1620]899            return None
[1700]900        return getattr(doc,'study_course',None)
[1620]901
[2078]902    def get_from_doc_name(self,doc,cached_data={}):
[1620]903        "return the students name from the personal"
[1700]904        if doc is None:
[1620]905            return None
906        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
907
[2078]908    def get_from_doc_verdict(self,doc,cached_data={}):
[1700]909        "return the students study_course"
910        if doc is None:
[1620]911            return None
[1700]912        return getattr(doc,'current_verdict',None)
[1702]913    ###)
[1620]914
[1702]915    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
916        if isinstance(name, str):
917            name = (name,)
[1749]918        reindextypes = {}
[1702]919        reindex_special = []
920        for n in name:
921            if n in ("review_state","registered_courses"):
922                reindex_special.append(n)
923            else:
924                for pt in self.affected_types.keys():
[1707]925                    if n in self.affected_types[pt]['fields']:
[1702]926                        if reindextypes.has_key(pt):
927                            reindextypes[pt].append(n)
928                        else:
929                            reindextypes[pt]= [n]
930                        break
[2078]931        cached_data = {}
932        if set(name).intersection(set(('faculty','department','end_level'))):
933            cached_data = self.fill_certificates_dict()
[1702]934        students = self.portal_catalog(portal_type="Student")
[1954]935        if hasattr(self,'portal_catalog_real'):
936            aq_portal = self.portal_catalog_real.evalAdvancedQuery
937        else:
938            aq_portal = self.portal_catalog.evalAdvancedQuery
[1702]939        num_objects = len(students)
940        if pghandler:
941            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
942        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
[2084]943        #import pdb;pdb.set_trace()
[1702]944        for i in xrange(num_objects):
945            if pghandler: pghandler.report(i)
946            student_brain = students[i]
[1707]947            student_object = student_brain.getObject()
[2084]948            # query = Eq('path',student_brain.getPath())
949            # sub_brains_list = aq_portal(query)
950            # sub_brains = {}
951            # for sub_brain in sub_brains_list:
952            #     sub_brains[sub_brain.portal_type] = sub_brain
953            # student_path = student_brain.getPath()
[1702]954            data = {}
955            modified = False
956            sid = data['id'] = student_brain.getId
957            if reindex_special and 'review_state' in reindex_special:
958                modified = True
959                data['review_state'] = student_brain.review_state
[1707]960            sub_objects = False
961            for pt in reindextypes.keys():
[1702]962                modified = True
[1707]963                try:
964                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
[2084]965                    #doc = sub_brains[pt].getObject().getContent()
966                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
967                    # doc = self.unrestrictedTraverse(path).getContent()
[1707]968                    sub_objects = True
969                except:
970                    continue
[2084]971                for field in set(name).intersection(self.affected_types[pt]['fields']):
[1707]972                    if hasattr(self,'get_from_doc_%s' % field):
[2078]973                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
974                                                                              cached_data=cached_data)
[1707]975                    else:
976                        data[field] = getattr(doc,field)
977            if not sub_objects and noattr:
978                import_res = self.returning_import(id = sid)
979                if not import_res:
980                    continue
981                import_record = import_res[0]
982                data['matric_no'] = import_record.matric_no
983                data['sex'] = import_record.Sex == 'F'
984                data['name'] = "%s %s %s" % (import_record.Firstname,
985                                             import_record.Middlename,
986                                             import_record.Lastname)
[1815]987                data['jamb_reg_no'] = import_record.Entryregno
[2454]988            #if reindex_special and 'registered_courses' in reindex_special:
989            #    try:
990            #        study_course = getattr(student_object,"study_course")
991            #        level_ids = study_course.objectIds()
992            #    except:
993            #        continue
994            #    if not level_ids:
995            #        continue
996            #    modified = True
997            #    level_ids.sort()
998            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
999            #    courses = []
1000            #    for c in course_ids:
1001            #        if c.endswith('_co'):
1002            #            courses.append(c[:-3])
1003            #        else:
1004            #            courses.append(c)
1005            #    data['registered_courses'] = courses
[1702]1006            if modified:
1007                self.modifyRecord(**data)
1008        if pghandler: pghandler.finish()
1009    ###)
[1620]1010
1011    def refreshCatalog(self, clear=0, pghandler=None): ###(
1012        """ re-index everything we can find """
1013        students_folder = self.portal_url.getPortalObject().campus.students
1014        if clear:
[1724]1015            self._catalog.clear()
[1700]1016        students = self.portal_catalog(portal_type="Student")
1017        num_objects = len(students)
[2078]1018        cached_data = self.fill_certificates_dict()
[1620]1019        if pghandler:
1020            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1021        for i in xrange(num_objects):
1022            if pghandler: pghandler.report(i)
[1700]1023            student_brain = students[i]
1024            spath = student_brain.getPath()
[1727]1025            student_object = student_brain.getObject()
[1620]1026            data = {}
[1700]1027            sid = data['id'] = student_brain.getId
1028            data['review_state'] = student_brain.review_state
[1707]1029            sub_objects = False
1030            for pt in self.affected_types.keys():
1031                modified = True
1032                try:
1033                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1034                    sub_objects = True
1035                except:
[1727]1036                    #from pdb import set_trace;set_trace()
[1707]1037                    continue
1038                for field in self.affected_types[pt]['fields']:
1039                    if hasattr(self,'get_from_doc_%s' % field):
[2078]1040                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1041                                                                              cached_data=cached_data)
[1707]1042                    else:
[1727]1043                        data[field] = getattr(doc,field,None)
1044            if not sub_objects:
[1700]1045                import_res = self.returning_import(id = sid)
1046                if not import_res:
[1620]1047                    continue
[1700]1048                import_record = import_res[0]
1049                data['matric_no'] = import_record.matric_no
1050                data['sex'] = import_record.Sex == 'F'
1051                data['name'] = "%s %s %s" % (import_record.Firstname,
1052                                             import_record.Middlename,
1053                                             import_record.Lastname)
[1815]1054                data['jamb_reg_no'] = import_record.Entryregno
[2454]1055            #else:
1056            #    study_course = getattr(student_object,'study_course',None)
1057            #    current_level = data.get('level',None)
1058            #    data['registered_courses'] = []
1059            #    if study_course and current_level and current_level in study_course.objectIds():
1060            #        level_obj = getattr(study_course,current_level)
1061            #        courses = []
1062            #        for c in level_obj.objectIds():
1063            #            if c.endswith('_co'):
1064            #                courses.append(c[:-3])
1065            #            else:
1066            #                courses.append(c)
1067            #        data['registered_courses'] = courses
[1700]1068            self.addRecord(**data)
[1620]1069        if pghandler: pghandler.finish()
1070    ###)
1071
[1700]1072    security.declarePrivate('notify_event_listener') ###(
[1620]1073    def notify_event_listener(self,event_type,object,infos):
1074        "listen for events"
[1716]1075        if not infos.has_key('rpath'):
1076            return
[1702]1077        pt = getattr(object,'portal_type',None)
1078        mt = getattr(object,'meta_type',None)
[1954]1079        students_catalog = self
[1702]1080        data = {}
1081        if pt == 'Student' and\
1082           mt == 'CPS Proxy Folder' and\
1083           event_type.startswith('workflow'):
1084            data['id'] = object.getId()
1085            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1086            students_catalog.modifyRecord(**data)
1087            return
[1700]1088        rpl = infos['rpath'].split('/')
[2396]1089        if pt == 'Student' and mt == 'CPS Proxy Folder':
[1700]1090            student_id = object.id
[2396]1091            if event_type == "sys_add_object":
1092                try:
1093                    self.addRecord(id = student_id)
1094                except ValueError:
1095                    pass
1096                return
1097            elif event_type == 'sys_del_object':
1098                self.deleteRecord(student_id)
1099                #import pdb;pdb.set_trace()
[2454]1100        #elif pt == 'StudentCourseResult' and mt == 'CPS Proxy Folder':
1101        #    if event_type not in ("sys_add_object","sys_del_object"):
1102        #        return
1103        #    level_session = getattr(object.aq_parent.getContent(),'session','unknown')
1104        #    if level_session not in (self.getSessionId()[-2:],'2006/2007'):
1105        #        return
1106        #    course_id = object.getId()
1107        #    if course_id.endswith('_co'):
1108        #        course_id = course_id[:-3]
1109        #    student_id = object.absolute_url_path().split('/')[-4]
1110        #    res = students_catalog(id = student_id)
1111        #    if not res:
1112        #        return
1113        #    student_rec = res[0]
1114        #    registered_courses = getattr(student_rec,'registered_courses',None)
1115        #    if not registered_courses:
1116        #        registered_courses = []
1117        #    if event_type == "sys_add_object":
1118        #        if course_id not in registered_courses:
1119        #            registered_courses.append(course_id)
1120        #        else:
1121        #            return
1122        #    elif registered_courses and event_type == "sys_del_object":
1123        #        removed = False
1124        #        while course_id in registered_courses:
1125        #            removed = True
1126        #            registered_courses.remove(course_id)
1127        #        if not removed:
1128        #            return
1129        #    data['id'] = student_id
1130        #    data['registered_courses'] = registered_courses
1131        #    self.modifyRecord(record = student_rec, **data)
1132        #    return
[1716]1133        if pt not in self.affected_types.keys():
[1700]1134            return
[1716]1135        if event_type not in ('sys_modify_object'):
1136            return
[1700]1137        if mt == 'CPS Proxy Folder':
1138            return
[1716]1139        for field in self.affected_types[pt]['fields']:
[1700]1140            if hasattr(self,'get_from_doc_%s' % field):
1141                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1142            else:
1143                data[field] = getattr(object,field)
1144        data['id'] = rpl[2]
[1716]1145        self.modifyRecord(**data)
[1700]1146    ###)
[1620]1147
[1625]1148
[971]1149InitializeClass(StudentsCatalog)
1150
[1146]1151###)
1152
1153class CoursesCatalog(WAeUPTable): ###(
[1716]1154    security = ClassSecurityInfo()
[1146]1155
1156    meta_type = 'WAeUP Courses Catalog'
[2094]1157    name =  "courses_catalog"
[1146]1158    key = "code"
[2094]1159    def __init__(self,name=None):
1160        if name ==  None:
1161            name =  self.name
1162        WAeUPTable.__init__(self, name)
[1146]1163
[1716]1164    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1165        """ clear the catalog, then re-index everything """
[1146]1166
[1716]1167        elapse = time.time()
1168        c_elapse = time.clock()
1169
1170        pgthreshold = self._getProgressThreshold()
1171        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1172        self.refreshCatalog(clear=1, pghandler=handler)
1173
1174        elapse = time.time() - elapse
1175        c_elapse = time.clock() - c_elapse
1176
1177        RESPONSE.redirect(
1178            URL1 +
1179            '/manage_catalogAdvanced?manage_tabs_message=' +
1180            urllib.quote('Catalog Updated \n'
1181                         'Total time: %s\n'
1182                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1183    ###)
1184
1185    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1186        if isinstance(name, str):
1187            name = (name,)
1188        courses = self.portal_catalog(portal_type="Course")
1189        num_objects = len(courses)
1190        if pghandler:
1191            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1192        for i in xrange(num_objects):
1193            if pghandler: pghandler.report(i)
1194            course_brain = courses[i]
1195            course_object = course_brain.getObject()
1196            pl = course_brain.getPath().split('/')
1197            data = {}
1198            cid = data[self.key] = course_brain.getId
1199            data['faculty'] = pl[-4]
1200            data['department'] = pl[-3]
1201            doc = course_object.getContent()
1202            for field in name:
1203                if field not in (self.key,'faculty','department'):
1204                    data[field] = getattr(doc,field)
1205            self.modifyRecord(**data)
1206        if pghandler: pghandler.finish()
1207    ###)
1208
1209    def refreshCatalog(self, clear=0, pghandler=None): ###(
1210        """ re-index everything we can find """
[1724]1211        if clear:
1212            self._catalog.clear()
[1716]1213        courses = self.portal_catalog(portal_type="Course")
1214        num_objects = len(courses)
1215        if pghandler:
1216            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
[1724]1217        #from pdb import set_trace;set_trace()
[1716]1218        for i in xrange(num_objects):
1219            if pghandler: pghandler.report(i)
1220            course_brain = courses[i]
[1724]1221            course_doc = course_brain.getObject().getContent()
[1716]1222            pl = course_brain.getPath().split('/')
1223            data = {}
[1724]1224            for field in self.schema():
[1749]1225                data[field] = getattr(course_doc,field,None)
[1716]1226            data[self.key] = course_brain.getId
[1724]1227            ai = pl.index('academics')
1228            data['faculty'] = pl[ai +1]
1229            data['department'] = pl[ai +2]
1230            if clear:
1231                self.addRecord(**data)
1232            else:
1233                self.modifyRecord(**data)
[1716]1234        if pghandler: pghandler.finish()
1235    ###)
1236
1237    security.declarePrivate('notify_event_listener') ###(
1238    def notify_event_listener(self,event_type,object,infos):
1239        "listen for events"
1240        if not infos.has_key('rpath'):
1241            return
1242        pt = getattr(object,'portal_type',None)
1243        mt = getattr(object,'meta_type',None)
1244        if pt != 'Course':
1245            return
1246        data = {}
1247        rpl = infos['rpath'].split('/')
1248        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1249            return
1250        course_id = object.getId()
1251        data[self.key] = course_id
[1724]1252        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
[1716]1253            try:
1254                self.addRecord(**data)
1255            except ValueError:
[1724]1256                return
1257            course_id = object.getId()
1258            doc = object.getContent()
1259            if doc is None:
1260                return
1261            for field in self.schema():
[1749]1262                data[field] = getattr(doc,field,None)
[1724]1263            data[self.key] = course_id
1264            ai = rpl.index('academics')
1265            data['faculty'] = rpl[ai +1]
1266            data['department'] = rpl[ai +2]
1267            self.modifyRecord(**data)
1268            return
[1716]1269        if event_type == "sys_del_object":
1270            self.deleteRecord(course_id)
[1724]1271            return
[1716]1272        if event_type == "sys_modify_object" and mt == 'Course':
[1724]1273            #from pdb import set_trace;set_trace()
[1716]1274            for field in self.schema():
[1749]1275                data[field] = getattr(object,field,None)
[1716]1276            course_id = object.aq_parent.getId()
1277            data[self.key] = course_id
[1724]1278            ai = rpl.index('academics')
1279            data['faculty'] = rpl[ai +1]
1280            data['department'] = rpl[ai +2]
[1716]1281            self.modifyRecord(**data)
1282    ###)
1283
1284
[1146]1285InitializeClass(CoursesCatalog)
[1151]1286###)
[1146]1287
[2084]1288class CourseResults(WAeUPTable): ###(
[2069]1289    security = ClassSecurityInfo()
1290
1291    meta_type = 'WAeUP Results Catalog'
1292    name = "course_results"
[2084]1293    key = "key" #student_id + level + course_id
[2094]1294    def __init__(self,name=None):
1295        if name ==  None:
1296            name = self.name
1297        WAeUPTable.__init__(self, name)
[2084]1298        self._queue = []
[2099]1299
[2094]1300    def addMultipleRecords(self, records): ###(
1301        """add many records"""
1302        added_keys = []
1303        for data in records:
1304            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1305            data['%s' % self.key] = uid
1306            res = self.searchResults({"%s" % self.key : uid})
1307            if len(res) > 0:
1308                raise ValueError("More than one record with uid %s" % uid)
1309            self.catalog_object(dict2ob(data), uid=uid)
1310        return uid
1311    ###)
1312
[2434]1313    def deleteResultsHere(self,level_id,student_id): ###(
1314        #import pdb;pdb.set_trace()
1315        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1316        course_results = self.course_results.evalAdvancedQuery(query)
1317        for result in course_results:
1318            self.deleteRecord(result.key)
[2084]1319    ###)
1320
[2434]1321    def moveResultsHere(self,level,student_id): ###(
1322        #import pdb;pdb.set_trace()
1323        level_id = level.getId()
1324        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1325        course_results = self.course_results.evalAdvancedQuery(query)
[2437]1326        existing_courses = [cr.code for cr in course_results]
[2434]1327        to_delete = []
1328        for code,obj in level.objectItems():
[2437]1329            to_delete.append(code)
[2434]1330            carry_over = False
1331            if code.endswith('_co'):
1332                carry_over = True
[2437]1333                code  = code[:-3]
[2434]1334            if code in existing_courses:
[2094]1335                continue
[2434]1336            course_result_doc = obj.getContent()
[2094]1337            data = {}
[2434]1338            course_id = code
[2094]1339            for field in self.schema():
1340                data[field] = getattr(course_result_doc,field,'')
1341            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
[2099]1342            data['student_id'] = student_id
1343            data['level_id'] = level_id
[2439]1344            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
[2442]1345            data['session_id'] = session_id
[2434]1346            #data['queue_status'] = OBJECT_CREATED
[2099]1347            data['code'] = course_id
[2434]1348            data['carry_over'] = carry_over
[2094]1349            self.catalog_object(dict2ob(data), uid=key)
[2434]1350        level.manage_delObjects(to_delete)
1351    ###)
1352
1353    def getCourses(self,student_id,level_id): ###(
[2094]1354        query = Eq('student_id',student_id) & Eq('level_id', level_id)
[2434]1355        course_results = self.course_results.evalAdvancedQuery(query)
1356        carry_overs = []
1357        normal = []
1358        credits = 0
1359        for brain in course_results:
1360            d = {}
1361            credits += int(brain.credits)
1362            for field in self.schema():
1363                d[field] = getattr(brain,field,'')
1364            #d['sheduled'] = brain.queue_status == ADDING_SHEDULED
1365            d['coe'] = 'Elective'
1366            if brain.core_or_elective:
1367                d['coe'] = 'Core'
1368            id = code = d['id'] = brain.code
1369            d['code'] = code
1370            d['title'] = self.courses_catalog.evalAdvancedQuery(Eq('code',code))[0].title
[2448]1371            if brain.carry_over:
1372                d['coe'] = 'Carry-Over'
[2434]1373                carry_overs.append(d)
1374            else:
1375                normal.append(d)
[2503]1376        normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
[2460]1377                                        "%(semester)s%(code)s" % y))
[2503]1378        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
[2460]1379                                             "%(semester)s%(code)s" % y))
[2434]1380        return credits,carry_overs,normal
[2094]1381    ###)
1382
[2084]1383InitializeClass(CourseResults)
[2069]1384###)
1385
[1625]1386class OnlinePaymentsImport(WAeUPTable): ###(
[1620]1387
1388    meta_type = 'WAeUP Online Payment Transactions'
[1625]1389    name = "online_payments_import"
[1620]1390    key = "order_id"
[2094]1391    def __init__(self,name=None):
1392        if name ==  None:
1393            name = self.name
1394        WAeUPTable.__init__(self, name)
[1620]1395
1396
[2069]1397InitializeClass(OnlinePaymentsImport)
[1620]1398###)
1399
[1151]1400class ReturningImport(WAeUPTable): ###(
[1146]1401
[1151]1402    meta_type = 'Returning Import Table'
1403    name = "returning_import"
[1146]1404    key = "matric_no"
[2094]1405    def __init__(self,name=None):
1406        if name ==  None:
1407            name = self.name
1408        WAeUPTable.__init__(self, name)
[1146]1409
1410
[1151]1411InitializeClass(ReturningImport)
1412###)
[1146]1413
1414class ResultsImport(WAeUPTable): ###(
1415
1416    meta_type = 'Results Import Table'
1417    name = "results_import"
1418    key = "key"
[2094]1419    def __init__(self,name=None):
1420        if name ==  None:
1421            name = self.name
1422        WAeUPTable.__init__(self, name)
[1146]1423
1424
1425InitializeClass(ResultsImport)
1426
1427###)
1428
1429class PaymentsCatalog(WAeUPTable): ###(
1430
1431    meta_type = 'WAeUP Payments Catalog'
1432    name = "students_catalog"
1433    key = "id"
[2094]1434    def __init__(self,name=None):
1435        if name ==  None:
1436            name = self.name
1437        WAeUPTable.__init__(self, name)
[1146]1438
1439
1440InitializeClass(PaymentsCatalog)
1441
1442###)
1443
[414]1444# BBB:
1445AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.