source: WAeUP_SRP/base/WAeUPTables.py @ 2680

Last change on this file since 2680 was 2665, checked in by joachim, 17 years ago

fix for #276 dumpSCPayments

  • Property svn:keywords set to Id
File size: 52.3 KB
Line 
1#-*- mode: python; mode: fold -*-
2# (C) Copyright 2005 AixtraWare <http://aixtraware.de>
3# Author: Joachim Schmitz <js@aixtraware.de>
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as published
7# by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License
15# along with this program; if not, write to the Free Software
16# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
17# 02111-1307, USA.
18#
19# $Id: WAeUPTables.py 2665 2007-11-15 15:05:13Z joachim $
20
21from zope.interface import implements
22from Globals import InitializeClass
23from Products.ZCatalog.ZCatalog import ZCatalog
24from Products.ZCatalog.ProgressHandler import ZLogHandler
25from AccessControl import ClassSecurityInfo
26from Products.CMFCore.permissions import ModifyPortalContent
27from Products.CMFCore.utils import getToolByName
28from Products.CMFCore.CatalogTool import CatalogTool
29from Products.CPSSchemas.StorageAdapter import MappingStorageAdapter
30from Products.CPSSchemas.DataStructure import DataStructure
31from Products.CPSSchemas.DataModel import DataModel
32from Products.AdvancedQuery import Eq, Between, Le,In
33import urllib
34import DateTime,time
35import csv,re
36import logging
37import Globals
38p_home = Globals.package_home(globals())
39i_home = Globals.INSTANCE_HOME
40
41ADDING_SHEDULED = "adding_sheduled"
42OBJECT_CREATED = "object_created"
43
44from interfaces import IWAeUPTable
45
46class AttributeHolder(object):
47    pass
48
49def dict2ob(dict):
50    ob = AttributeHolder()
51    for key, value in dict.items():
52        setattr(ob, key, value)
53    return ob
54
55class WAeUPTable(ZCatalog): ###(
56
57    implements(IWAeUPTable)
58    security = ClassSecurityInfo()
59    meta_type = None
60
61    def __init__(self,name=None):
62        if name ==  None:
63            name = self.name
64        ZCatalog.__init__(self,name)
65
66    def refreshCatalog(self, clear=0, pghandler=None): ###(
67        """ don't refresh for a normal table """
68
69        if self.REQUEST and self.REQUEST.RESPONSE:
70            self.REQUEST.RESPONSE.redirect(
71              URL1 +
72              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20refresh%20not%20implemented')
73
74###)
75
76    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None): ###(
77        """ clears the whole enchilada """
78
79        #if REQUEST and RESPONSE:
80        #    RESPONSE.redirect(
81        #      URL1 +
82        #      '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Clearing%20disabled')
83
84        self._catalog.clear()
85        if REQUEST and RESPONSE:
86            RESPONSE.redirect(
87              URL1 +
88              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20cleared')
89
90###)
91
92    def record2dict(self,fields,record): ###(
93        d = {}
94        for key in fields:
95            v = getattr(record, key, None)
96            if key == 'sex':
97                if v:
98                    v = 'F'
99                else:
100                    v = 'M'
101                d[key] = v
102            elif v:
103                if key == 'lga':
104                    v = self.portal_vocabularies.local_gov_areas.get(v)
105                elif key == 'aos':
106                    v = self.portal_vocabularies.aos.get(v)
107                d[key] = v
108            else:
109                d[key] = ''
110        return d
111
112###)
113
114    def addRecord(self, **data): ###(
115        # The uid is the same as "bed".
116        uid = data[self.key]
117        res = self.searchResults({"%s" % self.key : uid})
118        if len(res) > 0:
119            raise ValueError("More than one record with uid %s" % uid)
120        self.catalog_object(dict2ob(data), uid=uid)
121        return uid
122
123###)
124
125    def deleteRecord(self, uid):
126        self.uncatalog_object(uid)
127
128    def searchAndSetRecord(self, **data):
129        raise NotImplemented
130
131    def modifyRecord(self, record=None, **data): ###(
132        #records = self.searchResults(uid=uid)
133        uid = data[self.key]
134        if record is None:
135            records = self.searchResults({"%s" % self.key : uid})
136            if len(records) > 1:
137                # Can not happen, but anyway...
138                raise ValueError("More than one record with uid %s" % uid)
139            if len(records) == 0:
140                raise KeyError("No record for uid %s" % uid)
141            record = records[0]
142        record_data = {}
143        for field in self.schema() + self.indexes():
144            record_data[field] = getattr(record, field)
145        # Add the updated data:
146        record_data.update(data)
147        self.catalog_object(dict2ob(record_data), uid)
148
149###)
150
151    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
152        if isinstance(name, str):
153            name =  (name,)
154        paths = self._catalog.uids.items()
155        i = 0
156        #import pdb;pdb.set_trace()
157        for p,rid in paths:
158            i += 1
159            metadata = self.getMetadataForRID(rid)
160            record_data = {}
161            for field in name:
162                record_data[field] = metadata.get(field)
163            uid = metadata.get(self.key)
164            self.catalog_object(dict2ob(record_data), uid, idxs=name,
165                                update_metadata=0)
166
167###)
168
169    security.declareProtected(ModifyPortalContent,"exportAllRecords") ###(
170    def exportAllRecords(self):
171        "export a WAeUPTable"
172        #import pdb;pdb.set_trace()
173        fields = [field for field in self.schema()]
174        format = ','.join(['"%%(%s)s"' % fn for fn in fields])
175        csv = []
176        csv.append(','.join(['"%s"' % fn for fn in fields]))
177        for uid in self._catalog.uids:
178            records = self.searchResults({"%s" % self.key : uid})
179            if len(records) > 1:
180                # Can not happen, but anyway...
181                raise ValueError("More than one record with uid %s" % uid)
182            if len(records) == 0:
183                raise KeyError("No record for uid %s" % uid)
184            rec = records[0]
185            csv.append(format % rec)
186        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
187        open("%s/import/%s-%s.csv" % (i_home,self.getId(),current),"w+").write('\n'.join(csv))
188
189###)
190
191    security.declareProtected(ModifyPortalContent,"dumpAll")###(
192    def dumpAll(self):
193        """dump all data in the table to a csv"""
194        member = self.portal_membership.getAuthenticatedMember()
195        logger = logging.getLogger('WAeUPTables.dump_%s' % self.__name__)
196        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
197        export_file = "%s/export/%s_%s.csv" % (i_home,self.__name__,current,)
198        print export_file
199        res_list = []
200        lines = []
201        if hasattr(self,"export_keys"):
202            fields = self.export_keys
203        else:
204            fields = []
205            for f in self.schema():
206                fields.append(f)
207        headline = ','.join(fields)
208        #open(export_file,"a").write(headline +'\n')
209        out = open(export_file,"wb")
210        out.write(headline +'\n')
211        out.close()
212        out = open(export_file,"a")
213        csv_writer = csv.DictWriter(out,fields,)
214        #format = '"%(' + ')s","%('.join(fields) + ')s"'
215        records = self()
216        nr2export = len(records)
217        logger.info('%s starts dumping, %s records to export' % (member,nr2export))
218        chunk = 2000
219        total = 0
220        start = DateTime.DateTime().timeTime()
221        start_chunk = DateTime.DateTime().timeTime()
222        for record in records:
223            not_all = False
224            d = self.record2dict(fields,record)
225            lines.append(d)
226            total += 1
227            if total and not total % chunk or total == len(records):
228                #open(export_file,"a").write('\n'.join(lines) +'\n')
229                csv_writer.writerows(lines)
230                anz = len(lines)
231                logger.info("wrote %(anz)d  total written %(total)d" % vars())
232                end_chunk = DateTime.DateTime().timeTime()
233                duration = end_chunk-start_chunk
234                per_record = duration/anz
235                till_now = end_chunk - start
236                avarage_per_record = till_now/total
237                estimated_end = DateTime.DateTime(start + avarage_per_record * nr2export)
238                estimated_end = estimated_end.strftime("%H:%M:%S")
239                logger.info('%(duration)4.1f, %(per_record)4.3f,end %(estimated_end)s' % vars())
240                start_chunk = DateTime.DateTime().timeTime()
241                lines = []
242        end = DateTime.DateTime().timeTime()
243        logger.info('total time %6.2f m' % ((end-start)/60))
244        import os
245        filename, extension = os.path.splitext(export_file)
246        from subprocess import call
247        msg = "wrote %(total)d records to %(export_file)s" % vars()
248        #try:
249        #    retcode = call('gzip %s' % (export_file),shell=True)
250        #    if retcode == 0:
251        #        msg = "wrote %(total)d records to %(export_file)s.gz" % vars()
252        #except OSError, e:
253        #    retcode = -99
254        #    logger.info("zip failed with %s" % e)
255        logger.info(msg)
256        args = {'portal_status_message': msg}
257        #url = self.REQUEST.get('URL1') + '?' + urlencode(args)
258        url = self.REQUEST.get('URL2')
259        return self.REQUEST.RESPONSE.redirect(url)
260    ###)
261
262    security.declarePrivate("_import_old") ###(
263    def _import_old(self,filename,schema,layout, mode,logger):
264        "import data from csv"
265        import transaction
266        import random
267        pm = self.portal_membership
268        member = pm.getAuthenticatedMember()
269        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
270        import_fn = "%s/import/%s.csv" % (i_home,filename)
271        imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)
272        not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
273        start = True
274        tr_count = 1
275        total_imported = 0
276        total_not_imported = 0
277        total = 0
278        iname =  "%s" % filename
279        not_imported = []
280        imported = []
281        valid_records = []
282        invalid_records = []
283        d = {}
284        d['mode'] = mode
285        d['imported'] = total_imported
286        d['not_imported'] = total_not_imported
287        d['valid_records'] = valid_records
288        d['invalid_records'] = invalid_records
289        d['import_fn'] = import_fn
290        d['imported_fn'] = imported_fn
291        d['not_imported_fn'] = not_imported_fn
292        if schema is None:
293            em = 'No schema specified'
294            logger.error(em)
295            return d
296        if layout is None:
297            em = 'No layout specified'
298            logger.error(em)
299            return d
300        validators = {}
301        for widget in layout.keys():
302            try:
303                validators[widget] = layout[widget].validate
304            except AttributeError:
305                logger.info('%s has no validate attribute' % widget)
306                return d
307        # if mode == 'edit':
308        #     importer = self.importEdit
309        # elif mode == 'add':
310        #     importer = self.importAdd
311        # else:
312        #     importer = None
313        try:
314            items = csv.DictReader(open(import_fn,"rb"),
315                                   dialect="excel",
316                                   skipinitialspace=True)
317        except:
318            em = 'Error reading %s.csv' % filename
319            logger.error(em)
320            return d
321        #import pdb;pdb.set_trace()
322        for item in items:
323            if start:
324                start = False
325                logger.info('%s starts import from %s.csv' % (member,filename))
326                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
327                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),
328                                   dialect="excel",
329                                   skipinitialspace=True).next()
330                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
331                diff2schema = set(import_keys).difference(set(schema.keys()))
332                diff2layout = set(import_keys).difference(set(layout.keys()))
333                if diff2layout:
334                    em = "not ignorable key(s) %s found in heading" % diff2layout
335                    logger.info(em)
336                    return d
337                s = ','.join(['"%s"' % fn for fn in import_keys])
338                open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
339                #s = '"id",' + s
340                open(imported_fn,"a").write(s + '\n')
341                format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
342                format_error = format + ',"%(Error)s"'
343                #format = '"%(id)s",'+ format
344                adapters = [MappingStorageAdapter(schema, item)]
345            dm = DataModel(item, adapters,context=self)
346            ds = DataStructure(data=item,datamodel=dm)
347            error_string = ""
348            #import pdb;pdb.set_trace()
349            for k in import_keys:
350                if not validators[k](ds,mode=mode):
351                    error_string += " %s : %s" % (k,ds.getError(k))
352            # if not error_string and importer:
353            #     item.update(dm)
354            #     item['id'],error = importer(item)
355            #     if error:
356            #         error_string += error
357            if error_string:
358                item['Error'] = error_string
359                invalid_records.append(dm)
360                not_imported.append(format_error % item)
361                total_not_imported += 1
362            else:
363                em = format % item
364                valid_records.append(dm)
365                imported.append(em)
366                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
367                tr_count += 1
368                total_imported += 1
369            total += 1
370        if len(imported) > 0:
371            open(imported_fn,"a").write('\n'.join(imported))
372        if len(not_imported) > 0:
373            open(not_imported_fn,"a").write('\n'.join(not_imported))
374        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
375        d['imported'] = total_imported
376        d['not_imported'] = total_not_imported
377        d['valid_records'] = valid_records
378        d['invalid_records'] = invalid_records
379        d['imported_fn'] = imported_fn
380        d['not_imported_fn'] = not_imported_fn
381        #logger.info(em)
382        return d
383    ###)
384
385    security.declarePrivate("_import") ###(
386    def _import_new(self,csv_items,schema, layout, mode,logger):
387        "import data from csv.Dictreader Instance"
388        start = True
389        tr_count = 1
390        total_imported = 0
391        total_not_imported = 0
392        total = 0
393        iname =  "%s" % filename
394        not_imported = []
395        valid_records = []
396        invalid_records = []
397        duplicate_records = []
398        d = {}
399        d['mode'] = mode
400        d['valid_records'] = valid_records
401        d['invalid_records'] = invalid_records
402        d['invalid_records'] = duplicate_records
403        # d['import_fn'] = import_fn
404        # d['imported_fn'] = imported_fn
405        # d['not_imported_fn'] = not_imported_fn
406        validators = {}
407        for widget in layout.keys():
408            try:
409                validators[widget] = layout[widget].validate
410            except AttributeError:
411                logger.info('%s has no validate attribute' % widget)
412                return d
413        for item in csv_items:
414            if start:
415                start = False
416                logger.info('%s starts import from %s.csv' % (member,filename))
417                #import_keys = [k for k in item.keys() if not k.startswith('ignore')]
418                attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb")).next()
419                import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]
420                diff2schema = set(import_keys).difference(set(schema.keys()))
421                diff2layout = set(import_keys).difference(set(layout.keys()))
422                if diff2layout:
423                    em = "not ignorable key(s) %s found in heading" % diff2layout
424                    logger.info(em)
425                    return d
426                # s = ','.join(['"%s"' % fn for fn in import_keys])
427                # open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')
428                # #s = '"id",' + s
429                # open(imported_fn,"a").write(s + '\n')
430                # format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])
431                # format_error = format + ',"%(Error)s"'
432                # #format = '"%(id)s",'+ format
433                adapters = [MappingStorageAdapter(schema, item)]
434            dm = DataModel(item, adapters,context=self)
435            ds = DataStructure(data=item,datamodel=dm)
436            error_string = ""
437            for k in import_keys:
438                if not validators[k](ds,mode=mode):
439                    error_string += " %s : %s" % (k,ds.getError(k))
440            if error_string:
441                item['Error'] = error_string
442                #invalid_records.append(dm)
443                invalid_records.append(item)
444                total_not_imported += 1
445            else:
446                em = format % item
447                valid_records.append(dm)
448                #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())
449                tr_count += 1
450                total_imported += 1
451            total += 1
452        # if len(imported) > 0:
453        #     open(imported_fn,"a").write('\n'.join(imported))
454        # if len(not_imported) > 0:
455        #     open(not_imported_fn,"a").write('\n'.join(not_imported))
456        #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)
457        d['imported'] = total_imported
458        d['not_imported'] = total_not_imported
459        d['valid_records'] = valid_records
460        d['invalid_records'] = invalid_records
461        return d
462    ###)
463
464    security.declarePublic("missingValue")###(
465    def missingValue(self):
466        from Missing import MV
467        return MV
468    ###)
469###)
470
471class AccommodationTable(WAeUPTable): ###(
472
473    meta_type = 'WAeUP Accommodation Tool'
474    name = "portal_accommodation"
475    key = "bed"
476    def __init__(self,name=None):
477        if name ==  None:
478            name = self.name
479        WAeUPTable.__init__(self, name)
480
481    def searchAndReserveBed(self, student_id,bed_type):
482        records = self.searchResults({'student' : student_id})
483        if len(records) > 0:
484            return -1,"Student with Id %s already booked bed %s." % (student_id,records[0].bed)
485
486        records = [r for r in self.searchResults({'bed_type' : bed_type}) if not r.student]
487        #import pdb;pdb.set_trace()
488        if len(records) == 0:
489            return -2,"No bed available"
490        rec = records[0]
491        self.modifyRecord(bed=rec.bed,student=student_id)
492        s_logger = logging.getLogger('WAeUPTables.AccommodationTable.searchAndReserveBed')
493        s_logger.info('%s reserved bed %s' % (student_id,rec.bed))
494        return 1,rec.bed
495
496
497InitializeClass(AccommodationTable)
498
499###)
500
501class PinTable(WAeUPTable): ###(
502    from ZODB.POSException import ConflictError
503    meta_type = 'WAeUP Pin Tool'
504    name = "portal_pins"
505    key = 'pin'
506    def __init__(self,name=None):
507        if name ==  None:
508            name = self.name
509        WAeUPTable.__init__(self, name)
510
511
512    def searchAndSetRecord(self, uid, student_id,prefix):
513
514        # The following line must be activated after resetting the
515        # the portal_pins table. This is to avoid duplicate entries
516        # and disable duplicate payments.
517
518        #student_id = student_id.upper()
519
520        records = self.searchResults(student = student_id)
521        #if len(records) > 0 and prefix in ('CLR','APP'):
522        #    for r in records:
523        #        if r.pin != uid and r.prefix_batch.startswith(prefix):
524        #            return -2
525        records = self.searchResults({"%s" % self.key : uid})
526        if len(records) > 1:
527            # Can not happen, but anyway...
528            raise ValueError("More than one record with uid %s" % uid)
529        if len(records) == 0:
530            return -1
531        record = records[0]
532        if record.student == "":
533            record_data = {}
534            for field in self.schema() + self.indexes():
535                record_data[field] = getattr(record, field)
536            # Add the updated data:
537            record_data['student'] = student_id
538            try:
539                self.catalog_object(dict2ob(record_data), uid)
540                return 1
541            except ConflictError:
542                return 2
543        if record.student.upper() != student_id.upper():
544            return 0
545        if record.student.upper() == student_id.upper():
546            return 2
547        return -3
548InitializeClass(PinTable)
549###)
550
551class PumeResultsTable(WAeUPTable): ###(
552
553    meta_type = 'WAeUP PumeResults Tool'
554    name = "portal_pumeresults"
555    key = "jamb_reg_no"
556    def __init__(self,name=None):
557        if name ==  None:
558            name = self.name
559        WAeUPTable.__init__(self, name)
560
561
562InitializeClass(PumeResultsTable)
563
564###)
565
566class ApplicantsCatalog(WAeUPTable): ###(
567
568    meta_type = 'WAeUP Applicants Catalog'
569    name = "applicants_catalog"
570    key = "reg_no"
571    security = ClassSecurityInfo()
572    #export_keys = (
573    #               "reg_no",
574    #               "status",
575    #               "lastname",
576    #               "sex",
577    #               "date_of_birth",
578    #               "lga",
579    #               "email",
580    #               "phone",
581    #               "passport",
582    #               "entry_mode",
583    #               "pin",
584    #               "screening_type",
585    #               "registration_date",
586    #               "testdate",
587    #               "application_date",
588    #               "screening_date",
589    #               "faculty",
590    #               "department",
591    #               "course1",
592    #               "course2",
593    #               "course3",
594    #               "eng_score",
595    #               "subj1",
596    #               "subj1score",
597    #               "subj2",
598    #               "subj2score",
599    #               "subj3",
600    #               "subj3score",
601    #               "aggregate",
602    #               "course_admitted",
603    #               )
604    ###)
605
606    def __init__(self,name=None):
607        if name ==  None:
608            name = self.name
609        WAeUPTable.__init__(self, name)
610
611    security.declareProtected(ModifyPortalContent,"new_importCSV")###(
612    def new_importCSV(self,filename="JAMB_data",
613                  schema_id="application",
614                  layout_id="import_application",
615                  mode='add'):
616        """ import JAMB data """
617        current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")
618        pm = self.portal_membership
619        member = pm.getAuthenticatedMember()
620        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
621        lock_fn = "%s/import/%s_import_lock" % (i_home,filename)
622        import_fn = "%s/import/%s.csv" % (i_home,filename)
623        if mode not in ('add','edit'):
624            logger.info("invalid mode: %s" % mode)
625        if os.path.exists(lock_fn):
626            logger.info("import of %(import_fn)s already in progress" % vars())
627            return
628        lock_file = open(lock_fn,"w")
629        lock_file.write("%(current)s \n" % vars())
630        lock_file.close()
631        invalid_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
632        duplicate_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)
633        stool = getToolByName(self, 'portal_schemas')
634        ltool = getToolByName(self, 'portal_layouts')
635        schema = stool._getOb(schema_id)
636        if schema is None:
637            em = 'No such schema %s' % schema_id
638            logger.error(em)
639            return
640        for postfix in ('_import',''):
641            layout_name = "%(layout_id)s%(postfix)s" % vars()
642            if hasattr(ltool,layout_name):
643                break
644        layout = ltool._getOb(layout_name)
645        if layout is None:
646            em = 'No such layout %s' % layout_id
647            logger.error(em)
648            return
649        try:
650            csv_file = csv.DictReader(open(import_fn,"rb"))
651        except:
652            em = 'Error reading %s.csv' % filename
653            logger.error(em)
654            return
655        d = self._import_new(csv_items,schema,layout,mode,logger)
656        imported = []
657        edited = []
658        duplicates = []
659        not_found = []
660        if len(d['valid_records']) > 0:
661            for record in d['valid_records']:
662                #import pdb;pdb.set_trace()
663                if mode == "add":
664                    try:
665                        self.addRecord(**dict(record.items()))
666                        imported.append(**dict(record.items()))
667                        logger.info("added %s" % record.items())
668                    except ValueError:
669                        dupplicate.append(**dict(record.items()))
670                        logger.info("duplicate %s" % record.items())
671                elif mode == "edit":
672                    try:
673                        self.modifyRecord(**dict(record.items()))
674                        edited.append(**dict(record.items()))
675                        logger.info("edited %s" % record.items())
676                    except KeyError:
677                        not_found.append(**dict(record.items()))
678                        logger.info("not found %s" % record.items())
679        invalid = d['invalid_records']
680        for itype in ("imported","edited","not_found","duplicate","invalid"):
681            outlist = locals[itype]
682            if len(outlist):
683                d = {}
684                for k in outlist[0].keys():
685                    d[k] = k
686                outlist[0] = d
687                outfile = open("file_name_%s" % itype,'w')
688                csv.DictWriter(outfile,outlist[0].keys()).writerows(outlist)
689                logger.info("wrote %(itype)s records to %(, written to %(not_imported_fn)s" % d)
690###)
691
692    security.declareProtected(ModifyPortalContent,"importCSV")###(
693    def importCSV(self,filename="JAMB_data",
694                  schema_id="application",
695                  layout_id="application_pce",
696                  mode='add'):
697        """ import JAMB data """
698        stool = getToolByName(self, 'portal_schemas')
699        ltool = getToolByName(self, 'portal_layouts')
700        schema = stool._getOb(schema_id)
701        if schema is None:
702            em = 'No such schema %s' % schema_id
703            logger.error(em)
704            return
705        layout = ltool._getOb(layout_id)
706        if layout is None:
707            em = 'No such layout %s' % layout_id
708            logger.error(em)
709            return
710        logger = logging.getLogger('WAeUPTables.ApplicantsCatalog.importCSV')
711        d = self._import_old(filename,schema,layout,mode,logger)
712        if len(d['valid_records']) > 0:
713            for record in d['valid_records']:
714                #import pdb;pdb.set_trace()
715                if mode == "add":
716                    self.addRecord(**dict(record.items()))
717                    logger.info("added %s" % record.items())
718                elif mode == "edit":
719                    self.modifyRecord(**dict(record.items()))
720                    logger.info("edited %s" % record.items())
721                else:
722                    logger.info("invalid mode: %s" % mode)
723        logger.info("%(mode)sed %(imported)d records, invalid written to %(not_imported_fn)s" % d)
724    ###)
725
726InitializeClass(ApplicantsCatalog)
727
728###)
729
730class StudentsCatalog(WAeUPTable): ###(
731    security = ClassSecurityInfo()
732
733    meta_type = 'WAeUP Students Catalog'
734    name = "students_catalog"
735    key = "id"
736    affected_types = {   ###(
737                      'StudentApplication':
738                      {'id': 'application',
739                       'fields':
740                       ('jamb_reg_no',
741                        'entry_mode',
742                        #'entry_level',
743                        'entry_session',
744                       )
745                      },
746                      'StudentClearance':
747                      {'id': 'clearance',
748                       'fields':
749                       ('matric_no',
750                        'lga',
751                       )
752                      },
753                      'StudentPersonal':
754                      {'id': 'personal',
755                       'fields':
756                       ('name',
757                        'sex',
758                        'perm_address',
759                        'email',
760                        'phone',
761                       )
762                      },
763                      'StudentStudyCourse':
764                      {'id': 'study_course',
765                       'fields':
766                       ('course', # study_course
767                        'faculty', # from certificate
768                        'department', # from certificate
769                        'end_level', # from certificate
770                        'level', # current_level
771                        'mode',  # current_mode
772                        'session', # current_session
773                        'verdict', # current_verdict
774                       )
775                      },
776                     }
777    ###)
778
779    def __init__(self,name=None):
780        if name ==  None:
781            name = self.name
782        WAeUPTable.__init__(self, name)
783        return
784
785    def manage_catalogClear(self, REQUEST=None, RESPONSE=None, URL1=None):
786        """ clears the whole enchilada """
787        self._catalog.clear()
788
789        if REQUEST and RESPONSE:
790            RESPONSE.redirect(
791              URL1 +
792              '/manage_catalogAdvanced?manage_tabs_message=Catalog%20Cleared')
793
794    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
795        """ clear the catalog, then re-index everything """
796
797        elapse = time.time()
798        c_elapse = time.clock()
799
800        pgthreshold = self._getProgressThreshold()
801        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
802        self.refreshCatalog(clear=1, pghandler=handler)
803
804        elapse = time.time() - elapse
805        c_elapse = time.clock() - c_elapse
806
807        RESPONSE.redirect(
808            URL1 +
809            '/manage_catalogAdvanced?manage_tabs_message=' +
810            urllib.quote('Catalog Updated \n'
811                         'Total time: %s\n'
812                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
813    ###)
814
815    def fill_certificates_dict(self): ###(
816        "return certificate data in  dict"
817        certificates_brains = self.portal_catalog(portal_type ='Certificate')
818        d = {}
819        for cb in certificates_brains:
820            certificate_doc = cb.getObject().getContent()
821            cb_path = cb.getPath().split('/')
822            ld = {}
823            ld['faculty'] = cb_path[-4]
824            ld['department'] = cb_path[-3]
825            ld['end_level'] = getattr(certificate_doc,'end_level','999')
826            d[cb.getId] = ld
827        return d
828    ###)
829
830    def get_from_doc_department(self,doc,cached_data={}): ###(
831        "return the students department"
832        if doc is None:
833            return None
834        if cached_data.has_key(doc.study_course):
835            return cached_data[doc.study_course]['department']
836        certificate_res = self.portal_catalog(id = doc.study_course)
837        if len(certificate_res) != 1:
838            return None
839        return certificate_res[0].getPath().split('/')[-3]
840
841    def get_from_doc_faculty(self,doc,cached_data={}):
842        "return the students faculty"
843        if doc is None:
844            return None
845        if cached_data.has_key(doc.study_course):
846            return cached_data[doc.study_course]['faculty']
847        certificate_res = self.portal_catalog(id = doc.study_course)
848        if len(certificate_res) != 1:
849            return None
850        return certificate_res[0].getPath().split('/')[-4]
851
852    def get_from_doc_end_level(self,doc,cached_data={}):
853        "return the students end_level"
854        if doc is None:
855            return None
856        if cached_data.has_key(doc.study_course):
857            return cached_data[doc.study_course]['end_level']
858        certificate_res = self.portal_catalog(id = doc.study_course)
859        if len(certificate_res) != 1:
860            return None
861        return getattr(certificate_res[0].getObject().getContent(),'end_level','unknown')
862
863    def get_from_doc_level(self,doc,cached_data={}):
864        "return the students level"
865        if doc is None:
866            return None
867        return getattr(doc,'current_level',None)
868
869    def get_from_doc_mode(self,doc,cached_data={}):
870        "return the students mode"
871        if doc is None:
872            return None
873        cm = getattr(doc,'current_mode',None)
874        return cm
875
876
877    def get_from_doc_session(self,doc,cached_data={}):
878        "return the students current_session"
879        if doc is None:
880            return None
881        return getattr(doc,'current_session',None)
882
883    def get_from_doc_entry_session(self,doc,cached_data={}):
884        "return the students entry_session"
885        if doc is None:
886            return None
887        es = getattr(doc,'entry_session',None)
888        if es is not None and len(es) == 2:
889            return es
890        try:
891            digit = int(doc.jamb_reg_no[0])
892        except:
893            return "-1"
894        if digit < 8:
895            return "0%c" % doc.jamb_reg_no[0]
896        return "9%c" % doc.jamb_reg_no[0]
897
898    def get_from_doc_course(self,doc,cached_data={}):
899        "return the students study_course"
900        if doc is None:
901            return None
902        return getattr(doc,'study_course',None)
903
904    def get_from_doc_name(self,doc,cached_data={}):
905        "return the students name from the personal"
906        if doc is None:
907            return None
908        return "%s %s %s" % (doc.firstname,doc.middlename,doc.lastname)
909
910    def get_from_doc_verdict(self,doc,cached_data={}):
911        "return the students study_course"
912        if doc is None:
913            return None
914        return getattr(doc,'current_verdict',None)
915    ###)
916
917    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
918        if isinstance(name, str):
919            name = (name,)
920        reindextypes = {}
921        reindex_special = []
922        for n in name:
923            if n in ("review_state","registered_courses"):
924                reindex_special.append(n)
925            else:
926                for pt in self.affected_types.keys():
927                    if n in self.affected_types[pt]['fields']:
928                        if reindextypes.has_key(pt):
929                            reindextypes[pt].append(n)
930                        else:
931                            reindextypes[pt]= [n]
932                        break
933        cached_data = {}
934        if set(name).intersection(set(('faculty','department','end_level'))):
935            cached_data = self.fill_certificates_dict()
936        students = self.portal_catalog(portal_type="Student")
937        if hasattr(self,'portal_catalog_real'):
938            aq_portal = self.portal_catalog_real.evalAdvancedQuery
939        else:
940            aq_portal = self.portal_catalog.evalAdvancedQuery
941        num_objects = len(students)
942        if pghandler:
943            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
944        noattr = set(('StudentClearance','StudentPersonal')) & set(reindextypes.keys())
945        #import pdb;pdb.set_trace()
946        for i in xrange(num_objects):
947            if pghandler: pghandler.report(i)
948            student_brain = students[i]
949            student_object = student_brain.getObject()
950            # query = Eq('path',student_brain.getPath())
951            # sub_brains_list = aq_portal(query)
952            # sub_brains = {}
953            # for sub_brain in sub_brains_list:
954            #     sub_brains[sub_brain.portal_type] = sub_brain
955            # student_path = student_brain.getPath()
956            data = {}
957            modified = False
958            sid = data['id'] = student_brain.getId
959            if reindex_special and 'review_state' in reindex_special:
960                modified = True
961                data['review_state'] = student_brain.review_state
962            sub_objects = False
963            for pt in reindextypes.keys():
964                modified = True
965                try:
966                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
967                    #doc = sub_brains[pt].getObject().getContent()
968                    # path = "%s/%s" % (student_path,self.affected_types[pt]['id'])
969                    # doc = self.unrestrictedTraverse(path).getContent()
970                    sub_objects = True
971                except:
972                    continue
973                for field in set(name).intersection(self.affected_types[pt]['fields']):
974                    if hasattr(self,'get_from_doc_%s' % field):
975                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
976                                                                              cached_data=cached_data)
977                    else:
978                        data[field] = getattr(doc,field)
979            if not sub_objects and noattr:
980                import_res = self.returning_import(id = sid)
981                if not import_res:
982                    continue
983                import_record = import_res[0]
984                data['matric_no'] = import_record.matric_no
985                data['sex'] = import_record.Sex == 'F'
986                data['name'] = "%s %s %s" % (import_record.Firstname,
987                                             import_record.Middlename,
988                                             import_record.Lastname)
989                data['jamb_reg_no'] = import_record.Entryregno
990            #if reindex_special and 'registered_courses' in reindex_special:
991            #    try:
992            #        study_course = getattr(student_object,"study_course")
993            #        level_ids = study_course.objectIds()
994            #    except:
995            #        continue
996            #    if not level_ids:
997            #        continue
998            #    modified = True
999            #    level_ids.sort()
1000            #    course_ids = getattr(study_course,level_ids[-1]).objectIds()
1001            #    courses = []
1002            #    for c in course_ids:
1003            #        if c.endswith('_co'):
1004            #            courses.append(c[:-3])
1005            #        else:
1006            #            courses.append(c)
1007            #    data['registered_courses'] = courses
1008            if modified:
1009                self.modifyRecord(**data)
1010        if pghandler: pghandler.finish()
1011    ###)
1012
1013    def refreshCatalog(self, clear=0, pghandler=None): ###(
1014        """ re-index everything we can find """
1015        students_folder = self.portal_url.getPortalObject().campus.students
1016        if clear:
1017            self._catalog.clear()
1018        students = self.portal_catalog(portal_type="Student")
1019        num_objects = len(students)
1020        cached_data = self.fill_certificates_dict()
1021        if pghandler:
1022            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1023        for i in xrange(num_objects):
1024            if pghandler: pghandler.report(i)
1025            student_brain = students[i]
1026            spath = student_brain.getPath()
1027            student_object = student_brain.getObject()
1028            data = {}
1029            sid = data['id'] = student_brain.getId
1030            data['review_state'] = student_brain.review_state
1031            sub_objects = False
1032            for pt in self.affected_types.keys():
1033                modified = True
1034                try:
1035                    doc = getattr(student_object,self.affected_types[pt]['id']).getContent()
1036                    sub_objects = True
1037                except:
1038                    #from pdb import set_trace;set_trace()
1039                    continue
1040                for field in self.affected_types[pt]['fields']:
1041                    if hasattr(self,'get_from_doc_%s' % field):
1042                        data[field] = getattr(self,'get_from_doc_%s' % field)(doc,
1043                                                                              cached_data=cached_data)
1044                    else:
1045                        data[field] = getattr(doc,field,None)
1046            if not sub_objects:
1047                import_res = self.returning_import(id = sid)
1048                if not import_res:
1049                    continue
1050                import_record = import_res[0]
1051                data['matric_no'] = import_record.matric_no
1052                data['sex'] = import_record.Sex == 'F'
1053                data['name'] = "%s %s %s" % (import_record.Firstname,
1054                                             import_record.Middlename,
1055                                             import_record.Lastname)
1056                data['jamb_reg_no'] = import_record.Entryregno
1057            self.addRecord(**data)
1058        if pghandler: pghandler.finish()
1059    ###)
1060
1061    security.declarePrivate('notify_event_listener') ###(
1062    def notify_event_listener(self,event_type,object,infos):
1063        "listen for events"
1064        if not infos.has_key('rpath'):
1065            return
1066        pt = getattr(object,'portal_type',None)
1067        mt = getattr(object,'meta_type',None)
1068        students_catalog = self
1069        data = {}
1070        if pt == 'Student' and\
1071           mt == 'CPS Proxy Folder' and\
1072           event_type.startswith('workflow'):
1073            data['id'] = object.getId()
1074            data['review_state'] = self.portal_workflow.getInfoFor(object,'review_state',None)
1075            students_catalog.modifyRecord(**data)
1076            return
1077        rpl = infos['rpath'].split('/')
1078        if pt == 'Student' and mt == 'CPS Proxy Folder':
1079            student_id = object.id
1080            if event_type == "sys_add_object":
1081                try:
1082                    self.addRecord(id = student_id)
1083                except ValueError:
1084                    pass
1085                return
1086            elif event_type == 'sys_del_object':
1087                self.deleteRecord(student_id)
1088        if pt not in self.affected_types.keys():
1089            return
1090        if event_type not in ('sys_modify_object'):
1091            return
1092        if mt == 'CPS Proxy Folder':
1093            return
1094        for field in self.affected_types[pt]['fields']:
1095            if hasattr(self,'get_from_doc_%s' % field):
1096                data[field] = getattr(self,'get_from_doc_%s' % field)(object)
1097            else:
1098                data[field] = getattr(object,field)
1099        data['id'] = rpl[2]
1100        self.modifyRecord(**data)
1101    ###)
1102
1103
1104InitializeClass(StudentsCatalog)
1105
1106###)
1107
1108class CoursesCatalog(WAeUPTable): ###(
1109    security = ClassSecurityInfo()
1110
1111    meta_type = 'WAeUP Courses Catalog'
1112    name =  "courses_catalog"
1113    key = "code"
1114    def __init__(self,name=None):
1115        if name ==  None:
1116            name =  self.name
1117        WAeUPTable.__init__(self, name)
1118
1119    def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): ###(
1120        """ clear the catalog, then re-index everything """
1121
1122        elapse = time.time()
1123        c_elapse = time.clock()
1124
1125        pgthreshold = self._getProgressThreshold()
1126        handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None
1127        self.refreshCatalog(clear=1, pghandler=handler)
1128
1129        elapse = time.time() - elapse
1130        c_elapse = time.clock() - c_elapse
1131
1132        RESPONSE.redirect(
1133            URL1 +
1134            '/manage_catalogAdvanced?manage_tabs_message=' +
1135            urllib.quote('Catalog Updated \n'
1136                         'Total time: %s\n'
1137                         'Total CPU time: %s' % (`elapse`, `c_elapse`)))
1138    ###)
1139
1140    def reindexIndex(self, name, REQUEST,pghandler=None): ###(
1141        if isinstance(name, str):
1142            name = (name,)
1143        courses = self.portal_catalog(portal_type="Course")
1144        num_objects = len(courses)
1145        if pghandler:
1146            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1147        for i in xrange(num_objects):
1148            if pghandler: pghandler.report(i)
1149            course_brain = courses[i]
1150            course_object = course_brain.getObject()
1151            pl = course_brain.getPath().split('/')
1152            data = {}
1153            cid = data[self.key] = course_brain.getId
1154            data['faculty'] = pl[-4]
1155            data['department'] = pl[-3]
1156            doc = course_object.getContent()
1157            for field in name:
1158                if field not in (self.key,'faculty','department'):
1159                    data[field] = getattr(doc,field)
1160            self.modifyRecord(**data)
1161        if pghandler: pghandler.finish()
1162    ###)
1163
1164    def refreshCatalog(self, clear=0, pghandler=None): ###(
1165        """ re-index everything we can find """
1166        if clear:
1167            self._catalog.clear()
1168        courses = self.portal_catalog(portal_type="Course")
1169        num_objects = len(courses)
1170        if pghandler:
1171            pghandler.init('Refreshing catalog: %s' % self.absolute_url(1), num_objects)
1172        #from pdb import set_trace;set_trace()
1173        for i in xrange(num_objects):
1174            if pghandler: pghandler.report(i)
1175            course_brain = courses[i]
1176            course_doc = course_brain.getObject().getContent()
1177            pl = course_brain.getPath().split('/')
1178            data = {}
1179            for field in self.schema():
1180                data[field] = getattr(course_doc,field,None)
1181            data[self.key] = course_brain.getId
1182            ai = pl.index('academics')
1183            data['faculty'] = pl[ai +1]
1184            data['department'] = pl[ai +2]
1185            if clear:
1186                self.addRecord(**data)
1187            else:
1188                self.modifyRecord(**data)
1189        if pghandler: pghandler.finish()
1190    ###)
1191
1192    security.declarePrivate('notify_event_listener') ###(
1193    def notify_event_listener(self,event_type,object,infos):
1194        "listen for events"
1195        if not infos.has_key('rpath'):
1196            return
1197        pt = getattr(object,'portal_type',None)
1198        mt = getattr(object,'meta_type',None)
1199        if pt != 'Course':
1200            return
1201        data = {}
1202        rpl = infos['rpath'].split('/')
1203        if event_type not in ("sys_add_object","sys_modify_object","sys_del_object"):
1204            return
1205        course_id = object.getId()
1206        data[self.key] = course_id
1207        if event_type == "sys_add_object" and mt == 'CPS Proxy Folder':
1208            try:
1209                self.addRecord(**data)
1210            except ValueError:
1211                return
1212            course_id = object.getId()
1213            doc = object.getContent()
1214            if doc is None:
1215                return
1216            for field in self.schema():
1217                data[field] = getattr(doc,field,None)
1218            data[self.key] = course_id
1219            ai = rpl.index('academics')
1220            data['faculty'] = rpl[ai +1]
1221            data['department'] = rpl[ai +2]
1222            self.modifyRecord(**data)
1223            return
1224        if event_type == "sys_del_object":
1225            self.deleteRecord(course_id)
1226            return
1227        if event_type == "sys_modify_object" and mt == 'Course':
1228            #from pdb import set_trace;set_trace()
1229            for field in self.schema():
1230                data[field] = getattr(object,field,None)
1231            course_id = object.aq_parent.getId()
1232            data[self.key] = course_id
1233            ai = rpl.index('academics')
1234            data['faculty'] = rpl[ai +1]
1235            data['department'] = rpl[ai +2]
1236            self.modifyRecord(**data)
1237    ###)
1238
1239
1240InitializeClass(CoursesCatalog)
1241###)
1242
1243class CourseResults(WAeUPTable): ###(
1244    security = ClassSecurityInfo()
1245
1246    meta_type = 'WAeUP Results Catalog'
1247    name = "course_results"
1248    key = "key" #student_id + level + course_id
1249    def __init__(self,name=None):
1250        if name ==  None:
1251            name = self.name
1252        WAeUPTable.__init__(self, name)
1253        self._queue = []
1254
1255    def addMultipleRecords(self, records): ###(
1256        """add many records"""
1257        added_keys = []
1258        for data in records:
1259            uid = key = "%(student_id)s|%(level_id)s|%(course_id)s" % data
1260            data['%s' % self.key] = uid
1261            res = self.searchResults({"%s" % self.key : uid})
1262            if len(res) > 0:
1263                raise ValueError("More than one record with uid %s" % uid)
1264            self.catalog_object(dict2ob(data), uid=uid)
1265        return uid
1266    ###)
1267
1268    def deleteResultsHere(self,level_id,student_id): ###(
1269        #import pdb;pdb.set_trace()
1270        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1271        course_results = self.course_results.evalAdvancedQuery(query)
1272        for result in course_results:
1273            self.deleteRecord(result.key)
1274    ###)
1275
1276    def moveResultsHere(self,level,student_id): ###(
1277        #import pdb;pdb.set_trace()
1278        level_id = level.getId()
1279        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1280        course_results = self.course_results.evalAdvancedQuery(query)
1281        existing_courses = [cr.code for cr in course_results]
1282        to_delete = []
1283        for code,obj in level.objectItems():
1284            to_delete.append(code)
1285            carry_over = False
1286            if code.endswith('_co'):
1287                carry_over = True
1288                code  = code[:-3]
1289            if code in existing_courses:
1290                continue
1291            course_result_doc = obj.getContent()
1292            data = {}
1293            course_id = code
1294            for field in self.schema():
1295                data[field] = getattr(course_result_doc,field,'')
1296            data['key'] = key = "%(student_id)s|%(level_id)s|%(course_id)s" % vars()
1297            data['student_id'] = student_id
1298            data['level_id'] = level_id
1299            session_id = self.getLevelSession(level.getContent(),student_id,level_id)
1300            data['session_id'] = session_id
1301            #data['queue_status'] = OBJECT_CREATED
1302            data['code'] = course_id
1303            data['carry_over'] = carry_over
1304            self.catalog_object(dict2ob(data), uid=key)
1305        level.manage_delObjects(to_delete)
1306    ###)
1307
1308    def getCourses(self,student_id,level_id): ###(
1309        query = Eq('student_id',student_id) & Eq('level_id', level_id)
1310        course_results = self.course_results.evalAdvancedQuery(query)
1311        carry_overs = []
1312        normal1 = []
1313        normal2 = []
1314        normal3 = []
1315        credits = 0
1316        for brain in course_results:
1317            d = {}
1318            credits += int(brain.credits)
1319            for field in self.schema():
1320                d[field] = getattr(brain,field,'')
1321            #d['sheduled'] = brain.queue_status == ADDING_SHEDULED
1322            d['coe'] = 'Elective'
1323            if brain.core_or_elective:
1324                d['coe'] = 'Core'
1325            id = code = d['id'] = brain.code
1326            d['code'] = code
1327            course = self.courses_catalog.evalAdvancedQuery(Eq('code',code))[0]
1328            d['title'] = course.title
1329
1330            # The courses_catalog contains strings and integers in its semester field.
1331            # Maybe this can be fixed by reindexing the catalog. The schema of course says 'CPS Int Field'.
1332            d['semester'] = str(course.semester)
1333            if brain.carry_over:
1334                d['coe'] = 'Carry-Over'
1335                carry_overs.append(d)
1336            else:
1337                if d['semester'] == '1':
1338                    normal1.append(d)
1339
1340                elif d['semester'] == '2':
1341                    normal2.append(d)
1342                else:
1343                    normal3.append(d)
1344        #normal.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1345        #                                "%(semester)s%(code)s" % y))
1346        carry_overs.sort(cmp=lambda x,y: cmp("%(semester)s%(code)s" % x,
1347                                             "%(semester)s%(code)s" % y))
1348        return credits,carry_overs,normal1,normal2,normal3
1349    ###)
1350
1351InitializeClass(CourseResults)
1352###)
1353
1354class OnlinePaymentsImport(WAeUPTable): ###(
1355
1356    meta_type = 'WAeUP Online Payment Transactions'
1357    name = "online_payments_import"
1358    key = "order_id"
1359    def __init__(self,name=None):
1360        if name ==  None:
1361            name = self.name
1362        WAeUPTable.__init__(self, name)
1363
1364
1365InitializeClass(OnlinePaymentsImport)
1366###)
1367
1368class ReturningImport(WAeUPTable): ###(
1369
1370    meta_type = 'Returning Import Table'
1371    name = "returning_import"
1372    key = "matric_no"
1373    def __init__(self,name=None):
1374        if name ==  None:
1375            name = self.name
1376        WAeUPTable.__init__(self, name)
1377
1378
1379InitializeClass(ReturningImport)
1380###)
1381
1382class ResultsImport(WAeUPTable): ###(
1383
1384    meta_type = 'Results Import Table'
1385    name = "results_import"
1386    key = "key"
1387    def __init__(self,name=None):
1388        if name ==  None:
1389            name = self.name
1390        WAeUPTable.__init__(self, name)
1391
1392
1393InitializeClass(ResultsImport)
1394
1395###)
1396
1397class PaymentsCatalog(WAeUPTable): ###(
1398
1399    meta_type = 'WAeUP Payments Catalog'
1400    name = "students_catalog"
1401    key = "id"
1402    def __init__(self,name=None):
1403        if name ==  None:
1404            name = self.name
1405        WAeUPTable.__init__(self, name)
1406
1407
1408InitializeClass(PaymentsCatalog)
1409
1410###)
1411
1412# BBB:
1413AccomodationTable = AccommodationTable
Note: See TracBrowser for help on using the repository browser.