Changeset 5559 for WAeUP_SRP/trunk/WAeUPTables.py
- Timestamp:
- 5 Oct 2010, 12:20:34 (14 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
WAeUP_SRP/trunk/WAeUPTables.py
r5202 r5559 273 273 ###) 274 274 275 security.declarePrivate("_import_old") ###(276 def _import_old(self,filename,schema,layout, mode,logger):277 "import data from csv"278 import transaction279 import random280 pm = self.portal_membership281 member = pm.getAuthenticatedMember()282 current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S")283 import_fn = "%s/import/%s.csv" % (i_home,filename)284 imported_fn = "%s/import/%s_imported%s.csv" % (i_home,filename,current)285 not_imported_fn = "%s/import/%s_not_imported%s.csv" % (i_home,filename,current)286 start = True287 tr_count = 1288 total_imported = 0289 total_not_imported = 0290 total = 0291 iname = "%s" % filename292 not_imported = []293 imported = []294 valid_records = []295 invalid_records = []296 d = {}297 d['mode'] = mode298 d['imported'] = total_imported299 d['not_imported'] = total_not_imported300 d['valid_records'] = valid_records301 d['invalid_records'] = invalid_records302 d['import_fn'] = import_fn303 d['imported_fn'] = imported_fn304 d['not_imported_fn'] = not_imported_fn305 if schema is None:306 em = 'No schema specified'307 logger.error(em)308 return d309 if layout is None:310 em = 'No layout specified'311 logger.error(em)312 return d313 validators = {}314 for widget in layout.keys():315 try:316 validators[widget] = layout[widget].validate317 except AttributeError:318 logger.info('%s has no validate attribute' % widget)319 return d320 # if mode == 'edit':321 # importer = self.importEdit322 # elif mode == 'add':323 # importer = self.importAdd324 # else:325 # importer = None326 try:327 items = csv.DictReader(open(import_fn,"rb"),328 dialect="excel",329 skipinitialspace=True)330 except:331 em = 'Error reading %s.csv' % filename332 logger.error(em)333 return d334 #import pdb;pdb.set_trace()335 for item in items:336 if start:337 start = False338 logger.info('%s starts import from %s.csv' % (member,filename))339 #import_keys = [k for k in item.keys() if not k.startswith('ignore')]340 attrs = csv.reader(open("%s/import/%s.csv" % (i_home,filename),"rb"),341 dialect="excel",342 skipinitialspace=True).next()343 import_keys = [k for k in attrs if not (k.startswith('ignore') or k.isupper())]344 diff2schema = set(import_keys).difference(set(schema.keys()))345 diff2layout = set(import_keys).difference(set(layout.keys()))346 if diff2layout:347 em = "not ignorable key(s) %s found in heading" % diff2layout348 logger.info(em)349 return d350 s = ','.join(['"%s"' % fn for fn in import_keys])351 open(not_imported_fn,"a").write(s + ',"Error"'+ '\n')352 #s = '"id",' + s353 open(imported_fn,"a").write(s + '\n')354 format = ','.join(['"%%(%s)s"' % fn for fn in import_keys])355 format_error = format + ',"%(Error)s"'356 #format = '"%(id)s",'+ format357 adapters = [MappingStorageAdapter(schema, item)]358 dm = DataModel(item, adapters,context=self)359 ds = DataStructure(data=item,datamodel=dm)360 error_string = ""361 #import pdb;pdb.set_trace()362 for k in import_keys:363 if not validators[k](ds,mode=mode):364 error_string += " %s : %s" % (k,ds.getError(k))365 # if not error_string and importer:366 # item.update(dm)367 # item['id'],error = importer(item)368 # if error:369 # error_string += error370 if error_string:371 item['Error'] = error_string372 invalid_records.append(dm)373 not_imported.append(format_error % item)374 total_not_imported += 1375 else:376 em = format % item377 valid_records.append(dm)378 imported.append(em)379 #logger.info("%(total_imported)d of %(total)d %(em)s" % vars())380 tr_count += 1381 total_imported += 1382 total += 1383 if len(imported) > 0:384 open(imported_fn,"a").write('\n'.join(imported))385 if len(not_imported) > 0:386 open(not_imported_fn,"a").write('\n'.join(not_imported))387 #em = "Imported: %d, not imported: %d of total %d" % (total_imported,total_not_imported,total)388 d['imported'] = total_imported389 d['not_imported'] = total_not_imported390 d['valid_records'] = valid_records391 d['invalid_records'] = invalid_records392 d['imported_fn'] = imported_fn393 d['not_imported_fn'] = not_imported_fn394 #logger.info(em)395 return d396 ###)397 275 398 276 security.declarePrivate("_import") ###( … … 1991 1869 ###) 1992 1870 1871 security.declareProtected(ModifyPortalContent,"dumpPayments")###( 1872 def dumpPayments(self,session_id='09'): 1873 """dump all valid payments and combine with student data """ 1874 member = self.portal_membership.getAuthenticatedMember() 1875 logger = logging.getLogger('WAeUPTables.WAeUPTable.dumpPayments') 1876 current = DateTime.DateTime().strftime("%d-%m-%y_%H_%M_%S") 1877 export_file = "%s/export/valid_payments%s_%s.csv" % (i_home,session_id,current,) 1878 1879 pm_catalog = self.payments_catalog 1880 query = Eq('status','paid') & Eq('type','online') & Eq('session_id',session_id) 1881 payments = pm_catalog.evalAdvancedQuery(query) 1882 payments_dic = [] 1883 s_catalog = self.students_catalog 1884 fields_pm = pm_catalog.schema() 1885 fields_s = s_catalog.schema() 1886 fields = fields_pm + fields_s 1887 1888 format = '"%(' + ')s","%('.join(fields) + ')s"' 1889 #import pdb;pdb.set_trace() 1890 for brain in payments: 1891 d = {} 1892 for field in fields_pm: 1893 d[field] = getattr(brain,field,'') 1894 1895 student_id = getattr(brain,'student_id','') 1896 query = Eq('id',student_id) 1897 student = s_catalog.evalAdvancedQuery(query) 1898 if student: 1899 for field in fields_s: 1900 d[field] = getattr(student[0],field,'') 1901 payments_dic.append(format % d) 1902 1903 if not os.path.exists(export_file): 1904 file_handler = open(export_file,"a") 1905 headline = ','.join(fields) 1906 file_handler.write(headline +'\n') 1907 else: 1908 file_handler = open(export_file,"a") 1909 for line in payments_dic: 1910 file_handler.write(line +'\n') 1911 1912 return 'ready' 1913 1914 1993 1915 InitializeClass(PaymentsCatalog) 1994 1916
Note: See TracChangeset for help on using the changeset viewer.