Changeset 11995
- Timestamp:
- 19 Nov 2014, 16:05:30 (10 years ago)
- Location:
- main/waeup.ikoba/trunk/src/waeup/ikoba/customers
- Files:
-
- 1 added
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
main/waeup.ikoba/trunk/src/waeup/ikoba/customers/batching.py
r11985 r11995 41 41 from waeup.ikoba.interfaces import MessageFactory as _ 42 42 from waeup.ikoba.customers.interfaces import ( 43 ICustomer, ICustomerUpdateByRegNo) 43 ICustomer, ICustomerUpdateByRegNo, 44 ICustomerDocument) 44 45 from waeup.ikoba.customers.workflow import ( 45 46 IMPORTABLE_STATES, IMPORTABLE_TRANSITIONS) … … 278 279 279 280 class CustomerProcessorBase(BatchProcessor): 280 """A base for customer subitem processor .281 """A base for customer subitem processors. 281 282 282 283 Helps reducing redundancy. … … 354 355 raw_header = reader.next() 355 356 for num, field in enumerate(headerfields): 356 if field not in ['customer_id', 'reg_number', ' p_id', 'code', 'level'357 if field not in ['customer_id', 'reg_number', 'id', 'code', 'level' 357 358 ] and mode == 'remove': 358 359 continue … … 362 363 result[raw_header[num]] = field 363 364 return result 365 366 class CustomerDocumentProcessor(CustomerProcessorBase): 367 """A batch processor for ICustomerDocument objects. 368 """ 369 grok.implements(IBatchProcessor) 370 grok.provides(IBatchProcessor) 371 grok.context(Interface) 372 util_name = 'customerdocumentprocessor' 373 grok.name(util_name) 374 375 name = _('CustomerDocument Processor') 376 iface = ICustomerDocument 377 factory_name = 'waeup.CustomerDocument' 378 379 location_fields = [] 380 additional_fields = ['id'] 381 additional_headers = [] 382 383 def checkHeaders(self, headerfields, mode='ignore'): 384 super(CustomerDocumentProcessor, self).checkHeaders(headerfields) 385 if mode in ('update', 'remove') and not 'id' in headerfields: 386 raise FatalCSVError( 387 "Need id for import in update and remove modes!") 388 return True 389 390 def getParent(self, row, site): 391 customer = self._getCustomer(row, site) 392 if customer is None: 393 return None 394 return customer['documents'] 395 396 def getEntry(self, row, site): 397 documents = self.getParent(row, site) 398 if documents is None: 399 return None 400 id = row.get('id', None) 401 if id is None: 402 return None 403 entry = documents.get(id) 404 return entry 405 406 def updateEntry(self, obj, row, site, filename): 407 """Update obj to the values given in row. 408 """ 409 items_changed = super(CustomerDocumentProcessor, self).updateEntry( 410 obj, row, site, filename) 411 customer = self.getParent(row, site).__parent__ 412 customer.__parent__.logger.info( 413 '%s - %s - %s - updated: %s' 414 % (self.name, filename, customer.customer_id, items_changed)) 415 return 416 417 def addEntry(self, obj, row, site): 418 parent = self.getParent(row, site) 419 id = row['id'].strip('#') 420 parent[id] = obj 421 return 422 423 def delEntry(self, row, site): 424 document = self.getEntry(row, site) 425 parent = self.getParent(row, site) 426 if document is not None: 427 customer = self._getCustomer(row, site) 428 customer.__parent__.logger.info('%s - Document removed: %s' 429 % (customer.customer_id, document.id)) 430 del parent[document.id] 431 return 432 433 def checkConversion(self, row, mode='ignore'): 434 """Validates all values in row. 435 """ 436 errs, inv_errs, conv_dict = super( 437 CustomerDocumentProcessor, self).checkConversion(row, mode=mode) 438 439 # We have to check id. 440 id = row.get('id', None) 441 if not id: 442 timestamp = ("%d" % int(time()*10000))[1:] 443 id = "d%s" % timestamp 444 conv_dict['id'] = id 445 return errs, inv_errs, conv_dict 446 if not id.startswith('d') or len(id) != 14: 447 errs.append(('id','invalid format')) 448 return errs, inv_errs, conv_dict -
main/waeup.ikoba/trunk/src/waeup/ikoba/customers/tests/test_batching.py
r11964 r11995 2 2 ## $Id: test_batching.py 11756 2014-07-09 12:46:08Z henrik $ 3 3 ## 4 ## Copyright (C) 201 1Uli Fouquet & Henrik Bettermann4 ## Copyright (C) 2014 Uli Fouquet & Henrik Bettermann 5 5 ## This program is free software; you can redistribute it and/or modify 6 6 ## it under the terms of the GNU General Public License as published by … … 35 35 from waeup.ikoba.app import Company 36 36 from waeup.ikoba.interfaces import IBatchProcessor, FatalCSVError, IUserAccount 37 from waeup.ikoba.customers.batching import CustomerProcessor 37 from waeup.ikoba.customers.batching import ( 38 CustomerProcessor, CustomerDocumentProcessor) 38 39 from waeup.ikoba.customers.customer import Customer 39 40 from waeup.ikoba.testing import FunctionalLayer, FunctionalTestCase … … 59 60 60 61 CUSTOMER_HEADER_FIELDS_DUPLICATES = CUSTOMER_SAMPLE_DATA_DUPLICATES.split( 62 '\n')[0].split(',') 63 64 DOCUMENT_SAMPLE_DATA = open( 65 os.path.join(os.path.dirname(__file__), 'sample_document_data.csv'), 66 'rb').read() 67 68 DOCUMENT_HEADER_FIELDS = DOCUMENT_SAMPLE_DATA.split( 61 69 '\n')[0].split(',') 62 70 … … 238 246 shutil.rmtree(os.path.dirname(fin_file)) 239 247 248 class CustomerDocumentProcessorTest(CustomerImportExportSetup): 249 250 def setUp(self): 251 super(CustomerDocumentProcessorTest, self).setUp() 252 253 # Add customer with document 254 customer = Customer() 255 customer.firstname = u'Anna' 256 customer.lastname = u'Tester' 257 customer.reg_number = u'123' 258 self.app['customers'].addCustomer(customer) 259 self.customer = self.app['customers'][customer.customer_id] 260 document = createObject(u'waeup.CustomerDocument') 261 document.id = 'd120' 262 self.customer['documents'][document.id] = document 263 264 # Import customers with subobjects 265 customer_file = os.path.join(self.workdir, 'sample_customer_data.csv') 266 open(customer_file, 'wb').write(CUSTOMER_SAMPLE_DATA) 267 num, num_warns, fin_file, fail_file = CustomerProcessor().doImport( 268 customer_file, CUSTOMER_HEADER_FIELDS) 269 shutil.rmtree(os.path.dirname(fin_file)) 270 271 self.processor = CustomerDocumentProcessor() 272 self.csv_file = os.path.join( 273 self.workdir, 'sample_document_data.csv') 274 open(self.csv_file, 'wb').write(DOCUMENT_SAMPLE_DATA) 275 276 def test_interface(self): 277 # Make sure we fulfill the interface contracts. 278 assert verifyObject(IBatchProcessor, self.processor) is True 279 assert verifyClass( 280 IBatchProcessor, CustomerDocumentProcessor) is True 281 282 def test_getEntry(self): 283 assert self.processor.getEntry( 284 dict(customer_id='ID_NONE', id='nonsense'), self.app) is None 285 assert self.processor.getEntry( 286 dict(customer_id=self.customer.customer_id, id='d120'), 287 self.app) is self.customer['documents']['d120'] 288 289 def test_delEntry(self): 290 assert self.processor.getEntry( 291 dict(customer_id=self.customer.customer_id, id='d120'), 292 self.app) is self.customer['documents']['d120'] 293 self.assertEqual(len(self.customer['documents'].keys()),1) 294 self.processor.delEntry( 295 dict(customer_id=self.customer.customer_id, id='d120'), 296 self.app) 297 assert self.processor.getEntry( 298 dict(customer_id=self.customer.customer_id, id='d120'), 299 self.app) is None 300 self.assertEqual(len(self.customer['documents'].keys()),0) 301 302 def test_addEntry(self): 303 self.assertEqual(len(self.customer['documents'].keys()),1) 304 document1 = createObject(u'waeup.CustomerDocument') 305 document1.id = 'p234' 306 self.processor.addEntry( 307 document1, dict(customer_id=self.customer.customer_id, id='p234'), 308 self.app) 309 self.assertEqual(len(self.customer['documents'].keys()),2) 310 self.assertEqual(self.customer['documents']['p234'].id, 'p234') 311 document2 = createObject(u'waeup.CustomerDocument') 312 document1.id = 'nonsense' 313 314 def test_checkConversion(self): 315 errs, inv_errs, conv_dict = self.processor.checkConversion( 316 dict(id='d1266236341955')) 317 self.assertEqual(len(errs),0) 318 errs, inv_errs, conv_dict = self.processor.checkConversion( 319 dict(id='nonsense')) 320 self.assertEqual(len(errs),1) 321 timestamp = ("%d" % int(time()*10000))[1:] 322 id = "d%s" % timestamp 323 errs, inv_errs, conv_dict = self.processor.checkConversion( 324 dict(id=id)) 325 self.assertEqual(len(errs),0) 326 327 def test_import(self): 328 num, num_warns, fin_file, fail_file = self.processor.doImport( 329 self.csv_file, DOCUMENT_HEADER_FIELDS,'create') 330 self.assertEqual(num_warns,0) 331 document = self.processor.getEntry(dict(reg_number='1', 332 id='d1266236341953'), self.app) 333 self.assertEqual( 334 self.app['customers']['X666666']['documents']['d1266236341953'], 335 document) 336 self.assertEqual(document.id, 'd1266236341953') 337 document = self.processor.getEntry(dict(reg_number='3', 338 id='d1266236341955'), self.app) 339 shutil.rmtree(os.path.dirname(fin_file)) 340 logcontent = open(self.logfile).read() 341 # Logging message from updateEntry 342 self.assertTrue( 343 'INFO - system - CustomerDocument Processor - ' 344 'sample_document_data - X666666 - updated: ' 345 'id=d1266236341953, title=My first doc' 346 in logcontent) 347 348 def test_import_update(self): 349 # We perform the same import twice, 350 # the second time in update mode. The number 351 # of warnings must be the same. 352 num, num_warns, fin_file, fail_file = self.processor.doImport( 353 self.csv_file, DOCUMENT_HEADER_FIELDS,'create') 354 shutil.rmtree(os.path.dirname(fin_file)) 355 num, num_warns, fin_file, fail_file = self.processor.doImport( 356 self.csv_file, DOCUMENT_HEADER_FIELDS,'update') 357 self.assertEqual(num_warns,1) # There is one record without id 358 shutil.rmtree(os.path.dirname(fin_file)) 359 360 def test_import_remove(self): 361 # We perform the same import twice, 362 # the second time in remove mode. The number 363 # of warnings must be the same. 364 num, num_warns, fin_file, fail_file = self.processor.doImport( 365 self.csv_file, DOCUMENT_HEADER_FIELDS,'create') 366 shutil.rmtree(os.path.dirname(fin_file)) 367 num, num_warns, fin_file, fail_file = self.processor.doImport( 368 self.csv_file, DOCUMENT_HEADER_FIELDS,'remove') 369 self.assertEqual(num_warns,1) # There is one record without id 370 shutil.rmtree(os.path.dirname(fin_file)) 371 logcontent = open(self.logfile).read() 372 self.assertTrue( 373 'INFO - system - K1000001 - Document removed: d1266236341955' 374 in logcontent)
Note: See TracChangeset for help on using the changeset viewer.