1010- Table creation and metadata operations
1111- Full CRUD operations testing
1212- Query functionality validation
13+ - Batch operations (create, read, update, changeset, delete)
1314- Interactive cleanup options
1415
1516Prerequisites:
3233# Import SDK components (assumes installation is already validated)
3334from PowerPlatform .Dataverse .client import DataverseClient
3435from PowerPlatform .Dataverse .core .errors import HttpError , MetadataError
36+ from PowerPlatform .Dataverse .models .upsert import UpsertItem
3537from azure .identity import InteractiveBrowserCredential
3638
3739
@@ -309,6 +311,234 @@ def test_query_records(client: DataverseClient, table_info: Dict[str, Any]) -> N
309311 print (" This might be expected if the table is very new." )
310312
311313
314+ def test_batch_all_operations (client : DataverseClient , table_info : Dict [str , Any ]) -> None :
315+ """Test every available batch operation type in a structured sequence.
316+
317+ Operations covered:
318+ records.create (single + CreateMultiple)
319+ records.get (single by ID)
320+ records.update (single PATCH + UpdateMultiple)
321+ records.delete (multi, use_bulk_delete=False)
322+ records.upsert (graceful — requires configured alternate key)
323+ tables.get, tables.list
324+ query.sql
325+ changeset happy path (create + update via content-ID ref + delete)
326+ changeset rollback (failing op rolls back entire changeset)
327+ execute(continue_on_error=True) — mixed success/failure
328+ """
329+ print ("\n -> Batch Operations Test (All Operations)" )
330+ print ("=" * 50 )
331+
332+ table_schema_name = table_info .get ("table_schema_name" )
333+ logical_name = table_info .get ("table_logical_name" , table_schema_name .lower ())
334+ attr_prefix = table_schema_name .split ("_" , 1 )[0 ] if "_" in table_schema_name else table_schema_name
335+ all_ids : list = []
336+
337+ try :
338+ # -------------------------------------------------------------------
339+ # [1/8] CREATE — single record + CreateMultiple (list) in one batch
340+ # -------------------------------------------------------------------
341+ print ("\n [1/8] Create — single + CreateMultiple (2 ops, 1 POST $batch)" )
342+ batch = client .batch .new ()
343+ batch .records .create (
344+ table_schema_name ,
345+ {
346+ f"{ attr_prefix } _name" : f"Batch-A { datetime .now ().strftime ('%H:%M:%S' )} " ,
347+ f"{ attr_prefix } _count" : 1 ,
348+ f"{ attr_prefix } _is_active" : True ,
349+ },
350+ )
351+ batch .records .create (
352+ table_schema_name ,
353+ [
354+ {
355+ f"{ attr_prefix } _name" : f"Batch-B { datetime .now ().strftime ('%H:%M:%S' )} " ,
356+ f"{ attr_prefix } _count" : 2 ,
357+ f"{ attr_prefix } _is_active" : True ,
358+ },
359+ {
360+ f"{ attr_prefix } _name" : f"Batch-C { datetime .now ().strftime ('%H:%M:%S' )} " ,
361+ f"{ attr_prefix } _count" : 3 ,
362+ f"{ attr_prefix } _is_active" : True ,
363+ },
364+ ],
365+ )
366+ result = batch .execute ()
367+ all_ids = list (result .created_ids )
368+ if result .has_errors :
369+ for item in result .failed :
370+ print (f"[WARN] { item .status_code } : { item .error_message } " )
371+ else :
372+ print (f"[OK] { len (result .succeeded )} ops → { len (all_ids )} records created: { all_ids } " )
373+
374+ # -------------------------------------------------------------------
375+ # [2/8] READ — get by ID + tables.get + tables.list + query.sql
376+ # All 4 reads in one batch request
377+ # -------------------------------------------------------------------
378+ if all_ids :
379+ print ("\n [2/8] Read — records.get + tables.get + tables.list + query.sql (4 ops, 1 POST $batch)" )
380+ batch = client .batch .new ()
381+ batch .records .get (
382+ table_schema_name ,
383+ all_ids [0 ],
384+ select = [f"{ attr_prefix } _name" , f"{ attr_prefix } _count" ],
385+ )
386+ batch .tables .get (table_schema_name )
387+ batch .tables .list ()
388+ batch .query .sql (f"SELECT TOP 3 { attr_prefix } _name FROM { logical_name } " )
389+ result = batch .execute ()
390+ print (f"[OK] { len (result .succeeded )} succeeded, { len (result .failed )} failed" )
391+ for i , resp in enumerate (result .responses ):
392+ if not resp .is_success :
393+ print (f" [{ i } ] FAILED { resp .status_code } : { resp .error_message } " )
394+ continue
395+ if i == 0 and resp .data :
396+ print (f" records.get → name='{ resp .data .get (f'{ attr_prefix } _name' )} ', count={ resp .data .get (f'{ attr_prefix } _count' )} " )
397+ elif i == 1 and resp .data :
398+ print (f" tables.get → LogicalName='{ resp .data .get ('LogicalName' )} ', EntitySet='{ resp .data .get ('EntitySetName' )} '" )
399+ elif i == 2 and resp .data :
400+ print (f" tables.list → { len (resp .data .get ('value' , []))} tables returned" )
401+ elif i == 3 and resp .data :
402+ print (f" query.sql → { len (resp .data .get ('value' , []))} rows returned" )
403+
404+ # -------------------------------------------------------------------
405+ # [3/8] UPDATE — single PATCH + UpdateMultiple (broadcast) in one batch
406+ # -------------------------------------------------------------------
407+ if len (all_ids ) >= 3 :
408+ print (f"\n [3/8] Update — single PATCH + UpdateMultiple ({ len (all_ids )} records, 2 ops, 1 POST $batch)" )
409+ batch = client .batch .new ()
410+ batch .records .update (table_schema_name , all_ids [0 ], {f"{ attr_prefix } _count" : 10 })
411+ batch .records .update (table_schema_name , all_ids [1 :], {f"{ attr_prefix } _count" : 20 })
412+ result = batch .execute ()
413+ print (f"[OK] { len (result .succeeded )} updates succeeded, { len (result .failed )} failed" )
414+
415+ # -------------------------------------------------------------------
416+ # [4/8] CHANGESET (happy path) — create + update via content-ID + delete
417+ # All three changeset operation types committed atomically
418+ # -------------------------------------------------------------------
419+ if len (all_ids ) >= 1 :
420+ print ("\n [4/8] Changeset (happy path) — cs.create + cs.update(ref) + cs.delete (1 transaction)" )
421+ batch = client .batch .new ()
422+ with batch .changeset () as cs :
423+ ref = cs .records .create (
424+ table_schema_name ,
425+ {
426+ f"{ attr_prefix } _name" : f"Batch-D { datetime .now ().strftime ('%H:%M:%S' )} " ,
427+ f"{ attr_prefix } _count" : 4 ,
428+ f"{ attr_prefix } _is_active" : False ,
429+ },
430+ )
431+ cs .records .update (table_schema_name , ref , {f"{ attr_prefix } _is_active" : True })
432+ cs .records .delete (table_schema_name , all_ids [- 1 ])
433+ result = batch .execute ()
434+ if result .has_errors :
435+ for item in result .failed :
436+ print (f"[WARN] Changeset error { item .status_code } : { item .error_message } " )
437+ else :
438+ new_id = next (iter (result .created_ids ), None )
439+ if new_id :
440+ all_ids [- 1 ] = new_id # replace deleted id with the new one
441+ print (f"[OK] { len (result .succeeded )} ops committed atomically (create + update + delete)" )
442+
443+ # -------------------------------------------------------------------
444+ # [5/8] CHANGESET (rollback) — failing update rolls back the create
445+ # -------------------------------------------------------------------
446+ print ("\n [5/8] Changeset (rollback) — cs.create + cs.update(nonexistent) → full rollback" )
447+ nonexistent_id = "00000000-0000-0000-0000-000000000001"
448+ batch = client .batch .new ()
449+ with batch .changeset () as cs :
450+ cs .records .create (
451+ table_schema_name ,
452+ {
453+ f"{ attr_prefix } _name" : f"Rollback-test { datetime .now ().strftime ('%H:%M:%S' )} " ,
454+ f"{ attr_prefix } _count" : 0 ,
455+ f"{ attr_prefix } _is_active" : False ,
456+ },
457+ )
458+ cs .records .update (table_schema_name , nonexistent_id , {f"{ attr_prefix } _count" : 999 })
459+ result = batch .execute ()
460+ if result .has_errors :
461+ leaked = list (result .created_ids )
462+ if not leaked :
463+ print ("[OK] Changeset rollback verified: changeset failed, no records created" )
464+ else :
465+ print (f"[WARN] Changeset failed but { len (leaked )} IDs leaked — queuing for cleanup" )
466+ all_ids .extend (leaked )
467+ else :
468+ print ("[WARN] Expected rollback but changeset succeeded (unexpected)" )
469+ all_ids .extend (result .created_ids )
470+
471+ # -------------------------------------------------------------------
472+ # [6/8] UPSERT — requires an alternate key configured on the table.
473+ # The test table has none, so this is expected to fail (graceful).
474+ # -------------------------------------------------------------------
475+ print (f"\n [6/8] Upsert — UpsertItem with alternate key (expected to fail: no alt key on test table)" )
476+ try :
477+ batch = client .batch .new ()
478+ batch .records .upsert (
479+ table_schema_name ,
480+ [
481+ UpsertItem (
482+ alternate_key = {f"{ attr_prefix } _name" : f"Upsert-E { datetime .now ().strftime ('%H:%M:%S' )} " },
483+ record = {f"{ attr_prefix } _count" : 5 , f"{ attr_prefix } _is_active" : True },
484+ )
485+ ],
486+ )
487+ result = batch .execute ()
488+ if result .has_errors :
489+ print (f"[WARN] Upsert failed as expected (no alternate key configured): { result .failed [0 ].status_code } " )
490+ else :
491+ upsert_ids = list (result .created_ids )
492+ all_ids .extend (upsert_ids )
493+ print (f"[OK] Upsert succeeded: { len (upsert_ids )} record(s) — alternate key was accepted" )
494+ except Exception as e :
495+ print (f"[WARN] Upsert skipped due to exception: { e } " )
496+
497+ # -------------------------------------------------------------------
498+ # [7/8] MIXED BATCH with continue_on_error
499+ # One intentional 404 alongside a valid get — both attempted
500+ # -------------------------------------------------------------------
501+ if all_ids :
502+ print (f"\n [7/8] Mixed batch (continue_on_error=True) — 1 bad get + 1 good get" )
503+ batch = client .batch .new ()
504+ batch .records .get (
505+ table_schema_name ,
506+ "00000000-0000-0000-0000-000000000002" ,
507+ select = [f"{ attr_prefix } _name" ],
508+ )
509+ batch .records .get (
510+ table_schema_name ,
511+ all_ids [0 ],
512+ select = [f"{ attr_prefix } _name" ],
513+ )
514+ result = batch .execute (continue_on_error = True )
515+ print (f"[OK] Succeeded: { len (result .succeeded )} , Failed: { len (result .failed )} " )
516+ for item in result .failed :
517+ print (f" Expected failure: { item .status_code } { item .error_message } " )
518+
519+ # -------------------------------------------------------------------
520+ # [8/8] DELETE — multi-delete (use_bulk_delete=False → individual DELETEs)
521+ # -------------------------------------------------------------------
522+ if all_ids :
523+ print (f"\n [8/8] Delete — { len (all_ids )} records via multi-delete (use_bulk_delete=False, 1 POST $batch)" )
524+ batch = client .batch .new ()
525+ batch .records .delete (table_schema_name , all_ids , use_bulk_delete = False )
526+ result = batch .execute (continue_on_error = True )
527+ print (f"[OK] Deleted { len (result .succeeded )} , failed { len (result .failed )} " )
528+
529+ print ("\n [OK] Batch all-operations test completed!" )
530+
531+ except Exception as e :
532+ print (f"[WARN] Batch all-operations test encountered an issue: { e } " )
533+ if all_ids :
534+ try :
535+ batch = client .batch .new ()
536+ batch .records .delete (table_schema_name , all_ids , use_bulk_delete = False )
537+ batch .execute (continue_on_error = True )
538+ except Exception :
539+ pass
540+
541+
312542def cleanup_test_data (client : DataverseClient , table_info : Dict [str , Any ], record_id : str ) -> None :
313543 """Clean up test data."""
314544 print ("\n -> Cleanup" )
@@ -403,6 +633,7 @@ def main():
403633 print (" - Table Creation & Metadata Operations" )
404634 print (" - Record CRUD Operations" )
405635 print (" - Query Functionality" )
636+ print (" - Batch Operations (create, read, update, changeset, delete)" )
406637 print (" - Interactive Cleanup" )
407638 print ("=" * 70 )
408639 print ("For installation validation, run examples/basic/installation_example.py first" )
@@ -422,6 +653,9 @@ def main():
422653 # Test querying
423654 test_query_records (client , table_info )
424655
656+ # Test batch operations (all operation types)
657+ test_batch_all_operations (client , table_info )
658+
425659 # Success summary
426660 print ("\n Functional Test Summary" )
427661 print ("=" * 50 )
@@ -430,6 +664,7 @@ def main():
430664 print ("[OK] Record Creation: Success" )
431665 print ("[OK] Record Reading: Success" )
432666 print ("[OK] Record Querying: Success" )
667+ print ("[OK] Batch Operations: Success" )
433668 print ("\n Your PowerPlatform Dataverse Client SDK is fully functional!" )
434669
435670 # Cleanup
0 commit comments