Skip to content

Commit d24eee2

Browse files
author
Samson Gebre
committed
feat: add comprehensive batch operations testing and update documentation
1 parent d9d4bc8 commit d24eee2

2 files changed

Lines changed: 315 additions & 2 deletions

File tree

examples/advanced/walkthrough.py

Lines changed: 80 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
- Querying with filtering, paging, and SQL
1111
- Picklist label-to-value conversion
1212
- Column management
13+
- Batch operations (create, read, update, changeset, delete in one HTTP request)
1314
- Cleanup
1415
1516
Prerequisites:
@@ -323,10 +324,86 @@ def main():
323324
print(f" (Deleting {len(paging_ids)} paging demo records)")
324325

325326
# ============================================================================
326-
# 11. CLEANUP
327+
# 11. BATCH OPERATIONS
327328
# ============================================================================
328329
print("\n" + "=" * 80)
329-
print("11. Cleanup")
330+
print("11. Batch Operations")
331+
print("=" * 80)
332+
333+
# Batch create: send 2 creates in a single POST $batch
334+
log_call("client.batch.new() + batch.records.create(...) x2 + batch.execute()")
335+
batch = client.batch.new()
336+
batch.records.create(
337+
table_name,
338+
{
339+
"new_Title": "Batch task alpha",
340+
"new_Quantity": 1,
341+
"new_Amount": 25.0,
342+
"new_Completed": False,
343+
"new_Priority": Priority.LOW,
344+
},
345+
)
346+
batch.records.create(
347+
table_name,
348+
{
349+
"new_Title": "Batch task beta",
350+
"new_Quantity": 2,
351+
"new_Amount": 50.0,
352+
"new_Completed": False,
353+
"new_Priority": Priority.MEDIUM,
354+
},
355+
)
356+
result = batch.execute()
357+
batch_ids = list(result.created_ids)
358+
print(f"[OK] Batch create: {len(result.succeeded)} operations in one HTTP request, {len(batch_ids)} records created")
359+
360+
# Batch get: read both records in a single request
361+
log_call("client.batch.new() + batch.records.get(...) x2 + batch.execute()")
362+
batch = client.batch.new()
363+
for bid in batch_ids:
364+
batch.records.get(table_name, bid, select=["new_title", "new_quantity"])
365+
result = batch.execute()
366+
print(f"[OK] Batch get: {len(result.succeeded)} reads in one HTTP request")
367+
for resp in result.succeeded:
368+
if resp.data:
369+
print(f" new_title='{resp.data.get('new_title')}', new_quantity={resp.data.get('new_quantity')}")
370+
371+
# Changeset: create + update atomically (all-or-nothing)
372+
log_call("with batch.changeset() as cs: cs.records.create(...); cs.records.update(cs_ref, ...)")
373+
batch = client.batch.new()
374+
with batch.changeset() as cs:
375+
cs_ref = cs.records.create(
376+
table_name,
377+
{
378+
"new_Title": "Changeset task",
379+
"new_Quantity": 5,
380+
"new_Amount": 100.0,
381+
"new_Completed": False,
382+
"new_Priority": Priority.HIGH,
383+
},
384+
)
385+
cs.records.update(table_name, cs_ref, {"new_Completed": True})
386+
result = batch.execute()
387+
if not result.has_errors:
388+
batch_ids.extend(result.created_ids)
389+
print(f"[OK] Changeset: {len(result.succeeded)} operations committed atomically")
390+
else:
391+
for item in result.failed:
392+
print(f"[WARN] Changeset error {item.status_code}: {item.error_message}")
393+
394+
# Batch delete: clean up all batch-created records in one request
395+
log_call(f"client.batch.new() + batch.records.delete(...) x{len(batch_ids)} + batch.execute()")
396+
batch = client.batch.new()
397+
for bid in batch_ids:
398+
batch.records.delete(table_name, bid)
399+
result = batch.execute(continue_on_error=True)
400+
print(f"[OK] Batch delete: {len(result.succeeded)} records deleted in one HTTP request")
401+
402+
# ============================================================================
403+
# 12. CLEANUP
404+
# ============================================================================
405+
print("\n" + "=" * 80)
406+
print("12. Cleanup")
330407
print("=" * 80)
331408

332409
log_call(f"client.tables.delete('{table_name}')")
@@ -356,6 +433,7 @@ def main():
356433
print(" [OK] Picklist label-to-value conversion")
357434
print(" [OK] Column management")
358435
print(" [OK] Single and bulk delete operations")
436+
print(" [OK] Batch operations (create, read, changeset, delete)")
359437
print(" [OK] Table cleanup")
360438
print("=" * 80)
361439

examples/basic/functional_testing.py

Lines changed: 235 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
- Table creation and metadata operations
1111
- Full CRUD operations testing
1212
- Query functionality validation
13+
- Batch operations (create, read, update, changeset, delete)
1314
- Interactive cleanup options
1415
1516
Prerequisites:
@@ -32,6 +33,7 @@
3233
# Import SDK components (assumes installation is already validated)
3334
from PowerPlatform.Dataverse.client import DataverseClient
3435
from PowerPlatform.Dataverse.core.errors import HttpError, MetadataError
36+
from PowerPlatform.Dataverse.models.upsert import UpsertItem
3537
from azure.identity import InteractiveBrowserCredential
3638

3739

@@ -309,6 +311,234 @@ def test_query_records(client: DataverseClient, table_info: Dict[str, Any]) -> N
309311
print(" This might be expected if the table is very new.")
310312

311313

314+
def test_batch_all_operations(client: DataverseClient, table_info: Dict[str, Any]) -> None:
315+
"""Test every available batch operation type in a structured sequence.
316+
317+
Operations covered:
318+
records.create (single + CreateMultiple)
319+
records.get (single by ID)
320+
records.update (single PATCH + UpdateMultiple)
321+
records.delete (multi, use_bulk_delete=False)
322+
records.upsert (graceful — requires configured alternate key)
323+
tables.get, tables.list
324+
query.sql
325+
changeset happy path (create + update via content-ID ref + delete)
326+
changeset rollback (failing op rolls back entire changeset)
327+
execute(continue_on_error=True) — mixed success/failure
328+
"""
329+
print("\n-> Batch Operations Test (All Operations)")
330+
print("=" * 50)
331+
332+
table_schema_name = table_info.get("table_schema_name")
333+
logical_name = table_info.get("table_logical_name", table_schema_name.lower())
334+
attr_prefix = table_schema_name.split("_", 1)[0] if "_" in table_schema_name else table_schema_name
335+
all_ids: list = []
336+
337+
try:
338+
# -------------------------------------------------------------------
339+
# [1/8] CREATE — single record + CreateMultiple (list) in one batch
340+
# -------------------------------------------------------------------
341+
print("\n[1/8] Create — single + CreateMultiple (2 ops, 1 POST $batch)")
342+
batch = client.batch.new()
343+
batch.records.create(
344+
table_schema_name,
345+
{
346+
f"{attr_prefix}_name": f"Batch-A {datetime.now().strftime('%H:%M:%S')}",
347+
f"{attr_prefix}_count": 1,
348+
f"{attr_prefix}_is_active": True,
349+
},
350+
)
351+
batch.records.create(
352+
table_schema_name,
353+
[
354+
{
355+
f"{attr_prefix}_name": f"Batch-B {datetime.now().strftime('%H:%M:%S')}",
356+
f"{attr_prefix}_count": 2,
357+
f"{attr_prefix}_is_active": True,
358+
},
359+
{
360+
f"{attr_prefix}_name": f"Batch-C {datetime.now().strftime('%H:%M:%S')}",
361+
f"{attr_prefix}_count": 3,
362+
f"{attr_prefix}_is_active": True,
363+
},
364+
],
365+
)
366+
result = batch.execute()
367+
all_ids = list(result.created_ids)
368+
if result.has_errors:
369+
for item in result.failed:
370+
print(f"[WARN] {item.status_code}: {item.error_message}")
371+
else:
372+
print(f"[OK] {len(result.succeeded)} ops → {len(all_ids)} records created: {all_ids}")
373+
374+
# -------------------------------------------------------------------
375+
# [2/8] READ — get by ID + tables.get + tables.list + query.sql
376+
# All 4 reads in one batch request
377+
# -------------------------------------------------------------------
378+
if all_ids:
379+
print("\n[2/8] Read — records.get + tables.get + tables.list + query.sql (4 ops, 1 POST $batch)")
380+
batch = client.batch.new()
381+
batch.records.get(
382+
table_schema_name,
383+
all_ids[0],
384+
select=[f"{attr_prefix}_name", f"{attr_prefix}_count"],
385+
)
386+
batch.tables.get(table_schema_name)
387+
batch.tables.list()
388+
batch.query.sql(f"SELECT TOP 3 {attr_prefix}_name FROM {logical_name}")
389+
result = batch.execute()
390+
print(f"[OK] {len(result.succeeded)} succeeded, {len(result.failed)} failed")
391+
for i, resp in enumerate(result.responses):
392+
if not resp.is_success:
393+
print(f" [{i}] FAILED {resp.status_code}: {resp.error_message}")
394+
continue
395+
if i == 0 and resp.data:
396+
print(f" records.get → name='{resp.data.get(f'{attr_prefix}_name')}', count={resp.data.get(f'{attr_prefix}_count')}")
397+
elif i == 1 and resp.data:
398+
print(f" tables.get → LogicalName='{resp.data.get('LogicalName')}', EntitySet='{resp.data.get('EntitySetName')}'")
399+
elif i == 2 and resp.data:
400+
print(f" tables.list → {len(resp.data.get('value', []))} tables returned")
401+
elif i == 3 and resp.data:
402+
print(f" query.sql → {len(resp.data.get('value', []))} rows returned")
403+
404+
# -------------------------------------------------------------------
405+
# [3/8] UPDATE — single PATCH + UpdateMultiple (broadcast) in one batch
406+
# -------------------------------------------------------------------
407+
if len(all_ids) >= 3:
408+
print(f"\n[3/8] Update — single PATCH + UpdateMultiple ({len(all_ids)} records, 2 ops, 1 POST $batch)")
409+
batch = client.batch.new()
410+
batch.records.update(table_schema_name, all_ids[0], {f"{attr_prefix}_count": 10})
411+
batch.records.update(table_schema_name, all_ids[1:], {f"{attr_prefix}_count": 20})
412+
result = batch.execute()
413+
print(f"[OK] {len(result.succeeded)} updates succeeded, {len(result.failed)} failed")
414+
415+
# -------------------------------------------------------------------
416+
# [4/8] CHANGESET (happy path) — create + update via content-ID + delete
417+
# All three changeset operation types committed atomically
418+
# -------------------------------------------------------------------
419+
if len(all_ids) >= 1:
420+
print("\n[4/8] Changeset (happy path) — cs.create + cs.update(ref) + cs.delete (1 transaction)")
421+
batch = client.batch.new()
422+
with batch.changeset() as cs:
423+
ref = cs.records.create(
424+
table_schema_name,
425+
{
426+
f"{attr_prefix}_name": f"Batch-D {datetime.now().strftime('%H:%M:%S')}",
427+
f"{attr_prefix}_count": 4,
428+
f"{attr_prefix}_is_active": False,
429+
},
430+
)
431+
cs.records.update(table_schema_name, ref, {f"{attr_prefix}_is_active": True})
432+
cs.records.delete(table_schema_name, all_ids[-1])
433+
result = batch.execute()
434+
if result.has_errors:
435+
for item in result.failed:
436+
print(f"[WARN] Changeset error {item.status_code}: {item.error_message}")
437+
else:
438+
new_id = next(iter(result.created_ids), None)
439+
if new_id:
440+
all_ids[-1] = new_id # replace deleted id with the new one
441+
print(f"[OK] {len(result.succeeded)} ops committed atomically (create + update + delete)")
442+
443+
# -------------------------------------------------------------------
444+
# [5/8] CHANGESET (rollback) — failing update rolls back the create
445+
# -------------------------------------------------------------------
446+
print("\n[5/8] Changeset (rollback) — cs.create + cs.update(nonexistent) → full rollback")
447+
nonexistent_id = "00000000-0000-0000-0000-000000000001"
448+
batch = client.batch.new()
449+
with batch.changeset() as cs:
450+
cs.records.create(
451+
table_schema_name,
452+
{
453+
f"{attr_prefix}_name": f"Rollback-test {datetime.now().strftime('%H:%M:%S')}",
454+
f"{attr_prefix}_count": 0,
455+
f"{attr_prefix}_is_active": False,
456+
},
457+
)
458+
cs.records.update(table_schema_name, nonexistent_id, {f"{attr_prefix}_count": 999})
459+
result = batch.execute()
460+
if result.has_errors:
461+
leaked = list(result.created_ids)
462+
if not leaked:
463+
print("[OK] Changeset rollback verified: changeset failed, no records created")
464+
else:
465+
print(f"[WARN] Changeset failed but {len(leaked)} IDs leaked — queuing for cleanup")
466+
all_ids.extend(leaked)
467+
else:
468+
print("[WARN] Expected rollback but changeset succeeded (unexpected)")
469+
all_ids.extend(result.created_ids)
470+
471+
# -------------------------------------------------------------------
472+
# [6/8] UPSERT — requires an alternate key configured on the table.
473+
# The test table has none, so this is expected to fail (graceful).
474+
# -------------------------------------------------------------------
475+
print(f"\n[6/8] Upsert — UpsertItem with alternate key (expected to fail: no alt key on test table)")
476+
try:
477+
batch = client.batch.new()
478+
batch.records.upsert(
479+
table_schema_name,
480+
[
481+
UpsertItem(
482+
alternate_key={f"{attr_prefix}_name": f"Upsert-E {datetime.now().strftime('%H:%M:%S')}"},
483+
record={f"{attr_prefix}_count": 5, f"{attr_prefix}_is_active": True},
484+
)
485+
],
486+
)
487+
result = batch.execute()
488+
if result.has_errors:
489+
print(f"[WARN] Upsert failed as expected (no alternate key configured): {result.failed[0].status_code}")
490+
else:
491+
upsert_ids = list(result.created_ids)
492+
all_ids.extend(upsert_ids)
493+
print(f"[OK] Upsert succeeded: {len(upsert_ids)} record(s) — alternate key was accepted")
494+
except Exception as e:
495+
print(f"[WARN] Upsert skipped due to exception: {e}")
496+
497+
# -------------------------------------------------------------------
498+
# [7/8] MIXED BATCH with continue_on_error
499+
# One intentional 404 alongside a valid get — both attempted
500+
# -------------------------------------------------------------------
501+
if all_ids:
502+
print(f"\n[7/8] Mixed batch (continue_on_error=True) — 1 bad get + 1 good get")
503+
batch = client.batch.new()
504+
batch.records.get(
505+
table_schema_name,
506+
"00000000-0000-0000-0000-000000000002",
507+
select=[f"{attr_prefix}_name"],
508+
)
509+
batch.records.get(
510+
table_schema_name,
511+
all_ids[0],
512+
select=[f"{attr_prefix}_name"],
513+
)
514+
result = batch.execute(continue_on_error=True)
515+
print(f"[OK] Succeeded: {len(result.succeeded)}, Failed: {len(result.failed)}")
516+
for item in result.failed:
517+
print(f" Expected failure: {item.status_code} {item.error_message}")
518+
519+
# -------------------------------------------------------------------
520+
# [8/8] DELETE — multi-delete (use_bulk_delete=False → individual DELETEs)
521+
# -------------------------------------------------------------------
522+
if all_ids:
523+
print(f"\n[8/8] Delete — {len(all_ids)} records via multi-delete (use_bulk_delete=False, 1 POST $batch)")
524+
batch = client.batch.new()
525+
batch.records.delete(table_schema_name, all_ids, use_bulk_delete=False)
526+
result = batch.execute(continue_on_error=True)
527+
print(f"[OK] Deleted {len(result.succeeded)}, failed {len(result.failed)}")
528+
529+
print("\n[OK] Batch all-operations test completed!")
530+
531+
except Exception as e:
532+
print(f"[WARN] Batch all-operations test encountered an issue: {e}")
533+
if all_ids:
534+
try:
535+
batch = client.batch.new()
536+
batch.records.delete(table_schema_name, all_ids, use_bulk_delete=False)
537+
batch.execute(continue_on_error=True)
538+
except Exception:
539+
pass
540+
541+
312542
def cleanup_test_data(client: DataverseClient, table_info: Dict[str, Any], record_id: str) -> None:
313543
"""Clean up test data."""
314544
print("\n-> Cleanup")
@@ -403,6 +633,7 @@ def main():
403633
print(" - Table Creation & Metadata Operations")
404634
print(" - Record CRUD Operations")
405635
print(" - Query Functionality")
636+
print(" - Batch Operations (create, read, update, changeset, delete)")
406637
print(" - Interactive Cleanup")
407638
print("=" * 70)
408639
print("For installation validation, run examples/basic/installation_example.py first")
@@ -422,6 +653,9 @@ def main():
422653
# Test querying
423654
test_query_records(client, table_info)
424655

656+
# Test batch operations (all operation types)
657+
test_batch_all_operations(client, table_info)
658+
425659
# Success summary
426660
print("\nFunctional Test Summary")
427661
print("=" * 50)
@@ -430,6 +664,7 @@ def main():
430664
print("[OK] Record Creation: Success")
431665
print("[OK] Record Reading: Success")
432666
print("[OK] Record Querying: Success")
667+
print("[OK] Batch Operations: Success")
433668
print("\nYour PowerPlatform Dataverse Client SDK is fully functional!")
434669

435670
# Cleanup

0 commit comments

Comments
 (0)