Skip to content

Commit d13f052

Browse files
authored
Merge branch 'main' into divyansh-vijayvergia_data/stack/azure-msi-support
2 parents 22d25df + 958f1d9 commit d13f052

File tree

12 files changed

+522
-104
lines changed

12 files changed

+522
-104
lines changed

.github/workflows/package.yml

Lines changed: 168 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,168 @@
1+
# This workflow builds, signs, and uploads artifacts for the release workflow in
2+
# secure-public-registry-releases-eng to consume. It does not need to be triggered manually.
3+
name: Package
4+
5+
on:
6+
workflow_dispatch:
7+
inputs:
8+
tag:
9+
description: "The version tag to package (e.g. v0.35.0)."
10+
required: true
11+
type: string
12+
13+
permissions:
14+
id-token: write
15+
contents: read
16+
17+
jobs:
18+
package:
19+
runs-on:
20+
group: databricks-protected-runner-group
21+
labels: linux-ubuntu-latest
22+
timeout-minutes: 20
23+
steps:
24+
- name: Validate tag format
25+
run: |
26+
if [[ ! "${{ inputs.tag }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+(-.+)?$ ]]; then
27+
echo "::error::Invalid tag format '${{ inputs.tag }}'. Expected vX.Y.Z or vX.Y.Z-suffix (e.g. v0.35.0, v0.0.0-test.1)."
28+
exit 1
29+
fi
30+
31+
- name: Checkout
32+
uses: actions/checkout@ee0669bd1cc54295c223e0bb666b733df41de1c5 # v2.7.0
33+
with:
34+
ref: ${{ inputs.tag }}
35+
36+
- name: Cache Maven packages
37+
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
38+
with:
39+
path: ~/.m2/repository
40+
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
41+
restore-keys: ${{ runner.os }}-m2
42+
43+
- name: Setup JFrog CLI with OIDC
44+
id: jfrog
45+
uses: jfrog/setup-jfrog-cli@279b1f629f43dd5bc658d8361ac4802a7ef8d2d5 # v4.9.1
46+
env:
47+
JF_URL: https://databricks.jfrog.io
48+
with:
49+
oidc-provider-name: github-actions
50+
51+
- name: Set up Java
52+
uses: actions/setup-java@17f84c3641ba7b8f6deff6309fc4c864478f5d62 # v3.14.1
53+
with:
54+
java-version: 8
55+
distribution: adopt
56+
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
57+
gpg-passphrase: GPG_PASSPHRASE
58+
59+
- name: Configure Maven for JFrog
60+
run: |
61+
mkdir -p ~/.m2
62+
cat > ~/.m2/settings.xml << EOF
63+
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
64+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
65+
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0 https://maven.apache.org/xsd/settings-1.0.0.xsd">
66+
<mirrors>
67+
<mirror>
68+
<id>jfrog-maven</id>
69+
<url>https://databricks.jfrog.io/artifactory/db-maven/</url>
70+
<mirrorOf>*</mirrorOf>
71+
</mirror>
72+
</mirrors>
73+
<servers>
74+
<server>
75+
<id>jfrog-maven</id>
76+
<username>${{ steps.jfrog.outputs.oidc-user }}</username>
77+
<password><![CDATA[${{ steps.jfrog.outputs.oidc-token }}]]></password>
78+
</server>
79+
<server>
80+
<id>gpg.passphrase</id>
81+
<passphrase>\${env.GPG_PASSPHRASE}</passphrase>
82+
</server>
83+
</servers>
84+
</settings>
85+
EOF
86+
87+
# The -Prelease profile activates central-publishing-maven-plugin with
88+
# <extensions>true</extensions>, which Maven tries to resolve at startup.
89+
# JFrog's db-maven mirror cannot proxy this plugin, so the build fails.
90+
# Instead, we build without -Prelease and handle sources, javadoc, and
91+
# GPG signing manually.
92+
- name: Build
93+
run: mvn -DskipTests=true --batch-mode install
94+
95+
- name: Generate sources and javadoc
96+
run: |
97+
mvn source:jar-no-fork -pl databricks-sdk-java --batch-mode
98+
mvn javadoc:jar -pl databricks-sdk-java --batch-mode -Ddoclint=none
99+
100+
- name: Stage and sign release artifacts
101+
env:
102+
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
103+
run: |
104+
VERSION="${{ inputs.tag }}"
105+
VERSION="${VERSION#v}"
106+
echo "Version: ${VERSION}"
107+
108+
# The repo is a multi-module Maven project:
109+
# databricks-sdk-parent (root pom.xml, packaging=pom) — shared build config
110+
# └── databricks-sdk-java (the actual SDK JAR)
111+
# Both must be published to Maven Central. The parent is POM-only;
112+
# the child includes the JAR, sources, javadoc, and its own POM.
113+
114+
# --- Stage child module artifacts (databricks-sdk-java) ---
115+
CHILD_DIR=staging/child
116+
mkdir -p "${CHILD_DIR}"
117+
cp databricks-sdk-java/target/databricks-sdk-java-${VERSION}.jar "${CHILD_DIR}/"
118+
cp databricks-sdk-java/target/databricks-sdk-java-${VERSION}-sources.jar "${CHILD_DIR}/"
119+
cp databricks-sdk-java/target/databricks-sdk-java-${VERSION}-javadoc.jar "${CHILD_DIR}/"
120+
cp databricks-sdk-java/pom.xml "${CHILD_DIR}/databricks-sdk-java-${VERSION}.pom"
121+
122+
# --- Stage parent POM (databricks-sdk-parent) ---
123+
PARENT_DIR=staging/parent
124+
mkdir -p "${PARENT_DIR}"
125+
cp pom.xml "${PARENT_DIR}/databricks-sdk-parent-${VERSION}.pom"
126+
127+
# --- GPG sign all artifacts ---
128+
for dir in "${CHILD_DIR}" "${PARENT_DIR}"; do
129+
for file in "${dir}"/*.jar "${dir}"/*.pom; do
130+
[[ -f "$file" ]] || continue
131+
gpg --batch --armor --pinentry-mode loopback \
132+
--passphrase "${GPG_PASSPHRASE}" \
133+
--detach-sign "${file}"
134+
done
135+
done
136+
137+
echo "--- Child artifacts ---"
138+
ls -lh "${CHILD_DIR}/"
139+
echo "--- Parent artifacts ---"
140+
ls -lh "${PARENT_DIR}/"
141+
142+
- name: Upload child artifacts
143+
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
144+
with:
145+
name: maven-child
146+
path: staging/child/
147+
if-no-files-found: error
148+
149+
- name: Upload parent artifacts
150+
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
151+
with:
152+
name: maven-parent
153+
path: staging/parent/
154+
if-no-files-found: error
155+
156+
- name: Write release notes
157+
run: |
158+
mkdir -p staging/release-notes
159+
git for-each-ref --format='%(body)' "refs/tags/${{ inputs.tag }}" > staging/release-notes/release-notes.md
160+
echo "Release notes:"
161+
cat staging/release-notes/release-notes.md
162+
163+
- name: Upload release notes
164+
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
165+
with:
166+
name: release-notes
167+
path: staging/release-notes/
168+
if-no-files-found: error

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
### New Features and Improvements
66
* Add support for authentication through Azure Managed Service Identity (MSI) via the new `azure-msi` credential provider.
7+
* Support `default_profile` in `[__settings__]` section of `.databrickscfg` for consistent default profile resolution across CLI and SDKs.
78
* Added automatic detection of AI coding agents (Antigravity, Claude Code, Cline, Codex, Copilot CLI, Cursor, Gemini CLI, OpenCode) in the user-agent string. The SDK now appends `agent/<name>` to HTTP request headers when running inside a known AI agent environment.
89

910
### Bug Fixes

0 commit comments

Comments
 (0)