diff --git a/.github/workflows/dotnet-release.yml b/.github/workflows/dotnet-release.yml new file mode 100644 index 0000000..a4441b6 --- /dev/null +++ b/.github/workflows/dotnet-release.yml @@ -0,0 +1,103 @@ +name: Spark Dotnet + +on: + push: + branches: [ "main" ] + +jobs: + + build: + + strategy: + matrix: + configuration: [Release] + + runs-on: ubuntu-latest + + env: + Solution_Name: Spark.Connect.Dotnet # Replace with your solution name, i.e. MyWpfApp.sln. + Test_Project_Path: src/test # Replace with the path to your test project, i.e. MyWpfApp.Tests\MyWpfApp.Tests.csproj. + + steps: + + - name: Checkout code + uses: actions/checkout@v2 + + - uses: actions/setup-dotnet@v1 + with: + dotnet-version: '7.0' # Specify your .NET Core version here + + - name: Print current directory + run: pwd + + - name: Print current directory listing + run: ls -a ./src + + - uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - uses: actions/setup-java@v4 + with: + java-version: '8' + distribution: temurin + + - uses: vemonet/setup-spark@v1 + with: + spark-version: '3.5.1' + hadoop-version: '3' + + - run: $SPARK_HOME/sbin/stop-connect-server.sh --force + + - run: $SPARK_HOME/sbin/start-connect-server.sh --packages org.apache.spark:spark-connect_2.12:3.5.1 + + - name: Install dependencies + working-directory: ./src/Spark.Connect.Dotnet/ + run: dotnet restore + + - name: Build the project + working-directory: ./src/Spark.Connect.Dotnet/ + run: dotnet build --configuration Release --no-restore + + + # Execute all unit tests in the solution + - name: Execute unit tests + working-directory: ./src/test/Spark.Connect.Dotnet.Tests/ + run: dotnet test -l:"console;verbosity=detailed" --logger "trx;LogFileName=./test_results.xml" + + - name: Upload test results + uses: actions/upload-artifact@v2 + with: + name: Test Results + path: ./test_results.xml + + - name: Pack + working-directory: ./src/Spark.Connect.Dotnet/Spark.Connect.Dotnet/ + run: dotnet pack --no-build --configuration Release --output nupkgs /p:PackageVersion=3.5.1-${{ github.run_number }} + + - name: Upload test results + uses: actions/upload-artifact@v2 + with: + name: NuGet-3.5.1-${{ github.run_number }} + path: ./src/Spark.Connect.Dotnet/Spark.Connect.Dotnet/nupkgs/*.nupkg + + - name: Create Release + id: create_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: Release ${{ github.ref }} + draft: false + prerelease: false + + - name: Upload Release Asset + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: ./src/Spark.Connect.Dotnet/Spark.Connect.Dotnet/nupkgs/GOEddie.Spark.Dotnet.3.5.1-${{ github.run_number }}.nupkg + asset_name: GOEddie.Spark.Dotnet.3.5.1-${{ github.run_number }}.nupkg + asset_content_type: application/zip diff --git a/README.md b/README.md index 1ebf356..7bad863 100644 --- a/README.md +++ b/README.md @@ -75,11 +75,10 @@ The documentation for Spark Connect is limited at best but there is an example i [Dev Guide](docs/dev-guide.md) -### Deployment scenarios to be tested +### Deployment scenarios -1. Local on Windows -1. Databricks -1. Synapse Analytics +1. [Databricks](https://the.agilesql.club/2024/01/using-spark-connect-from-.net-to-run-spark-jobs-on-databricks/) +1. Synapse Analytics - to be tested ### Major Features to be implemented