From 1ec17f4c5bb1bf16a9912817a158146c5291b618 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kat=20March=C3=A1n?= Date: Thu, 9 Jan 2025 00:01:06 -0800 Subject: [PATCH] feat(lsp): add LSP server --- .github/workflows/ci.yml | 2 +- .github/workflows/release.yml | 320 ++++++++++++++++++++++++++++++++++ Cargo.toml | 18 +- README.md | 2 +- dist-workspace.toml | 19 ++ src/document.rs | 5 +- src/entry.rs | 2 +- src/node.rs | 4 +- src/v2_parser.rs | 8 +- tools/kdl-lsp/Cargo.toml | 22 +++ tools/kdl-lsp/README.md | 8 + tools/kdl-lsp/src/main.rs | 196 +++++++++++++++++++++ 12 files changed, 590 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/release.yml create mode 100644 dist-workspace.toml create mode 100644 tools/kdl-lsp/Cargo.toml create mode 100644 tools/kdl-lsp/README.md create mode 100644 tools/kdl-lsp/src/main.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b5e1dbf..0d920e4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - rust: [1.70.0, stable] + rust: [1.71.1, stable] os: [ubuntu-latest, macOS-latest, windows-latest] steps: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..fdbc141 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,320 @@ +# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/ +# +# Copyright 2022-2024, axodotdev +# SPDX-License-Identifier: MIT or Apache-2.0 +# +# CI that: +# +# * checks for a Git Tag that looks like a release +# * builds artifacts with dist (archives, installers, hashes) +# * uploads those artifacts to temporary workflow zip +# * on success, uploads the artifacts to a GitHub Release +# +# Note that the GitHub Release will be created with a generated +# title/body based on your changelogs. + +name: Release +permissions: + "contents": "write" + +# This task will run whenever you push a git tag that looks like a version +# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. +# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where +# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION +# must be a Cargo-style SemVer Version (must have at least major.minor.patch). +# +# If PACKAGE_NAME is specified, then the announcement will be for that +# package (erroring out if it doesn't have the given version or isn't dist-able). +# +# If PACKAGE_NAME isn't specified, then the announcement will be for all +# (dist-able) packages in the workspace with that version (this mode is +# intended for workspaces with only one dist-able package, or with all dist-able +# packages versioned/released in lockstep). +# +# If you push multiple tags at once, separate instances of this workflow will +# spin up, creating an independent announcement for each one. However, GitHub +# will hard limit this to 3 tags per commit, as it will assume more tags is a +# mistake. +# +# If there's a prerelease-style suffix to the version, then the release(s) +# will be marked as a prerelease. +on: + pull_request: + push: + tags: + - '**[0-9]+.[0-9]+.[0-9]+*' + +jobs: + # Run 'dist plan' (or host) to determine what tasks we need to do + plan: + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.plan.outputs.manifest }} + tag: ${{ !github.event.pull_request && github.ref_name || '' }} + tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} + publishing: ${{ !github.event.pull_request }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install dist + # we specify bash to get pipefail; it guards against the `curl` command + # failing. otherwise `sh` won't catch that `curl` returned non-0 + shell: bash + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.28.0/cargo-dist-installer.sh | sh" + - name: Cache dist + uses: actions/upload-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/dist + # sure would be cool if github gave us proper conditionals... + # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible + # functionality based on whether this is a pull_request, and whether it's from a fork. + # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* + # but also really annoying to build CI around when it needs secrets to work right.) + - id: plan + run: | + dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json + echo "dist ran successfully" + cat plan-dist-manifest.json + echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + name: artifacts-plan-dist-manifest + path: plan-dist-manifest.json + + # Build and packages all the platform-specific things + build-local-artifacts: + name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) + # Let the initial task tell us to not run (currently very blunt) + needs: + - plan + if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} + strategy: + fail-fast: false + # Target platforms/runners are computed by dist in create-release. + # Each member of the matrix has the following arguments: + # + # - runner: the github runner + # - dist-args: cli flags to pass to dist + # - install-dist: expression to run to install dist on the runner + # + # Typically there will be: + # - 1 "global" task that builds universal installers + # - N "local" tasks that build each platform's binaries and platform-specific installers + matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} + runs-on: ${{ matrix.runner }} + container: ${{ matrix.container && matrix.container.image || null }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json + steps: + - name: enable windows longpaths + run: | + git config --global core.longpaths true + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install Rust non-interactively if not already installed + if: ${{ matrix.container }} + run: | + if ! command -v cargo > /dev/null 2>&1; then + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + echo "$HOME/.cargo/bin" >> $GITHUB_PATH + fi + - name: Install dist + run: ${{ matrix.install_dist.run }} + # Get the dist-manifest + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - name: Install dependencies + run: | + ${{ matrix.packages_install }} + - name: Build artifacts + run: | + # Actually do builds and make zips and whatnot + dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json + echo "dist ran successfully" + - id: cargo-dist + name: Post-build + # We force bash here just because github makes it really hard to get values up + # to "real" actions without writing to env-vars, and writing to env-vars has + # inconsistent syntax between shell and powershell. + shell: bash + run: | + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-local-${{ join(matrix.targets, '_') }} + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + + # Build and package all the platform-agnostic(ish) things + build-global-artifacts: + needs: + - plan + - build-local-artifacts + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cached dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/dist + # Get all the local artifacts for the global tasks to use (for e.g. checksums) + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: cargo-dist + shell: bash + run: | + dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json + echo "dist ran successfully" + + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-global + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + # Determines if we should publish/announce + host: + needs: + - plan + - build-local-artifacts + - build-global-artifacts + # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) + if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.host.outputs.manifest }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cached dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/dist + # Fetch artifacts from scratch-storage + - name: Fetch artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: host + shell: bash + run: | + dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json + echo "artifacts uploaded and released successfully" + cat dist-manifest.json + echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + # Overwrite the previous copy + name: artifacts-dist-manifest + path: dist-manifest.json + # Create a GitHub Release while uploading all files to it + - name: "Download GitHub Artifacts" + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: artifacts + merge-multiple: true + - name: Cleanup + run: | + # Remove the granular manifests + rm -f artifacts/*-dist-manifest.json + - name: Create GitHub Release + env: + PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" + ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" + ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" + RELEASE_COMMIT: "${{ github.sha }}" + run: | + # Write and read notes from a file to avoid quoting breaking things + echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt + + gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* + + publish-npm: + needs: + - plan + - host + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PLAN: ${{ needs.plan.outputs.val }} + if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }} + steps: + - name: Fetch npm packages + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: npm/ + merge-multiple: true + - uses: actions/setup-node@v4 + with: + node-version: '20.x' + registry-url: 'https://registry.npmjs.org' + - run: | + for release in $(echo "$PLAN" | jq --compact-output '.releases[] | select([.artifacts[] | endswith("-npm-package.tar.gz")] | any)'); do + pkg=$(echo "$release" | jq '.artifacts[] | select(endswith("-npm-package.tar.gz"))' --raw-output) + npm publish --access public "./npm/${pkg}" + done + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + announce: + needs: + - plan + - host + - publish-npm + # use "always() && ..." to allow us to wait for all publish jobs while + # still allowing individual publish jobs to skip themselves (for prereleases). + # "host" however must run to completion, no skipping allowed! + if: ${{ always() && needs.host.result == 'success' && (needs.publish-npm.result == 'skipped' || needs.publish-npm.result == 'success') }} + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive diff --git a/Cargo.toml b/Cargo.toml index d1394d9..d393ff1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,17 +17,29 @@ span = [] v1-fallback = ["v1"] v1 = ["kdlv1"] +[workspace] +members = ["tools/*"] + [dependencies] -miette = "7.2.0" +miette.workspace = true +thiserror.workspace = true num = "0.4.2" -thiserror = "1.0.40" winnow = { version = "0.6.20", features = ["alloc", "unstable-recover"] } kdlv1 = { package = "kdl", version = "4.7.0", optional = true } +[workspace.dependencies] +miette = "7.2.0" +thiserror = "1.0.40" + [dev-dependencies] -miette = { version = "7.2.0", features = ["fancy"] } +miette = { workspace = true, features = ["fancy"] } pretty_assertions = "1.3.0" +# The profile that 'dist' will build with +[profile.dist] +inherits = "release" +lto = "thin" + # docs.rs-specific configuration [package.metadata.docs.rs] # document all features diff --git a/README.md b/README.md index e74deae..67b2bef 100644 --- a/README.md +++ b/README.md @@ -142,7 +142,7 @@ means a few things: ### Minimum Supported Rust Version -You must be at least `1.70.0` tall to get on this ride. +You must be at least `1.71.1` tall to get on this ride. ### License diff --git a/dist-workspace.toml b/dist-workspace.toml new file mode 100644 index 0000000..cf6aa93 --- /dev/null +++ b/dist-workspace.toml @@ -0,0 +1,19 @@ +[workspace] +members = ["cargo:tools/kdl-lsp"] + +# Config for 'dist' +[dist] +# The preferred dist version to use in CI (Cargo.toml SemVer syntax) +cargo-dist-version = "0.28.0" +# CI backends to support +ci = "github" +# The installers to generate for each app +installers = ["shell", "powershell", "npm"] +# Target platforms to build apps for (Rust target-triple syntax) +targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "aarch64-pc-windows-msvc", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] +# Path that installers should place binaries in +install-path = "CARGO_HOME" +# Whether to install an updater program +install-updater = false +# Publish jobs to run in CI +publish-jobs = ["npm"] diff --git a/src/document.rs b/src/document.rs index 03a2e78..b81512d 100644 --- a/src/document.rs +++ b/src/document.rs @@ -356,9 +356,6 @@ impl KdlDocument { let v1_res = KdlDocument::parse_v2(s); if v1_res.is_ok() || detect_v1(s) { v1_res - } else if detect_v2(s) { - // v2, but with confidence - v2_res } else { // TODO(@zkat): maybe we can add something to the error // message to specify that it's "uncertain"? @@ -424,7 +421,7 @@ impl KdlDocument { let penult = iter.next(); if let Some(last) = last { if let Some(fmt) = last.format_mut() { - if !fmt.trailing.contains(";") + if !fmt.trailing.contains(';') && fmt .trailing .chars() diff --git a/src/entry.rs b/src/entry.rs index 4677838..1cd56ee 100644 --- a/src/entry.rs +++ b/src/entry.rs @@ -296,7 +296,7 @@ impl KdlEntry { // but just in case. let s = x.value_repr.trim(); // convert raw strings to v1 format - let s = if s.starts_with("#") { + let s = if s.starts_with('#') { format!("r{s}") } else { s.to_string() diff --git a/src/node.rs b/src/node.rs index df44293..9d1f63c 100644 --- a/src/node.rs +++ b/src/node.rs @@ -392,10 +392,10 @@ impl From for KdlNode { fn from(value: kdlv1::KdlNode) -> Self { let terminator = value .trailing() - .map(|t| if t.contains(";") { ";" } else { "\n" }) + .map(|t| if t.contains(';') { ";" } else { "\n" }) .unwrap_or("\n"); let trailing = value.trailing().map(|t| { - if t.contains(";") { + if t.contains(';') { t.replace(';', "") } else { let t = t.replace("\r\n", "\n"); diff --git a/src/v2_parser.rs b/src/v2_parser.rs index 66c56b0..21dd442 100644 --- a/src/v2_parser.rs +++ b/src/v2_parser.rs @@ -16,7 +16,7 @@ use winnow::{ prelude::*, stream::{AsChar, Location, Recover, Recoverable, Stream}, token::{any, none_of, one_of, take_while}, - Located, + LocatingSlice, }; use crate::{ @@ -24,14 +24,14 @@ use crate::{ KdlIdentifier, KdlNode, KdlNodeFormat, KdlValue, }; -type Input<'a> = Recoverable, KdlParseError>; +type Input<'a> = Recoverable, KdlParseError>; type PResult = winnow::PResult; pub(crate) fn try_parse<'a, P: Parser, T, KdlParseError>, T>( mut parser: P, input: &'a str, ) -> Result { - let (_, maybe_val, errs) = parser.recoverable_parse(Located::new(input)); + let (_, maybe_val, errs) = parser.recoverable_parse(LocatingSlice::new(input)); if let (Some(v), true) = (maybe_val, errs.is_empty()) { Ok(v) } else { @@ -256,7 +256,7 @@ where #[cfg(test)] fn new_input(s: &str) -> Input<'_> { - Recoverable::new(Located::new(s)) + Recoverable::new(LocatingSlice::new(s)) } /// `document := bom? nodes` diff --git a/tools/kdl-lsp/Cargo.toml b/tools/kdl-lsp/Cargo.toml new file mode 100644 index 0000000..146bf3c --- /dev/null +++ b/tools/kdl-lsp/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "kdl-lsp" +version = "6.2.2" +edition = "2021" +description = "LSP Server for the KDL Document Language" +authors = ["Kat Marchán ", "KDL Community"] +license = "Apache-2.0" +readme = "README.md" +homepage = "https://kdl.dev" +repository = "https://github.com/kdl-org/kdl-rs" +keywords = ["kdl", "document", "serialization", "config", "lsp", "language server"] +rust-version = "1.70.0" + +[dependencies] +miette.workspace = true +kdl = { version = "6.2.2", path = "../../", features = ["span", "v1-fallback"] } +tower-lsp = "0.20.0" +dashmap = "6.1.0" +ropey = "1.6.1" +tokio = { version = "1.43.0", features = ["full"] } +tracing = "0.1.41" +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } diff --git a/tools/kdl-lsp/README.md b/tools/kdl-lsp/README.md new file mode 100644 index 0000000..f63467a --- /dev/null +++ b/tools/kdl-lsp/README.md @@ -0,0 +1,8 @@ +# `kdl-lsp` + +This is an LSP server for KDL. + +Currently, it only supports diagnostics, which it generates using +[`kdl-rs`](https://github.com/kdl-org/kdl-rs). + +It will eventually support richer diagnostics, completions, and KDL Schema. \ No newline at end of file diff --git a/tools/kdl-lsp/src/main.rs b/tools/kdl-lsp/src/main.rs new file mode 100644 index 0000000..f8f003d --- /dev/null +++ b/tools/kdl-lsp/src/main.rs @@ -0,0 +1,196 @@ +use dashmap::DashMap; +use kdl::{KdlDocument, KdlError}; +use miette::Diagnostic as _; +use ropey::Rope; +use tower_lsp::jsonrpc::Result; +use tower_lsp::lsp_types::*; +use tower_lsp::{Client, LanguageServer, LspService, Server}; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; + +#[derive(Debug)] +struct Backend { + client: Client, + document_map: DashMap, +} + +impl Backend { + async fn on_change(&self, uri: Url, text: &str) { + let rope = ropey::Rope::from_str(text); + self.document_map.insert(uri.to_string(), rope.clone()); + } +} + +#[tower_lsp::async_trait] +impl LanguageServer for Backend { + async fn initialize(&self, _: InitializeParams) -> Result { + Ok(InitializeResult { + capabilities: ServerCapabilities { + text_document_sync: Some(TextDocumentSyncCapability::Options( + TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::FULL), + save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { + include_text: Some(true), + })), + ..Default::default() + }, + )), + workspace: Some(WorkspaceServerCapabilities { + workspace_folders: Some(WorkspaceFoldersServerCapabilities { + supported: Some(true), + change_notifications: Some(OneOf::Left(true)), + }), + file_operations: None, + }), + diagnostic_provider: Some(DiagnosticServerCapabilities::RegistrationOptions( + DiagnosticRegistrationOptions { + text_document_registration_options: TextDocumentRegistrationOptions { + document_selector: Some(vec![DocumentFilter { + language: Some("kdl".into()), + scheme: Some("file".into()), + pattern: None, + }]), + }, + ..Default::default() + }, + )), + // hover_provider: Some(HoverProviderCapability::Simple(true)), + // completion_provider: Some(Default::default()), + ..Default::default() + }, + ..Default::default() + }) + } + + async fn initialized(&self, _: InitializedParams) { + self.client + .log_message(MessageType::INFO, "server initialized!") + .await; + } + + async fn shutdown(&self) -> Result<()> { + self.client + .log_message(MessageType::INFO, "server shutting down") + .await; + Ok(()) + } + + async fn did_open(&self, params: DidOpenTextDocumentParams) { + self.on_change(params.text_document.uri, ¶ms.text_document.text) + .await; + } + + async fn did_change(&self, params: DidChangeTextDocumentParams) { + self.on_change(params.text_document.uri, ¶ms.content_changes[0].text) + .await; + } + + async fn did_save(&self, params: DidSaveTextDocumentParams) { + if let Some(text) = params.text.as_ref() { + self.on_change(params.text_document.uri, text).await; + } + } + + async fn did_close(&self, params: DidCloseTextDocumentParams) { + self.document_map + .remove(¶ms.text_document.uri.to_string()); + } + + async fn diagnostic( + &self, + params: DocumentDiagnosticParams, + ) -> Result { + tracing::debug!("diagnostic req"); + if let Some(doc) = self.document_map.get(¶ms.text_document.uri.to_string()) { + let res: std::result::Result = doc.to_string().parse(); + if let Err(kdl_err) = res { + let diags = kdl_err + .diagnostics + .into_iter() + .map(|diag| { + Diagnostic::new( + Range::new( + char_to_position(diag.span.offset(), &doc), + char_to_position(diag.span.offset() + diag.span.len(), &doc), + ), + diag.severity().map(to_lsp_sev), + diag.code().map(|c| NumberOrString::String(c.to_string())), + None, + diag.to_string(), + None, + None, + ) + }) + .collect(); + return Ok(DocumentDiagnosticReportResult::Report( + DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: FullDocumentDiagnosticReport { + result_id: None, + items: diags, + }, + }), + )); + } + } + Ok(DocumentDiagnosticReportResult::Report( + DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport::default()), + )) + } + + // TODO(@zkat): autocomplete #-keywords + // TODO(@zkat): autocomplete schema stuff + // async fn completion(&self, _: CompletionParams) -> Result> { + // tracing::debug!("Completion request"); + // Ok(Some(CompletionResponse::Array(vec![ + // CompletionItem::new_simple("Hello".to_string(), "Some detail".to_string()), + // CompletionItem::new_simple("Bye".to_string(), "More detail".to_string()), + // ]))) + // } + + // TODO(@zkat): We'll use this when we actually do schema stuff. + // async fn hover(&self, _: HoverParams) -> Result> { + // tracing::debug!("Hover request"); + // Ok(Some(Hover { + // contents: HoverContents::Scalar(MarkedString::String("You're hovering!".to_string())), + // range: None, + // })) + // } +} + +fn char_to_position(char_idx: usize, rope: &Rope) -> Position { + let line_idx = rope.char_to_line(char_idx); + let line_char_idx = rope.line_to_char(line_idx); + let column_idx = char_idx - line_char_idx; + Position::new(line_idx as u32, column_idx as u32) +} + +fn to_lsp_sev(sev: miette::Severity) -> DiagnosticSeverity { + match sev { + miette::Severity::Advice => DiagnosticSeverity::HINT, + miette::Severity::Warning => DiagnosticSeverity::WARNING, + miette::Severity::Error => DiagnosticSeverity::ERROR, + } +} + +#[tokio::main] +async fn main() { + tracing_subscriber::registry() + .with( + tracing_subscriber::fmt::layer() + .map_writer(move |_| std::io::stderr) + .with_ansi(false), + ) + .with(EnvFilter::from_default_env()) + .init(); + + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); + + let (service, socket) = LspService::new(|client| Backend { + client, + document_map: DashMap::new(), + }); + Server::new(stdin, stdout, socket).serve(service).await; +}