Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,4 @@ jobs:
- uses: actions/checkout@v4
- uses: cachix/install-nix-action@v27
- run: nix build -L .#ocaml
- run: nix build -L .#legacy
- run: nix flake check -L
50 changes: 50 additions & 0 deletions docs/language.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,56 @@ pub fn vector_add(fvec3 a, fvec3 b) -> fvec3 {
> [!TIP]
> When integrating Haven with C, `fvecN` is the equivalent of ([non-standard](https://gcc.gnu.org/onlinedocs/gcc/Vector-Extensions.html)) `typedef float floatN __attribute__((vector_size(sizeof(float)) * N))`.

#### Specialized Vector Functions

Functions may accept vectors of any concrete dimension using `fvec?`.

These are not runtime-sized vectors. Instead, the compiler specializes the function at each call site using the
concrete argument types from that call.

```
fn vadd(fvec? a, fvec? b) {
@assert a.dim == b.dim, "vector dimensions must match";
a + b
}
```

Inside such a function:

- `a.dim` is a compile-time property of the specialized vector type
- omitted return types are inferred after specialization
- `@assert` conditions must reduce to compile-time constants after specialization

If a compile-time assertion fails, the compiler reports both the original condition and the specialized one:

```
semantic: error: vector dimensions must match
compile-time assertion failed: a.dim == b.dim
specialized as: 3 == 2
```

### Matrices

Haven offers `matMxN` matrix types of floating point numbers.

Like vectors, matrices support specialization holes in function signatures through `mat?`.

```
fn mat-width(mat? m) {
m.cols
}

fn get-mat-row(mat? m, u32 row) {
m[row]
}
```

Inside specialized matrix functions:

- `m.rows` and `m.cols` are compile-time properties
- indexing a concrete `matMxN` yields an `fvecN`
- specialized functions are cloned before LLVM lowering, so hole types do not reach IR

### Strings

The `str` type carries string data. It is essentially a `const char *` under the hood.
Expand Down
19 changes: 19 additions & 0 deletions examples/specialization.hv
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
fn vadd(fvec? a, fvec? b) {
@assert a.dim == b.dim, "vector dimensions must match";
a + b
}

fn mat-width(mat? m) {
m.cols
}

fn get-mat-row(mat? m, u32 row) {
m[row]
}

pub fn main() -> fvec3 {
let row = get-mat-row(Mat<Vec<1.0, 2.0, 3.0>, Vec<4.0, 5.0, 6.0>>, 1);
let sum = vadd(row, Vec<10.0, 20.0, 30.0>);
let cols = as<float>(mat-width(Mat<Vec<1.0, 2.0, 3.0>, Vec<4.0, 5.0, 6.0>>));
sum + Vec<cols, 0.0, 0.0>
}
22 changes: 17 additions & 5 deletions examples/vec/index.hv
Original file line number Diff line number Diff line change
@@ -1,26 +1,38 @@
pub fn __builtin_sqrtf(float x) -> float intrinsic "llvm.sqrt" float;

fn vadd(fvec3 a, fvec3 b) -> fvec3 {
fn vadd(fvec? a, fvec? b) {
@assert a.dim == b.dim, "vadd requires both vectors to be the same width";

a + b
}

fn vscale(fvec3 a, float b) -> fvec3 {
fn vscale(fvec? a, float b) {
a * b
}

fn vdot(fvec3 a, fvec3 b) -> float {
fn vdot(fvec? a, fvec? b) -> float {
@assert a.dim == b.dim, "vdot requires both vectors to be the same width";

let mult = a * b;
mult.x + mult.y + mult.z
let mut accum = 0.0;

iter 0:(a.dim - 1) i {
accum = accum + mult[i];
};

accum
}

fn vcross(fvec3 a, fvec3 b) -> fvec3 {
let x = a.y * b.z - a.z * b.y;
let y = a.z * b.x - a.x * b.z;
let z = a.x * b.y - a.y * b.x;

Vec<x, y, z>
}

impure fn vnorm(fvec3 v) -> fvec3 {
impure fn vnorm(fvec? v) {
let denom = 1.0 / __builtin_sqrtf(vdot(v, v));

v * denom
}
1 change: 0 additions & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@

checks.default = havenOcaml;
checks.ocaml = havenOcaml;
checks.legacy = havenLegacy;

devShells.default = pkgs.mkShell {
nativeBuildInputs = with pkgs; [
Expand Down
1 change: 1 addition & 0 deletions src/bin/haven.ml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ let collect_pipeline_diagnostics (pipeline : Analysis.Pipeline.result) =
pipeline.typing.diagnostics
@ pipeline.verify.diagnostics
@ pipeline.semantic.diagnostics
@ pipeline.asserts.diagnostics
@ pipeline.purity.diagnostics
@ pipeline.ownership.diagnostics

Expand Down
55 changes: 47 additions & 8 deletions src/bin/lsp/document_store.ml
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,23 @@ let location_from_error_message ~filename message =
in
Option.bind location_text parse_line_col

let analyze_document uri text =
let get_text_by_path (store : t) path =
Hashtbl.fold
(fun _ (doc : document) acc ->
match acc with
| Some _ -> acc
| None ->
if String.equal (DocumentUri.to_path doc.uri) path then Some doc.text else None)
store None

let analyze_document (store : t) uri text =
let filename = DocumentUri.to_path uri in
try
let cst = Haven.Parser.parse_string ~filename text in
let pipeline = Analysis.Pipeline.run_cst cst in
let pipeline =
Analysis.Pipeline.run_cst ~import_text_resolver:(get_text_by_path store)
cst
in
(Some cst, Some pipeline, None)
with
| Failure message
Expand All @@ -111,12 +123,15 @@ let analyze_document uri text =
in
(None, None, Some parse_error)

let reanalyze doc =
let cst, pipeline, parse_error = analyze_document doc.uri doc.text in
let reanalyze store doc =
let cst, pipeline, parse_error = analyze_document store doc.uri doc.text in
doc.cst <- cst;
doc.pipeline <- pipeline;
doc.parse_error <- parse_error

let reanalyze_all (store : t) =
Hashtbl.iter (fun _ doc -> reanalyze store doc) store

let open_doc (store : t) (td : TextDocumentItem.t) =
let uri = td.uri in
let doc =
Expand All @@ -129,10 +144,12 @@ let open_doc (store : t) (td : TextDocumentItem.t) =
parse_error = None;
}
in
reanalyze doc;
Hashtbl.replace store uri doc
Hashtbl.replace store uri doc;
reanalyze_all store

let close_doc (store : t) (uri : DocumentUri.t) = Hashtbl.remove store uri
let close_doc (store : t) (uri : DocumentUri.t) =
Hashtbl.remove store uri;
reanalyze_all store

let line_offsets text =
let offsets = ref [ 0 ] in
Expand Down Expand Up @@ -178,7 +195,29 @@ let change_doc (store : t) (d : VersionedTextDocumentIdentifier.t)
| Some doc ->
doc.version <- Some d.version;
List.iter (apply_change doc) evs;
reanalyze doc
reanalyze_all store

let save_doc (store : t) (id : TextDocumentIdentifier.t) text =
match (Hashtbl.find_opt store id.uri, text) with
| Some doc, Some text ->
doc.text <- text;
reanalyze_all store
| Some _doc, None ->
reanalyze_all store
| None, Some text ->
let doc =
{
uri = id.uri;
version = None;
text;
cst = None;
pipeline = None;
parse_error = None;
}
in
Hashtbl.replace store id.uri doc;
reanalyze_all store
| None, None -> ()

let get_text (store : t) (uri : DocumentUri.t) : string option =
Hashtbl.find_opt store uri |> Option.map (fun d -> d.text)
Expand Down
8 changes: 4 additions & 4 deletions src/bin/lsp/document_symbols.ml
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@ let function_detail (fn : Cst.function_decl) =
"fn";
])
in
let return_type =
let return_suffix =
match fn.value.return_type with
| Some ty -> type_text ty
| None -> "void"
| Some ty -> " -> " ^ type_text ty
| None -> ""
in
Printf.sprintf "%s (%s) -> %s" prefix (String.concat ", " params) return_type
Printf.sprintf "%s (%s)%s" prefix (String.concat ", " params) return_suffix

let variable_detail (decl : Cst.var_decl) =
Printf.sprintf "%s%s"
Expand Down
69 changes: 57 additions & 12 deletions src/bin/lsp/haven_lsp.ml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ let collect_pipeline_diagnostics (pipeline : Analysis.Pipeline.result) =
pipeline.typing.diagnostics
@ pipeline.verify.diagnostics
@ pipeline.semantic.diagnostics
@ pipeline.asserts.diagnostics
@ pipeline.purity.diagnostics
@ pipeline.ownership.diagnostics

Expand Down Expand Up @@ -59,12 +60,21 @@ let publish_diagnostics_params (state : state) (uri : DocumentUri.t) =
(PublishDiagnosticsParams.create
~diagnostics:(diagnostics_for_doc doc) ~uri ?version:doc.version ())

let publish_all_diagnostics_params (state : state) =
Hashtbl.fold
(fun uri _ acc ->
match publish_diagnostics_params state uri with
| None -> acc
| Some params -> params :: acc)
state.docs []

let server_capabilities () : ServerCapabilities.t =
ServerCapabilities.create
~textDocumentSync:
(`TextDocumentSyncOptions
(TextDocumentSyncOptions.create ~openClose:true
~change:TextDocumentSyncKind.Incremental ()))
~change:TextDocumentSyncKind.Incremental
~save:(`SaveOptions (SaveOptions.create ~includeText:true ())) ()))
~definitionProvider:(`Bool true)
~documentHighlightProvider:(`Bool true)
~documentFormattingProvider:(`Bool true)
Expand Down Expand Up @@ -98,6 +108,9 @@ let on_did_change (state : state) (doc : VersionedTextDocumentIdentifier.t)
(evs : TextDocumentContentChangeEvent.t list) =
Document_store.change_doc state.docs doc evs

let on_did_save (state : state) (params : DidSaveTextDocumentParams.t) =
Document_store.save_doc state.docs params.textDocument params.text

let with_doc state uri f =
Option.bind (Document_store.get_doc state.docs uri) f

Expand Down Expand Up @@ -187,19 +200,51 @@ let on_code_lenses (state : state) (uri : DocumentUri.t) =
let on_execute_command (_state : state) command =
if String.equal command Code_lenses.command_name then Some `Null else None

let parse_cst text uri =
let filename = DocumentUri.to_path uri in
try Some (Haven.Parser.parse_string ~filename text) with _ -> None

let read_file_text path =
try
let ch = open_in_bin path in
Fun.protect
~finally:(fun () -> close_in_noerr ch)
(fun () ->
let len = in_channel_length ch in
really_input_string ch len)
|> Option.some
with _ -> None

let format_document (state : state) (uri : DocumentUri.t) :
TextEdit.t list option =
let full_range =
let start_pos = { Position.line = 0; character = 0 } in
let end_pos = { Position.line = max_int; character = 0 } in
{ Range.start = start_pos; end_ = end_pos }
in
match Document_store.get_cst state.docs uri with
| None -> None
| Some cst ->
let newText = Haven.Cst.Emit.emit_program_to_string cst in
let edit = TextEdit.create ~range:full_range ~newText in
Some [ edit ]
match Document_store.get_doc state.docs uri with
| None ->
let path = DocumentUri.to_path uri in
Option.bind (read_file_text path) (fun text ->
Option.bind (parse_cst text uri) (fun cst ->
let newText = Haven.Cst.Emit.emit_program_to_string cst in
let edit =
TextEdit.create ~range:(Lsp_helpers.full_document_range text)
~newText
in
Some [ edit ]))
| Some doc -> (
match doc.cst with
| Some cst ->
let newText = Haven.Cst.Emit.emit_program_to_string cst in
let edit =
TextEdit.create ~range:(Lsp_helpers.full_document_range doc.text)
~newText
in
Some [ edit ]
| None ->
Option.bind (parse_cst doc.text uri) (fun cst ->
let newText = Haven.Cst.Emit.emit_program_to_string cst in
let edit =
TextEdit.create ~range:(Lsp_helpers.full_document_range doc.text)
~newText
in
Some [ edit ]))

let on_formatting (state : state) (params : DocumentFormattingParams.t) :
TextEdit.t list option =
Expand Down
Loading
Loading