Compare commits

..

1 Commits
main ... v1

Author SHA1 Message Date
FunctionsAPI
6d32c45842 Automatic push from FunctionsAPI 2025-07-07 13:17:53 +00:00
5 changed files with 3652 additions and 1 deletions

3560
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

11
Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
edition = "2024"
name = "web"
version = "0.1.0"
[dependencies]
fathom-function = { git = "ssh://git@github.com/fathom-io/pipeline-calculations.git", branch = "main" }
pipeline-application = { git = "ssh://git@github.com/fathom-io/pipeline-calculations.git", branch = "main" }
serde = { version = "1.0.219", features = ["derive"] }
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread"] }
uuid = { version = "1" }

View File

@ -1,2 +1,43 @@
# f1f699327c944304a00b55058ac9f8eb
# Aggregate all matched anomalies
Runs the algorithm to aggregate all the matched anomalies across ILI runs.
## Input
### Arguments
- `org_id`: as string which should be a valid `uuid` for the organization
- `project_id`: the id of the data project where the pipeline data is found
- `pipeline_id`: an `array` of `strings` which should each be a valid uuid representing a pipeline.
Note the pipeline_id array and route_file array should be the length such that
the first entry in each array corresponds to one another.
## Creating the function on the platform
To create this function on the platform using the `cli` set up the port forwarding as shown in README.
Then run the following command to create the function.
```bash
cargo run functions create \
-f functions/aggregate_all_comparisons/ \
-d "Run algorithm to aggregate all matched anomalies from all ILI comparison runs" \
-i org_id=string \
-i project_id=string \
-i pipeline_id=array
```
## Testing the function locally
You can run and test the function locally by running
```bash
cargo run
```
Then you can check it work with `curl` as follows
```bash
curl localhost:8080 -d $(jq '. | tojson' functions/aggregate_all_comparisons/example_input.json)
```

7
example_input.json Normal file
View File

@ -0,0 +1,7 @@
{
"org_id": "2cbfe270-d195-48ad-aed1-24145924635c",
"pipeline_id": [
"01966d47-1d4c-7751-a1f1-0617caa3a00d"
],
"project_id": "680b61b0aedd6f9e639d8699"
}

32
src/main.rs Normal file
View File

@ -0,0 +1,32 @@
use fathom_function::tracing;
use pipeline_application::application::Application;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[fathom_function::function]
async fn aggregate_all_comparisons(input: Input) -> Result<Output, String> {
let app = Application::new_from_compile_env(input.org_id, &input.project_id).unwrap();
for pipeline_id in input.pipeline_id {
app.aggregate_all_comparisons(pipeline_id)
.await
.map_err(|err| {
tracing::error!(%pipeline_id, ?err, "Error running the algorithm to aggregate all matched anomalies");
format!("{err:?}")
})?;
}
Ok(Output {
status: "Success".to_owned(),
})
}
#[derive(Debug, Serialize)]
struct Output {
status: String,
}
#[derive(Debug, Deserialize)]
struct Input {
org_id: Uuid,
project_id: String,
pipeline_id: Vec<Uuid>,
}