Automatic push from FunctionsAPI

This commit is contained in:
FunctionsAPI 2025-07-29 15:30:17 +00:00
parent e129e737d4
commit 591c0198ce
5 changed files with 3680 additions and 1 deletions

3590
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

11
Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[package]
edition = "2024"
name = "web"
version = "0.1.0"
[dependencies]
fathom-function = { git = "ssh://git@github.com/fathom-io/pipeline-calculations.git", branch = "FTHM-13285/scoring" }
pipeline-application = { git = "ssh://git@github.com/fathom-io/pipeline-calculations.git", branch = "FTHM-13285/scoring" }
serde = { version = "1.0.219", features = ["derive"] }
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread"] }
uuid = { version = "1" }

View File

@ -1,2 +1,41 @@
# 1ba54b6f7e6b4c518896cc6052b6b7dd
# Runs the algorithm that aggregates all of the unmatched anomalies
Run the calculation that creates a report with all the anomalies that were not matched in
any of the ILI comparison runs.
## Input
### Arguments
- `org_id`: as string which should be a valid `uuid` for the organization
- `project_id`: the id of the data project where the pipeline data is found
- `pipeline_id`: an `array` of `strings` which should each be a valid uuid representing a pipeline.
## Creating the function on the platform
To create this function on the platform using the `cli` set up the port forwarding as shown in README.
Then run the following command to create the function.
```bash
cargo run functions create \
-f functions/aggregate_all_unmatched/ \
-d "Run algorithm to aggregate anomalies that were not matches in any of the ILI comparison runs" \
-i org_id=string \
-i project_id=string \
-i pipeline_id=array
```
## Testing the function locally
You can run and test the function locally by running
```bash
cargo run
```
Then you can check it work with `curl` as follows
```bash
curl localhost:8080 -d $(jq '. | tojson' functions/aggregate_all_unmatched/example_input.json)
```

7
example_input.json Normal file
View File

@ -0,0 +1,7 @@
{
"org_id": "2cbfe270-d195-48ad-aed1-24145924635c",
"pipeline_id": [
"01966d47-1d4c-7751-a1f1-0617caa3a00d"
],
"project_id": "680b61b0aedd6f9e639d8699"
}

32
src/main.rs Normal file
View File

@ -0,0 +1,32 @@
use fathom_function::tracing;
use pipeline_application::application::Application;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[fathom_function::function]
async fn aggregate_all_unmatched(input: Input) -> Result<Output, String> {
let app = Application::new_from_compile_env(input.org_id, &input.project_id).unwrap();
for pipeline_id in input.pipeline_id {
app.unmatched_all(pipeline_id)
.await
.map_err(|err| {
tracing::error!(%pipeline_id, ?err, "Error running the algorithm to aggregate all unmatched anomalies");
format!("{err:?}")
})?;
}
Ok(Output {
status: "Success".to_owned(),
})
}
#[derive(Debug, Serialize)]
struct Output {
status: String,
}
#[derive(Debug, Deserialize)]
struct Input {
org_id: Uuid,
project_id: String,
pipeline_id: Vec<Uuid>,
}