Spaces:
Running
Running
Merge pull request #224 from biggraph/darabos-merge-public
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .github/workflows/test.yaml +15 -32
- .pre-commit-config.yaml +25 -0
- .python-version +1 -1
- docs/contributing.md +4 -5
- docs/guides/plugins.md +6 -1
- examples/Model definition.lynxkite.json +66 -19
- examples/Model use.lynxkite.json +0 -0
- examples/Multi-output demo.lynxkite.json +14 -3
- examples/NetworkX demo.lynxkite.json +0 -0
- examples/fake_data.py +1 -1
- examples/multi_output_demo.py +1 -1
- examples/ode_lstm.py +54 -0
- examples/word2vec.py +1 -1
- lynxkite-app/pyproject.toml +16 -5
- lynxkite-app/src/lynxkite_app/crdt.py +24 -6
- lynxkite-app/src/lynxkite_app/main.py +7 -3
- lynxkite-app/web/package-lock.json +21 -3
- lynxkite-app/web/package.json +2 -0
- lynxkite-app/web/src/Code.tsx +14 -3
- lynxkite-app/web/src/Directory.tsx +51 -10
- lynxkite-app/web/src/apiTypes.ts +1 -0
- lynxkite-app/web/src/index.css +27 -6
- lynxkite-app/web/src/workspace/NodeSearch.tsx +3 -0
- lynxkite-app/web/src/workspace/Workspace.tsx +68 -16
- lynxkite-app/web/src/workspace/nodes/LynxKiteNode.tsx +43 -3
- lynxkite-app/web/src/workspace/nodes/ModelMappingParameter.tsx +169 -0
- lynxkite-app/web/src/workspace/nodes/NodeParameter.tsx +34 -182
- lynxkite-app/web/src/workspace/nodes/NodeWithTableView.tsx +2 -2
- lynxkite-app/web/src/workspace/nodes/ParameterInput.tsx +20 -0
- lynxkite-app/web/tests/basic.spec.ts +7 -4
- lynxkite-app/web/tests/errors.spec.ts +5 -5
- lynxkite-app/web/tests/examples.spec.ts +16 -7
- lynxkite-app/web/tests/graph_creation.spec.ts +8 -8
- lynxkite-app/web/tests/lynxkite.ts +21 -2
- lynxkite-core/pyproject.toml +11 -6
- lynxkite-core/src/lynxkite/core/executors/one_by_one.py +9 -41
- lynxkite-core/src/lynxkite/core/executors/simple.py +1 -1
- lynxkite-core/src/lynxkite/core/ops.py +93 -49
- lynxkite-core/src/lynxkite/core/workspace.py +51 -19
- lynxkite-core/tests/test_one_by_one.py +2 -2
- lynxkite-core/tests/test_ops.py +26 -20
- lynxkite-core/tests/test_simple.py +3 -2
- lynxkite-core/tests/test_workspace.py +8 -6
- lynxkite-graph-analytics/pyproject.toml +26 -7
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/__init__.py +1 -1
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/core.py +102 -20
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/lynxkite_ops.py +52 -34
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/ml_ops.py +60 -18
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/networkx_ops.py +140 -61
- lynxkite-graph-analytics/src/lynxkite_graph_analytics/pytorch/__init__.py +7 -0
.github/workflows/test.yaml
CHANGED
|
@@ -6,19 +6,20 @@ on:
|
|
| 6 |
jobs:
|
| 7 |
test:
|
| 8 |
runs-on: ubuntu-latest
|
| 9 |
-
env:
|
| 10 |
-
UV_SYSTEM_PYTHON: 1
|
| 11 |
steps:
|
| 12 |
- uses: actions/checkout@v4
|
| 13 |
|
| 14 |
- name: Install uv
|
| 15 |
-
uses: astral-sh/setup-uv@
|
| 16 |
-
with:
|
| 17 |
-
enable-cache: true
|
| 18 |
|
| 19 |
-
-
|
| 20 |
-
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
- name: Install dependencies
|
| 24 |
run: |
|
|
@@ -37,32 +38,16 @@ jobs:
|
|
| 37 |
- name: Run pre-commits
|
| 38 |
run: |
|
| 39 |
uv pip install pre-commit
|
| 40 |
-
pre-commit run --all-files
|
| 41 |
-
|
| 42 |
-
- name: Run core tests
|
| 43 |
-
run: |
|
| 44 |
-
cd lynxkite-core
|
| 45 |
-
pytest
|
| 46 |
|
| 47 |
-
- name: Run
|
| 48 |
run: |
|
| 49 |
-
|
| 50 |
-
pytest
|
| 51 |
|
| 52 |
-
- name:
|
| 53 |
-
run: |
|
| 54 |
-
cd lynxkite-graph-analytics
|
| 55 |
-
pytest
|
| 56 |
-
|
| 57 |
-
- name: Run LynxScribe tests
|
| 58 |
-
run: |
|
| 59 |
-
cd lynxkite-lynxscribe
|
| 60 |
-
pytest
|
| 61 |
-
|
| 62 |
-
- name: Try building the documentation
|
| 63 |
run: |
|
| 64 |
uv pip install mkdocs-material mkdocstrings[python]
|
| 65 |
-
mkdocs build
|
| 66 |
|
| 67 |
- uses: actions/setup-node@v4
|
| 68 |
with:
|
|
@@ -76,9 +61,7 @@ jobs:
|
|
| 76 |
|
| 77 |
- name: Run Playwright tests
|
| 78 |
run: |
|
| 79 |
-
cd lynxkite-app/web
|
| 80 |
-
npm run build
|
| 81 |
-
npm run test
|
| 82 |
|
| 83 |
- uses: actions/upload-artifact@v4
|
| 84 |
name: Upload playwright report
|
|
|
|
| 6 |
jobs:
|
| 7 |
test:
|
| 8 |
runs-on: ubuntu-latest
|
|
|
|
|
|
|
| 9 |
steps:
|
| 10 |
- uses: actions/checkout@v4
|
| 11 |
|
| 12 |
- name: Install uv
|
| 13 |
+
uses: astral-sh/setup-uv@v6
|
|
|
|
|
|
|
| 14 |
|
| 15 |
+
- name: Set up Python
|
| 16 |
+
run: uv python install
|
| 17 |
+
|
| 18 |
+
# - name: Debug ty issue
|
| 19 |
+
# run: |
|
| 20 |
+
# uv pip install ty
|
| 21 |
+
# uv pip list
|
| 22 |
+
# uv run python -m ty check
|
| 23 |
|
| 24 |
- name: Install dependencies
|
| 25 |
run: |
|
|
|
|
| 38 |
- name: Run pre-commits
|
| 39 |
run: |
|
| 40 |
uv pip install pre-commit
|
| 41 |
+
uv run pre-commit run --all-files
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
|
| 43 |
+
- name: Run Python unittests
|
| 44 |
run: |
|
| 45 |
+
uv run pytest --asyncio-mode=auto
|
|
|
|
| 46 |
|
| 47 |
+
- name: Build the documentation
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
run: |
|
| 49 |
uv pip install mkdocs-material mkdocstrings[python]
|
| 50 |
+
uv run mkdocs build
|
| 51 |
|
| 52 |
- uses: actions/setup-node@v4
|
| 53 |
with:
|
|
|
|
| 61 |
|
| 62 |
- name: Run Playwright tests
|
| 63 |
run: |
|
| 64 |
+
uv run bash -c 'cd lynxkite-app/web; npm run build; npm run test'
|
|
|
|
|
|
|
| 65 |
|
| 66 |
- uses: actions/upload-artifact@v4
|
| 67 |
name: Upload playwright report
|
.pre-commit-config.yaml
CHANGED
|
@@ -15,3 +15,28 @@ repos:
|
|
| 15 |
rev: v1.9.4
|
| 16 |
hooks:
|
| 17 |
- id: biome-check
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
rev: v1.9.4
|
| 16 |
hooks:
|
| 17 |
- id: biome-check
|
| 18 |
+
# https://github.com/astral-sh/ty/issues/269
|
| 19 |
+
- repo: local
|
| 20 |
+
hooks:
|
| 21 |
+
- id: ty-check
|
| 22 |
+
name: ty-check
|
| 23 |
+
language: python
|
| 24 |
+
entry: uv run ty check
|
| 25 |
+
pass_filenames: false
|
| 26 |
+
args: [--python=.venv/]
|
| 27 |
+
additional_dependencies: [ty]
|
| 28 |
+
- repo: https://github.com/fpgmaas/deptry.git
|
| 29 |
+
rev: "0.23.0"
|
| 30 |
+
hooks:
|
| 31 |
+
- id: deptry
|
| 32 |
+
name: deptry for lynxkite-app
|
| 33 |
+
entry: bash -c 'cd lynxkite-app && deptry .'
|
| 34 |
+
- id: deptry
|
| 35 |
+
name: deptry for lynxkite-core
|
| 36 |
+
entry: bash -c 'cd lynxkite-core && deptry .'
|
| 37 |
+
- id: deptry
|
| 38 |
+
name: deptry for lynxkite-graph-analytics
|
| 39 |
+
entry: bash -c 'cd lynxkite-graph-analytics && deptry .'
|
| 40 |
+
- id: deptry
|
| 41 |
+
name: deptry for lynxkite-pillow-example
|
| 42 |
+
entry: bash -c 'cd lynxkite-pillow-example && deptry .'
|
.python-version
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
3.
|
|
|
|
| 1 |
+
3.12
|
docs/contributing.md
CHANGED
|
@@ -22,7 +22,7 @@ Install everything like this:
|
|
| 22 |
uv venv
|
| 23 |
source .venv/bin/activate
|
| 24 |
uvx pre-commit install
|
| 25 |
-
uv
|
| 26 |
```
|
| 27 |
|
| 28 |
This also builds the frontend, hopefully very quickly. To run it:
|
|
@@ -41,10 +41,10 @@ npm run dev
|
|
| 41 |
|
| 42 |
## Executing tests
|
| 43 |
|
| 44 |
-
Run all tests with a single command, or look inside to see how to run them individually:
|
| 45 |
-
|
| 46 |
```bash
|
| 47 |
-
|
|
|
|
|
|
|
| 48 |
```
|
| 49 |
|
| 50 |
## Documentation
|
|
@@ -52,6 +52,5 @@ Run all tests with a single command, or look inside to see how to run them indiv
|
|
| 52 |
To work on the documentation:
|
| 53 |
|
| 54 |
```bash
|
| 55 |
-
uv pip install mkdocs-material mkdocstrings[python]
|
| 56 |
mkdocs serve
|
| 57 |
```
|
|
|
|
| 22 |
uv venv
|
| 23 |
source .venv/bin/activate
|
| 24 |
uvx pre-commit install
|
| 25 |
+
uv sync
|
| 26 |
```
|
| 27 |
|
| 28 |
This also builds the frontend, hopefully very quickly. To run it:
|
|
|
|
| 41 |
|
| 42 |
## Executing tests
|
| 43 |
|
|
|
|
|
|
|
| 44 |
```bash
|
| 45 |
+
pytest # Runs all backend unit tests.
|
| 46 |
+
pytest lynxkite-core # Runs tests for one package.
|
| 47 |
+
cd lynxkite-app/web && npm run test # Runs frontend tests.
|
| 48 |
```
|
| 49 |
|
| 50 |
## Documentation
|
|
|
|
| 52 |
To work on the documentation:
|
| 53 |
|
| 54 |
```bash
|
|
|
|
| 55 |
mkdocs serve
|
| 56 |
```
|
docs/guides/plugins.md
CHANGED
|
@@ -57,7 +57,12 @@ Let's review the changes we made.
|
|
| 57 |
|
| 58 |
The [`@op`](../reference/lynxkite-core/ops.md#lynxkite.core.ops.op) decorator registers a
|
| 59 |
function as a LynxKite operation. The first argument is the name of the environment,
|
| 60 |
-
the
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
|
| 62 |
When defining multiple operations, you can use
|
| 63 |
[`ops.op_registration`](../reference/lynxkite-core/ops.md#lynxkite.core.ops.op_registration)
|
|
|
|
| 57 |
|
| 58 |
The [`@op`](../reference/lynxkite-core/ops.md#lynxkite.core.ops.op) decorator registers a
|
| 59 |
function as a LynxKite operation. The first argument is the name of the environment,
|
| 60 |
+
the last argument is the name of the operation. Between the two, you can list the hierarchy of
|
| 61 |
+
categories the operation belongs to. For example:
|
| 62 |
+
|
| 63 |
+
```python
|
| 64 |
+
@op("LynxKite Graph Analytics", "Machine learning", "Preprocessing", "Split train/test set")
|
| 65 |
+
```
|
| 66 |
|
| 67 |
When defining multiple operations, you can use
|
| 68 |
[`ops.op_registration`](../reference/lynxkite-core/ops.md#lynxkite.core.ops.op_registration)
|
examples/Model definition.lynxkite.json
CHANGED
|
@@ -81,7 +81,10 @@
|
|
| 81 |
"error": null,
|
| 82 |
"input_metadata": null,
|
| 83 |
"meta": {
|
|
|
|
| 84 |
"color": "green",
|
|
|
|
|
|
|
| 85 |
"inputs": [
|
| 86 |
{
|
| 87 |
"name": "loss",
|
|
@@ -110,7 +113,7 @@
|
|
| 110 |
}
|
| 111 |
},
|
| 112 |
{
|
| 113 |
-
"default": 0.
|
| 114 |
"name": "lr",
|
| 115 |
"type": {
|
| 116 |
"type": "<class 'float'>"
|
|
@@ -119,6 +122,7 @@
|
|
| 119 |
],
|
| 120 |
"type": "basic"
|
| 121 |
},
|
|
|
|
| 122 |
"params": {
|
| 123 |
"lr": "0.1",
|
| 124 |
"type": "SGD"
|
|
@@ -126,7 +130,7 @@
|
|
| 126 |
"status": "done",
|
| 127 |
"title": "Optimizer"
|
| 128 |
},
|
| 129 |
-
"dragHandle": ".
|
| 130 |
"height": 250.0,
|
| 131 |
"id": "Optimizer 2",
|
| 132 |
"position": {
|
|
@@ -144,7 +148,10 @@
|
|
| 144 |
"error": null,
|
| 145 |
"input_metadata": null,
|
| 146 |
"meta": {
|
|
|
|
| 147 |
"color": "orange",
|
|
|
|
|
|
|
| 148 |
"inputs": [
|
| 149 |
{
|
| 150 |
"name": "x",
|
|
@@ -170,23 +177,30 @@
|
|
| 170 |
"name": "type",
|
| 171 |
"type": {
|
| 172 |
"enum": [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
"ReLU",
|
| 174 |
-
"
|
| 175 |
-
"
|
| 176 |
-
"
|
|
|
|
| 177 |
]
|
| 178 |
}
|
| 179 |
}
|
| 180 |
],
|
| 181 |
"type": "basic"
|
| 182 |
},
|
|
|
|
| 183 |
"params": {
|
| 184 |
-
"type": "
|
| 185 |
},
|
| 186 |
"status": "done",
|
| 187 |
"title": "Activation"
|
| 188 |
},
|
| 189 |
-
"dragHandle": ".
|
| 190 |
"height": 200.0,
|
| 191 |
"id": "Activation 1",
|
| 192 |
"position": {
|
|
@@ -204,7 +218,10 @@
|
|
| 204 |
"error": null,
|
| 205 |
"input_metadata": null,
|
| 206 |
"meta": {
|
| 207 |
-
"
|
|
|
|
|
|
|
|
|
|
| 208 |
"inputs": [],
|
| 209 |
"name": "Input: tensor",
|
| 210 |
"outputs": [
|
|
@@ -227,13 +244,14 @@
|
|
| 227 |
],
|
| 228 |
"type": "basic"
|
| 229 |
},
|
|
|
|
| 230 |
"params": {
|
| 231 |
"name": "Y"
|
| 232 |
},
|
| 233 |
"status": "done",
|
| 234 |
"title": "Input: tensor"
|
| 235 |
},
|
| 236 |
-
"dragHandle": ".
|
| 237 |
"height": 200.0,
|
| 238 |
"id": "Input: tensor 3",
|
| 239 |
"position": {
|
|
@@ -251,7 +269,10 @@
|
|
| 251 |
"error": null,
|
| 252 |
"input_metadata": null,
|
| 253 |
"meta": {
|
|
|
|
| 254 |
"color": "orange",
|
|
|
|
|
|
|
| 255 |
"inputs": [
|
| 256 |
{
|
| 257 |
"name": "x",
|
|
@@ -281,11 +302,12 @@
|
|
| 281 |
"params": [],
|
| 282 |
"type": "basic"
|
| 283 |
},
|
|
|
|
| 284 |
"params": {},
|
| 285 |
"status": "done",
|
| 286 |
"title": "MSE loss"
|
| 287 |
},
|
| 288 |
-
"dragHandle": ".
|
| 289 |
"height": 200.0,
|
| 290 |
"id": "MSE loss 2",
|
| 291 |
"position": {
|
|
@@ -303,7 +325,10 @@
|
|
| 303 |
"error": null,
|
| 304 |
"input_metadata": null,
|
| 305 |
"meta": {
|
|
|
|
| 306 |
"color": "orange",
|
|
|
|
|
|
|
| 307 |
"inputs": [
|
| 308 |
{
|
| 309 |
"name": "input",
|
|
@@ -341,6 +366,7 @@
|
|
| 341 |
],
|
| 342 |
"type": "basic"
|
| 343 |
},
|
|
|
|
| 344 |
"params": {
|
| 345 |
"same_weights": false,
|
| 346 |
"times": "2"
|
|
@@ -348,7 +374,7 @@
|
|
| 348 |
"status": "done",
|
| 349 |
"title": "Repeat"
|
| 350 |
},
|
| 351 |
-
"dragHandle": ".
|
| 352 |
"height": 200.0,
|
| 353 |
"id": "Repeat 1",
|
| 354 |
"position": {
|
|
@@ -366,7 +392,10 @@
|
|
| 366 |
"error": null,
|
| 367 |
"input_metadata": null,
|
| 368 |
"meta": {
|
|
|
|
| 369 |
"color": "blue",
|
|
|
|
|
|
|
| 370 |
"inputs": [
|
| 371 |
{
|
| 372 |
"name": "x",
|
|
@@ -397,13 +426,14 @@
|
|
| 397 |
],
|
| 398 |
"type": "basic"
|
| 399 |
},
|
|
|
|
| 400 |
"params": {
|
| 401 |
"output_dim": "4"
|
| 402 |
},
|
| 403 |
"status": "done",
|
| 404 |
"title": "Linear"
|
| 405 |
},
|
| 406 |
-
"dragHandle": ".
|
| 407 |
"height": 189.0,
|
| 408 |
"id": "Linear 1",
|
| 409 |
"position": {
|
|
@@ -421,7 +451,10 @@
|
|
| 421 |
"error": null,
|
| 422 |
"input_metadata": null,
|
| 423 |
"meta": {
|
| 424 |
-
"
|
|
|
|
|
|
|
|
|
|
| 425 |
"inputs": [],
|
| 426 |
"name": "Input: tensor",
|
| 427 |
"outputs": [
|
|
@@ -444,13 +477,14 @@
|
|
| 444 |
],
|
| 445 |
"type": "basic"
|
| 446 |
},
|
|
|
|
| 447 |
"params": {
|
| 448 |
"name": "X"
|
| 449 |
},
|
| 450 |
"status": "done",
|
| 451 |
"title": "Input: tensor"
|
| 452 |
},
|
| 453 |
-
"dragHandle": ".
|
| 454 |
"height": 200.0,
|
| 455 |
"id": "Input: tensor 1",
|
| 456 |
"position": {
|
|
@@ -468,7 +502,10 @@
|
|
| 468 |
"error": null,
|
| 469 |
"input_metadata": null,
|
| 470 |
"meta": {
|
|
|
|
| 471 |
"color": "orange",
|
|
|
|
|
|
|
| 472 |
"inputs": [],
|
| 473 |
"name": "Constant vector",
|
| 474 |
"outputs": [
|
|
@@ -498,6 +535,7 @@
|
|
| 498 |
],
|
| 499 |
"type": "basic"
|
| 500 |
},
|
|
|
|
| 501 |
"params": {
|
| 502 |
"size": "1",
|
| 503 |
"value": "1"
|
|
@@ -505,7 +543,7 @@
|
|
| 505 |
"status": "done",
|
| 506 |
"title": "Constant vector"
|
| 507 |
},
|
| 508 |
-
"dragHandle": ".
|
| 509 |
"height": 258.0,
|
| 510 |
"id": "Constant vector 1",
|
| 511 |
"position": {
|
|
@@ -523,7 +561,10 @@
|
|
| 523 |
"error": null,
|
| 524 |
"input_metadata": null,
|
| 525 |
"meta": {
|
|
|
|
| 526 |
"color": "orange",
|
|
|
|
|
|
|
| 527 |
"inputs": [
|
| 528 |
{
|
| 529 |
"name": "a",
|
|
@@ -553,11 +594,12 @@
|
|
| 553 |
"params": [],
|
| 554 |
"type": "basic"
|
| 555 |
},
|
|
|
|
| 556 |
"params": {},
|
| 557 |
"status": "done",
|
| 558 |
"title": "Add"
|
| 559 |
},
|
| 560 |
-
"dragHandle": ".
|
| 561 |
"height": 200.0,
|
| 562 |
"id": "Add 1",
|
| 563 |
"position": {
|
|
@@ -575,7 +617,10 @@
|
|
| 575 |
"error": null,
|
| 576 |
"input_metadata": null,
|
| 577 |
"meta": {
|
| 578 |
-
"
|
|
|
|
|
|
|
|
|
|
| 579 |
"inputs": [
|
| 580 |
{
|
| 581 |
"name": "x",
|
|
@@ -606,11 +651,12 @@
|
|
| 606 |
],
|
| 607 |
"type": "basic"
|
| 608 |
},
|
|
|
|
| 609 |
"params": {},
|
| 610 |
"status": "done",
|
| 611 |
"title": "Output"
|
| 612 |
},
|
| 613 |
-
"dragHandle": ".
|
| 614 |
"height": 200.0,
|
| 615 |
"id": "Output 1",
|
| 616 |
"position": {
|
|
@@ -620,5 +666,6 @@
|
|
| 620 |
"type": "basic",
|
| 621 |
"width": 200.0
|
| 622 |
}
|
| 623 |
-
]
|
|
|
|
| 624 |
}
|
|
|
|
| 81 |
"error": null,
|
| 82 |
"input_metadata": null,
|
| 83 |
"meta": {
|
| 84 |
+
"categories": [],
|
| 85 |
"color": "green",
|
| 86 |
+
"doc": null,
|
| 87 |
+
"id": "Optimizer",
|
| 88 |
"inputs": [
|
| 89 |
{
|
| 90 |
"name": "loss",
|
|
|
|
| 113 |
}
|
| 114 |
},
|
| 115 |
{
|
| 116 |
+
"default": 0.0001,
|
| 117 |
"name": "lr",
|
| 118 |
"type": {
|
| 119 |
"type": "<class 'float'>"
|
|
|
|
| 122 |
],
|
| 123 |
"type": "basic"
|
| 124 |
},
|
| 125 |
+
"op_id": "Optimizer",
|
| 126 |
"params": {
|
| 127 |
"lr": "0.1",
|
| 128 |
"type": "SGD"
|
|
|
|
| 130 |
"status": "done",
|
| 131 |
"title": "Optimizer"
|
| 132 |
},
|
| 133 |
+
"dragHandle": ".drag-handle",
|
| 134 |
"height": 250.0,
|
| 135 |
"id": "Optimizer 2",
|
| 136 |
"position": {
|
|
|
|
| 148 |
"error": null,
|
| 149 |
"input_metadata": null,
|
| 150 |
"meta": {
|
| 151 |
+
"categories": [],
|
| 152 |
"color": "orange",
|
| 153 |
+
"doc": null,
|
| 154 |
+
"id": "Activation",
|
| 155 |
"inputs": [
|
| 156 |
{
|
| 157 |
"name": "x",
|
|
|
|
| 177 |
"name": "type",
|
| 178 |
"type": {
|
| 179 |
"enum": [
|
| 180 |
+
"ELU",
|
| 181 |
+
"GELU",
|
| 182 |
+
"LeakyReLU",
|
| 183 |
+
"Mish",
|
| 184 |
+
"PReLU",
|
| 185 |
"ReLU",
|
| 186 |
+
"Sigmoid",
|
| 187 |
+
"SiLU",
|
| 188 |
+
"Softplus",
|
| 189 |
+
"Tanh"
|
| 190 |
]
|
| 191 |
}
|
| 192 |
}
|
| 193 |
],
|
| 194 |
"type": "basic"
|
| 195 |
},
|
| 196 |
+
"op_id": "Activation",
|
| 197 |
"params": {
|
| 198 |
+
"type": "LeakyReLU"
|
| 199 |
},
|
| 200 |
"status": "done",
|
| 201 |
"title": "Activation"
|
| 202 |
},
|
| 203 |
+
"dragHandle": ".drag-handle",
|
| 204 |
"height": 200.0,
|
| 205 |
"id": "Activation 1",
|
| 206 |
"position": {
|
|
|
|
| 218 |
"error": null,
|
| 219 |
"input_metadata": null,
|
| 220 |
"meta": {
|
| 221 |
+
"categories": [],
|
| 222 |
+
"color": "gray",
|
| 223 |
+
"doc": null,
|
| 224 |
+
"id": "Input: tensor",
|
| 225 |
"inputs": [],
|
| 226 |
"name": "Input: tensor",
|
| 227 |
"outputs": [
|
|
|
|
| 244 |
],
|
| 245 |
"type": "basic"
|
| 246 |
},
|
| 247 |
+
"op_id": "Input: tensor",
|
| 248 |
"params": {
|
| 249 |
"name": "Y"
|
| 250 |
},
|
| 251 |
"status": "done",
|
| 252 |
"title": "Input: tensor"
|
| 253 |
},
|
| 254 |
+
"dragHandle": ".drag-handle",
|
| 255 |
"height": 200.0,
|
| 256 |
"id": "Input: tensor 3",
|
| 257 |
"position": {
|
|
|
|
| 269 |
"error": null,
|
| 270 |
"input_metadata": null,
|
| 271 |
"meta": {
|
| 272 |
+
"categories": [],
|
| 273 |
"color": "orange",
|
| 274 |
+
"doc": null,
|
| 275 |
+
"id": "MSE loss",
|
| 276 |
"inputs": [
|
| 277 |
{
|
| 278 |
"name": "x",
|
|
|
|
| 302 |
"params": [],
|
| 303 |
"type": "basic"
|
| 304 |
},
|
| 305 |
+
"op_id": "MSE loss",
|
| 306 |
"params": {},
|
| 307 |
"status": "done",
|
| 308 |
"title": "MSE loss"
|
| 309 |
},
|
| 310 |
+
"dragHandle": ".drag-handle",
|
| 311 |
"height": 200.0,
|
| 312 |
"id": "MSE loss 2",
|
| 313 |
"position": {
|
|
|
|
| 325 |
"error": null,
|
| 326 |
"input_metadata": null,
|
| 327 |
"meta": {
|
| 328 |
+
"categories": [],
|
| 329 |
"color": "orange",
|
| 330 |
+
"doc": null,
|
| 331 |
+
"id": "Repeat",
|
| 332 |
"inputs": [
|
| 333 |
{
|
| 334 |
"name": "input",
|
|
|
|
| 366 |
],
|
| 367 |
"type": "basic"
|
| 368 |
},
|
| 369 |
+
"op_id": "Repeat",
|
| 370 |
"params": {
|
| 371 |
"same_weights": false,
|
| 372 |
"times": "2"
|
|
|
|
| 374 |
"status": "done",
|
| 375 |
"title": "Repeat"
|
| 376 |
},
|
| 377 |
+
"dragHandle": ".drag-handle",
|
| 378 |
"height": 200.0,
|
| 379 |
"id": "Repeat 1",
|
| 380 |
"position": {
|
|
|
|
| 392 |
"error": null,
|
| 393 |
"input_metadata": null,
|
| 394 |
"meta": {
|
| 395 |
+
"categories": [],
|
| 396 |
"color": "blue",
|
| 397 |
+
"doc": null,
|
| 398 |
+
"id": "Linear",
|
| 399 |
"inputs": [
|
| 400 |
{
|
| 401 |
"name": "x",
|
|
|
|
| 426 |
],
|
| 427 |
"type": "basic"
|
| 428 |
},
|
| 429 |
+
"op_id": "Linear",
|
| 430 |
"params": {
|
| 431 |
"output_dim": "4"
|
| 432 |
},
|
| 433 |
"status": "done",
|
| 434 |
"title": "Linear"
|
| 435 |
},
|
| 436 |
+
"dragHandle": ".drag-handle",
|
| 437 |
"height": 189.0,
|
| 438 |
"id": "Linear 1",
|
| 439 |
"position": {
|
|
|
|
| 451 |
"error": null,
|
| 452 |
"input_metadata": null,
|
| 453 |
"meta": {
|
| 454 |
+
"categories": [],
|
| 455 |
+
"color": "gray",
|
| 456 |
+
"doc": null,
|
| 457 |
+
"id": "Input: tensor",
|
| 458 |
"inputs": [],
|
| 459 |
"name": "Input: tensor",
|
| 460 |
"outputs": [
|
|
|
|
| 477 |
],
|
| 478 |
"type": "basic"
|
| 479 |
},
|
| 480 |
+
"op_id": "Input: tensor",
|
| 481 |
"params": {
|
| 482 |
"name": "X"
|
| 483 |
},
|
| 484 |
"status": "done",
|
| 485 |
"title": "Input: tensor"
|
| 486 |
},
|
| 487 |
+
"dragHandle": ".drag-handle",
|
| 488 |
"height": 200.0,
|
| 489 |
"id": "Input: tensor 1",
|
| 490 |
"position": {
|
|
|
|
| 502 |
"error": null,
|
| 503 |
"input_metadata": null,
|
| 504 |
"meta": {
|
| 505 |
+
"categories": [],
|
| 506 |
"color": "orange",
|
| 507 |
+
"doc": null,
|
| 508 |
+
"id": "Constant vector",
|
| 509 |
"inputs": [],
|
| 510 |
"name": "Constant vector",
|
| 511 |
"outputs": [
|
|
|
|
| 535 |
],
|
| 536 |
"type": "basic"
|
| 537 |
},
|
| 538 |
+
"op_id": "Constant vector",
|
| 539 |
"params": {
|
| 540 |
"size": "1",
|
| 541 |
"value": "1"
|
|
|
|
| 543 |
"status": "done",
|
| 544 |
"title": "Constant vector"
|
| 545 |
},
|
| 546 |
+
"dragHandle": ".drag-handle",
|
| 547 |
"height": 258.0,
|
| 548 |
"id": "Constant vector 1",
|
| 549 |
"position": {
|
|
|
|
| 561 |
"error": null,
|
| 562 |
"input_metadata": null,
|
| 563 |
"meta": {
|
| 564 |
+
"categories": [],
|
| 565 |
"color": "orange",
|
| 566 |
+
"doc": null,
|
| 567 |
+
"id": "Add",
|
| 568 |
"inputs": [
|
| 569 |
{
|
| 570 |
"name": "a",
|
|
|
|
| 594 |
"params": [],
|
| 595 |
"type": "basic"
|
| 596 |
},
|
| 597 |
+
"op_id": "Add",
|
| 598 |
"params": {},
|
| 599 |
"status": "done",
|
| 600 |
"title": "Add"
|
| 601 |
},
|
| 602 |
+
"dragHandle": ".drag-handle",
|
| 603 |
"height": 200.0,
|
| 604 |
"id": "Add 1",
|
| 605 |
"position": {
|
|
|
|
| 617 |
"error": null,
|
| 618 |
"input_metadata": null,
|
| 619 |
"meta": {
|
| 620 |
+
"categories": [],
|
| 621 |
+
"color": "gray",
|
| 622 |
+
"doc": null,
|
| 623 |
+
"id": "Output",
|
| 624 |
"inputs": [
|
| 625 |
{
|
| 626 |
"name": "x",
|
|
|
|
| 651 |
],
|
| 652 |
"type": "basic"
|
| 653 |
},
|
| 654 |
+
"op_id": "Output",
|
| 655 |
"params": {},
|
| 656 |
"status": "done",
|
| 657 |
"title": "Output"
|
| 658 |
},
|
| 659 |
+
"dragHandle": ".drag-handle",
|
| 660 |
"height": 200.0,
|
| 661 |
"id": "Output 1",
|
| 662 |
"position": {
|
|
|
|
| 666 |
"type": "basic",
|
| 667 |
"width": 200.0
|
| 668 |
}
|
| 669 |
+
],
|
| 670 |
+
"paused": false
|
| 671 |
}
|
examples/Model use.lynxkite.json
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|
examples/Multi-output demo.lynxkite.json
CHANGED
|
@@ -25,6 +25,9 @@
|
|
| 25 |
"error": null,
|
| 26 |
"input_metadata": [],
|
| 27 |
"meta": {
|
|
|
|
|
|
|
|
|
|
| 28 |
"color": "orange",
|
| 29 |
"doc": [
|
| 30 |
{
|
|
@@ -57,6 +60,7 @@
|
|
| 57 |
]
|
| 58 |
}
|
| 59 |
],
|
|
|
|
| 60 |
"inputs": [],
|
| 61 |
"name": "Multi-output example",
|
| 62 |
"outputs": [
|
|
@@ -93,6 +97,7 @@
|
|
| 93 |
],
|
| 94 |
"type": "basic"
|
| 95 |
},
|
|
|
|
| 96 |
"params": {
|
| 97 |
"a_limit": "2",
|
| 98 |
"b_limit": "10"
|
|
@@ -100,7 +105,7 @@
|
|
| 100 |
"status": "done",
|
| 101 |
"title": "Multi-output example"
|
| 102 |
},
|
| 103 |
-
"dragHandle": ".
|
| 104 |
"height": 275.0,
|
| 105 |
"id": "Multi-output example 1",
|
| 106 |
"position": {
|
|
@@ -146,8 +151,10 @@
|
|
| 146 |
}
|
| 147 |
],
|
| 148 |
"meta": {
|
|
|
|
| 149 |
"color": "orange",
|
| 150 |
"doc": null,
|
|
|
|
| 151 |
"inputs": [
|
| 152 |
{
|
| 153 |
"name": "bundle",
|
|
@@ -170,13 +177,14 @@
|
|
| 170 |
],
|
| 171 |
"type": "table_view"
|
| 172 |
},
|
|
|
|
| 173 |
"params": {
|
| 174 |
"limit": 100.0
|
| 175 |
},
|
| 176 |
"status": "done",
|
| 177 |
"title": "View tables"
|
| 178 |
},
|
| 179 |
-
"dragHandle": ".
|
| 180 |
"height": 200.0,
|
| 181 |
"id": "View tables 1",
|
| 182 |
"position": {
|
|
@@ -246,8 +254,10 @@
|
|
| 246 |
}
|
| 247 |
],
|
| 248 |
"meta": {
|
|
|
|
| 249 |
"color": "orange",
|
| 250 |
"doc": null,
|
|
|
|
| 251 |
"inputs": [
|
| 252 |
{
|
| 253 |
"name": "bundle",
|
|
@@ -270,13 +280,14 @@
|
|
| 270 |
],
|
| 271 |
"type": "table_view"
|
| 272 |
},
|
|
|
|
| 273 |
"params": {
|
| 274 |
"limit": 100.0
|
| 275 |
},
|
| 276 |
"status": "done",
|
| 277 |
"title": "View tables"
|
| 278 |
},
|
| 279 |
-
"dragHandle": ".
|
| 280 |
"height": 215.0,
|
| 281 |
"id": "View tables 2",
|
| 282 |
"position": {
|
|
|
|
| 25 |
"error": null,
|
| 26 |
"input_metadata": [],
|
| 27 |
"meta": {
|
| 28 |
+
"categories": [
|
| 29 |
+
"Examples"
|
| 30 |
+
],
|
| 31 |
"color": "orange",
|
| 32 |
"doc": [
|
| 33 |
{
|
|
|
|
| 60 |
]
|
| 61 |
}
|
| 62 |
],
|
| 63 |
+
"id": "Examples > Multi-output example",
|
| 64 |
"inputs": [],
|
| 65 |
"name": "Multi-output example",
|
| 66 |
"outputs": [
|
|
|
|
| 97 |
],
|
| 98 |
"type": "basic"
|
| 99 |
},
|
| 100 |
+
"op_id": "Examples > Multi-output example",
|
| 101 |
"params": {
|
| 102 |
"a_limit": "2",
|
| 103 |
"b_limit": "10"
|
|
|
|
| 105 |
"status": "done",
|
| 106 |
"title": "Multi-output example"
|
| 107 |
},
|
| 108 |
+
"dragHandle": ".drag-handle",
|
| 109 |
"height": 275.0,
|
| 110 |
"id": "Multi-output example 1",
|
| 111 |
"position": {
|
|
|
|
| 151 |
}
|
| 152 |
],
|
| 153 |
"meta": {
|
| 154 |
+
"categories": [],
|
| 155 |
"color": "orange",
|
| 156 |
"doc": null,
|
| 157 |
+
"id": "View tables",
|
| 158 |
"inputs": [
|
| 159 |
{
|
| 160 |
"name": "bundle",
|
|
|
|
| 177 |
],
|
| 178 |
"type": "table_view"
|
| 179 |
},
|
| 180 |
+
"op_id": "View tables",
|
| 181 |
"params": {
|
| 182 |
"limit": 100.0
|
| 183 |
},
|
| 184 |
"status": "done",
|
| 185 |
"title": "View tables"
|
| 186 |
},
|
| 187 |
+
"dragHandle": ".drag-handle",
|
| 188 |
"height": 200.0,
|
| 189 |
"id": "View tables 1",
|
| 190 |
"position": {
|
|
|
|
| 254 |
}
|
| 255 |
],
|
| 256 |
"meta": {
|
| 257 |
+
"categories": [],
|
| 258 |
"color": "orange",
|
| 259 |
"doc": null,
|
| 260 |
+
"id": "View tables",
|
| 261 |
"inputs": [
|
| 262 |
{
|
| 263 |
"name": "bundle",
|
|
|
|
| 280 |
],
|
| 281 |
"type": "table_view"
|
| 282 |
},
|
| 283 |
+
"op_id": "View tables",
|
| 284 |
"params": {
|
| 285 |
"limit": 100.0
|
| 286 |
},
|
| 287 |
"status": "done",
|
| 288 |
"title": "View tables"
|
| 289 |
},
|
| 290 |
+
"dragHandle": ".drag-handle",
|
| 291 |
"height": 215.0,
|
| 292 |
"id": "View tables 2",
|
| 293 |
"position": {
|
examples/NetworkX demo.lynxkite.json
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|
examples/fake_data.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
from lynxkite.core.ops import op
|
| 2 |
-
from faker import Faker
|
| 3 |
import pandas as pd
|
| 4 |
|
| 5 |
faker = Faker()
|
|
|
|
| 1 |
from lynxkite.core.ops import op
|
| 2 |
+
from faker import Faker # ty: ignore[unresolved-import]
|
| 3 |
import pandas as pd
|
| 4 |
|
| 5 |
faker = Faker()
|
examples/multi_output_demo.py
CHANGED
|
@@ -2,7 +2,7 @@ from lynxkite.core.ops import op
|
|
| 2 |
import pandas as pd
|
| 3 |
|
| 4 |
|
| 5 |
-
@op("LynxKite Graph Analytics", "Multi-output example", outputs=["one", "two"])
|
| 6 |
def multi_output(*, a_limit=4, b_limit=10):
|
| 7 |
"""
|
| 8 |
Returns two outputs. Also demonstrates Numpy-style docstrings.
|
|
|
|
| 2 |
import pandas as pd
|
| 3 |
|
| 4 |
|
| 5 |
+
@op("LynxKite Graph Analytics", "Examples", "Multi-output example", outputs=["one", "two"])
|
| 6 |
def multi_output(*, a_limit=4, b_limit=10):
|
| 7 |
"""
|
| 8 |
Returns two outputs. Also demonstrates Numpy-style docstrings.
|
examples/ode_lstm.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from lynxkite.core.ops import op_registration, LongStr
|
| 2 |
+
from lynxkite_graph_analytics.core import Bundle
|
| 3 |
+
from matplotlib import pyplot as plt
|
| 4 |
+
import numpy as np
|
| 5 |
+
import pandas as pd
|
| 6 |
+
import json
|
| 7 |
+
|
| 8 |
+
op = op_registration("LynxKite Graph Analytics")
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@op("Drop NA")
|
| 12 |
+
def drop_na(df: pd.DataFrame):
|
| 13 |
+
return df.replace("", np.nan).dropna()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@op("Sort by")
|
| 17 |
+
def sort_by(df: pd.DataFrame, *, key_columns: str):
|
| 18 |
+
df = df.copy()
|
| 19 |
+
df.sort_values(
|
| 20 |
+
by=[k.strip() for k in key_columns.split(",")],
|
| 21 |
+
inplace=True,
|
| 22 |
+
ignore_index=True,
|
| 23 |
+
)
|
| 24 |
+
return df
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@op("Group by")
|
| 28 |
+
def group_by(df: pd.DataFrame, *, key_columns: str, aggregation: LongStr):
|
| 29 |
+
key_columns = [k.strip() for k in key_columns.split(",")]
|
| 30 |
+
j = json.loads(aggregation)
|
| 31 |
+
for k, vs in j.items():
|
| 32 |
+
j[k] = [list if v == "list" else v for v in vs]
|
| 33 |
+
res = df.groupby(key_columns).agg(j).reset_index()
|
| 34 |
+
res.columns = ["_".join(col) for col in res.columns]
|
| 35 |
+
return res
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@op("Take first element of list")
|
| 39 |
+
def take_first_element(df: pd.DataFrame, *, column: str):
|
| 40 |
+
df = df.copy()
|
| 41 |
+
df[f"{column}_first_element"] = df[column].apply(lambda x: x[0])
|
| 42 |
+
return df
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
@op("Plot time series", view="matplotlib")
|
| 46 |
+
def plot_time_series(bundle: Bundle, *, table_name: str, index: int, x_column: str, y_columns: str):
|
| 47 |
+
df = bundle.dfs[table_name]
|
| 48 |
+
y_columns = [y.strip() for y in y_columns.split(",")]
|
| 49 |
+
x = df[x_column].iloc[index]
|
| 50 |
+
for y_column in y_columns:
|
| 51 |
+
y = df[y_column].iloc[index]
|
| 52 |
+
plt.plot(x, y, "o-", label=y_column)
|
| 53 |
+
plt.xlabel(x_column)
|
| 54 |
+
plt.legend()
|
examples/word2vec.py
CHANGED
|
@@ -6,7 +6,7 @@ ENV = "LynxKite Graph Analytics"
|
|
| 6 |
|
| 7 |
@op(ENV, "Word2vec for the top 1000 words", slow=True)
|
| 8 |
def word2vec_1000():
|
| 9 |
-
import staticvectors
|
| 10 |
|
| 11 |
model = staticvectors.StaticVectors("neuml/word2vec-quantized")
|
| 12 |
df = pd.read_csv(
|
|
|
|
| 6 |
|
| 7 |
@op(ENV, "Word2vec for the top 1000 words", slow=True)
|
| 8 |
def word2vec_1000():
|
| 9 |
+
import staticvectors # ty: ignore[unresolved-import]
|
| 10 |
|
| 11 |
model = staticvectors.StaticVectors("neuml/word2vec-quantized")
|
| 12 |
df = pd.read_csv(
|
lynxkite-app/pyproject.toml
CHANGED
|
@@ -6,25 +6,28 @@ readme = "README.md"
|
|
| 6 |
requires-python = ">=3.11"
|
| 7 |
dependencies = [
|
| 8 |
"fastapi[standard]>=0.115.6",
|
|
|
|
|
|
|
| 9 |
"lynxkite-core",
|
| 10 |
-
"orjson>=3.10.13",
|
| 11 |
"pycrdt-websocket>=0.16",
|
|
|
|
|
|
|
| 12 |
"sse-starlette>=2.2.1",
|
| 13 |
-
"
|
| 14 |
]
|
| 15 |
classifiers = ["Private :: Do Not Upload"]
|
| 16 |
|
| 17 |
[project.urls]
|
| 18 |
Homepage = "https://github.com/lynxkite/lynxkite-2000/"
|
| 19 |
|
| 20 |
-
[
|
| 21 |
dev = [
|
| 22 |
"pydantic-to-typescript>=2.0.0",
|
| 23 |
-
"
|
| 24 |
]
|
| 25 |
|
| 26 |
[tool.uv.sources]
|
| 27 |
-
lynxkite-core = {
|
| 28 |
|
| 29 |
[build-system]
|
| 30 |
requires = ["setuptools", "wheel", "setuptools-scm"]
|
|
@@ -47,3 +50,11 @@ build_py = "build_frontend.build_py"
|
|
| 47 |
|
| 48 |
[project.scripts]
|
| 49 |
lynxkite = "lynxkite_app.__main__:main"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
requires-python = ">=3.11"
|
| 7 |
dependencies = [
|
| 8 |
"fastapi[standard]>=0.115.6",
|
| 9 |
+
"griffe>=1.7.3",
|
| 10 |
+
"joblib>=1.5.1",
|
| 11 |
"lynxkite-core",
|
|
|
|
| 12 |
"pycrdt-websocket>=0.16",
|
| 13 |
+
"pycrdt>=0.12.26",
|
| 14 |
+
"pydantic>=2.11.7",
|
| 15 |
"sse-starlette>=2.2.1",
|
| 16 |
+
"uvicorn>=0.35.0",
|
| 17 |
]
|
| 18 |
classifiers = ["Private :: Do Not Upload"]
|
| 19 |
|
| 20 |
[project.urls]
|
| 21 |
Homepage = "https://github.com/lynxkite/lynxkite-2000/"
|
| 22 |
|
| 23 |
+
[dependency-groups]
|
| 24 |
dev = [
|
| 25 |
"pydantic-to-typescript>=2.0.0",
|
| 26 |
+
"setuptools>=80.9.0",
|
| 27 |
]
|
| 28 |
|
| 29 |
[tool.uv.sources]
|
| 30 |
+
lynxkite-core = { workspace = true }
|
| 31 |
|
| 32 |
[build-system]
|
| 33 |
requires = ["setuptools", "wheel", "setuptools-scm"]
|
|
|
|
| 50 |
|
| 51 |
[project.scripts]
|
| 52 |
lynxkite = "lynxkite_app.__main__:main"
|
| 53 |
+
|
| 54 |
+
[tool.deptry.package_module_name_map]
|
| 55 |
+
lynxkite-core = "lynxkite"
|
| 56 |
+
sse-starlette = "starlette"
|
| 57 |
+
|
| 58 |
+
[tool.deptry.per_rule_ignores]
|
| 59 |
+
DEP002 = ["pycrdt-websocket", "griffe"]
|
| 60 |
+
DEP004 = ["setuptools"]
|
lynxkite-app/src/lynxkite_app/crdt.py
CHANGED
|
@@ -7,8 +7,8 @@ import pathlib
|
|
| 7 |
import fastapi
|
| 8 |
import os.path
|
| 9 |
import pycrdt.websocket
|
| 10 |
-
import pycrdt.store
|
| 11 |
-
import uvicorn
|
| 12 |
import builtins
|
| 13 |
from lynxkite.core import workspace, ops
|
| 14 |
|
|
@@ -59,7 +59,8 @@ class WorkspaceWebsocketServer(pycrdt.websocket.WebsocketServer):
|
|
| 59 |
room = pycrdt.websocket.YRoom(
|
| 60 |
ystore=ystore, ydoc=ydoc, exception_handler=ws_exception_handler
|
| 61 |
)
|
| 62 |
-
room
|
|
|
|
| 63 |
|
| 64 |
def on_change(changes):
|
| 65 |
task = asyncio.create_task(workspace_changed(name, changes, ws))
|
|
@@ -106,7 +107,8 @@ class CodeWebsocketServer(WorkspaceWebsocketServer):
|
|
| 106 |
room = pycrdt.websocket.YRoom(
|
| 107 |
ystore=ystore, ydoc=ydoc, exception_handler=ws_exception_handler
|
| 108 |
)
|
| 109 |
-
room
|
|
|
|
| 110 |
|
| 111 |
def on_change(changes):
|
| 112 |
asyncio.create_task(code_changed(name, changes, text))
|
|
@@ -128,10 +130,12 @@ def clean_input(ws_pyd):
|
|
| 128 |
for p in list(node.data.params):
|
| 129 |
if p.startswith("_"):
|
| 130 |
del node.data.params[p]
|
| 131 |
-
if node.data.
|
| 132 |
node.data.params = {}
|
| 133 |
node.position.x = 0
|
| 134 |
node.position.y = 0
|
|
|
|
|
|
|
| 135 |
if node.model_extra:
|
| 136 |
for key in list(node.model_extra.keys()):
|
| 137 |
delattr(node, key)
|
|
@@ -161,6 +165,7 @@ def crdt_update(
|
|
| 161 |
ValueError: If the Python object provided is not a dict or list.
|
| 162 |
"""
|
| 163 |
if isinstance(python_obj, dict):
|
|
|
|
| 164 |
for key, value in python_obj.items():
|
| 165 |
if key in non_collaborative_fields:
|
| 166 |
crdt_obj[key] = value
|
|
@@ -177,6 +182,7 @@ def crdt_update(
|
|
| 177 |
else:
|
| 178 |
crdt_obj[key] = value
|
| 179 |
elif isinstance(python_obj, list):
|
|
|
|
| 180 |
for i, value in enumerate(python_obj):
|
| 181 |
if isinstance(value, dict):
|
| 182 |
if i >= len(crdt_obj):
|
|
@@ -218,7 +224,7 @@ last_known_versions = {}
|
|
| 218 |
delayed_executions = {}
|
| 219 |
|
| 220 |
|
| 221 |
-
async def workspace_changed(name: str, changes: pycrdt.MapEvent, ws_crdt: pycrdt.Map):
|
| 222 |
"""Callback to react to changes in the workspace.
|
| 223 |
|
| 224 |
Args:
|
|
@@ -242,6 +248,10 @@ async def workspace_changed(name: str, changes: pycrdt.MapEvent, ws_crdt: pycrdt
|
|
| 242 |
getattr(change, "keys", {}).get("__execution_delay", {}).get("newValue", 0)
|
| 243 |
for change in changes
|
| 244 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 245 |
if delay:
|
| 246 |
task = asyncio.create_task(execute(name, ws_crdt, ws_pyd, delay))
|
| 247 |
delayed_executions[name] = task
|
|
@@ -289,6 +299,14 @@ async def code_changed(name: str, changes: pycrdt.TextEvent, text: pycrdt.Text):
|
|
| 289 |
f.write(contents)
|
| 290 |
|
| 291 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 292 |
@contextlib.asynccontextmanager
|
| 293 |
async def lifespan(app):
|
| 294 |
global ws_websocket_server
|
|
|
|
| 7 |
import fastapi
|
| 8 |
import os.path
|
| 9 |
import pycrdt.websocket
|
| 10 |
+
import pycrdt.store.file
|
| 11 |
+
import uvicorn.protocols.utils
|
| 12 |
import builtins
|
| 13 |
from lynxkite.core import workspace, ops
|
| 14 |
|
|
|
|
| 59 |
room = pycrdt.websocket.YRoom(
|
| 60 |
ystore=ystore, ydoc=ydoc, exception_handler=ws_exception_handler
|
| 61 |
)
|
| 62 |
+
# We hang the YDoc pointer on the room, so it only gets garbage collected when the room does.
|
| 63 |
+
room.ws = ws # ty: ignore[unresolved-attribute]
|
| 64 |
|
| 65 |
def on_change(changes):
|
| 66 |
task = asyncio.create_task(workspace_changed(name, changes, ws))
|
|
|
|
| 107 |
room = pycrdt.websocket.YRoom(
|
| 108 |
ystore=ystore, ydoc=ydoc, exception_handler=ws_exception_handler
|
| 109 |
)
|
| 110 |
+
# We hang the YDoc pointer on the room, so it only gets garbage collected when the room does.
|
| 111 |
+
room.text = text # ty: ignore[unresolved-attribute]
|
| 112 |
|
| 113 |
def on_change(changes):
|
| 114 |
asyncio.create_task(code_changed(name, changes, text))
|
|
|
|
| 130 |
for p in list(node.data.params):
|
| 131 |
if p.startswith("_"):
|
| 132 |
del node.data.params[p]
|
| 133 |
+
if node.data.op_id == "Comment":
|
| 134 |
node.data.params = {}
|
| 135 |
node.position.x = 0
|
| 136 |
node.position.y = 0
|
| 137 |
+
node.width = 0
|
| 138 |
+
node.height = 0
|
| 139 |
if node.model_extra:
|
| 140 |
for key in list(node.model_extra.keys()):
|
| 141 |
delattr(node, key)
|
|
|
|
| 165 |
ValueError: If the Python object provided is not a dict or list.
|
| 166 |
"""
|
| 167 |
if isinstance(python_obj, dict):
|
| 168 |
+
assert isinstance(crdt_obj, pycrdt.Map), "CRDT object must be a Map for a dict input"
|
| 169 |
for key, value in python_obj.items():
|
| 170 |
if key in non_collaborative_fields:
|
| 171 |
crdt_obj[key] = value
|
|
|
|
| 182 |
else:
|
| 183 |
crdt_obj[key] = value
|
| 184 |
elif isinstance(python_obj, list):
|
| 185 |
+
assert isinstance(crdt_obj, pycrdt.Array), "CRDT object must be an Array for a list input"
|
| 186 |
for i, value in enumerate(python_obj):
|
| 187 |
if isinstance(value, dict):
|
| 188 |
if i >= len(crdt_obj):
|
|
|
|
| 224 |
delayed_executions = {}
|
| 225 |
|
| 226 |
|
| 227 |
+
async def workspace_changed(name: str, changes: list[pycrdt.MapEvent], ws_crdt: pycrdt.Map):
|
| 228 |
"""Callback to react to changes in the workspace.
|
| 229 |
|
| 230 |
Args:
|
|
|
|
| 248 |
getattr(change, "keys", {}).get("__execution_delay", {}).get("newValue", 0)
|
| 249 |
for change in changes
|
| 250 |
)
|
| 251 |
+
# Check if workspace is paused - if so, skip automatic execution
|
| 252 |
+
if getattr(ws_pyd, "paused", False):
|
| 253 |
+
print(f"Skipping automatic execution for {name} in {ws_pyd.env} - workspace is paused")
|
| 254 |
+
return
|
| 255 |
if delay:
|
| 256 |
task = asyncio.create_task(execute(name, ws_crdt, ws_pyd, delay))
|
| 257 |
delayed_executions[name] = task
|
|
|
|
| 299 |
f.write(contents)
|
| 300 |
|
| 301 |
|
| 302 |
+
ws_websocket_server: WorkspaceWebsocketServer
|
| 303 |
+
code_websocket_server: CodeWebsocketServer
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def get_room(name):
|
| 307 |
+
return ws_websocket_server.get_room(name)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
@contextlib.asynccontextmanager
|
| 311 |
async def lifespan(app):
|
| 312 |
global ws_websocket_server
|
lynxkite-app/src/lynxkite_app/main.py
CHANGED
|
@@ -4,20 +4,24 @@ import shutil
|
|
| 4 |
import pydantic
|
| 5 |
import fastapi
|
| 6 |
import importlib
|
|
|
|
| 7 |
import pathlib
|
| 8 |
import pkgutil
|
| 9 |
from fastapi.staticfiles import StaticFiles
|
| 10 |
from fastapi.middleware.gzip import GZipMiddleware
|
| 11 |
-
import starlette
|
| 12 |
from lynxkite.core import ops
|
| 13 |
from lynxkite.core import workspace
|
| 14 |
from . import crdt
|
| 15 |
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
def detect_plugins():
|
| 18 |
plugins = {}
|
| 19 |
for _, name, _ in pkgutil.iter_modules():
|
| 20 |
-
if name.startswith("lynxkite_"):
|
| 21 |
print(f"Importing {name}")
|
| 22 |
plugins[name] = importlib.import_module(name)
|
| 23 |
if not plugins:
|
|
@@ -136,7 +140,7 @@ async def upload(req: fastapi.Request):
|
|
| 136 |
@app.post("/api/execute_workspace")
|
| 137 |
async def execute_workspace(name: str):
|
| 138 |
"""Trigger and await the execution of a workspace."""
|
| 139 |
-
room = await crdt.
|
| 140 |
ws_pyd = workspace.Workspace.model_validate(room.ws.to_py())
|
| 141 |
await crdt.execute(name, room.ws, ws_pyd)
|
| 142 |
|
|
|
|
| 4 |
import pydantic
|
| 5 |
import fastapi
|
| 6 |
import importlib
|
| 7 |
+
import joblib
|
| 8 |
import pathlib
|
| 9 |
import pkgutil
|
| 10 |
from fastapi.staticfiles import StaticFiles
|
| 11 |
from fastapi.middleware.gzip import GZipMiddleware
|
| 12 |
+
import starlette.exceptions
|
| 13 |
from lynxkite.core import ops
|
| 14 |
from lynxkite.core import workspace
|
| 15 |
from . import crdt
|
| 16 |
|
| 17 |
+
mem = joblib.Memory(".joblib-cache")
|
| 18 |
+
ops.CACHE_WRAPPER = mem.cache
|
| 19 |
+
|
| 20 |
|
| 21 |
def detect_plugins():
|
| 22 |
plugins = {}
|
| 23 |
for _, name, _ in pkgutil.iter_modules():
|
| 24 |
+
if name.startswith("lynxkite_") and name != "lynxkite_app":
|
| 25 |
print(f"Importing {name}")
|
| 26 |
plugins[name] = importlib.import_module(name)
|
| 27 |
if not plugins:
|
|
|
|
| 140 |
@app.post("/api/execute_workspace")
|
| 141 |
async def execute_workspace(name: str):
|
| 142 |
"""Trigger and await the execution of a workspace."""
|
| 143 |
+
room = await crdt.get_room(name)
|
| 144 |
ws_pyd = workspace.Workspace.model_validate(room.ws.to_py())
|
| 145 |
await crdt.execute(name, room.ws, ws_pyd)
|
| 146 |
|
lynxkite-app/web/package-lock.json
CHANGED
|
@@ -23,6 +23,7 @@
|
|
| 23 |
"daisyui": "^4.12.20",
|
| 24 |
"echarts": "^5.5.1",
|
| 25 |
"fuse.js": "^7.0.0",
|
|
|
|
| 26 |
"json-schema-to-typescript": "^15.0.3",
|
| 27 |
"monaco-editor": "^0.52.2",
|
| 28 |
"react": "^18.3.1",
|
|
@@ -40,6 +41,7 @@
|
|
| 40 |
"devDependencies": {
|
| 41 |
"@playwright/test": "^1.50.1",
|
| 42 |
"@tailwindcss/typography": "^0.5.16",
|
|
|
|
| 43 |
"@types/node": "^22.13.1",
|
| 44 |
"@types/react": "^18.3.14",
|
| 45 |
"@types/react-dom": "^18.3.2",
|
|
@@ -1894,6 +1896,13 @@
|
|
| 1894 |
"@types/unist": "*"
|
| 1895 |
}
|
| 1896 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1897 |
"node_modules/@types/json-schema": {
|
| 1898 |
"version": "7.0.15",
|
| 1899 |
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
|
|
@@ -2357,9 +2366,9 @@
|
|
| 2357 |
}
|
| 2358 |
},
|
| 2359 |
"node_modules/caniuse-lite": {
|
| 2360 |
-
"version": "1.0.
|
| 2361 |
-
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.
|
| 2362 |
-
"integrity": "sha512-
|
| 2363 |
"funding": [
|
| 2364 |
{
|
| 2365 |
"type": "opencollective",
|
|
@@ -3667,6 +3676,15 @@
|
|
| 3667 |
"jiti": "bin/jiti.js"
|
| 3668 |
}
|
| 3669 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3670 |
"node_modules/js-tokens": {
|
| 3671 |
"version": "4.0.0",
|
| 3672 |
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
|
|
|
| 23 |
"daisyui": "^4.12.20",
|
| 24 |
"echarts": "^5.5.1",
|
| 25 |
"fuse.js": "^7.0.0",
|
| 26 |
+
"jmespath": "^0.16.0",
|
| 27 |
"json-schema-to-typescript": "^15.0.3",
|
| 28 |
"monaco-editor": "^0.52.2",
|
| 29 |
"react": "^18.3.1",
|
|
|
|
| 41 |
"devDependencies": {
|
| 42 |
"@playwright/test": "^1.50.1",
|
| 43 |
"@tailwindcss/typography": "^0.5.16",
|
| 44 |
+
"@types/jmespath": "^0.15.2",
|
| 45 |
"@types/node": "^22.13.1",
|
| 46 |
"@types/react": "^18.3.14",
|
| 47 |
"@types/react-dom": "^18.3.2",
|
|
|
|
| 1896 |
"@types/unist": "*"
|
| 1897 |
}
|
| 1898 |
},
|
| 1899 |
+
"node_modules/@types/jmespath": {
|
| 1900 |
+
"version": "0.15.2",
|
| 1901 |
+
"resolved": "https://registry.npmjs.org/@types/jmespath/-/jmespath-0.15.2.tgz",
|
| 1902 |
+
"integrity": "sha512-pegh49FtNsC389Flyo9y8AfkVIZn9MMPE9yJrO9svhq6Fks2MwymULWjZqySuxmctd3ZH4/n7Mr98D+1Qo5vGA==",
|
| 1903 |
+
"dev": true,
|
| 1904 |
+
"license": "MIT"
|
| 1905 |
+
},
|
| 1906 |
"node_modules/@types/json-schema": {
|
| 1907 |
"version": "7.0.15",
|
| 1908 |
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
|
|
|
|
| 2366 |
}
|
| 2367 |
},
|
| 2368 |
"node_modules/caniuse-lite": {
|
| 2369 |
+
"version": "1.0.30001723",
|
| 2370 |
+
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001723.tgz",
|
| 2371 |
+
"integrity": "sha512-1R/elMjtehrFejxwmexeXAtae5UO9iSyFn6G/I806CYC/BLyyBk1EPhrKBkWhy6wM6Xnm47dSJQec+tLJ39WHw==",
|
| 2372 |
"funding": [
|
| 2373 |
{
|
| 2374 |
"type": "opencollective",
|
|
|
|
| 3676 |
"jiti": "bin/jiti.js"
|
| 3677 |
}
|
| 3678 |
},
|
| 3679 |
+
"node_modules/jmespath": {
|
| 3680 |
+
"version": "0.16.0",
|
| 3681 |
+
"resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz",
|
| 3682 |
+
"integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==",
|
| 3683 |
+
"license": "Apache-2.0",
|
| 3684 |
+
"engines": {
|
| 3685 |
+
"node": ">= 0.6.0"
|
| 3686 |
+
}
|
| 3687 |
+
},
|
| 3688 |
"node_modules/js-tokens": {
|
| 3689 |
"version": "4.0.0",
|
| 3690 |
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
lynxkite-app/web/package.json
CHANGED
|
@@ -25,6 +25,7 @@
|
|
| 25 |
"daisyui": "^4.12.20",
|
| 26 |
"echarts": "^5.5.1",
|
| 27 |
"fuse.js": "^7.0.0",
|
|
|
|
| 28 |
"json-schema-to-typescript": "^15.0.3",
|
| 29 |
"monaco-editor": "^0.52.2",
|
| 30 |
"react": "^18.3.1",
|
|
@@ -42,6 +43,7 @@
|
|
| 42 |
"devDependencies": {
|
| 43 |
"@playwright/test": "^1.50.1",
|
| 44 |
"@tailwindcss/typography": "^0.5.16",
|
|
|
|
| 45 |
"@types/node": "^22.13.1",
|
| 46 |
"@types/react": "^18.3.14",
|
| 47 |
"@types/react-dom": "^18.3.2",
|
|
|
|
| 25 |
"daisyui": "^4.12.20",
|
| 26 |
"echarts": "^5.5.1",
|
| 27 |
"fuse.js": "^7.0.0",
|
| 28 |
+
"jmespath": "^0.16.0",
|
| 29 |
"json-schema-to-typescript": "^15.0.3",
|
| 30 |
"monaco-editor": "^0.52.2",
|
| 31 |
"react": "^18.3.1",
|
|
|
|
| 43 |
"devDependencies": {
|
| 44 |
"@playwright/test": "^1.50.1",
|
| 45 |
"@tailwindcss/typography": "^0.5.16",
|
| 46 |
+
"@types/jmespath": "^0.15.2",
|
| 47 |
"@types/node": "^22.13.1",
|
| 48 |
"@types/react": "^18.3.14",
|
| 49 |
"@types/react-dom": "^18.3.2",
|
lynxkite-app/web/src/Code.tsx
CHANGED
|
@@ -50,9 +50,13 @@ export default function Code() {
|
|
| 50 |
yDocRef.current = new Y.Doc();
|
| 51 |
const text = yDocRef.current.getText("text");
|
| 52 |
const proto = location.protocol === "https:" ? "wss:" : "ws:";
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
wsProviderRef.current = new WebsocketProvider(
|
| 54 |
`${proto}//${location.host}/ws/code/crdt`,
|
| 55 |
-
|
| 56 |
yDocRef.current,
|
| 57 |
);
|
| 58 |
editorRef.current.getModel()!.setEOL(0); // https://github.com/yjs/y-monaco/issues/6
|
|
@@ -73,10 +77,11 @@ export default function Code() {
|
|
| 73 |
return (
|
| 74 |
<div className="workspace">
|
| 75 |
<div className="top-bar bg-neutral">
|
| 76 |
-
<Link className="logo" to="">
|
| 77 |
<img alt="" src={favicon} />
|
| 78 |
</Link>
|
| 79 |
<div className="ws-name">{path}</div>
|
|
|
|
| 80 |
<div className="tools text-secondary">
|
| 81 |
<button className="btn btn-link">
|
| 82 |
<Atom />
|
|
@@ -84,7 +89,13 @@ export default function Code() {
|
|
| 84 |
<button className="btn btn-link">
|
| 85 |
<Backspace />
|
| 86 |
</button>
|
| 87 |
-
<Link
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 88 |
<Close />
|
| 89 |
</Link>
|
| 90 |
</div>
|
|
|
|
| 50 |
yDocRef.current = new Y.Doc();
|
| 51 |
const text = yDocRef.current.getText("text");
|
| 52 |
const proto = location.protocol === "https:" ? "wss:" : "ws:";
|
| 53 |
+
const encodedPath = path!
|
| 54 |
+
.split("/")
|
| 55 |
+
.map((segment) => encodeURIComponent(segment))
|
| 56 |
+
.join("/");
|
| 57 |
wsProviderRef.current = new WebsocketProvider(
|
| 58 |
`${proto}//${location.host}/ws/code/crdt`,
|
| 59 |
+
encodedPath!,
|
| 60 |
yDocRef.current,
|
| 61 |
);
|
| 62 |
editorRef.current.getModel()!.setEOL(0); // https://github.com/yjs/y-monaco/issues/6
|
|
|
|
| 77 |
return (
|
| 78 |
<div className="workspace">
|
| 79 |
<div className="top-bar bg-neutral">
|
| 80 |
+
<Link className="logo" to="/">
|
| 81 |
<img alt="" src={favicon} />
|
| 82 |
</Link>
|
| 83 |
<div className="ws-name">{path}</div>
|
| 84 |
+
<title>{path}</title>
|
| 85 |
<div className="tools text-secondary">
|
| 86 |
<button className="btn btn-link">
|
| 87 |
<Atom />
|
|
|
|
| 89 |
<button className="btn btn-link">
|
| 90 |
<Backspace />
|
| 91 |
</button>
|
| 92 |
+
<Link
|
| 93 |
+
to={`/dir/${parentDir
|
| 94 |
+
.split("/")
|
| 95 |
+
.map((segment) => encodeURIComponent(segment))
|
| 96 |
+
.join("/")}`}
|
| 97 |
+
className="btn btn-link"
|
| 98 |
+
>
|
| 99 |
<Close />
|
| 100 |
</Link>
|
| 101 |
</div>
|
lynxkite-app/web/src/Directory.tsx
CHANGED
|
@@ -29,23 +29,55 @@ function EntryCreator(props: {
|
|
| 29 |
onCreate: (name: string) => void;
|
| 30 |
}) {
|
| 31 |
const [isCreating, setIsCreating] = useState(false);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
return (
|
| 33 |
<>
|
| 34 |
{isCreating ? (
|
| 35 |
<form
|
| 36 |
onSubmit={(e) => {
|
| 37 |
e.preventDefault();
|
| 38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
}}
|
| 40 |
>
|
| 41 |
<input
|
| 42 |
-
className=
|
| 43 |
autoFocus
|
| 44 |
type="text"
|
| 45 |
name="entryName"
|
| 46 |
onBlur={() => setIsCreating(false)}
|
|
|
|
| 47 |
placeholder={`${props.label} name`}
|
| 48 |
/>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
</form>
|
| 50 |
) : (
|
| 51 |
<button type="button" onClick={() => setIsCreating(true)}>
|
|
@@ -67,13 +99,14 @@ export default function Directory() {
|
|
| 67 |
const navigate = useNavigate();
|
| 68 |
|
| 69 |
function link(item: DirectoryEntry) {
|
|
|
|
| 70 |
if (item.type === "directory") {
|
| 71 |
-
return `/dir/${
|
| 72 |
}
|
| 73 |
if (item.type === "workspace") {
|
| 74 |
-
return `/edit/${
|
| 75 |
}
|
| 76 |
-
return `/code/${
|
| 77 |
}
|
| 78 |
|
| 79 |
function shortName(item: DirectoryEntry) {
|
|
@@ -83,13 +116,20 @@ export default function Directory() {
|
|
| 83 |
?.replace(/[.]lynxkite[.]json$/, "");
|
| 84 |
}
|
| 85 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 86 |
function newWorkspaceIn(path: string, workspaceName: string) {
|
| 87 |
-
const pathSlash = path ? `${path}/` : "";
|
| 88 |
-
navigate(`/edit/${pathSlash}${workspaceName}.lynxkite.json`, {
|
|
|
|
|
|
|
| 89 |
}
|
| 90 |
function newCodeFile(path: string, name: string) {
|
| 91 |
-
const pathSlash = path ? `${path}/` : "";
|
| 92 |
-
navigate(`/code/${pathSlash}${name}`, { replace: true });
|
| 93 |
}
|
| 94 |
async function newFolderIn(path: string, folderName: string) {
|
| 95 |
const pathSlash = path ? `${path}/` : "";
|
|
@@ -99,7 +139,8 @@ export default function Directory() {
|
|
| 99 |
body: JSON.stringify({ path: pathSlash + folderName }),
|
| 100 |
});
|
| 101 |
if (res.ok) {
|
| 102 |
-
|
|
|
|
| 103 |
} else {
|
| 104 |
alert("Failed to create folder.");
|
| 105 |
}
|
|
|
|
| 29 |
onCreate: (name: string) => void;
|
| 30 |
}) {
|
| 31 |
const [isCreating, setIsCreating] = useState(false);
|
| 32 |
+
const [nameValidationError, setNameValidationError] = useState("");
|
| 33 |
+
|
| 34 |
+
function validateName(name: string): boolean {
|
| 35 |
+
if (name.includes("/")) {
|
| 36 |
+
setNameValidationError("Name cannot contain '/' characters");
|
| 37 |
+
return false;
|
| 38 |
+
}
|
| 39 |
+
if (name.trim() === "") {
|
| 40 |
+
setNameValidationError("Name cannot be empty");
|
| 41 |
+
return false;
|
| 42 |
+
}
|
| 43 |
+
setNameValidationError("");
|
| 44 |
+
return true;
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
return (
|
| 48 |
<>
|
| 49 |
{isCreating ? (
|
| 50 |
<form
|
| 51 |
onSubmit={(e) => {
|
| 52 |
e.preventDefault();
|
| 53 |
+
const name = (e.target as HTMLFormElement).entryName.value.trim();
|
| 54 |
+
if (validateName(name)) {
|
| 55 |
+
props.onCreate(name);
|
| 56 |
+
setIsCreating(false);
|
| 57 |
+
}
|
| 58 |
}}
|
| 59 |
>
|
| 60 |
<input
|
| 61 |
+
className={`input input-ghost w-full ${nameValidationError ? "input-error" : ""}`}
|
| 62 |
autoFocus
|
| 63 |
type="text"
|
| 64 |
name="entryName"
|
| 65 |
onBlur={() => setIsCreating(false)}
|
| 66 |
+
onChange={(e) => validateName(e.target.value)}
|
| 67 |
placeholder={`${props.label} name`}
|
| 68 |
/>
|
| 69 |
+
{nameValidationError && (
|
| 70 |
+
<div
|
| 71 |
+
className="error-message"
|
| 72 |
+
role="alert"
|
| 73 |
+
style={{ position: "absolute", zIndex: 10 }}
|
| 74 |
+
>
|
| 75 |
+
<span className="error-icon" aria-hidden="true">
|
| 76 |
+
⚠️
|
| 77 |
+
</span>
|
| 78 |
+
<span className="error-text">{nameValidationError}</span>
|
| 79 |
+
</div>
|
| 80 |
+
)}
|
| 81 |
</form>
|
| 82 |
) : (
|
| 83 |
<button type="button" onClick={() => setIsCreating(true)}>
|
|
|
|
| 99 |
const navigate = useNavigate();
|
| 100 |
|
| 101 |
function link(item: DirectoryEntry) {
|
| 102 |
+
const encodedName = encodePathSegments(item.name);
|
| 103 |
if (item.type === "directory") {
|
| 104 |
+
return `/dir/${encodedName}`;
|
| 105 |
}
|
| 106 |
if (item.type === "workspace") {
|
| 107 |
+
return `/edit/${encodedName}`;
|
| 108 |
}
|
| 109 |
+
return `/code/${encodedName}`;
|
| 110 |
}
|
| 111 |
|
| 112 |
function shortName(item: DirectoryEntry) {
|
|
|
|
| 116 |
?.replace(/[.]lynxkite[.]json$/, "");
|
| 117 |
}
|
| 118 |
|
| 119 |
+
function encodePathSegments(path: string): string {
|
| 120 |
+
const segments = path.split("/");
|
| 121 |
+
return segments.map((segment) => encodeURIComponent(segment)).join("/");
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
function newWorkspaceIn(path: string, workspaceName: string) {
|
| 125 |
+
const pathSlash = path ? `${encodePathSegments(path)}/` : "";
|
| 126 |
+
navigate(`/edit/${pathSlash}${encodeURIComponent(workspaceName)}.lynxkite.json`, {
|
| 127 |
+
replace: true,
|
| 128 |
+
});
|
| 129 |
}
|
| 130 |
function newCodeFile(path: string, name: string) {
|
| 131 |
+
const pathSlash = path ? `${encodePathSegments(path)}/` : "";
|
| 132 |
+
navigate(`/code/${pathSlash}${encodeURIComponent(name)}`, { replace: true });
|
| 133 |
}
|
| 134 |
async function newFolderIn(path: string, folderName: string) {
|
| 135 |
const pathSlash = path ? `${path}/` : "";
|
|
|
|
| 139 |
body: JSON.stringify({ path: pathSlash + folderName }),
|
| 140 |
});
|
| 141 |
if (res.ok) {
|
| 142 |
+
const pathSlash = path ? `${encodePathSegments(path)}/` : "";
|
| 143 |
+
navigate(`/dir/${pathSlash}${encodeURIComponent(folderName)}`);
|
| 144 |
} else {
|
| 145 |
alert("Failed to create folder.");
|
| 146 |
}
|
lynxkite-app/web/src/apiTypes.ts
CHANGED
|
@@ -25,6 +25,7 @@ export interface SaveRequest {
|
|
| 25 |
*/
|
| 26 |
export interface Workspace {
|
| 27 |
env?: string;
|
|
|
|
| 28 |
nodes?: WorkspaceNode[];
|
| 29 |
edges?: WorkspaceEdge[];
|
| 30 |
[k: string]: unknown;
|
|
|
|
| 25 |
*/
|
| 26 |
export interface Workspace {
|
| 27 |
env?: string;
|
| 28 |
+
paused?: boolean;
|
| 29 |
nodes?: WorkspaceNode[];
|
| 30 |
edges?: WorkspaceEdge[];
|
| 31 |
[k: string]: unknown;
|
lynxkite-app/web/src/index.css
CHANGED
|
@@ -81,7 +81,7 @@ body {
|
|
| 81 |
display: flex;
|
| 82 |
flex-direction: column;
|
| 83 |
|
| 84 |
-
> :not(.title) {
|
| 85 |
user-select: text;
|
| 86 |
cursor: default;
|
| 87 |
}
|
|
@@ -277,11 +277,6 @@ body {
|
|
| 277 |
padding: 2px 8px;
|
| 278 |
border-radius: 4px 4px 0 0;
|
| 279 |
}
|
| 280 |
-
|
| 281 |
-
.collapsed-param {
|
| 282 |
-
min-height: 20px;
|
| 283 |
-
line-height: 10px;
|
| 284 |
-
}
|
| 285 |
}
|
| 286 |
|
| 287 |
.node-search {
|
|
@@ -704,3 +699,29 @@ body {
|
|
| 704 |
left: -4px;
|
| 705 |
top: -5px;
|
| 706 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
display: flex;
|
| 82 |
flex-direction: column;
|
| 83 |
|
| 84 |
+
> :not(.title, .react-flow__handle) {
|
| 85 |
user-select: text;
|
| 86 |
cursor: default;
|
| 87 |
}
|
|
|
|
| 277 |
padding: 2px 8px;
|
| 278 |
border-radius: 4px 4px 0 0;
|
| 279 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 280 |
}
|
| 281 |
|
| 282 |
.node-search {
|
|
|
|
| 699 |
left: -4px;
|
| 700 |
top: -5px;
|
| 701 |
}
|
| 702 |
+
|
| 703 |
+
.error-message {
|
| 704 |
+
display: flex;
|
| 705 |
+
align-items: center;
|
| 706 |
+
gap: 0.5rem;
|
| 707 |
+
margin-top: 0.25rem;
|
| 708 |
+
padding: 0.5rem;
|
| 709 |
+
background-color: #fee2e2;
|
| 710 |
+
border: 1px solid #fecaca;
|
| 711 |
+
border-radius: 0.375rem;
|
| 712 |
+
color: #dc2626;
|
| 713 |
+
font-size: 0.875rem;
|
| 714 |
+
}
|
| 715 |
+
|
| 716 |
+
.error-icon {
|
| 717 |
+
flex-shrink: 0;
|
| 718 |
+
}
|
| 719 |
+
|
| 720 |
+
.error-text {
|
| 721 |
+
line-height: 1.4;
|
| 722 |
+
}
|
| 723 |
+
|
| 724 |
+
.input-error {
|
| 725 |
+
border-color: #dc2626;
|
| 726 |
+
box-shadow: 0 0 0 1px #dc2626;
|
| 727 |
+
}
|
lynxkite-app/web/src/workspace/NodeSearch.tsx
CHANGED
|
@@ -3,6 +3,8 @@ import { useEffect, useMemo, useRef, useState } from "react";
|
|
| 3 |
|
| 4 |
export type OpsOp = {
|
| 5 |
name: string;
|
|
|
|
|
|
|
| 6 |
type: string;
|
| 7 |
position: { x: number; y: number };
|
| 8 |
params: { name: string; default: any }[];
|
|
@@ -81,6 +83,7 @@ export default function NodeSearch(props: {
|
|
| 81 |
onClick={addSelected}
|
| 82 |
className={`search-result ${index === selectedIndex ? "selected" : ""}`}
|
| 83 |
>
|
|
|
|
| 84 |
{box.item.name}
|
| 85 |
</div>
|
| 86 |
))}
|
|
|
|
| 3 |
|
| 4 |
export type OpsOp = {
|
| 5 |
name: string;
|
| 6 |
+
id: string;
|
| 7 |
+
categories: string[];
|
| 8 |
type: string;
|
| 9 |
position: { x: number; y: number };
|
| 10 |
params: { name: string; default: any }[];
|
|
|
|
| 83 |
onClick={addSelected}
|
| 84 |
className={`search-result ${index === selectedIndex ? "selected" : ""}`}
|
| 85 |
>
|
| 86 |
+
{box.item.categories.map((category) => `${category}\u00A0›\u00A0`)}
|
| 87 |
{box.item.name}
|
| 88 |
</div>
|
| 89 |
))}
|
lynxkite-app/web/src/workspace/Workspace.tsx
CHANGED
|
@@ -29,6 +29,10 @@ import UngroupIcon from "~icons/tabler/library-minus.jsx";
|
|
| 29 |
// @ts-ignore
|
| 30 |
import GroupIcon from "~icons/tabler/library-plus.jsx";
|
| 31 |
// @ts-ignore
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
import Restart from "~icons/tabler/rotate-clockwise.jsx";
|
| 33 |
// @ts-ignore
|
| 34 |
import Close from "~icons/tabler/x.jsx";
|
|
@@ -71,12 +75,24 @@ function LynxKiteFlow() {
|
|
| 71 |
.replace(/[.]lynxkite[.]json$/, "");
|
| 72 |
const [state, setState] = useState({ workspace: {} as WorkspaceType });
|
| 73 |
const [message, setMessage] = useState(null as string | null);
|
|
|
|
| 74 |
useEffect(() => {
|
| 75 |
const state = syncedStore({ workspace: {} as WorkspaceType });
|
| 76 |
setState(state);
|
| 77 |
const doc = getYjsDoc(state);
|
| 78 |
const proto = location.protocol === "https:" ? "wss:" : "ws:";
|
| 79 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
const onChange = (_update: any, origin: any, _doc: any, _tr: any) => {
|
| 81 |
if (origin === wsProvider) {
|
| 82 |
// An update from the CRDT. Apply it to the local state.
|
|
@@ -100,6 +116,7 @@ function LynxKiteFlow() {
|
|
| 100 |
// Make sure the internal copies are updated.
|
| 101 |
updateNodeInternals(node.id);
|
| 102 |
}
|
|
|
|
| 103 |
}
|
| 104 |
};
|
| 105 |
doc.on("update", onChange);
|
|
@@ -128,30 +145,44 @@ function LynxKiteFlow() {
|
|
| 128 |
!Number.isNaN(ch.position.y)
|
| 129 |
) {
|
| 130 |
getYjsDoc(state).transact(() => {
|
| 131 |
-
|
|
|
|
| 132 |
});
|
|
|
|
|
|
|
| 133 |
} else if (ch.type === "select") {
|
| 134 |
} else if (ch.type === "dimensions") {
|
| 135 |
-
getYjsDoc(state).transact(() =>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 136 |
} else if (ch.type === "remove") {
|
| 137 |
wnodes.splice(nodeIndex, 1);
|
| 138 |
} else if (ch.type === "replace") {
|
| 139 |
// Ideally we would only update the parameter that changed. But ReactFlow does not give us that detail.
|
| 140 |
-
|
| 141 |
-
collapsed
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
}
|
| 146 |
-
__execution_delay
|
| 147 |
-
|
| 148 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 149 |
} else {
|
| 150 |
console.log("Unknown node change", ch);
|
| 151 |
}
|
| 152 |
}
|
| 153 |
},
|
| 154 |
-
[state],
|
| 155 |
);
|
| 156 |
const onEdgesChange = useCallback(
|
| 157 |
(changes: any[]) => {
|
|
@@ -173,7 +204,11 @@ function LynxKiteFlow() {
|
|
| 173 |
|
| 174 |
const fetcher: Fetcher<Catalogs> = (resource: string, init?: RequestInit) =>
|
| 175 |
fetch(resource, init).then((res) => res.json());
|
| 176 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
| 177 |
const [suppressSearchUntil, setSuppressSearchUntil] = useState(0);
|
| 178 |
const [nodeSearchSettings, setNodeSearchSettings] = useState(
|
| 179 |
undefined as
|
|
@@ -308,6 +343,7 @@ function LynxKiteFlow() {
|
|
| 308 |
data: {
|
| 309 |
meta: { value: meta },
|
| 310 |
title: meta.name,
|
|
|
|
| 311 |
params: Object.fromEntries(meta.params.map((p) => [p.name, p.default])),
|
| 312 |
},
|
| 313 |
};
|
|
@@ -386,11 +422,15 @@ function LynxKiteFlow() {
|
|
| 386 |
}
|
| 387 |
}
|
| 388 |
async function executeWorkspace() {
|
| 389 |
-
const response = await axios.post(`/api/execute_workspace?name=${path}`);
|
| 390 |
if (response.status !== 200) {
|
| 391 |
setMessage("Workspace execution failed.");
|
| 392 |
}
|
| 393 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 394 |
function deleteSelection() {
|
| 395 |
const selectedNodes = nodes.filter((n) => n.selected);
|
| 396 |
const selectedEdges = edges.filter((e) => e.selected);
|
|
@@ -526,13 +566,25 @@ function LynxKiteFlow() {
|
|
| 526 |
<Backspace />
|
| 527 |
</button>
|
| 528 |
</Tooltip>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 529 |
<Tooltip doc="Re-run the workspace">
|
| 530 |
<button className="btn btn-link" onClick={executeWorkspace}>
|
| 531 |
<Restart />
|
| 532 |
</button>
|
| 533 |
</Tooltip>
|
| 534 |
<Tooltip doc="Close workspace">
|
| 535 |
-
<Link
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 536 |
<Close />
|
| 537 |
</Link>
|
| 538 |
</Tooltip>
|
|
|
|
| 29 |
// @ts-ignore
|
| 30 |
import GroupIcon from "~icons/tabler/library-plus.jsx";
|
| 31 |
// @ts-ignore
|
| 32 |
+
import Pause from "~icons/tabler/player-pause.jsx";
|
| 33 |
+
// @ts-ignore
|
| 34 |
+
import Play from "~icons/tabler/player-play.jsx";
|
| 35 |
+
// @ts-ignore
|
| 36 |
import Restart from "~icons/tabler/rotate-clockwise.jsx";
|
| 37 |
// @ts-ignore
|
| 38 |
import Close from "~icons/tabler/x.jsx";
|
|
|
|
| 75 |
.replace(/[.]lynxkite[.]json$/, "");
|
| 76 |
const [state, setState] = useState({ workspace: {} as WorkspaceType });
|
| 77 |
const [message, setMessage] = useState(null as string | null);
|
| 78 |
+
const [pausedUIState, setPausedUIState] = useState(false);
|
| 79 |
useEffect(() => {
|
| 80 |
const state = syncedStore({ workspace: {} as WorkspaceType });
|
| 81 |
setState(state);
|
| 82 |
const doc = getYjsDoc(state);
|
| 83 |
const proto = location.protocol === "https:" ? "wss:" : "ws:";
|
| 84 |
+
const encodedPath = path!
|
| 85 |
+
.split("/")
|
| 86 |
+
.map((segment) => encodeURIComponent(segment))
|
| 87 |
+
.join("/");
|
| 88 |
+
const wsProvider = new WebsocketProvider(
|
| 89 |
+
`${proto}//${location.host}/ws/crdt`,
|
| 90 |
+
encodedPath,
|
| 91 |
+
doc,
|
| 92 |
+
);
|
| 93 |
+
if (state.workspace && typeof state.workspace.paused === "undefined") {
|
| 94 |
+
state.workspace.paused = false;
|
| 95 |
+
}
|
| 96 |
const onChange = (_update: any, origin: any, _doc: any, _tr: any) => {
|
| 97 |
if (origin === wsProvider) {
|
| 98 |
// An update from the CRDT. Apply it to the local state.
|
|
|
|
| 116 |
// Make sure the internal copies are updated.
|
| 117 |
updateNodeInternals(node.id);
|
| 118 |
}
|
| 119 |
+
setPausedUIState(state.workspace.paused || false);
|
| 120 |
}
|
| 121 |
};
|
| 122 |
doc.on("update", onChange);
|
|
|
|
| 145 |
!Number.isNaN(ch.position.y)
|
| 146 |
) {
|
| 147 |
getYjsDoc(state).transact(() => {
|
| 148 |
+
node.position.x = ch.position.x;
|
| 149 |
+
node.position.y = ch.position.y;
|
| 150 |
});
|
| 151 |
+
// Update edge positions.
|
| 152 |
+
updateNodeInternals(ch.id);
|
| 153 |
} else if (ch.type === "select") {
|
| 154 |
} else if (ch.type === "dimensions") {
|
| 155 |
+
getYjsDoc(state).transact(() => {
|
| 156 |
+
node.width = ch.dimensions.width;
|
| 157 |
+
node.height = ch.dimensions.height;
|
| 158 |
+
});
|
| 159 |
+
// Update edge positions when node size changes.
|
| 160 |
+
updateNodeInternals(ch.id);
|
| 161 |
} else if (ch.type === "remove") {
|
| 162 |
wnodes.splice(nodeIndex, 1);
|
| 163 |
} else if (ch.type === "replace") {
|
| 164 |
// Ideally we would only update the parameter that changed. But ReactFlow does not give us that detail.
|
| 165 |
+
getYjsDoc(state).transact(() => {
|
| 166 |
+
if (node.data.collapsed !== ch.item.data.collapsed) {
|
| 167 |
+
node.data.collapsed = ch.item.data.collapsed;
|
| 168 |
+
// Update edge positions when node collapses/expands.
|
| 169 |
+
setTimeout(() => updateNodeInternals(ch.id), 0);
|
| 170 |
+
}
|
| 171 |
+
if (node.data.__execution_delay !== ch.item.data.__execution_delay) {
|
| 172 |
+
node.data.__execution_delay = ch.item.data.__execution_delay;
|
| 173 |
+
}
|
| 174 |
+
for (const [key, value] of Object.entries(ch.item.data.params)) {
|
| 175 |
+
if (node.data.params[key] !== value) {
|
| 176 |
+
node.data.params[key] = value;
|
| 177 |
+
}
|
| 178 |
+
}
|
| 179 |
+
});
|
| 180 |
} else {
|
| 181 |
console.log("Unknown node change", ch);
|
| 182 |
}
|
| 183 |
}
|
| 184 |
},
|
| 185 |
+
[state, updateNodeInternals],
|
| 186 |
);
|
| 187 |
const onEdgesChange = useCallback(
|
| 188 |
(changes: any[]) => {
|
|
|
|
| 204 |
|
| 205 |
const fetcher: Fetcher<Catalogs> = (resource: string, init?: RequestInit) =>
|
| 206 |
fetch(resource, init).then((res) => res.json());
|
| 207 |
+
const encodedPathForAPI = path!
|
| 208 |
+
.split("/")
|
| 209 |
+
.map((segment) => encodeURIComponent(segment))
|
| 210 |
+
.join("/");
|
| 211 |
+
const catalog = useSWR(`/api/catalog?workspace=${encodedPathForAPI}`, fetcher);
|
| 212 |
const [suppressSearchUntil, setSuppressSearchUntil] = useState(0);
|
| 213 |
const [nodeSearchSettings, setNodeSearchSettings] = useState(
|
| 214 |
undefined as
|
|
|
|
| 343 |
data: {
|
| 344 |
meta: { value: meta },
|
| 345 |
title: meta.name,
|
| 346 |
+
op_id: meta.id,
|
| 347 |
params: Object.fromEntries(meta.params.map((p) => [p.name, p.default])),
|
| 348 |
},
|
| 349 |
};
|
|
|
|
| 422 |
}
|
| 423 |
}
|
| 424 |
async function executeWorkspace() {
|
| 425 |
+
const response = await axios.post(`/api/execute_workspace?name=${encodeURIComponent(path)}`);
|
| 426 |
if (response.status !== 200) {
|
| 427 |
setMessage("Workspace execution failed.");
|
| 428 |
}
|
| 429 |
}
|
| 430 |
+
function togglePause() {
|
| 431 |
+
state.workspace.paused = !state.workspace.paused;
|
| 432 |
+
setPausedUIState(state.workspace.paused);
|
| 433 |
+
}
|
| 434 |
function deleteSelection() {
|
| 435 |
const selectedNodes = nodes.filter((n) => n.selected);
|
| 436 |
const selectedEdges = edges.filter((e) => e.selected);
|
|
|
|
| 566 |
<Backspace />
|
| 567 |
</button>
|
| 568 |
</Tooltip>
|
| 569 |
+
<Tooltip doc={pausedUIState ? "Resume automatic execution" : "Pause automatic execution"}>
|
| 570 |
+
<button className="btn btn-link" onClick={togglePause}>
|
| 571 |
+
{pausedUIState ? <Play /> : <Pause />}
|
| 572 |
+
</button>
|
| 573 |
+
</Tooltip>
|
| 574 |
<Tooltip doc="Re-run the workspace">
|
| 575 |
<button className="btn btn-link" onClick={executeWorkspace}>
|
| 576 |
<Restart />
|
| 577 |
</button>
|
| 578 |
</Tooltip>
|
| 579 |
<Tooltip doc="Close workspace">
|
| 580 |
+
<Link
|
| 581 |
+
className="btn btn-link"
|
| 582 |
+
to={`/dir/${parentDir
|
| 583 |
+
.split("/")
|
| 584 |
+
.map((segment) => encodeURIComponent(segment))
|
| 585 |
+
.join("/")}`}
|
| 586 |
+
aria-label="close"
|
| 587 |
+
>
|
| 588 |
<Close />
|
| 589 |
</Link>
|
| 590 |
</Tooltip>
|
lynxkite-app/web/src/workspace/nodes/LynxKiteNode.tsx
CHANGED
|
@@ -55,7 +55,45 @@ function getHandles(inputs: any[], outputs: any[]) {
|
|
| 55 |
return handles;
|
| 56 |
}
|
| 57 |
|
| 58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 59 |
|
| 60 |
function LynxKiteNodeComponent(props: LynxKiteNodeProps) {
|
| 61 |
const reactFlow = useReactFlow();
|
|
@@ -67,9 +105,11 @@ function LynxKiteNodeComponent(props: LynxKiteNodeProps) {
|
|
| 67 |
// ReactFlow handles wheel events to zoom/pan and this would prevent scrolling inside the node.
|
| 68 |
// To stop the event from reaching ReactFlow, we stop propagation on the wheel event.
|
| 69 |
// This must be done with a "passive: false" listener, which we can only register like this.
|
| 70 |
-
containerRef.current?.addEventListener("wheel",
|
|
|
|
|
|
|
| 71 |
return () => {
|
| 72 |
-
containerRef.current?.removeEventListener("wheel",
|
| 73 |
};
|
| 74 |
}, [containerRef]);
|
| 75 |
function titleClicked() {
|
|
|
|
| 55 |
return handles;
|
| 56 |
}
|
| 57 |
|
| 58 |
+
function canScrollX(element: HTMLElement) {
|
| 59 |
+
const style = getComputedStyle(element);
|
| 60 |
+
return style.overflowX === "auto" || style.overflow === "auto";
|
| 61 |
+
}
|
| 62 |
+
function canScrollY(element: HTMLElement) {
|
| 63 |
+
const style = getComputedStyle(element);
|
| 64 |
+
return style.overflowY === "auto" || style.overflow === "auto";
|
| 65 |
+
}
|
| 66 |
+
function canScrollUp(e: HTMLElement) {
|
| 67 |
+
return canScrollY(e) && e.scrollTop > 0;
|
| 68 |
+
}
|
| 69 |
+
function canScrollDown(e: HTMLElement) {
|
| 70 |
+
return canScrollY(e) && e.scrollTop < e.scrollHeight - e.clientHeight - 1;
|
| 71 |
+
}
|
| 72 |
+
function canScrollLeft(e: HTMLElement) {
|
| 73 |
+
return canScrollX(e) && e.scrollLeft > 0;
|
| 74 |
+
}
|
| 75 |
+
function canScrollRight(e: HTMLElement) {
|
| 76 |
+
return canScrollX(e) && e.scrollLeft < e.scrollWidth - e.clientWidth - 1;
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
function onWheel(e: WheelEvent) {
|
| 80 |
+
if (e.ctrlKey) return; // Zoom, not scroll.
|
| 81 |
+
let t = e.target as HTMLElement;
|
| 82 |
+
// If we find an element inside the node container that can apply this scroll event, we stop propagation.
|
| 83 |
+
// Otherwise ReactFlow can have it and pan the workspace.
|
| 84 |
+
while (t && !t.classList.contains("node-container")) {
|
| 85 |
+
if (
|
| 86 |
+
(e.deltaX < 0 && canScrollLeft(t)) ||
|
| 87 |
+
(e.deltaX > 0 && canScrollRight(t)) ||
|
| 88 |
+
(e.deltaY < 0 && canScrollUp(t)) ||
|
| 89 |
+
(e.deltaY > 0 && canScrollDown(t))
|
| 90 |
+
) {
|
| 91 |
+
e.stopPropagation();
|
| 92 |
+
return;
|
| 93 |
+
}
|
| 94 |
+
t = t.parentElement as HTMLElement;
|
| 95 |
+
}
|
| 96 |
+
}
|
| 97 |
|
| 98 |
function LynxKiteNodeComponent(props: LynxKiteNodeProps) {
|
| 99 |
const reactFlow = useReactFlow();
|
|
|
|
| 105 |
// ReactFlow handles wheel events to zoom/pan and this would prevent scrolling inside the node.
|
| 106 |
// To stop the event from reaching ReactFlow, we stop propagation on the wheel event.
|
| 107 |
// This must be done with a "passive: false" listener, which we can only register like this.
|
| 108 |
+
containerRef.current?.addEventListener("wheel", onWheel, {
|
| 109 |
+
passive: false,
|
| 110 |
+
});
|
| 111 |
return () => {
|
| 112 |
+
containerRef.current?.removeEventListener("wheel", onWheel);
|
| 113 |
};
|
| 114 |
}, [containerRef]);
|
| 115 |
function titleClicked() {
|
lynxkite-app/web/src/workspace/nodes/ModelMappingParameter.tsx
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { useRef } from "react";
|
| 2 |
+
// @ts-ignore
|
| 3 |
+
import ArrowsHorizontal from "~icons/tabler/arrows-horizontal.jsx";
|
| 4 |
+
// @ts-ignore
|
| 5 |
+
import Help from "~icons/tabler/question-mark.jsx";
|
| 6 |
+
import ParameterInput from "./ParameterInput";
|
| 7 |
+
|
| 8 |
+
type Bindings = {
|
| 9 |
+
[key: string]: {
|
| 10 |
+
df: string;
|
| 11 |
+
column: string;
|
| 12 |
+
};
|
| 13 |
+
};
|
| 14 |
+
|
| 15 |
+
type NamedId = {
|
| 16 |
+
name: string;
|
| 17 |
+
id: string;
|
| 18 |
+
};
|
| 19 |
+
|
| 20 |
+
function getModelBindings(
|
| 21 |
+
data: any,
|
| 22 |
+
variant: "training input" | "inference input" | "output",
|
| 23 |
+
): NamedId[] {
|
| 24 |
+
function bindingsOfModel(m: any): string[] {
|
| 25 |
+
switch (variant) {
|
| 26 |
+
case "training input":
|
| 27 |
+
return [
|
| 28 |
+
...m.model_inputs,
|
| 29 |
+
...m.loss_inputs.filter((i: string) => !m.model_outputs.includes(i)),
|
| 30 |
+
];
|
| 31 |
+
case "inference input":
|
| 32 |
+
return m.model_inputs;
|
| 33 |
+
case "output":
|
| 34 |
+
return m.model_outputs;
|
| 35 |
+
}
|
| 36 |
+
}
|
| 37 |
+
const bindings = new Set<NamedId>();
|
| 38 |
+
const inputs = data?.input_metadata?.value ?? data?.input_metadata ?? [];
|
| 39 |
+
for (const input of inputs) {
|
| 40 |
+
const other = input.other ?? {};
|
| 41 |
+
for (const e of Object.values(other) as any[]) {
|
| 42 |
+
if (e.type === "model") {
|
| 43 |
+
for (const id of bindingsOfModel(e.model)) {
|
| 44 |
+
bindings.add({ id, name: e.model.input_output_names[id] ?? id });
|
| 45 |
+
}
|
| 46 |
+
}
|
| 47 |
+
}
|
| 48 |
+
}
|
| 49 |
+
const list = [...bindings];
|
| 50 |
+
list.sort((a, b) => {
|
| 51 |
+
if (a.name < b.name) return -1;
|
| 52 |
+
if (a.name > b.name) return 1;
|
| 53 |
+
return 0;
|
| 54 |
+
});
|
| 55 |
+
return list;
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
function parseJsonOrEmpty(json: string): object {
|
| 59 |
+
try {
|
| 60 |
+
const j = JSON.parse(json);
|
| 61 |
+
if (j !== null && typeof j === "object") {
|
| 62 |
+
return j;
|
| 63 |
+
}
|
| 64 |
+
} catch (e) {}
|
| 65 |
+
return {};
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
+
export default function ModelMapping({ value, onChange, data, variant }: any) {
|
| 69 |
+
const dfsRef = useRef({} as { [binding: string]: HTMLSelectElement | null });
|
| 70 |
+
const columnsRef = useRef(
|
| 71 |
+
{} as { [binding: string]: HTMLSelectElement | HTMLInputElement | null },
|
| 72 |
+
);
|
| 73 |
+
const v: any = parseJsonOrEmpty(value);
|
| 74 |
+
v.map ??= {};
|
| 75 |
+
const dfs: { [df: string]: string[] } = {};
|
| 76 |
+
const inputs = data?.input_metadata?.value ?? data?.input_metadata ?? [];
|
| 77 |
+
for (const input of inputs) {
|
| 78 |
+
if (!input.dataframes) continue;
|
| 79 |
+
const dataframes = input.dataframes as {
|
| 80 |
+
[df: string]: { columns: string[] };
|
| 81 |
+
};
|
| 82 |
+
for (const [df, { columns }] of Object.entries(dataframes)) {
|
| 83 |
+
dfs[df] = columns;
|
| 84 |
+
}
|
| 85 |
+
}
|
| 86 |
+
const bindings = getModelBindings(data, variant);
|
| 87 |
+
function getMap() {
|
| 88 |
+
const map: Bindings = {};
|
| 89 |
+
for (const binding of bindings) {
|
| 90 |
+
const df = dfsRef.current[binding.id]?.value ?? "";
|
| 91 |
+
const column = columnsRef.current[binding.id]?.value ?? "";
|
| 92 |
+
if (df.length || column.length) {
|
| 93 |
+
map[binding.id] = { df, column };
|
| 94 |
+
}
|
| 95 |
+
}
|
| 96 |
+
return map;
|
| 97 |
+
}
|
| 98 |
+
return (
|
| 99 |
+
<table className="model-mapping-param">
|
| 100 |
+
<tbody>
|
| 101 |
+
{bindings.length > 0 ? (
|
| 102 |
+
bindings.map((binding: NamedId) => (
|
| 103 |
+
<tr key={binding.id}>
|
| 104 |
+
<td>{binding.name}</td>
|
| 105 |
+
<td>
|
| 106 |
+
<ArrowsHorizontal />
|
| 107 |
+
</td>
|
| 108 |
+
<td>
|
| 109 |
+
<select
|
| 110 |
+
className="select select-ghost"
|
| 111 |
+
value={v.map?.[binding.id]?.df}
|
| 112 |
+
ref={(el) => {
|
| 113 |
+
dfsRef.current[binding.id] = el;
|
| 114 |
+
}}
|
| 115 |
+
onChange={() => onChange(JSON.stringify({ map: getMap() }))}
|
| 116 |
+
>
|
| 117 |
+
<option key="" value="" />
|
| 118 |
+
{Object.keys(dfs).map((df: string) => (
|
| 119 |
+
<option key={df} value={df}>
|
| 120 |
+
{df}
|
| 121 |
+
</option>
|
| 122 |
+
))}
|
| 123 |
+
</select>
|
| 124 |
+
</td>
|
| 125 |
+
<td>
|
| 126 |
+
{variant === "output" ? (
|
| 127 |
+
<ParameterInput
|
| 128 |
+
inputRef={(el) => {
|
| 129 |
+
columnsRef.current[binding.id] = el;
|
| 130 |
+
}}
|
| 131 |
+
value={v.map?.[binding.id]?.column}
|
| 132 |
+
onChange={(column, options) => {
|
| 133 |
+
const map = getMap();
|
| 134 |
+
// At this point the <input> has not been updated yet. We use the value from the event.
|
| 135 |
+
const df = dfsRef.current[binding.id]?.value ?? "";
|
| 136 |
+
map[binding.id] ??= { df, column };
|
| 137 |
+
map[binding.id].column = column;
|
| 138 |
+
onChange(JSON.stringify({ map }), options);
|
| 139 |
+
}}
|
| 140 |
+
/>
|
| 141 |
+
) : (
|
| 142 |
+
<select
|
| 143 |
+
className="select select-ghost"
|
| 144 |
+
value={v.map?.[binding.id]?.column}
|
| 145 |
+
ref={(el) => {
|
| 146 |
+
columnsRef.current[binding.id] = el;
|
| 147 |
+
}}
|
| 148 |
+
onChange={() => onChange(JSON.stringify({ map: getMap() }))}
|
| 149 |
+
>
|
| 150 |
+
<option key="" value="" />
|
| 151 |
+
{dfs[v.map?.[binding.id]?.df]?.map((col: string) => (
|
| 152 |
+
<option key={col} value={col}>
|
| 153 |
+
{col}
|
| 154 |
+
</option>
|
| 155 |
+
))}
|
| 156 |
+
</select>
|
| 157 |
+
)}
|
| 158 |
+
</td>
|
| 159 |
+
</tr>
|
| 160 |
+
))
|
| 161 |
+
) : (
|
| 162 |
+
<tr>
|
| 163 |
+
<td>no bindings</td>
|
| 164 |
+
</tr>
|
| 165 |
+
)}
|
| 166 |
+
</tbody>
|
| 167 |
+
</table>
|
| 168 |
+
);
|
| 169 |
+
}
|
lynxkite-app/web/src/workspace/nodes/NodeParameter.tsx
CHANGED
|
@@ -1,10 +1,12 @@
|
|
| 1 |
-
import
|
| 2 |
// @ts-ignore
|
| 3 |
import ArrowsHorizontal from "~icons/tabler/arrows-horizontal.jsx";
|
| 4 |
// @ts-ignore
|
| 5 |
import Help from "~icons/tabler/question-mark.jsx";
|
| 6 |
import Tooltip from "../../Tooltip";
|
|
|
|
| 7 |
import NodeGroupParameter from "./NodeGroupParameter";
|
|
|
|
| 8 |
|
| 9 |
const BOOLEAN = "<class 'bool'>";
|
| 10 |
const MODEL_TRAINING_INPUT_MAPPING =
|
|
@@ -27,178 +29,6 @@ function ParamName({ name, doc }: { name: string; doc: string }) {
|
|
| 27 |
);
|
| 28 |
}
|
| 29 |
|
| 30 |
-
function Input({
|
| 31 |
-
value,
|
| 32 |
-
onChange,
|
| 33 |
-
inputRef,
|
| 34 |
-
}: {
|
| 35 |
-
value: string;
|
| 36 |
-
onChange: (value: string, options?: { delay: number }) => void;
|
| 37 |
-
inputRef?: React.Ref<HTMLInputElement>;
|
| 38 |
-
}) {
|
| 39 |
-
return (
|
| 40 |
-
<input
|
| 41 |
-
className="input input-bordered w-full"
|
| 42 |
-
ref={inputRef}
|
| 43 |
-
value={value ?? ""}
|
| 44 |
-
onChange={(evt) => onChange(evt.currentTarget.value, { delay: 2 })}
|
| 45 |
-
onBlur={(evt) => onChange(evt.currentTarget.value, { delay: 0 })}
|
| 46 |
-
onKeyDown={(evt) => evt.code === "Enter" && onChange(evt.currentTarget.value, { delay: 0 })}
|
| 47 |
-
/>
|
| 48 |
-
);
|
| 49 |
-
}
|
| 50 |
-
|
| 51 |
-
type Bindings = {
|
| 52 |
-
[key: string]: {
|
| 53 |
-
df: string;
|
| 54 |
-
column: string;
|
| 55 |
-
};
|
| 56 |
-
};
|
| 57 |
-
|
| 58 |
-
function getModelBindings(
|
| 59 |
-
data: any,
|
| 60 |
-
variant: "training input" | "inference input" | "output",
|
| 61 |
-
): string[] {
|
| 62 |
-
function bindingsOfModel(m: any): string[] {
|
| 63 |
-
switch (variant) {
|
| 64 |
-
case "training input":
|
| 65 |
-
return [...m.inputs, ...m.loss_inputs.filter((i: string) => !m.outputs.includes(i))];
|
| 66 |
-
case "inference input":
|
| 67 |
-
return m.inputs;
|
| 68 |
-
case "output":
|
| 69 |
-
return m.outputs;
|
| 70 |
-
}
|
| 71 |
-
}
|
| 72 |
-
const bindings = new Set<string>();
|
| 73 |
-
const inputs = data?.input_metadata?.value ?? data?.input_metadata ?? [];
|
| 74 |
-
for (const input of inputs) {
|
| 75 |
-
const other = input.other ?? {};
|
| 76 |
-
for (const e of Object.values(other) as any[]) {
|
| 77 |
-
if (e.type === "model") {
|
| 78 |
-
for (const b of bindingsOfModel(e.model)) {
|
| 79 |
-
bindings.add(b);
|
| 80 |
-
}
|
| 81 |
-
}
|
| 82 |
-
}
|
| 83 |
-
}
|
| 84 |
-
const list = [...bindings];
|
| 85 |
-
list.sort();
|
| 86 |
-
return list;
|
| 87 |
-
}
|
| 88 |
-
|
| 89 |
-
function parseJsonOrEmpty(json: string): object {
|
| 90 |
-
try {
|
| 91 |
-
const j = JSON.parse(json);
|
| 92 |
-
if (j !== null && typeof j === "object") {
|
| 93 |
-
return j;
|
| 94 |
-
}
|
| 95 |
-
} catch (e) {}
|
| 96 |
-
return {};
|
| 97 |
-
}
|
| 98 |
-
|
| 99 |
-
function ModelMapping({ value, onChange, data, variant }: any) {
|
| 100 |
-
const dfsRef = useRef({} as { [binding: string]: HTMLSelectElement | null });
|
| 101 |
-
const columnsRef = useRef(
|
| 102 |
-
{} as { [binding: string]: HTMLSelectElement | HTMLInputElement | null },
|
| 103 |
-
);
|
| 104 |
-
const v: any = parseJsonOrEmpty(value);
|
| 105 |
-
v.map ??= {};
|
| 106 |
-
const dfs: { [df: string]: string[] } = {};
|
| 107 |
-
const inputs = data?.input_metadata?.value ?? data?.input_metadata ?? [];
|
| 108 |
-
for (const input of inputs) {
|
| 109 |
-
if (!input.dataframes) continue;
|
| 110 |
-
const dataframes = input.dataframes as {
|
| 111 |
-
[df: string]: { columns: string[] };
|
| 112 |
-
};
|
| 113 |
-
for (const [df, { columns }] of Object.entries(dataframes)) {
|
| 114 |
-
dfs[df] = columns;
|
| 115 |
-
}
|
| 116 |
-
}
|
| 117 |
-
const bindings = getModelBindings(data, variant);
|
| 118 |
-
function getMap() {
|
| 119 |
-
const map: Bindings = {};
|
| 120 |
-
for (const binding of bindings) {
|
| 121 |
-
const df = dfsRef.current[binding]?.value ?? "";
|
| 122 |
-
const column = columnsRef.current[binding]?.value ?? "";
|
| 123 |
-
if (df.length || column.length) {
|
| 124 |
-
map[binding] = { df, column };
|
| 125 |
-
}
|
| 126 |
-
}
|
| 127 |
-
return map;
|
| 128 |
-
}
|
| 129 |
-
return (
|
| 130 |
-
<table className="model-mapping-param">
|
| 131 |
-
<tbody>
|
| 132 |
-
{bindings.length > 0 ? (
|
| 133 |
-
bindings.map((binding: string) => (
|
| 134 |
-
<tr key={binding}>
|
| 135 |
-
<td>{binding}</td>
|
| 136 |
-
<td>
|
| 137 |
-
<ArrowsHorizontal />
|
| 138 |
-
</td>
|
| 139 |
-
<td>
|
| 140 |
-
<select
|
| 141 |
-
className="select select-ghost"
|
| 142 |
-
value={v.map?.[binding]?.df}
|
| 143 |
-
ref={(el) => {
|
| 144 |
-
dfsRef.current[binding] = el;
|
| 145 |
-
}}
|
| 146 |
-
onChange={() => onChange(JSON.stringify({ map: getMap() }))}
|
| 147 |
-
>
|
| 148 |
-
<option key="" value="" />
|
| 149 |
-
{Object.keys(dfs).map((df: string) => (
|
| 150 |
-
<option key={df} value={df}>
|
| 151 |
-
{df}
|
| 152 |
-
</option>
|
| 153 |
-
))}
|
| 154 |
-
</select>
|
| 155 |
-
</td>
|
| 156 |
-
<td>
|
| 157 |
-
{variant === "output" ? (
|
| 158 |
-
<Input
|
| 159 |
-
inputRef={(el) => {
|
| 160 |
-
columnsRef.current[binding] = el;
|
| 161 |
-
}}
|
| 162 |
-
value={v.map?.[binding]?.column}
|
| 163 |
-
onChange={(column, options) => {
|
| 164 |
-
const map = getMap();
|
| 165 |
-
// At this point the <input> has not been updated yet. We use the value from the event.
|
| 166 |
-
const df = dfsRef.current[binding]?.value ?? "";
|
| 167 |
-
map[binding] ??= { df, column };
|
| 168 |
-
map[binding].column = column;
|
| 169 |
-
onChange(JSON.stringify({ map }), options);
|
| 170 |
-
}}
|
| 171 |
-
/>
|
| 172 |
-
) : (
|
| 173 |
-
<select
|
| 174 |
-
className="select select-ghost"
|
| 175 |
-
value={v.map?.[binding]?.column}
|
| 176 |
-
ref={(el) => {
|
| 177 |
-
columnsRef.current[binding] = el;
|
| 178 |
-
}}
|
| 179 |
-
onChange={() => onChange(JSON.stringify({ map: getMap() }))}
|
| 180 |
-
>
|
| 181 |
-
<option key="" value="" />
|
| 182 |
-
{dfs[v.map?.[binding]?.df]?.map((col: string) => (
|
| 183 |
-
<option key={col} value={col}>
|
| 184 |
-
{col}
|
| 185 |
-
</option>
|
| 186 |
-
))}
|
| 187 |
-
</select>
|
| 188 |
-
)}
|
| 189 |
-
</td>
|
| 190 |
-
</tr>
|
| 191 |
-
))
|
| 192 |
-
) : (
|
| 193 |
-
<tr>
|
| 194 |
-
<td>no bindings</td>
|
| 195 |
-
</tr>
|
| 196 |
-
)}
|
| 197 |
-
</tbody>
|
| 198 |
-
</table>
|
| 199 |
-
);
|
| 200 |
-
}
|
| 201 |
-
|
| 202 |
interface NodeParameterProps {
|
| 203 |
name: string;
|
| 204 |
value: any;
|
|
@@ -226,22 +56,32 @@ export default function NodeParameter({ name, value, meta, data, setParam }: Nod
|
|
| 226 |
function onChange(value: any, opts?: UpdateOptions) {
|
| 227 |
setParam(meta.name, value, opts || {});
|
| 228 |
}
|
| 229 |
-
return meta?.type?.format === "
|
| 230 |
-
<label className="param">
|
| 231 |
-
<ParamName name={name} doc={doc} />
|
| 232 |
-
<button className="collapsed-param">⋯</button>
|
| 233 |
-
</label>
|
| 234 |
-
) : meta?.type?.format === "textarea" ? (
|
| 235 |
<label className="param">
|
| 236 |
<ParamName name={name} doc={doc} />
|
| 237 |
<textarea
|
| 238 |
className="textarea textarea-bordered w-full"
|
| 239 |
-
rows={
|
| 240 |
-
value={value
|
| 241 |
onChange={(evt) => onChange(evt.currentTarget.value, { delay: 2 })}
|
| 242 |
onBlur={(evt) => onChange(evt.currentTarget.value, { delay: 0 })}
|
| 243 |
/>
|
| 244 |
</label>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 245 |
) : meta?.type === "group" ? (
|
| 246 |
<NodeGroupParameter meta={meta} data={data} setParam={setParam} />
|
| 247 |
) : meta?.type?.enum ? (
|
|
@@ -289,7 +129,19 @@ export default function NodeParameter({ name, value, meta, data, setParam }: Nod
|
|
| 289 |
) : (
|
| 290 |
<label className="param">
|
| 291 |
<ParamName name={name} doc={doc} />
|
| 292 |
-
<
|
| 293 |
</label>
|
| 294 |
);
|
| 295 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import jmespath from "jmespath";
|
| 2 |
// @ts-ignore
|
| 3 |
import ArrowsHorizontal from "~icons/tabler/arrows-horizontal.jsx";
|
| 4 |
// @ts-ignore
|
| 5 |
import Help from "~icons/tabler/question-mark.jsx";
|
| 6 |
import Tooltip from "../../Tooltip";
|
| 7 |
+
import ModelMapping from "./ModelMappingParameter";
|
| 8 |
import NodeGroupParameter from "./NodeGroupParameter";
|
| 9 |
+
import ParameterInput from "./ParameterInput";
|
| 10 |
|
| 11 |
const BOOLEAN = "<class 'bool'>";
|
| 12 |
const MODEL_TRAINING_INPUT_MAPPING =
|
|
|
|
| 29 |
);
|
| 30 |
}
|
| 31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
interface NodeParameterProps {
|
| 33 |
name: string;
|
| 34 |
value: any;
|
|
|
|
| 56 |
function onChange(value: any, opts?: UpdateOptions) {
|
| 57 |
setParam(meta.name, value, opts || {});
|
| 58 |
}
|
| 59 |
+
return meta?.type?.format === "textarea" ? (
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
<label className="param">
|
| 61 |
<ParamName name={name} doc={doc} />
|
| 62 |
<textarea
|
| 63 |
className="textarea textarea-bordered w-full"
|
| 64 |
+
rows={(value ?? "").split("\n").length}
|
| 65 |
+
value={value ?? ""}
|
| 66 |
onChange={(evt) => onChange(evt.currentTarget.value, { delay: 2 })}
|
| 67 |
onBlur={(evt) => onChange(evt.currentTarget.value, { delay: 0 })}
|
| 68 |
/>
|
| 69 |
</label>
|
| 70 |
+
) : meta?.type?.format === "dropdown" ? (
|
| 71 |
+
<label className="param">
|
| 72 |
+
<ParamName name={name} doc={doc} />
|
| 73 |
+
<select
|
| 74 |
+
className="select select-bordered w-full"
|
| 75 |
+
value={value ?? ""}
|
| 76 |
+
onChange={(evt) => onChange(evt.currentTarget.value)}
|
| 77 |
+
>
|
| 78 |
+
{getDropDownValues(data, meta).map((option: string) => (
|
| 79 |
+
<option key={option} value={option}>
|
| 80 |
+
{option}
|
| 81 |
+
</option>
|
| 82 |
+
))}
|
| 83 |
+
</select>
|
| 84 |
+
</label>
|
| 85 |
) : meta?.type === "group" ? (
|
| 86 |
<NodeGroupParameter meta={meta} data={data} setParam={setParam} />
|
| 87 |
) : meta?.type?.enum ? (
|
|
|
|
| 129 |
) : (
|
| 130 |
<label className="param">
|
| 131 |
<ParamName name={name} doc={doc} />
|
| 132 |
+
<ParameterInput value={value} onChange={onChange} />
|
| 133 |
</label>
|
| 134 |
);
|
| 135 |
}
|
| 136 |
+
|
| 137 |
+
function getDropDownValues(data: any, meta: any): string[] {
|
| 138 |
+
const metadata = data.input_metadata.value;
|
| 139 |
+
let query = meta.type.metadata_query;
|
| 140 |
+
// Substitute parameters in the query.
|
| 141 |
+
for (const p in data.params) {
|
| 142 |
+
query = query.replace(`<${p}>`, data.params[p]);
|
| 143 |
+
}
|
| 144 |
+
const res = ["", ...jmespath.search(metadata, query)];
|
| 145 |
+
res.sort();
|
| 146 |
+
return res;
|
| 147 |
+
}
|
lynxkite-app/web/src/workspace/nodes/NodeWithTableView.tsx
CHANGED
|
@@ -54,8 +54,8 @@ function NodeWithTableView(props: any) {
|
|
| 54 |
<dl key={`${name}-dl`}>
|
| 55 |
{df.columns.map((c: string, i: number) => (
|
| 56 |
<React.Fragment key={`${name}-${c}`}>
|
| 57 |
-
<dt>{c}</dt>
|
| 58 |
-
<dd>
|
| 59 |
<Markdown>{toMD(df.data[0][i])}</Markdown>
|
| 60 |
</dd>
|
| 61 |
</React.Fragment>
|
|
|
|
| 54 |
<dl key={`${name}-dl`}>
|
| 55 |
{df.columns.map((c: string, i: number) => (
|
| 56 |
<React.Fragment key={`${name}-${c}`}>
|
| 57 |
+
{df.columns.length > 1 && <dt>{c}</dt>}
|
| 58 |
+
<dd className="prose">
|
| 59 |
<Markdown>{toMD(df.data[0][i])}</Markdown>
|
| 60 |
</dd>
|
| 61 |
</React.Fragment>
|
lynxkite-app/web/src/workspace/nodes/ParameterInput.tsx
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
export default function ParameterInput({
|
| 2 |
+
value,
|
| 3 |
+
onChange,
|
| 4 |
+
inputRef,
|
| 5 |
+
}: {
|
| 6 |
+
value: string;
|
| 7 |
+
onChange: (value: string, options?: { delay: number }) => void;
|
| 8 |
+
inputRef?: React.Ref<HTMLInputElement>;
|
| 9 |
+
}) {
|
| 10 |
+
return (
|
| 11 |
+
<input
|
| 12 |
+
className="input input-bordered w-full"
|
| 13 |
+
ref={inputRef}
|
| 14 |
+
value={value ?? ""}
|
| 15 |
+
onChange={(evt) => onChange(evt.currentTarget.value, { delay: 2 })}
|
| 16 |
+
onBlur={(evt) => onChange(evt.currentTarget.value, { delay: 0 })}
|
| 17 |
+
onKeyDown={(evt) => evt.code === "Enter" && onChange(evt.currentTarget.value, { delay: 0 })}
|
| 18 |
+
/>
|
| 19 |
+
);
|
| 20 |
+
}
|
lynxkite-app/web/tests/basic.spec.ts
CHANGED
|
@@ -21,7 +21,10 @@ test("Box creation & deletion per env", async () => {
|
|
| 21 |
const envs = await workspace.getEnvs();
|
| 22 |
for (const env of envs) {
|
| 23 |
await workspace.setEnv(env);
|
| 24 |
-
|
|
|
|
|
|
|
|
|
|
| 25 |
expect(catalog).not.toHaveLength(0);
|
| 26 |
const op = catalog[0];
|
| 27 |
await workspace.addBox(op);
|
|
@@ -32,9 +35,9 @@ test("Box creation & deletion per env", async () => {
|
|
| 32 |
});
|
| 33 |
|
| 34 |
test("Delete multi-handle boxes", async () => {
|
| 35 |
-
await workspace.addBox("
|
| 36 |
-
await workspace.deleteBoxes(["
|
| 37 |
-
await expect(workspace.getBox("
|
| 38 |
});
|
| 39 |
|
| 40 |
test("Drag box", async () => {
|
|
|
|
| 21 |
const envs = await workspace.getEnvs();
|
| 22 |
for (const env of envs) {
|
| 23 |
await workspace.setEnv(env);
|
| 24 |
+
// Op categories don't have a finished UI yet. I just skip NetworkX ops for now.
|
| 25 |
+
const catalog = (await workspace.getCatalog()).filter(
|
| 26 |
+
(box) => box !== "Comment" && !box.includes("NetworkX"),
|
| 27 |
+
);
|
| 28 |
expect(catalog).not.toHaveLength(0);
|
| 29 |
const op = catalog[0];
|
| 30 |
await workspace.addBox(op);
|
|
|
|
| 35 |
});
|
| 36 |
|
| 37 |
test("Delete multi-handle boxes", async () => {
|
| 38 |
+
await workspace.addBox("NetworkX › Algorithms › Link analysis › PageRank alg › PageRank");
|
| 39 |
+
await workspace.deleteBoxes(["PageRank 1"]);
|
| 40 |
+
await expect(workspace.getBox("PageRank 1")).not.toBeVisible();
|
| 41 |
});
|
| 42 |
|
| 43 |
test("Drag box", async () => {
|
lynxkite-app/web/tests/errors.spec.ts
CHANGED
|
@@ -20,8 +20,8 @@ test.afterEach(async () => {
|
|
| 20 |
test("missing parameter", async () => {
|
| 21 |
// Test the correct error message is displayed when a required parameter is missing,
|
| 22 |
// and that the error message is removed when the parameter is filled.
|
| 23 |
-
await workspace.addBox("
|
| 24 |
-
const graphBox = workspace.getBox("
|
| 25 |
await expect(graphBox.locator(".error")).toHaveText("n is unset.");
|
| 26 |
await graphBox.getByLabel("n", { exact: true }).fill("10");
|
| 27 |
await expect(graphBox.locator(".error")).not.toBeVisible();
|
|
@@ -30,11 +30,11 @@ test("missing parameter", async () => {
|
|
| 30 |
test("unknown operation", async () => {
|
| 31 |
// Test that the correct error is displayed when the operation does not belong to
|
| 32 |
// the current environment.
|
| 33 |
-
await workspace.addBox("
|
| 34 |
-
const graphBox = workspace.getBox("
|
| 35 |
await graphBox.getByLabel("n", { exact: true }).fill("10");
|
| 36 |
await workspace.setEnv("Pillow");
|
| 37 |
-
const csvBox = workspace.getBox("
|
| 38 |
await expect(csvBox.locator(".error")).toHaveText("Unknown operation.");
|
| 39 |
await workspace.setEnv("LynxKite Graph Analytics");
|
| 40 |
await expect(csvBox.locator(".error")).not.toBeVisible();
|
|
|
|
| 20 |
test("missing parameter", async () => {
|
| 21 |
// Test the correct error message is displayed when a required parameter is missing,
|
| 22 |
// and that the error message is removed when the parameter is filled.
|
| 23 |
+
await workspace.addBox("NetworkX › Generators › Directed › Scale-free graph");
|
| 24 |
+
const graphBox = workspace.getBox("Scale-free graph 1");
|
| 25 |
await expect(graphBox.locator(".error")).toHaveText("n is unset.");
|
| 26 |
await graphBox.getByLabel("n", { exact: true }).fill("10");
|
| 27 |
await expect(graphBox.locator(".error")).not.toBeVisible();
|
|
|
|
| 30 |
test("unknown operation", async () => {
|
| 31 |
// Test that the correct error is displayed when the operation does not belong to
|
| 32 |
// the current environment.
|
| 33 |
+
await workspace.addBox("NetworkX › Generators › Directed › Scale-free graph");
|
| 34 |
+
const graphBox = workspace.getBox("Scale-free graph 1");
|
| 35 |
await graphBox.getByLabel("n", { exact: true }).fill("10");
|
| 36 |
await workspace.setEnv("Pillow");
|
| 37 |
+
const csvBox = workspace.getBox("Scale-free graph 1");
|
| 38 |
await expect(csvBox.locator(".error")).toHaveText("Unknown operation.");
|
| 39 |
await workspace.setEnv("LynxKite Graph Analytics");
|
| 40 |
await expect(csvBox.locator(".error")).not.toBeVisible();
|
lynxkite-app/web/tests/examples.spec.ts
CHANGED
|
@@ -2,13 +2,7 @@
|
|
| 2 |
import { expect, test } from "@playwright/test";
|
| 3 |
import { Workspace } from "./lynxkite";
|
| 4 |
|
| 5 |
-
const WORKSPACES = [
|
| 6 |
-
"Airlines demo",
|
| 7 |
-
"Bio Cypher demo",
|
| 8 |
-
"Image processing",
|
| 9 |
-
"NetworkX demo",
|
| 10 |
-
"Model use",
|
| 11 |
-
];
|
| 12 |
|
| 13 |
for (const name of WORKSPACES) {
|
| 14 |
test(name, async ({ page }) => {
|
|
@@ -17,3 +11,18 @@ for (const name of WORKSPACES) {
|
|
| 17 |
await ws.expectErrorFree();
|
| 18 |
});
|
| 19 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
import { expect, test } from "@playwright/test";
|
| 3 |
import { Workspace } from "./lynxkite";
|
| 4 |
|
| 5 |
+
const WORKSPACES = ["Airlines demo", "Bio Cypher demo", "Image processing", "NetworkX demo"];
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
for (const name of WORKSPACES) {
|
| 8 |
test(name, async ({ page }) => {
|
|
|
|
| 11 |
await ws.expectErrorFree();
|
| 12 |
});
|
| 13 |
}
|
| 14 |
+
|
| 15 |
+
test("Model use", async ({ page }) => {
|
| 16 |
+
const ws = await Workspace.open(page, "Model use");
|
| 17 |
+
await ws.execute({ timeout: 30000 }); // Actually trains the model.
|
| 18 |
+
await ws.expectErrorFree();
|
| 19 |
+
let b = ws.boxByTitle("Train/test split");
|
| 20 |
+
await b.expectParameterOptions("table name", ["", "df"]);
|
| 21 |
+
b = ws.boxByTitle("Train model");
|
| 22 |
+
await b.expectParameterOptions("model name", ["", "model"]);
|
| 23 |
+
b = ws.boxByTitle("View vectors");
|
| 24 |
+
await b.locator.locator(".params-expander").click();
|
| 25 |
+
await b.expectParameterOptions("table name", ["", "df", "df_test", "df_train", "training"]);
|
| 26 |
+
await b.expectParameterOptions("vector column", ["", "index", "pred", "x", "y"]);
|
| 27 |
+
await b.expectParameterOptions("label column", ["", "index", "pred", "x", "y"]);
|
| 28 |
+
});
|
lynxkite-app/web/tests/graph_creation.spec.ts
CHANGED
|
@@ -6,10 +6,10 @@ let workspace: Workspace;
|
|
| 6 |
|
| 7 |
test.beforeEach(async ({ browser }) => {
|
| 8 |
workspace = await Workspace.empty(await browser.newPage(), "graph_creation_spec_test");
|
| 9 |
-
await workspace.addBox("
|
| 10 |
-
await workspace.getBox("
|
| 11 |
-
await workspace.addBox("
|
| 12 |
-
await workspace.connectBoxes("
|
| 13 |
});
|
| 14 |
|
| 15 |
test.afterEach(async () => {
|
|
@@ -22,7 +22,7 @@ test.afterEach(async () => {
|
|
| 22 |
});
|
| 23 |
|
| 24 |
test("Tables are displayed in the Graph creation box", async () => {
|
| 25 |
-
const graphBox = await workspace.getBox("
|
| 26 |
const nodesTableHeader = graphBox.locator(".graph-tables .df-head", {
|
| 27 |
hasText: "nodes",
|
| 28 |
});
|
|
@@ -42,7 +42,7 @@ test("Tables are displayed in the Graph creation box", async () => {
|
|
| 42 |
});
|
| 43 |
|
| 44 |
test("Adding and removing relationships", async () => {
|
| 45 |
-
const graphBox = await workspace.getBox("
|
| 46 |
const addRelationshipButton = await graphBox.locator(".add-relationship-button");
|
| 47 |
await addRelationshipButton.click();
|
| 48 |
const formData: Record<string, string> = {
|
|
@@ -64,7 +64,7 @@ test("Adding and removing relationships", async () => {
|
|
| 64 |
await graphBox.locator(".submit-relationship-button").click();
|
| 65 |
// check that the relationship has been saved in the backend
|
| 66 |
await workspace.page.reload();
|
| 67 |
-
const graphBoxAfterReload = await workspace.getBox("
|
| 68 |
const relationHeader = await graphBoxAfterReload.locator(".graph-relations .df-head", {
|
| 69 |
hasText: "relation_1",
|
| 70 |
});
|
|
@@ -76,7 +76,7 @@ test("Adding and removing relationships", async () => {
|
|
| 76 |
test("Output of the box is a bundle", async () => {
|
| 77 |
await workspace.addBox("View tables");
|
| 78 |
const tableView = await workspace.getBox("View tables 1");
|
| 79 |
-
await workspace.connectBoxes("
|
| 80 |
const nodesTableHeader = await tableView.locator(".df-head", {
|
| 81 |
hasText: "nodes",
|
| 82 |
});
|
|
|
|
| 6 |
|
| 7 |
test.beforeEach(async ({ browser }) => {
|
| 8 |
workspace = await Workspace.empty(await browser.newPage(), "graph_creation_spec_test");
|
| 9 |
+
await workspace.addBox("NetworkX › Generators › Directed › Scale-free graph");
|
| 10 |
+
await workspace.getBox("Scale-free graph 1").getByLabel("n", { exact: true }).fill("10");
|
| 11 |
+
await workspace.addBox("Organize");
|
| 12 |
+
await workspace.connectBoxes("Scale-free graph 1", "Organize 1");
|
| 13 |
});
|
| 14 |
|
| 15 |
test.afterEach(async () => {
|
|
|
|
| 22 |
});
|
| 23 |
|
| 24 |
test("Tables are displayed in the Graph creation box", async () => {
|
| 25 |
+
const graphBox = await workspace.getBox("Organize 1");
|
| 26 |
const nodesTableHeader = graphBox.locator(".graph-tables .df-head", {
|
| 27 |
hasText: "nodes",
|
| 28 |
});
|
|
|
|
| 42 |
});
|
| 43 |
|
| 44 |
test("Adding and removing relationships", async () => {
|
| 45 |
+
const graphBox = await workspace.getBox("Organize 1");
|
| 46 |
const addRelationshipButton = await graphBox.locator(".add-relationship-button");
|
| 47 |
await addRelationshipButton.click();
|
| 48 |
const formData: Record<string, string> = {
|
|
|
|
| 64 |
await graphBox.locator(".submit-relationship-button").click();
|
| 65 |
// check that the relationship has been saved in the backend
|
| 66 |
await workspace.page.reload();
|
| 67 |
+
const graphBoxAfterReload = await workspace.getBox("Organize 1");
|
| 68 |
const relationHeader = await graphBoxAfterReload.locator(".graph-relations .df-head", {
|
| 69 |
hasText: "relation_1",
|
| 70 |
});
|
|
|
|
| 76 |
test("Output of the box is a bundle", async () => {
|
| 77 |
await workspace.addBox("View tables");
|
| 78 |
const tableView = await workspace.getBox("View tables 1");
|
| 79 |
+
await workspace.connectBoxes("Organize 1", "View tables 1");
|
| 80 |
const nodesTableHeader = await tableView.locator(".df-head", {
|
| 81 |
hasText: "nodes",
|
| 82 |
});
|
lynxkite-app/web/tests/lynxkite.ts
CHANGED
|
@@ -91,6 +91,10 @@ export class Workspace {
|
|
| 91 |
getBox(boxId: string) {
|
| 92 |
return this.page.locator(`[data-id="${boxId}"]`);
|
| 93 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
|
| 95 |
getBoxes() {
|
| 96 |
return this.page.locator(".react-flow__node");
|
|
@@ -150,8 +154,8 @@ export class Workspace {
|
|
| 150 |
}
|
| 151 |
}
|
| 152 |
|
| 153 |
-
async execute() {
|
| 154 |
-
const request = this.page.waitForResponse(/api[/]execute_workspace
|
| 155 |
await this.page.keyboard.press("r");
|
| 156 |
await request;
|
| 157 |
}
|
|
@@ -165,6 +169,21 @@ export class Workspace {
|
|
| 165 |
}
|
| 166 |
}
|
| 167 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 168 |
export class Splash {
|
| 169 |
page: Page;
|
| 170 |
root: Locator;
|
|
|
|
| 91 |
getBox(boxId: string) {
|
| 92 |
return this.page.locator(`[data-id="${boxId}"]`);
|
| 93 |
}
|
| 94 |
+
boxByTitle(title: string): Box {
|
| 95 |
+
const titleLocator = this.page.getByText(title, { exact: true });
|
| 96 |
+
return new Box(this.page, titleLocator.locator("../.."));
|
| 97 |
+
}
|
| 98 |
|
| 99 |
getBoxes() {
|
| 100 |
return this.page.locator(".react-flow__node");
|
|
|
|
| 154 |
}
|
| 155 |
}
|
| 156 |
|
| 157 |
+
async execute(opts?) {
|
| 158 |
+
const request = this.page.waitForResponse(/api[/]execute_workspace/, opts);
|
| 159 |
await this.page.keyboard.press("r");
|
| 160 |
await request;
|
| 161 |
}
|
|
|
|
| 169 |
}
|
| 170 |
}
|
| 171 |
|
| 172 |
+
export class Box {
|
| 173 |
+
constructor(
|
| 174 |
+
readonly page: Page,
|
| 175 |
+
readonly locator: Locator,
|
| 176 |
+
) {}
|
| 177 |
+
getParameter(name: string) {
|
| 178 |
+
return this.locator.getByLabel(name);
|
| 179 |
+
}
|
| 180 |
+
async expectParameterOptions(parameter: string, options: string[]) {
|
| 181 |
+
const param = this.getParameter(parameter);
|
| 182 |
+
const optionsLocator = param.locator("option");
|
| 183 |
+
await expect(optionsLocator).toHaveText(options);
|
| 184 |
+
}
|
| 185 |
+
}
|
| 186 |
+
|
| 187 |
export class Splash {
|
| 188 |
page: Page;
|
| 189 |
root: Locator;
|
lynxkite-core/pyproject.toml
CHANGED
|
@@ -5,16 +5,21 @@ description = "A lightweight dependency for authoring LynxKite operations and ex
|
|
| 5 |
readme = "README.md"
|
| 6 |
requires-python = ">=3.11"
|
| 7 |
dependencies = [
|
|
|
|
| 8 |
]
|
| 9 |
classifiers = ["Private :: Do Not Upload"]
|
| 10 |
|
| 11 |
[project.urls]
|
| 12 |
Homepage = "https://github.com/lynxkite/lynxkite-2000/"
|
| 13 |
|
| 14 |
-
[
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
|
|
|
|
|
|
|
|
|
| 18 |
|
| 19 |
-
[tool.
|
| 20 |
-
|
|
|
|
|
|
| 5 |
readme = "README.md"
|
| 6 |
requires-python = ">=3.11"
|
| 7 |
dependencies = [
|
| 8 |
+
"pydantic>=2.11.7",
|
| 9 |
]
|
| 10 |
classifiers = ["Private :: Do Not Upload"]
|
| 11 |
|
| 12 |
[project.urls]
|
| 13 |
Homepage = "https://github.com/lynxkite/lynxkite-2000/"
|
| 14 |
|
| 15 |
+
[tool.deptry.per_rule_ignores]
|
| 16 |
+
DEP001 = ["matplotlib", "griffe", "pycrdt"]
|
| 17 |
+
DEP003 = ["matplotlib", "griffe", "pycrdt"]
|
| 18 |
+
|
| 19 |
+
[build-system]
|
| 20 |
+
requires = ["setuptools", "wheel", "setuptools-scm"]
|
| 21 |
+
build-backend = "setuptools.build_meta"
|
| 22 |
|
| 23 |
+
[tool.setuptools.packages.find]
|
| 24 |
+
namespaces = true
|
| 25 |
+
where = ["src"]
|
lynxkite-core/src/lynxkite/core/executors/one_by_one.py
CHANGED
|
@@ -4,9 +4,6 @@ A LynxKite executor that assumes most operations operate on their input one by o
|
|
| 4 |
|
| 5 |
from .. import ops
|
| 6 |
from .. import workspace
|
| 7 |
-
import orjson
|
| 8 |
-
import pandas as pd
|
| 9 |
-
import pydantic
|
| 10 |
import traceback
|
| 11 |
import inspect
|
| 12 |
import typing
|
|
@@ -35,9 +32,6 @@ def _has_ctx(op):
|
|
| 35 |
return "_ctx" in sig.parameters
|
| 36 |
|
| 37 |
|
| 38 |
-
CACHES = {}
|
| 39 |
-
|
| 40 |
-
|
| 41 |
def register(env: str, cache: bool = True):
|
| 42 |
"""Registers the one-by-one executor.
|
| 43 |
|
|
@@ -46,12 +40,7 @@ def register(env: str, cache: bool = True):
|
|
| 46 |
from lynxkite.core.executors import one_by_one
|
| 47 |
one_by_one.register("My Environment")
|
| 48 |
"""
|
| 49 |
-
|
| 50 |
-
CACHES[env] = {}
|
| 51 |
-
cache = CACHES[env]
|
| 52 |
-
else:
|
| 53 |
-
cache = None
|
| 54 |
-
ops.EXECUTORS[env] = lambda ws: _execute(ws, ops.CATALOGS[env], cache=cache)
|
| 55 |
|
| 56 |
|
| 57 |
def _get_stages(ws, catalog: ops.Catalog):
|
|
@@ -64,7 +53,7 @@ def _get_stages(ws, catalog: ops.Catalog):
|
|
| 64 |
for edge in ws.edges:
|
| 65 |
inputs.setdefault(edge.target, []).append(edge.source)
|
| 66 |
node = nodes[edge.target]
|
| 67 |
-
op = catalog[node.data.
|
| 68 |
if op.get_input(edge.targetHandle).position.is_vertical():
|
| 69 |
batch_inputs.setdefault(edge.target, []).append(edge.source)
|
| 70 |
stages = []
|
|
@@ -83,26 +72,13 @@ def _get_stages(ws, catalog: ops.Catalog):
|
|
| 83 |
return stages
|
| 84 |
|
| 85 |
|
| 86 |
-
def _default_serializer(obj):
|
| 87 |
-
if isinstance(obj, pydantic.BaseModel):
|
| 88 |
-
return obj.dict()
|
| 89 |
-
return {"__nonserializable__": id(obj)}
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
def _make_cache_key(obj):
|
| 93 |
-
return orjson.dumps(obj, default=_default_serializer)
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
EXECUTOR_OUTPUT_CACHE = {}
|
| 97 |
-
|
| 98 |
-
|
| 99 |
async def _await_if_needed(obj):
|
| 100 |
if inspect.isawaitable(obj):
|
| 101 |
return await obj
|
| 102 |
return obj
|
| 103 |
|
| 104 |
|
| 105 |
-
async def _execute(ws: workspace.Workspace, catalog: ops.Catalog
|
| 106 |
nodes = {n.id: n for n in ws.nodes}
|
| 107 |
contexts = {n.id: Context(node=n) for n in ws.nodes}
|
| 108 |
edges = {n.id: [] for n in ws.nodes}
|
|
@@ -111,9 +87,9 @@ async def _execute(ws: workspace.Workspace, catalog: ops.Catalog, cache=None):
|
|
| 111 |
tasks = {}
|
| 112 |
NO_INPUT = object() # Marker for initial tasks.
|
| 113 |
for node in ws.nodes:
|
| 114 |
-
op = catalog.get(node.data.
|
| 115 |
if op is None:
|
| 116 |
-
node.publish_error(f'Operation "{node.data.
|
| 117 |
continue
|
| 118 |
node.publish_error(None)
|
| 119 |
# Start tasks for nodes that have no non-batch inputs.
|
|
@@ -130,7 +106,7 @@ async def _execute(ws: workspace.Workspace, catalog: ops.Catalog, cache=None):
|
|
| 130 |
next_stage.setdefault(n, []).extend(ts)
|
| 131 |
continue
|
| 132 |
node = nodes[n]
|
| 133 |
-
op = catalog[node.data.
|
| 134 |
params = {**node.data.params}
|
| 135 |
if _has_ctx(op):
|
| 136 |
params["_ctx"] = contexts[node.id]
|
|
@@ -155,15 +131,7 @@ async def _execute(ws: workspace.Workspace, catalog: ops.Catalog, cache=None):
|
|
| 155 |
if missing:
|
| 156 |
node.publish_error(f"Missing input: {', '.join(missing)}")
|
| 157 |
break
|
| 158 |
-
|
| 159 |
-
key = _make_cache_key((inputs, params))
|
| 160 |
-
if key not in cache:
|
| 161 |
-
result: ops.Result = op(*inputs, **params)
|
| 162 |
-
result.output = await _await_if_needed(result.output)
|
| 163 |
-
cache[key] = result
|
| 164 |
-
result = cache[key]
|
| 165 |
-
else:
|
| 166 |
-
result = op(*inputs, **params)
|
| 167 |
output = await _await_if_needed(result.output)
|
| 168 |
except Exception as e:
|
| 169 |
traceback.print_exc()
|
|
@@ -171,7 +139,7 @@ async def _execute(ws: workspace.Workspace, catalog: ops.Catalog, cache=None):
|
|
| 171 |
break
|
| 172 |
contexts[node.id].last_result = output
|
| 173 |
# Returned lists and DataFrames are considered multiple tasks.
|
| 174 |
-
if
|
| 175 |
output = _df_to_list(output)
|
| 176 |
elif not isinstance(output, list):
|
| 177 |
output = [output]
|
|
@@ -181,7 +149,7 @@ async def _execute(ws: workspace.Workspace, catalog: ops.Catalog, cache=None):
|
|
| 181 |
result.display = await _await_if_needed(result.display)
|
| 182 |
for edge in edges[node.id]:
|
| 183 |
t = nodes[edge.target]
|
| 184 |
-
op = catalog[t.data.
|
| 185 |
if op.get_input(edge.targetHandle).position.is_vertical():
|
| 186 |
batch_inputs.setdefault((edge.target, edge.targetHandle), []).extend(
|
| 187 |
results
|
|
|
|
| 4 |
|
| 5 |
from .. import ops
|
| 6 |
from .. import workspace
|
|
|
|
|
|
|
|
|
|
| 7 |
import traceback
|
| 8 |
import inspect
|
| 9 |
import typing
|
|
|
|
| 32 |
return "_ctx" in sig.parameters
|
| 33 |
|
| 34 |
|
|
|
|
|
|
|
|
|
|
| 35 |
def register(env: str, cache: bool = True):
|
| 36 |
"""Registers the one-by-one executor.
|
| 37 |
|
|
|
|
| 40 |
from lynxkite.core.executors import one_by_one
|
| 41 |
one_by_one.register("My Environment")
|
| 42 |
"""
|
| 43 |
+
ops.EXECUTORS[env] = lambda ws: _execute(ws, ops.CATALOGS[env])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
|
| 45 |
|
| 46 |
def _get_stages(ws, catalog: ops.Catalog):
|
|
|
|
| 53 |
for edge in ws.edges:
|
| 54 |
inputs.setdefault(edge.target, []).append(edge.source)
|
| 55 |
node = nodes[edge.target]
|
| 56 |
+
op = catalog[node.data.op_id]
|
| 57 |
if op.get_input(edge.targetHandle).position.is_vertical():
|
| 58 |
batch_inputs.setdefault(edge.target, []).append(edge.source)
|
| 59 |
stages = []
|
|
|
|
| 72 |
return stages
|
| 73 |
|
| 74 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 75 |
async def _await_if_needed(obj):
|
| 76 |
if inspect.isawaitable(obj):
|
| 77 |
return await obj
|
| 78 |
return obj
|
| 79 |
|
| 80 |
|
| 81 |
+
async def _execute(ws: workspace.Workspace, catalog: ops.Catalog):
|
| 82 |
nodes = {n.id: n for n in ws.nodes}
|
| 83 |
contexts = {n.id: Context(node=n) for n in ws.nodes}
|
| 84 |
edges = {n.id: [] for n in ws.nodes}
|
|
|
|
| 87 |
tasks = {}
|
| 88 |
NO_INPUT = object() # Marker for initial tasks.
|
| 89 |
for node in ws.nodes:
|
| 90 |
+
op = catalog.get(node.data.op_id)
|
| 91 |
if op is None:
|
| 92 |
+
node.publish_error(f'Operation "{node.data.op_id}" not found.')
|
| 93 |
continue
|
| 94 |
node.publish_error(None)
|
| 95 |
# Start tasks for nodes that have no non-batch inputs.
|
|
|
|
| 106 |
next_stage.setdefault(n, []).extend(ts)
|
| 107 |
continue
|
| 108 |
node = nodes[n]
|
| 109 |
+
op = catalog[node.data.op_id]
|
| 110 |
params = {**node.data.params}
|
| 111 |
if _has_ctx(op):
|
| 112 |
params["_ctx"] = contexts[node.id]
|
|
|
|
| 131 |
if missing:
|
| 132 |
node.publish_error(f"Missing input: {', '.join(missing)}")
|
| 133 |
break
|
| 134 |
+
result = op(*inputs, **params)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
output = await _await_if_needed(result.output)
|
| 136 |
except Exception as e:
|
| 137 |
traceback.print_exc()
|
|
|
|
| 139 |
break
|
| 140 |
contexts[node.id].last_result = output
|
| 141 |
# Returned lists and DataFrames are considered multiple tasks.
|
| 142 |
+
if hasattr(output, "to_dict"):
|
| 143 |
output = _df_to_list(output)
|
| 144 |
elif not isinstance(output, list):
|
| 145 |
output = [output]
|
|
|
|
| 149 |
result.display = await _await_if_needed(result.display)
|
| 150 |
for edge in edges[node.id]:
|
| 151 |
t = nodes[edge.target]
|
| 152 |
+
op = catalog[t.data.op_id]
|
| 153 |
if op.get_input(edge.targetHandle).position.is_vertical():
|
| 154 |
batch_inputs.setdefault((edge.target, edge.targetHandle), []).extend(
|
| 155 |
results
|
lynxkite-core/src/lynxkite/core/executors/simple.py
CHANGED
|
@@ -37,7 +37,7 @@ async def execute(ws: workspace.Workspace, catalog: ops.Catalog):
|
|
| 37 |
ts = graphlib.TopologicalSorter(dependencies)
|
| 38 |
for node_id in ts.static_order():
|
| 39 |
node = nodes[node_id]
|
| 40 |
-
op = catalog[node.data.
|
| 41 |
params = {**node.data.params}
|
| 42 |
node.publish_started()
|
| 43 |
try:
|
|
|
|
| 37 |
ts = graphlib.TopologicalSorter(dependencies)
|
| 38 |
for node_id in ts.static_order():
|
| 39 |
node = nodes[node_id]
|
| 40 |
+
op = catalog[node.data.op_id]
|
| 41 |
params = {**node.data.params}
|
| 42 |
node.publish_started()
|
| 43 |
try:
|
lynxkite-core/src/lynxkite/core/ops.py
CHANGED
|
@@ -6,7 +6,7 @@ import asyncio
|
|
| 6 |
import enum
|
| 7 |
import functools
|
| 8 |
import json
|
| 9 |
-
import importlib
|
| 10 |
import inspect
|
| 11 |
import pathlib
|
| 12 |
import subprocess
|
|
@@ -15,9 +15,7 @@ import types
|
|
| 15 |
import typing
|
| 16 |
from dataclasses import dataclass
|
| 17 |
|
| 18 |
-
import joblib
|
| 19 |
import pydantic
|
| 20 |
-
from typing_extensions import Annotated
|
| 21 |
|
| 22 |
if typing.TYPE_CHECKING:
|
| 23 |
from . import workspace
|
|
@@ -26,10 +24,17 @@ Catalog = dict[str, "Op"]
|
|
| 26 |
Catalogs = dict[str, Catalog]
|
| 27 |
CATALOGS: Catalogs = {}
|
| 28 |
EXECUTORS = {}
|
| 29 |
-
mem = joblib.Memory(".joblib-cache")
|
| 30 |
|
| 31 |
typeof = type # We have some arguments called "type".
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
def type_to_json(t):
|
| 35 |
if isinstance(t, type) and issubclass(t, enum.Enum):
|
|
@@ -39,13 +44,10 @@ def type_to_json(t):
|
|
| 39 |
return {"type": str(t)}
|
| 40 |
|
| 41 |
|
| 42 |
-
Type = Annotated[typing.Any, pydantic.PlainSerializer(type_to_json, return_type=dict)]
|
| 43 |
-
LongStr = Annotated[str, {"format": "textarea"}]
|
| 44 |
"""LongStr is a string type for parameters that will be displayed as a multiline text area in the UI."""
|
| 45 |
-
PathStr = Annotated[str, {"format": "path"}]
|
| 46 |
-
CollapsedStr = Annotated[str, {"format": "collapsed"}]
|
| 47 |
-
NodeAttribute = Annotated[str, {"format": "node attribute"}]
|
| 48 |
-
EdgeAttribute = Annotated[str, {"format": "edge attribute"}]
|
| 49 |
# https://github.com/python/typing/issues/182#issuecomment-1320974824
|
| 50 |
ReadOnlyJSON: typing.TypeAlias = (
|
| 51 |
typing.Mapping[str, "ReadOnlyJSON"]
|
|
@@ -76,10 +78,6 @@ class Parameter(BaseConfig):
|
|
| 76 |
e = enum.Enum(f"OptionsFor_{name}", options)
|
| 77 |
return Parameter.basic(name, default or options[0], e)
|
| 78 |
|
| 79 |
-
@staticmethod
|
| 80 |
-
def collapsed(name, default, type=None):
|
| 81 |
-
return Parameter.basic(name, default, CollapsedStr)
|
| 82 |
-
|
| 83 |
@staticmethod
|
| 84 |
def basic(name, default=None, type=None):
|
| 85 |
if default is inspect._empty:
|
|
@@ -110,17 +108,31 @@ class Position(str, enum.Enum):
|
|
| 110 |
def is_vertical(self):
|
| 111 |
return self in (self.TOP, self.BOTTOM)
|
| 112 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 113 |
|
| 114 |
class Input(BaseConfig):
|
| 115 |
name: str
|
| 116 |
type: Type
|
| 117 |
-
position: Position
|
| 118 |
|
| 119 |
|
| 120 |
class Output(BaseConfig):
|
| 121 |
name: str
|
| 122 |
type: Type
|
| 123 |
-
position: Position
|
| 124 |
|
| 125 |
|
| 126 |
@dataclass
|
|
@@ -139,17 +151,6 @@ class Result:
|
|
| 139 |
input_metadata: ReadOnlyJSON | None = None
|
| 140 |
|
| 141 |
|
| 142 |
-
MULTI_INPUT = Input(name="multi", type="*")
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
def basic_inputs(*names):
|
| 146 |
-
return {name: Input(name=name, type=None) for name in names}
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
def basic_outputs(*names):
|
| 150 |
-
return {name: Output(name=name, type=None) for name in names}
|
| 151 |
-
|
| 152 |
-
|
| 153 |
def get_optional_type(type):
|
| 154 |
"""For a type like `int | None`, returns `int`. Returns `None` otherwise."""
|
| 155 |
if isinstance(type, types.UnionType):
|
|
@@ -184,6 +185,7 @@ def _param_to_type(name, value, type):
|
|
| 184 |
|
| 185 |
class Op(BaseConfig):
|
| 186 |
func: typing.Callable = pydantic.Field(exclude=True)
|
|
|
|
| 187 |
name: str
|
| 188 |
params: list[Parameter | ParameterGroup]
|
| 189 |
inputs: list[Input]
|
|
@@ -192,6 +194,8 @@ class Op(BaseConfig):
|
|
| 192 |
type: str = "basic" # The UI to use for this operation.
|
| 193 |
color: str = "orange" # The color of the operation in the UI.
|
| 194 |
doc: object = None
|
|
|
|
|
|
|
| 195 |
|
| 196 |
def __call__(self, *inputs, **params):
|
| 197 |
# Convert parameters.
|
|
@@ -234,18 +238,44 @@ class Op(BaseConfig):
|
|
| 234 |
res[p.name] = _param_to_type(p.name, params[p.name], p.type)
|
| 235 |
return res
|
| 236 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 237 |
|
| 238 |
def op(
|
| 239 |
env: str,
|
| 240 |
-
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
|
|
|
| 247 |
):
|
| 248 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 249 |
|
| 250 |
def decorator(func):
|
| 251 |
doc = parse_doc(func)
|
|
@@ -256,10 +286,12 @@ def op(
|
|
| 256 |
func = matplotlib_to_image(func)
|
| 257 |
if slow:
|
| 258 |
func = make_async(func)
|
| 259 |
-
|
|
|
|
| 260 |
# Positional arguments are inputs.
|
|
|
|
| 261 |
inputs = [
|
| 262 |
-
Input(name=name, type=param.annotation)
|
| 263 |
for name, param in sig.parameters.items()
|
| 264 |
if param.kind not in (param.KEYWORD_ONLY, param.VAR_KEYWORD)
|
| 265 |
]
|
|
@@ -270,13 +302,14 @@ def op(
|
|
| 270 |
if params:
|
| 271 |
_params.extend(params)
|
| 272 |
if outputs is not None:
|
| 273 |
-
_outputs = [Output(name=name, type=None) for name in outputs]
|
| 274 |
else:
|
| 275 |
-
_outputs = [Output(name="output", type=None)] if view == "basic" else []
|
| 276 |
op = Op(
|
| 277 |
func=func,
|
| 278 |
doc=doc,
|
| 279 |
name=name,
|
|
|
|
| 280 |
params=_params,
|
| 281 |
inputs=inputs,
|
| 282 |
outputs=_outputs,
|
|
@@ -284,7 +317,7 @@ def op(
|
|
| 284 |
color=color or "orange",
|
| 285 |
)
|
| 286 |
CATALOGS.setdefault(env, {})
|
| 287 |
-
CATALOGS[env][
|
| 288 |
func.__op__ = op
|
| 289 |
return func
|
| 290 |
|
|
@@ -362,24 +395,34 @@ def no_op(*args, **kwargs):
|
|
| 362 |
return None
|
| 363 |
|
| 364 |
|
| 365 |
-
def register_passive_op(
|
|
|
|
|
|
|
| 366 |
"""A passive operation has no associated code."""
|
|
|
|
|
|
|
| 367 |
op = Op(
|
| 368 |
func=no_op,
|
| 369 |
name=name,
|
|
|
|
| 370 |
params=params,
|
| 371 |
-
inputs=[
|
| 372 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 373 |
**kwargs,
|
| 374 |
)
|
| 375 |
CATALOGS.setdefault(env, {})
|
| 376 |
-
CATALOGS[env][
|
| 377 |
return op
|
| 378 |
|
| 379 |
|
| 380 |
COMMENT_OP = Op(
|
| 381 |
func=no_op,
|
| 382 |
name="Comment",
|
|
|
|
| 383 |
params=[Parameter.basic("text", "", LongStr)],
|
| 384 |
inputs=[],
|
| 385 |
outputs=[],
|
|
@@ -403,14 +446,14 @@ def register_executor(env: str):
|
|
| 403 |
return decorator
|
| 404 |
|
| 405 |
|
| 406 |
-
def op_registration(env: str):
|
| 407 |
"""Returns a decorator that can be used for registering functions as operations."""
|
| 408 |
-
return functools.partial(op, env)
|
| 409 |
|
| 410 |
|
| 411 |
-
def passive_op_registration(env: str):
|
| 412 |
"""Returns a function that can be used to register operations without associated code."""
|
| 413 |
-
return functools.partial(register_passive_op, env)
|
| 414 |
|
| 415 |
|
| 416 |
def make_async(func):
|
|
@@ -475,6 +518,7 @@ def install_requirements(req: pathlib.Path):
|
|
| 475 |
|
| 476 |
def run_user_script(script_path: pathlib.Path):
|
| 477 |
spec = importlib.util.spec_from_file_location(script_path.stem, str(script_path))
|
|
|
|
| 478 |
module = importlib.util.module_from_spec(spec)
|
| 479 |
spec.loader.exec_module(module)
|
| 480 |
|
|
@@ -508,7 +552,7 @@ def _get_griffe_function(func):
|
|
| 508 |
if param.annotation is inspect.Parameter.empty:
|
| 509 |
annotation = None
|
| 510 |
else:
|
| 511 |
-
annotation = param.annotation.
|
| 512 |
parameters.append(
|
| 513 |
griffe.Parameter(
|
| 514 |
name,
|
|
|
|
| 6 |
import enum
|
| 7 |
import functools
|
| 8 |
import json
|
| 9 |
+
import importlib.util
|
| 10 |
import inspect
|
| 11 |
import pathlib
|
| 12 |
import subprocess
|
|
|
|
| 15 |
import typing
|
| 16 |
from dataclasses import dataclass
|
| 17 |
|
|
|
|
| 18 |
import pydantic
|
|
|
|
| 19 |
|
| 20 |
if typing.TYPE_CHECKING:
|
| 21 |
from . import workspace
|
|
|
|
| 24 |
Catalogs = dict[str, Catalog]
|
| 25 |
CATALOGS: Catalogs = {}
|
| 26 |
EXECUTORS = {}
|
|
|
|
| 27 |
|
| 28 |
typeof = type # We have some arguments called "type".
|
| 29 |
|
| 30 |
+
CACHE_WRAPPER = None # Overwrite this to configure a caching mechanism.
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _cache_wrap(func):
|
| 34 |
+
if CACHE_WRAPPER is None:
|
| 35 |
+
return func
|
| 36 |
+
return CACHE_WRAPPER(func)
|
| 37 |
+
|
| 38 |
|
| 39 |
def type_to_json(t):
|
| 40 |
if isinstance(t, type) and issubclass(t, enum.Enum):
|
|
|
|
| 44 |
return {"type": str(t)}
|
| 45 |
|
| 46 |
|
| 47 |
+
Type = typing.Annotated[typing.Any, pydantic.PlainSerializer(type_to_json, return_type=dict)]
|
| 48 |
+
LongStr = typing.Annotated[str, {"format": "textarea"}]
|
| 49 |
"""LongStr is a string type for parameters that will be displayed as a multiline text area in the UI."""
|
| 50 |
+
PathStr = typing.Annotated[str, {"format": "path"}]
|
|
|
|
|
|
|
|
|
|
| 51 |
# https://github.com/python/typing/issues/182#issuecomment-1320974824
|
| 52 |
ReadOnlyJSON: typing.TypeAlias = (
|
| 53 |
typing.Mapping[str, "ReadOnlyJSON"]
|
|
|
|
| 78 |
e = enum.Enum(f"OptionsFor_{name}", options)
|
| 79 |
return Parameter.basic(name, default or options[0], e)
|
| 80 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
@staticmethod
|
| 82 |
def basic(name, default=None, type=None):
|
| 83 |
if default is inspect._empty:
|
|
|
|
| 108 |
def is_vertical(self):
|
| 109 |
return self in (self.TOP, self.BOTTOM)
|
| 110 |
|
| 111 |
+
@staticmethod
|
| 112 |
+
def from_dir(dir: str) -> tuple[Position, Position]:
|
| 113 |
+
"""Returns the input and output positions based on the direction."""
|
| 114 |
+
if dir == "left-to-right":
|
| 115 |
+
return Position.LEFT, Position.RIGHT
|
| 116 |
+
elif dir == "right-to-left":
|
| 117 |
+
return Position.RIGHT, Position.LEFT
|
| 118 |
+
elif dir == "top-to-bottom":
|
| 119 |
+
return Position.TOP, Position.BOTTOM
|
| 120 |
+
elif dir == "bottom-to-top":
|
| 121 |
+
return Position.BOTTOM, Position.TOP
|
| 122 |
+
else:
|
| 123 |
+
raise ValueError(f"Invalid direction: {dir}")
|
| 124 |
+
|
| 125 |
|
| 126 |
class Input(BaseConfig):
|
| 127 |
name: str
|
| 128 |
type: Type
|
| 129 |
+
position: Position
|
| 130 |
|
| 131 |
|
| 132 |
class Output(BaseConfig):
|
| 133 |
name: str
|
| 134 |
type: Type
|
| 135 |
+
position: Position
|
| 136 |
|
| 137 |
|
| 138 |
@dataclass
|
|
|
|
| 151 |
input_metadata: ReadOnlyJSON | None = None
|
| 152 |
|
| 153 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 154 |
def get_optional_type(type):
|
| 155 |
"""For a type like `int | None`, returns `int`. Returns `None` otherwise."""
|
| 156 |
if isinstance(type, types.UnionType):
|
|
|
|
| 185 |
|
| 186 |
class Op(BaseConfig):
|
| 187 |
func: typing.Callable = pydantic.Field(exclude=True)
|
| 188 |
+
categories: list[str]
|
| 189 |
name: str
|
| 190 |
params: list[Parameter | ParameterGroup]
|
| 191 |
inputs: list[Input]
|
|
|
|
| 194 |
type: str = "basic" # The UI to use for this operation.
|
| 195 |
color: str = "orange" # The color of the operation in the UI.
|
| 196 |
doc: object = None
|
| 197 |
+
# ID is automatically set from the name and categories.
|
| 198 |
+
id: str = pydantic.Field(default=None)
|
| 199 |
|
| 200 |
def __call__(self, *inputs, **params):
|
| 201 |
# Convert parameters.
|
|
|
|
| 238 |
res[p.name] = _param_to_type(p.name, params[p.name], p.type)
|
| 239 |
return res
|
| 240 |
|
| 241 |
+
@pydantic.model_validator(mode="after")
|
| 242 |
+
def compute_id(self):
|
| 243 |
+
self.id = " > ".join(self.categories + [self.name])
|
| 244 |
+
return self
|
| 245 |
+
|
| 246 |
|
| 247 |
def op(
|
| 248 |
env: str,
|
| 249 |
+
*names: str,
|
| 250 |
+
view: str = "basic",
|
| 251 |
+
outputs: list[str] | None = None,
|
| 252 |
+
params: list[Parameter] | None = None,
|
| 253 |
+
slow: bool = False,
|
| 254 |
+
color: str | None = None,
|
| 255 |
+
cache: bool | None = None,
|
| 256 |
+
dir: str = "left-to-right",
|
| 257 |
):
|
| 258 |
+
"""
|
| 259 |
+
Decorator for defining an operation.
|
| 260 |
+
|
| 261 |
+
Parameters:
|
| 262 |
+
env: The environment (workspace type) to which the operation belongs.
|
| 263 |
+
names: The list of categories this operation belongs to, followed by the name of the operation.
|
| 264 |
+
view: How the operation will be displayed in the UI. One of "basic", "visualization",
|
| 265 |
+
"table_view", "graph_creation_view", "image", "molecule", "matplotlib".
|
| 266 |
+
outputs: A list of output names. If not provided, defaults to ["output"] for "basic" view.
|
| 267 |
+
params: Normally the parameters are taken from the function signature.
|
| 268 |
+
Use "params" to override this.
|
| 269 |
+
slow: If True, the operation results will be cached.
|
| 270 |
+
If the function is not async, it will be run in a separate thread.
|
| 271 |
+
color: The color of the operation in the UI. Defaults to "orange".
|
| 272 |
+
cache: Set to False to disable caching for a slow operation.
|
| 273 |
+
You may need this for slow operations with parameters/outputs that can't be serialized.
|
| 274 |
+
dir: Sets the default input and output positions. The default is "left-to-right", meaning
|
| 275 |
+
inputs are on the left and outputs are on the right. Other options are "right-to-left",
|
| 276 |
+
"top-to-bottom", and "bottom-to-top".
|
| 277 |
+
"""
|
| 278 |
+
[*categories, name] = names
|
| 279 |
|
| 280 |
def decorator(func):
|
| 281 |
doc = parse_doc(func)
|
|
|
|
| 286 |
func = matplotlib_to_image(func)
|
| 287 |
if slow:
|
| 288 |
func = make_async(func)
|
| 289 |
+
if cache is not False:
|
| 290 |
+
func = _cache_wrap(func)
|
| 291 |
# Positional arguments are inputs.
|
| 292 |
+
ipos, opos = Position.from_dir(dir)
|
| 293 |
inputs = [
|
| 294 |
+
Input(name=name, type=param.annotation, position=ipos)
|
| 295 |
for name, param in sig.parameters.items()
|
| 296 |
if param.kind not in (param.KEYWORD_ONLY, param.VAR_KEYWORD)
|
| 297 |
]
|
|
|
|
| 302 |
if params:
|
| 303 |
_params.extend(params)
|
| 304 |
if outputs is not None:
|
| 305 |
+
_outputs = [Output(name=name, type=None, position=opos) for name in outputs]
|
| 306 |
else:
|
| 307 |
+
_outputs = [Output(name="output", type=None, position=opos)] if view == "basic" else []
|
| 308 |
op = Op(
|
| 309 |
func=func,
|
| 310 |
doc=doc,
|
| 311 |
name=name,
|
| 312 |
+
categories=categories,
|
| 313 |
params=_params,
|
| 314 |
inputs=inputs,
|
| 315 |
outputs=_outputs,
|
|
|
|
| 317 |
color=color or "orange",
|
| 318 |
)
|
| 319 |
CATALOGS.setdefault(env, {})
|
| 320 |
+
CATALOGS[env][op.id] = op
|
| 321 |
func.__op__ = op
|
| 322 |
return func
|
| 323 |
|
|
|
|
| 395 |
return None
|
| 396 |
|
| 397 |
|
| 398 |
+
def register_passive_op(
|
| 399 |
+
env: str, *names: str, inputs=[], outputs=["output"], params=[], dir="left-to-right", **kwargs
|
| 400 |
+
):
|
| 401 |
"""A passive operation has no associated code."""
|
| 402 |
+
ipos, opos = Position.from_dir(dir)
|
| 403 |
+
[*categories, name] = names
|
| 404 |
op = Op(
|
| 405 |
func=no_op,
|
| 406 |
name=name,
|
| 407 |
+
categories=categories,
|
| 408 |
params=params,
|
| 409 |
+
inputs=[
|
| 410 |
+
Input(name=i, type=None, position=ipos) if isinstance(i, str) else i for i in inputs
|
| 411 |
+
],
|
| 412 |
+
outputs=[
|
| 413 |
+
Output(name=o, type=None, position=opos) if isinstance(o, str) else o for o in outputs
|
| 414 |
+
],
|
| 415 |
**kwargs,
|
| 416 |
)
|
| 417 |
CATALOGS.setdefault(env, {})
|
| 418 |
+
CATALOGS[env][op.id] = op
|
| 419 |
return op
|
| 420 |
|
| 421 |
|
| 422 |
COMMENT_OP = Op(
|
| 423 |
func=no_op,
|
| 424 |
name="Comment",
|
| 425 |
+
categories=[],
|
| 426 |
params=[Parameter.basic("text", "", LongStr)],
|
| 427 |
inputs=[],
|
| 428 |
outputs=[],
|
|
|
|
| 446 |
return decorator
|
| 447 |
|
| 448 |
|
| 449 |
+
def op_registration(env: str, *categories: str, **kwargs):
|
| 450 |
"""Returns a decorator that can be used for registering functions as operations."""
|
| 451 |
+
return functools.partial(op, env, *categories, **kwargs)
|
| 452 |
|
| 453 |
|
| 454 |
+
def passive_op_registration(env: str, *categories: str, **kwargs):
|
| 455 |
"""Returns a function that can be used to register operations without associated code."""
|
| 456 |
+
return functools.partial(register_passive_op, env, *categories, **kwargs)
|
| 457 |
|
| 458 |
|
| 459 |
def make_async(func):
|
|
|
|
| 518 |
|
| 519 |
def run_user_script(script_path: pathlib.Path):
|
| 520 |
spec = importlib.util.spec_from_file_location(script_path.stem, str(script_path))
|
| 521 |
+
assert spec
|
| 522 |
module = importlib.util.module_from_spec(spec)
|
| 523 |
spec.loader.exec_module(module)
|
| 524 |
|
|
|
|
| 552 |
if param.annotation is inspect.Parameter.empty:
|
| 553 |
annotation = None
|
| 554 |
else:
|
| 555 |
+
annotation = getattr(param.annotation, "__name__", str(param.annotation))
|
| 556 |
parameters.append(
|
| 557 |
griffe.Parameter(
|
| 558 |
name,
|
lynxkite-core/src/lynxkite/core/workspace.py
CHANGED
|
@@ -1,15 +1,18 @@
|
|
| 1 |
"""For working with LynxKite workspaces."""
|
| 2 |
|
| 3 |
import json
|
| 4 |
-
from typing import Optional
|
| 5 |
import dataclasses
|
| 6 |
import enum
|
| 7 |
import os
|
| 8 |
-
import pycrdt
|
| 9 |
import pydantic
|
| 10 |
import tempfile
|
| 11 |
from . import ops
|
| 12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
class BaseConfig(pydantic.BaseModel):
|
| 15 |
model_config = pydantic.ConfigDict(
|
|
@@ -30,13 +33,28 @@ class NodeStatus(str, enum.Enum):
|
|
| 30 |
|
| 31 |
class WorkspaceNodeData(BaseConfig):
|
| 32 |
title: str
|
|
|
|
| 33 |
params: dict
|
| 34 |
display: Optional[object] = None
|
| 35 |
input_metadata: Optional[object] = None
|
| 36 |
error: Optional[str] = None
|
| 37 |
status: NodeStatus = NodeStatus.done
|
| 38 |
-
|
| 39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
|
| 42 |
class WorkspaceNode(BaseConfig):
|
|
@@ -47,13 +65,13 @@ class WorkspaceNode(BaseConfig):
|
|
| 47 |
position: Position
|
| 48 |
width: Optional[float] = None
|
| 49 |
height: Optional[float] = None
|
| 50 |
-
_crdt: pycrdt.Map
|
| 51 |
|
| 52 |
def publish_started(self):
|
| 53 |
"""Notifies the frontend that work has started on this node."""
|
| 54 |
self.data.error = None
|
| 55 |
self.data.status = NodeStatus.active
|
| 56 |
-
if
|
| 57 |
with self._crdt.doc.transaction():
|
| 58 |
self._crdt["data"]["error"] = None
|
| 59 |
self._crdt["data"]["status"] = NodeStatus.active
|
|
@@ -64,7 +82,7 @@ class WorkspaceNode(BaseConfig):
|
|
| 64 |
self.data.input_metadata = result.input_metadata
|
| 65 |
self.data.error = result.error
|
| 66 |
self.data.status = NodeStatus.done
|
| 67 |
-
if
|
| 68 |
with self._crdt.doc.transaction():
|
| 69 |
try:
|
| 70 |
self._crdt["data"]["status"] = NodeStatus.done
|
|
@@ -100,13 +118,13 @@ class Workspace(BaseConfig):
|
|
| 100 |
env: str = ""
|
| 101 |
nodes: list[WorkspaceNode] = dataclasses.field(default_factory=list)
|
| 102 |
edges: list[WorkspaceEdge] = dataclasses.field(default_factory=list)
|
| 103 |
-
_crdt: pycrdt.Map
|
| 104 |
|
| 105 |
def normalize(self):
|
| 106 |
if self.env not in ops.CATALOGS:
|
| 107 |
return self
|
| 108 |
catalog = ops.CATALOGS[self.env]
|
| 109 |
-
_ops = {n.id: catalog[n.data.
|
| 110 |
valid_targets = set()
|
| 111 |
valid_sources = set()
|
| 112 |
for n in self.nodes:
|
|
@@ -128,10 +146,16 @@ class Workspace(BaseConfig):
|
|
| 128 |
async def execute(self):
|
| 129 |
return await ops.EXECUTORS[self.env](self)
|
| 130 |
|
| 131 |
-
def
|
| 132 |
-
"""
|
|
|
|
| 133 |
j = self.model_dump()
|
| 134 |
j = json.dumps(j, indent=2, sort_keys=True) + "\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
dirname, basename = os.path.split(path)
|
| 136 |
if dirname:
|
| 137 |
os.makedirs(dirname, exist_ok=True)
|
|
@@ -174,28 +198,30 @@ class Workspace(BaseConfig):
|
|
| 174 |
catalog = ops.CATALOGS[self.env]
|
| 175 |
for node in self.nodes:
|
| 176 |
data = node.data
|
| 177 |
-
op = catalog.get(data.
|
| 178 |
if op:
|
| 179 |
if getattr(data, "meta", None) != op:
|
| 180 |
data.meta = op
|
| 181 |
# If the node is connected to a CRDT, update that too.
|
| 182 |
-
if
|
| 183 |
node._crdt["data"]["meta"] = op.model_dump()
|
| 184 |
if node.type != op.type:
|
| 185 |
node.type = op.type
|
| 186 |
-
if
|
| 187 |
node._crdt["type"] = op.type
|
| 188 |
if data.error == "Unknown operation.":
|
| 189 |
data.error = None
|
| 190 |
-
if
|
| 191 |
node._crdt["data"]["error"] = None
|
| 192 |
else:
|
| 193 |
data.error = "Unknown operation."
|
| 194 |
-
if
|
| 195 |
node._crdt["data"]["meta"] = {}
|
| 196 |
node._crdt["data"]["error"] = "Unknown operation."
|
| 197 |
|
| 198 |
-
def connect_crdt(self, ws_crdt: pycrdt.Map):
|
|
|
|
|
|
|
| 199 |
self._crdt = ws_crdt
|
| 200 |
with ws_crdt.doc.transaction():
|
| 201 |
for nc, np in zip(ws_crdt["nodes"], self.nodes):
|
|
@@ -208,13 +234,19 @@ class Workspace(BaseConfig):
|
|
| 208 |
random_string = os.urandom(4).hex()
|
| 209 |
if func:
|
| 210 |
kwargs["type"] = func.__op__.type
|
| 211 |
-
kwargs["data"] = WorkspaceNodeData(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 212 |
kwargs.setdefault("type", "basic")
|
| 213 |
kwargs.setdefault("id", f"{kwargs['data'].title} {random_string}")
|
| 214 |
kwargs.setdefault("position", Position(x=0, y=0))
|
| 215 |
kwargs.setdefault("width", 100)
|
| 216 |
kwargs.setdefault("height", 100)
|
| 217 |
-
node = WorkspaceNode(**kwargs)
|
| 218 |
self.nodes.append(node)
|
| 219 |
return node
|
| 220 |
|
|
|
|
| 1 |
"""For working with LynxKite workspaces."""
|
| 2 |
|
| 3 |
import json
|
| 4 |
+
from typing import Optional, TYPE_CHECKING
|
| 5 |
import dataclasses
|
| 6 |
import enum
|
| 7 |
import os
|
|
|
|
| 8 |
import pydantic
|
| 9 |
import tempfile
|
| 10 |
from . import ops
|
| 11 |
|
| 12 |
+
if TYPE_CHECKING:
|
| 13 |
+
import pycrdt
|
| 14 |
+
from lynxkite.core import ops
|
| 15 |
+
|
| 16 |
|
| 17 |
class BaseConfig(pydantic.BaseModel):
|
| 18 |
model_config = pydantic.ConfigDict(
|
|
|
|
| 33 |
|
| 34 |
class WorkspaceNodeData(BaseConfig):
|
| 35 |
title: str
|
| 36 |
+
op_id: str
|
| 37 |
params: dict
|
| 38 |
display: Optional[object] = None
|
| 39 |
input_metadata: Optional[object] = None
|
| 40 |
error: Optional[str] = None
|
| 41 |
status: NodeStatus = NodeStatus.done
|
| 42 |
+
meta: Optional["ops.Op"] = None
|
| 43 |
+
|
| 44 |
+
@pydantic.model_validator(mode="before")
|
| 45 |
+
@classmethod
|
| 46 |
+
def fill_op_id_if_missing(cls, data: dict) -> dict:
|
| 47 |
+
"""Compatibility with old workspaces that don't have op_id."""
|
| 48 |
+
if "op_id" not in data:
|
| 49 |
+
data["op_id"] = data["title"]
|
| 50 |
+
return data
|
| 51 |
+
|
| 52 |
+
@pydantic.model_validator(mode="before")
|
| 53 |
+
@classmethod
|
| 54 |
+
def ignore_meta(cls, data: dict) -> dict:
|
| 55 |
+
"""Metadata is never loaded. We will use fresh metadata."""
|
| 56 |
+
data["meta"] = None
|
| 57 |
+
return data
|
| 58 |
|
| 59 |
|
| 60 |
class WorkspaceNode(BaseConfig):
|
|
|
|
| 65 |
position: Position
|
| 66 |
width: Optional[float] = None
|
| 67 |
height: Optional[float] = None
|
| 68 |
+
_crdt: Optional["pycrdt.Map"] = None
|
| 69 |
|
| 70 |
def publish_started(self):
|
| 71 |
"""Notifies the frontend that work has started on this node."""
|
| 72 |
self.data.error = None
|
| 73 |
self.data.status = NodeStatus.active
|
| 74 |
+
if self._crdt and "data" in self._crdt:
|
| 75 |
with self._crdt.doc.transaction():
|
| 76 |
self._crdt["data"]["error"] = None
|
| 77 |
self._crdt["data"]["status"] = NodeStatus.active
|
|
|
|
| 82 |
self.data.input_metadata = result.input_metadata
|
| 83 |
self.data.error = result.error
|
| 84 |
self.data.status = NodeStatus.done
|
| 85 |
+
if self._crdt and "data" in self._crdt:
|
| 86 |
with self._crdt.doc.transaction():
|
| 87 |
try:
|
| 88 |
self._crdt["data"]["status"] = NodeStatus.done
|
|
|
|
| 118 |
env: str = ""
|
| 119 |
nodes: list[WorkspaceNode] = dataclasses.field(default_factory=list)
|
| 120 |
edges: list[WorkspaceEdge] = dataclasses.field(default_factory=list)
|
| 121 |
+
_crdt: Optional["pycrdt.Map"] = None
|
| 122 |
|
| 123 |
def normalize(self):
|
| 124 |
if self.env not in ops.CATALOGS:
|
| 125 |
return self
|
| 126 |
catalog = ops.CATALOGS[self.env]
|
| 127 |
+
_ops = {n.id: catalog[n.data.op_id] for n in self.nodes if n.data.op_id in catalog}
|
| 128 |
valid_targets = set()
|
| 129 |
valid_sources = set()
|
| 130 |
for n in self.nodes:
|
|
|
|
| 146 |
async def execute(self):
|
| 147 |
return await ops.EXECUTORS[self.env](self)
|
| 148 |
|
| 149 |
+
def model_dump_json(self) -> str:
|
| 150 |
+
"""Returns the workspace as JSON."""
|
| 151 |
+
# Pydantic can't sort the keys. TODO: Keep an eye on https://github.com/pydantic/pydantic-core/pull/1637.
|
| 152 |
j = self.model_dump()
|
| 153 |
j = json.dumps(j, indent=2, sort_keys=True) + "\n"
|
| 154 |
+
return j
|
| 155 |
+
|
| 156 |
+
def save(self, path: str):
|
| 157 |
+
"""Persist the workspace to a local file in JSON format."""
|
| 158 |
+
j = self.model_dump_json()
|
| 159 |
dirname, basename = os.path.split(path)
|
| 160 |
if dirname:
|
| 161 |
os.makedirs(dirname, exist_ok=True)
|
|
|
|
| 198 |
catalog = ops.CATALOGS[self.env]
|
| 199 |
for node in self.nodes:
|
| 200 |
data = node.data
|
| 201 |
+
op = catalog.get(data.op_id)
|
| 202 |
if op:
|
| 203 |
if getattr(data, "meta", None) != op:
|
| 204 |
data.meta = op
|
| 205 |
# If the node is connected to a CRDT, update that too.
|
| 206 |
+
if node._crdt:
|
| 207 |
node._crdt["data"]["meta"] = op.model_dump()
|
| 208 |
if node.type != op.type:
|
| 209 |
node.type = op.type
|
| 210 |
+
if node._crdt:
|
| 211 |
node._crdt["type"] = op.type
|
| 212 |
if data.error == "Unknown operation.":
|
| 213 |
data.error = None
|
| 214 |
+
if node._crdt:
|
| 215 |
node._crdt["data"]["error"] = None
|
| 216 |
else:
|
| 217 |
data.error = "Unknown operation."
|
| 218 |
+
if node._crdt:
|
| 219 |
node._crdt["data"]["meta"] = {}
|
| 220 |
node._crdt["data"]["error"] = "Unknown operation."
|
| 221 |
|
| 222 |
+
def connect_crdt(self, ws_crdt: "pycrdt.Map"):
|
| 223 |
+
import pycrdt
|
| 224 |
+
|
| 225 |
self._crdt = ws_crdt
|
| 226 |
with ws_crdt.doc.transaction():
|
| 227 |
for nc, np in zip(ws_crdt["nodes"], self.nodes):
|
|
|
|
| 234 |
random_string = os.urandom(4).hex()
|
| 235 |
if func:
|
| 236 |
kwargs["type"] = func.__op__.type
|
| 237 |
+
kwargs["data"] = WorkspaceNodeData(
|
| 238 |
+
title=func.__op__.name, op_id=func.__op__.id, params={}
|
| 239 |
+
)
|
| 240 |
+
elif "title" in kwargs:
|
| 241 |
+
kwargs["data"] = WorkspaceNodeData(
|
| 242 |
+
title=kwargs["title"], op_id=kwargs["title"], params=kwargs.get("params", {})
|
| 243 |
+
)
|
| 244 |
kwargs.setdefault("type", "basic")
|
| 245 |
kwargs.setdefault("id", f"{kwargs['data'].title} {random_string}")
|
| 246 |
kwargs.setdefault("position", Position(x=0, y=0))
|
| 247 |
kwargs.setdefault("width", 100)
|
| 248 |
kwargs.setdefault("height", 100)
|
| 249 |
+
node = WorkspaceNode(**kwargs) # ty: ignore[missing-argument]
|
| 250 |
self.nodes.append(node)
|
| 251 |
return node
|
| 252 |
|
lynxkite-core/tests/test_one_by_one.py
CHANGED
|
@@ -13,8 +13,8 @@ async def test_optional_inputs():
|
|
| 13 |
return [a + b for a, b in zip(a, b)] if b else a
|
| 14 |
|
| 15 |
assert maybe_add.__op__.inputs == [
|
| 16 |
-
ops.Input(name="a", type=list[int], position=
|
| 17 |
-
ops.Input(name="b", type=list[int] | None, position=
|
| 18 |
]
|
| 19 |
one_by_one.register("test")
|
| 20 |
ws = workspace.Workspace(env="test", nodes=[], edges=[])
|
|
|
|
| 13 |
return [a + b for a, b in zip(a, b)] if b else a
|
| 14 |
|
| 15 |
assert maybe_add.__op__.inputs == [
|
| 16 |
+
ops.Input(name="a", type=list[int], position=ops.Position.BOTTOM),
|
| 17 |
+
ops.Input(name="b", type=list[int] | None, position=ops.Position.BOTTOM),
|
| 18 |
]
|
| 19 |
one_by_one.register("test")
|
| 20 |
ws = workspace.Workspace(env="test", nodes=[], edges=[])
|
lynxkite-core/tests/test_ops.py
CHANGED
|
@@ -4,17 +4,17 @@ import enum
|
|
| 4 |
|
| 5 |
|
| 6 |
def test_op_decorator_no_params_no_types_default_positions():
|
| 7 |
-
@ops.op(
|
| 8 |
def add(a, b):
|
| 9 |
return a + b
|
| 10 |
|
| 11 |
assert add.__op__.name == "add"
|
| 12 |
assert add.__op__.params == []
|
| 13 |
assert add.__op__.inputs == [
|
| 14 |
-
ops.Input(name="a", type=inspect._empty, position=
|
| 15 |
-
ops.Input(name="b", type=inspect._empty, position=
|
| 16 |
]
|
| 17 |
-
assert add.__op__.outputs == [ops.Output(name="result", type=None, position=
|
| 18 |
assert add.__op__.type == "basic"
|
| 19 |
assert ops.CATALOGS["test"]["add"] == add.__op__
|
| 20 |
|
|
@@ -22,33 +22,37 @@ def test_op_decorator_no_params_no_types_default_positions():
|
|
| 22 |
def test_op_decorator_custom_positions():
|
| 23 |
@ops.input_position(a="right", b="top")
|
| 24 |
@ops.output_position(result="bottom")
|
| 25 |
-
@ops.op(
|
| 26 |
def add(a, b):
|
| 27 |
return a + b
|
| 28 |
|
| 29 |
assert add.__op__.name == "add"
|
| 30 |
assert add.__op__.params == []
|
| 31 |
assert add.__op__.inputs == [
|
| 32 |
-
ops.Input(name="a", type=inspect._empty, position=
|
| 33 |
-
ops.Input(name="b", type=inspect._empty, position=
|
|
|
|
|
|
|
|
|
|
| 34 |
]
|
| 35 |
-
assert add.__op__.outputs == [ops.Output(name="result", type=None, position="bottom")]
|
| 36 |
assert add.__op__.type == "basic"
|
| 37 |
assert ops.CATALOGS["test"]["add"] == add.__op__
|
| 38 |
|
| 39 |
|
| 40 |
def test_op_decorator_with_params_and_types_():
|
| 41 |
-
@ops.op(
|
| 42 |
def multiply(a: int, b: float = 2.0, *, param: str = "param"):
|
| 43 |
return a * b
|
| 44 |
|
| 45 |
assert multiply.__op__.name == "multiply"
|
| 46 |
assert multiply.__op__.params == [ops.Parameter(name="param", default="param", type=str)]
|
| 47 |
assert multiply.__op__.inputs == [
|
| 48 |
-
ops.Input(name="a", type=int, position=
|
| 49 |
-
ops.Input(name="b", type=float, position=
|
|
|
|
|
|
|
|
|
|
| 50 |
]
|
| 51 |
-
assert multiply.__op__.outputs == [ops.Output(name="result", type=None, position="right")]
|
| 52 |
assert multiply.__op__.type == "basic"
|
| 53 |
assert ops.CATALOGS["test"]["multiply"] == multiply.__op__
|
| 54 |
|
|
@@ -59,24 +63,26 @@ def test_op_decorator_with_complex_types():
|
|
| 59 |
GREEN = 2
|
| 60 |
BLUE = 3
|
| 61 |
|
| 62 |
-
@ops.op(
|
| 63 |
def complex_op(color: Color, color_list: list[Color], color_dict: dict[str, Color]):
|
| 64 |
return color.name
|
| 65 |
|
| 66 |
assert complex_op.__op__.name == "color_op"
|
| 67 |
assert complex_op.__op__.params == []
|
| 68 |
assert complex_op.__op__.inputs == [
|
| 69 |
-
ops.Input(name="color", type=Color, position=
|
| 70 |
-
ops.Input(name="color_list", type=list[Color], position=
|
| 71 |
-
ops.Input(name="color_dict", type=dict[str, Color], position=
|
| 72 |
]
|
| 73 |
assert complex_op.__op__.type == "basic"
|
| 74 |
-
assert complex_op.__op__.outputs == [
|
|
|
|
|
|
|
| 75 |
assert ops.CATALOGS["test"]["color_op"] == complex_op.__op__
|
| 76 |
|
| 77 |
|
| 78 |
def test_operation_can_return_non_result_instance():
|
| 79 |
-
@ops.op(
|
| 80 |
def subtract(a, b):
|
| 81 |
return a - b
|
| 82 |
|
|
@@ -87,7 +93,7 @@ def test_operation_can_return_non_result_instance():
|
|
| 87 |
|
| 88 |
|
| 89 |
def test_operation_can_return_result_instance():
|
| 90 |
-
@ops.op(
|
| 91 |
def subtract(a, b):
|
| 92 |
return ops.Result(output=a - b, display=None)
|
| 93 |
|
|
@@ -98,7 +104,7 @@ def test_operation_can_return_result_instance():
|
|
| 98 |
|
| 99 |
|
| 100 |
def test_visualization_operations_display_is_populated_automatically():
|
| 101 |
-
@ops.op(
|
| 102 |
def display_op():
|
| 103 |
return {"display_value": 1}
|
| 104 |
|
|
|
|
| 4 |
|
| 5 |
|
| 6 |
def test_op_decorator_no_params_no_types_default_positions():
|
| 7 |
+
@ops.op("test", "add", view="basic", outputs=["result"])
|
| 8 |
def add(a, b):
|
| 9 |
return a + b
|
| 10 |
|
| 11 |
assert add.__op__.name == "add"
|
| 12 |
assert add.__op__.params == []
|
| 13 |
assert add.__op__.inputs == [
|
| 14 |
+
ops.Input(name="a", type=inspect._empty, position=ops.Position.LEFT),
|
| 15 |
+
ops.Input(name="b", type=inspect._empty, position=ops.Position.LEFT),
|
| 16 |
]
|
| 17 |
+
assert add.__op__.outputs == [ops.Output(name="result", type=None, position=ops.Position.RIGHT)]
|
| 18 |
assert add.__op__.type == "basic"
|
| 19 |
assert ops.CATALOGS["test"]["add"] == add.__op__
|
| 20 |
|
|
|
|
| 22 |
def test_op_decorator_custom_positions():
|
| 23 |
@ops.input_position(a="right", b="top")
|
| 24 |
@ops.output_position(result="bottom")
|
| 25 |
+
@ops.op("test", "add", view="basic", outputs=["result"])
|
| 26 |
def add(a, b):
|
| 27 |
return a + b
|
| 28 |
|
| 29 |
assert add.__op__.name == "add"
|
| 30 |
assert add.__op__.params == []
|
| 31 |
assert add.__op__.inputs == [
|
| 32 |
+
ops.Input(name="a", type=inspect._empty, position=ops.Position.RIGHT),
|
| 33 |
+
ops.Input(name="b", type=inspect._empty, position=ops.Position.TOP),
|
| 34 |
+
]
|
| 35 |
+
assert add.__op__.outputs == [
|
| 36 |
+
ops.Output(name="result", type=None, position=ops.Position.BOTTOM)
|
| 37 |
]
|
|
|
|
| 38 |
assert add.__op__.type == "basic"
|
| 39 |
assert ops.CATALOGS["test"]["add"] == add.__op__
|
| 40 |
|
| 41 |
|
| 42 |
def test_op_decorator_with_params_and_types_():
|
| 43 |
+
@ops.op("test", "multiply", view="basic", outputs=["result"])
|
| 44 |
def multiply(a: int, b: float = 2.0, *, param: str = "param"):
|
| 45 |
return a * b
|
| 46 |
|
| 47 |
assert multiply.__op__.name == "multiply"
|
| 48 |
assert multiply.__op__.params == [ops.Parameter(name="param", default="param", type=str)]
|
| 49 |
assert multiply.__op__.inputs == [
|
| 50 |
+
ops.Input(name="a", type=int, position=ops.Position.LEFT),
|
| 51 |
+
ops.Input(name="b", type=float, position=ops.Position.LEFT),
|
| 52 |
+
]
|
| 53 |
+
assert multiply.__op__.outputs == [
|
| 54 |
+
ops.Output(name="result", type=None, position=ops.Position.RIGHT)
|
| 55 |
]
|
|
|
|
| 56 |
assert multiply.__op__.type == "basic"
|
| 57 |
assert ops.CATALOGS["test"]["multiply"] == multiply.__op__
|
| 58 |
|
|
|
|
| 63 |
GREEN = 2
|
| 64 |
BLUE = 3
|
| 65 |
|
| 66 |
+
@ops.op("test", "color_op", view="basic", outputs=["result"])
|
| 67 |
def complex_op(color: Color, color_list: list[Color], color_dict: dict[str, Color]):
|
| 68 |
return color.name
|
| 69 |
|
| 70 |
assert complex_op.__op__.name == "color_op"
|
| 71 |
assert complex_op.__op__.params == []
|
| 72 |
assert complex_op.__op__.inputs == [
|
| 73 |
+
ops.Input(name="color", type=Color, position=ops.Position.LEFT),
|
| 74 |
+
ops.Input(name="color_list", type=list[Color], position=ops.Position.LEFT),
|
| 75 |
+
ops.Input(name="color_dict", type=dict[str, Color], position=ops.Position.LEFT),
|
| 76 |
]
|
| 77 |
assert complex_op.__op__.type == "basic"
|
| 78 |
+
assert complex_op.__op__.outputs == [
|
| 79 |
+
ops.Output(name="result", type=None, position=ops.Position.RIGHT)
|
| 80 |
+
]
|
| 81 |
assert ops.CATALOGS["test"]["color_op"] == complex_op.__op__
|
| 82 |
|
| 83 |
|
| 84 |
def test_operation_can_return_non_result_instance():
|
| 85 |
+
@ops.op("test", "subtract", view="basic", outputs=["result"])
|
| 86 |
def subtract(a, b):
|
| 87 |
return a - b
|
| 88 |
|
|
|
|
| 93 |
|
| 94 |
|
| 95 |
def test_operation_can_return_result_instance():
|
| 96 |
+
@ops.op("test", "subtract", view="basic", outputs=["result"])
|
| 97 |
def subtract(a, b):
|
| 98 |
return ops.Result(output=a - b, display=None)
|
| 99 |
|
|
|
|
| 104 |
|
| 105 |
|
| 106 |
def test_visualization_operations_display_is_populated_automatically():
|
| 107 |
+
@ops.op("test", "display_op", view="visualization", outputs=["result"])
|
| 108 |
def display_op():
|
| 109 |
return {"display_value": 1}
|
| 110 |
|
lynxkite-core/tests/test_simple.py
CHANGED
|
@@ -9,11 +9,12 @@ async def test_optional_inputs():
|
|
| 9 |
|
| 10 |
@ops.op("test", "maybe add")
|
| 11 |
def maybe_add(a: int, b: int | None = None):
|
|
|
|
| 12 |
return a + (b or 0)
|
| 13 |
|
| 14 |
assert maybe_add.__op__.inputs == [
|
| 15 |
-
ops.Input(name="a", type=int, position=
|
| 16 |
-
ops.Input(name="b", type=int | None, position=
|
| 17 |
]
|
| 18 |
simple.register("test")
|
| 19 |
ws = workspace.Workspace(env="test", nodes=[], edges=[])
|
|
|
|
| 9 |
|
| 10 |
@ops.op("test", "maybe add")
|
| 11 |
def maybe_add(a: int, b: int | None = None):
|
| 12 |
+
"""b is optional"""
|
| 13 |
return a + (b or 0)
|
| 14 |
|
| 15 |
assert maybe_add.__op__.inputs == [
|
| 16 |
+
ops.Input(name="a", type=int, position=ops.Position.LEFT),
|
| 17 |
+
ops.Input(name="b", type=int | None, position=ops.Position.LEFT),
|
| 18 |
]
|
| 19 |
simple.register("test")
|
| 20 |
ws = workspace.Workspace(env="test", nodes=[], edges=[])
|
lynxkite-core/tests/test_workspace.py
CHANGED
|
@@ -9,12 +9,12 @@ def test_save_load():
|
|
| 9 |
ws.add_node(
|
| 10 |
id="1",
|
| 11 |
type="node_type",
|
| 12 |
-
|
| 13 |
)
|
| 14 |
ws.add_node(
|
| 15 |
id="2",
|
| 16 |
type="node_type",
|
| 17 |
-
|
| 18 |
)
|
| 19 |
ws.edges.append(
|
| 20 |
workspace.WorkspaceEdge(
|
|
@@ -42,6 +42,7 @@ def test_save_load():
|
|
| 42 |
assert node.id == loaded_node.id
|
| 43 |
assert node.type == loaded_node.type
|
| 44 |
assert node.data.title == loaded_node.data.title
|
|
|
|
| 45 |
assert node.data.params == loaded_node.data.params
|
| 46 |
assert node.position.x == loaded_node.position.x
|
| 47 |
assert node.position.y == loaded_node.position.y
|
|
@@ -61,7 +62,7 @@ def test_save_load():
|
|
| 61 |
def populate_ops_catalog():
|
| 62 |
from lynxkite.core import ops
|
| 63 |
|
| 64 |
-
ops.register_passive_op("test", "Test Operation", [])
|
| 65 |
|
| 66 |
|
| 67 |
def test_update_metadata():
|
|
@@ -69,17 +70,18 @@ def test_update_metadata():
|
|
| 69 |
ws.add_node(
|
| 70 |
id="1",
|
| 71 |
type="basic",
|
| 72 |
-
|
| 73 |
)
|
| 74 |
ws.add_node(
|
| 75 |
id="2",
|
| 76 |
type="basic",
|
| 77 |
-
|
| 78 |
)
|
| 79 |
ws.update_metadata()
|
|
|
|
| 80 |
assert ws.nodes[0].data.meta.name == "Test Operation"
|
| 81 |
assert ws.nodes[0].data.error is None
|
| 82 |
-
assert
|
| 83 |
assert ws.nodes[1].data.error == "Unknown operation."
|
| 84 |
|
| 85 |
|
|
|
|
| 9 |
ws.add_node(
|
| 10 |
id="1",
|
| 11 |
type="node_type",
|
| 12 |
+
title="Node 1",
|
| 13 |
)
|
| 14 |
ws.add_node(
|
| 15 |
id="2",
|
| 16 |
type="node_type",
|
| 17 |
+
title="Node 2",
|
| 18 |
)
|
| 19 |
ws.edges.append(
|
| 20 |
workspace.WorkspaceEdge(
|
|
|
|
| 42 |
assert node.id == loaded_node.id
|
| 43 |
assert node.type == loaded_node.type
|
| 44 |
assert node.data.title == loaded_node.data.title
|
| 45 |
+
assert node.data.op_id == loaded_node.data.op_id
|
| 46 |
assert node.data.params == loaded_node.data.params
|
| 47 |
assert node.position.x == loaded_node.position.x
|
| 48 |
assert node.position.y == loaded_node.position.y
|
|
|
|
| 62 |
def populate_ops_catalog():
|
| 63 |
from lynxkite.core import ops
|
| 64 |
|
| 65 |
+
ops.register_passive_op("test", "Test Operation", inputs=[])
|
| 66 |
|
| 67 |
|
| 68 |
def test_update_metadata():
|
|
|
|
| 70 |
ws.add_node(
|
| 71 |
id="1",
|
| 72 |
type="basic",
|
| 73 |
+
title="Test Operation",
|
| 74 |
)
|
| 75 |
ws.add_node(
|
| 76 |
id="2",
|
| 77 |
type="basic",
|
| 78 |
+
title="Unknown Operation",
|
| 79 |
)
|
| 80 |
ws.update_metadata()
|
| 81 |
+
assert ws.nodes[0].data.meta
|
| 82 |
assert ws.nodes[0].data.meta.name == "Test Operation"
|
| 83 |
assert ws.nodes[0].data.error is None
|
| 84 |
+
assert ws.nodes[1].data.meta is None
|
| 85 |
assert ws.nodes[1].data.error == "Unknown operation."
|
| 86 |
|
| 87 |
|
lynxkite-graph-analytics/pyproject.toml
CHANGED
|
@@ -5,19 +5,24 @@ description = "The graph analytics executor and boxes for LynxKite"
|
|
| 5 |
readme = "README.md"
|
| 6 |
requires-python = ">=3.11"
|
| 7 |
dependencies = [
|
|
|
|
| 8 |
"fsspec>=2025.3.2",
|
| 9 |
"grand-cypher>=0.13.0",
|
| 10 |
-
"joblib>=1.4.2",
|
| 11 |
"lynxkite-core",
|
| 12 |
"matplotlib>=3.10.1",
|
| 13 |
"networkx[default]>=3.4.2",
|
|
|
|
|
|
|
| 14 |
"osmnx>=2.0.2",
|
| 15 |
"pandas>=2.2.3",
|
| 16 |
"polars>=1.25.2",
|
| 17 |
"pyarrow>=19.0.1",
|
|
|
|
| 18 |
"torch>=2.7.0",
|
| 19 |
"torch-geometric>=2.6.1",
|
| 20 |
-
"
|
|
|
|
|
|
|
| 21 |
]
|
| 22 |
classifiers = ["License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)"]
|
| 23 |
|
|
@@ -25,10 +30,6 @@ classifiers = ["License :: OSI Approved :: GNU Affero General Public License v3
|
|
| 25 |
Homepage = "https://github.com/lynxkite/lynxkite-2000/"
|
| 26 |
|
| 27 |
[project.optional-dependencies]
|
| 28 |
-
dev = [
|
| 29 |
-
"pytest>=8.3.5",
|
| 30 |
-
"pytest-asyncio>=0.26.0",
|
| 31 |
-
]
|
| 32 |
gpu = [
|
| 33 |
"cuml-cu12>=25.2.1",
|
| 34 |
"nx-cugraph-cu12>=25.4.0",
|
|
@@ -37,7 +38,7 @@ gpu = [
|
|
| 37 |
]
|
| 38 |
|
| 39 |
[tool.uv.sources]
|
| 40 |
-
lynxkite-core = {
|
| 41 |
pylibcugraph-cu12 = { index = "nvidia" }
|
| 42 |
|
| 43 |
[tool.pytest.ini_options]
|
|
@@ -46,3 +47,21 @@ asyncio_mode = "auto"
|
|
| 46 |
[[tool.uv.index]]
|
| 47 |
name = "nvidia"
|
| 48 |
url = "https://pypi.nvidia.com"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
readme = "README.md"
|
| 6 |
requires-python = ">=3.11"
|
| 7 |
dependencies = [
|
| 8 |
+
"cudf-cu12>=25.6.0",
|
| 9 |
"fsspec>=2025.3.2",
|
| 10 |
"grand-cypher>=0.13.0",
|
|
|
|
| 11 |
"lynxkite-core",
|
| 12 |
"matplotlib>=3.10.1",
|
| 13 |
"networkx[default]>=3.4.2",
|
| 14 |
+
"numba>=0.61.2",
|
| 15 |
+
"numpy>=2.2.6",
|
| 16 |
"osmnx>=2.0.2",
|
| 17 |
"pandas>=2.2.3",
|
| 18 |
"polars>=1.25.2",
|
| 19 |
"pyarrow>=19.0.1",
|
| 20 |
+
"pydantic>=2.11.7",
|
| 21 |
"torch>=2.7.0",
|
| 22 |
"torch-geometric>=2.6.1",
|
| 23 |
+
"torchdiffeq>=0.2.5",
|
| 24 |
+
"tqdm>=4.67.1",
|
| 25 |
+
"umap-learn>=0.5.9.post2",
|
| 26 |
]
|
| 27 |
classifiers = ["License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)"]
|
| 28 |
|
|
|
|
| 30 |
Homepage = "https://github.com/lynxkite/lynxkite-2000/"
|
| 31 |
|
| 32 |
[project.optional-dependencies]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
gpu = [
|
| 34 |
"cuml-cu12>=25.2.1",
|
| 35 |
"nx-cugraph-cu12>=25.4.0",
|
|
|
|
| 38 |
]
|
| 39 |
|
| 40 |
[tool.uv.sources]
|
| 41 |
+
lynxkite-core = { workspace = true }
|
| 42 |
pylibcugraph-cu12 = { index = "nvidia" }
|
| 43 |
|
| 44 |
[tool.pytest.ini_options]
|
|
|
|
| 47 |
[[tool.uv.index]]
|
| 48 |
name = "nvidia"
|
| 49 |
url = "https://pypi.nvidia.com"
|
| 50 |
+
|
| 51 |
+
[tool.deptry.per_rule_ignores]
|
| 52 |
+
DEP002 = ["numba", "pyarrow", "nx-cugraph-cu12", "pylibcugraph-cu12"]
|
| 53 |
+
|
| 54 |
+
[tool.deptry.package_module_name_map]
|
| 55 |
+
grand-cypher = "grandcypher"
|
| 56 |
+
lynxkite-core = "lynxkite"
|
| 57 |
+
umap-learn = "umap"
|
| 58 |
+
cuml-cu12 = "cuml"
|
| 59 |
+
cudf-cu12 = "cudf"
|
| 60 |
+
|
| 61 |
+
[build-system]
|
| 62 |
+
requires = ["setuptools", "wheel", "setuptools-scm"]
|
| 63 |
+
build-backend = "setuptools.build_meta"
|
| 64 |
+
|
| 65 |
+
[tool.setuptools.packages.find]
|
| 66 |
+
namespaces = true
|
| 67 |
+
where = ["src"]
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/__init__.py
CHANGED
|
@@ -4,7 +4,7 @@ import os
|
|
| 4 |
import pandas as pd
|
| 5 |
|
| 6 |
if os.environ.get("NX_CUGRAPH_AUTOCONFIG", "").strip().lower() == "true":
|
| 7 |
-
import cudf.pandas
|
| 8 |
|
| 9 |
cudf.pandas.install()
|
| 10 |
|
|
|
|
| 4 |
import pandas as pd
|
| 5 |
|
| 6 |
if os.environ.get("NX_CUGRAPH_AUTOCONFIG", "").strip().lower() == "true":
|
| 7 |
+
import cudf.pandas # ty: ignore[unresolved-import]
|
| 8 |
|
| 9 |
cudf.pandas.install()
|
| 10 |
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/core.py
CHANGED
|
@@ -14,6 +14,56 @@ import typing
|
|
| 14 |
|
| 15 |
ENV = "LynxKite Graph Analytics"
|
| 16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
|
| 18 |
@dataclasses.dataclass
|
| 19 |
class RelationDefinition:
|
|
@@ -141,12 +191,15 @@ class Bundle:
|
|
| 141 |
return {
|
| 142 |
"dataframes": {
|
| 143 |
name: {
|
|
|
|
| 144 |
"columns": sorted(str(c) for c in df.columns),
|
| 145 |
}
|
| 146 |
for name, df in self.dfs.items()
|
| 147 |
},
|
| 148 |
"relations": [dataclasses.asdict(relation) for relation in self.relations],
|
| 149 |
-
"other": {
|
|
|
|
|
|
|
| 150 |
}
|
| 151 |
|
| 152 |
|
|
@@ -173,14 +226,17 @@ def disambiguate_edges(ws: workspace.Workspace):
|
|
| 173 |
seen = set()
|
| 174 |
for edge in reversed(ws.edges):
|
| 175 |
dst_node = nodes[edge.target]
|
| 176 |
-
op = catalog.get(dst_node.data.
|
| 177 |
-
if op
|
|
|
|
|
|
|
|
|
|
| 178 |
# Takes multiple bundles as an input. No need to disambiguate.
|
| 179 |
continue
|
| 180 |
if (edge.target, edge.targetHandle) in seen:
|
| 181 |
i = ws.edges.index(edge)
|
| 182 |
del ws.edges[i]
|
| 183 |
-
if
|
| 184 |
del ws._crdt["edges"][i]
|
| 185 |
seen.add((edge.target, edge.targetHandle))
|
| 186 |
|
|
@@ -220,44 +276,65 @@ async def await_if_needed(obj):
|
|
| 220 |
return obj
|
| 221 |
|
| 222 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 223 |
async def _execute_node(
|
| 224 |
node: workspace.WorkspaceNode, ws: workspace.Workspace, catalog: ops.Catalog, outputs: Outputs
|
| 225 |
):
|
| 226 |
params = {**node.data.params}
|
| 227 |
-
op = catalog.get(node.data.
|
| 228 |
if not op:
|
| 229 |
node.publish_error("Operation not found in catalog")
|
| 230 |
return
|
| 231 |
node.publish_started()
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
# Convert inputs types to match operation signature.
|
| 239 |
try:
|
| 240 |
inputs = []
|
| 241 |
missing = []
|
| 242 |
for p in op.inputs:
|
|
|
|
| 243 |
if p.name not in input_map:
|
| 244 |
opt_type = ops.get_optional_type(p.type)
|
| 245 |
if opt_type is not None:
|
| 246 |
inputs.append(None)
|
|
|
|
|
|
|
| 247 |
else:
|
| 248 |
missing.append(p.name)
|
| 249 |
continue
|
| 250 |
x = input_map[p.name]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 251 |
if p.type == nx.Graph:
|
| 252 |
if isinstance(x, Bundle):
|
| 253 |
x = x.to_nx()
|
| 254 |
assert isinstance(x, nx.Graph), f"Input must be a graph. Got: {x}"
|
| 255 |
elif p.type == Bundle:
|
|
|
|
|
|
|
| 256 |
if isinstance(x, nx.Graph):
|
| 257 |
x = Bundle.from_nx(x)
|
| 258 |
-
|
| 259 |
-
x
|
| 260 |
-
|
|
|
|
| 261 |
inputs.append(x)
|
| 262 |
except Exception as e:
|
| 263 |
if not os.environ.get("LYNXKITE_SUPPRESS_OP_ERRORS"):
|
|
@@ -277,12 +354,17 @@ async def _execute_node(
|
|
| 277 |
traceback.print_exc()
|
| 278 |
result = ops.Result(error=str(e))
|
| 279 |
result.input_metadata = [_get_metadata(i) for i in inputs]
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
|
| 283 |
-
|
| 284 |
-
|
| 285 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 286 |
node.publish_result(result)
|
| 287 |
|
| 288 |
|
|
|
|
| 14 |
|
| 15 |
ENV = "LynxKite Graph Analytics"
|
| 16 |
|
| 17 |
+
# Annotated types with format "dropdown" let you specify the available options
|
| 18 |
+
# as a query on the input_metadata. These query expressions are JMESPath expressions.
|
| 19 |
+
TableName = typing.Annotated[
|
| 20 |
+
str, {"format": "dropdown", "metadata_query": "[].dataframes[].keys(@)[]"}
|
| 21 |
+
]
|
| 22 |
+
"""A type annotation to be used for parameters of an operation. TableName is
|
| 23 |
+
rendered as a dropdown in the frontend, listing all DataFrames in the Bundle.
|
| 24 |
+
The table name is passed to the operation as a string."""
|
| 25 |
+
|
| 26 |
+
NodePropertyName = typing.Annotated[
|
| 27 |
+
str, {"format": "dropdown", "metadata_query": "[].dataframes[].nodes[].columns[]"}
|
| 28 |
+
]
|
| 29 |
+
"""A type annotation to be used for parameters of an operation. NodePropertyName is
|
| 30 |
+
rendered as a dropdown in the frontend, listing the columns of the "nodes" DataFrame.
|
| 31 |
+
The column name is passed to the operation as a string."""
|
| 32 |
+
|
| 33 |
+
EdgePropertyName = typing.Annotated[
|
| 34 |
+
str, {"format": "dropdown", "metadata_query": "[].dataframes[].edges[].columns[]"}
|
| 35 |
+
]
|
| 36 |
+
"""A type annotation to be used for parameters of an operation. EdgePropertyName is
|
| 37 |
+
rendered as a dropdown in the frontend, listing the columns of the "edges" DataFrame.
|
| 38 |
+
The column name is passed to the operation as a string."""
|
| 39 |
+
|
| 40 |
+
OtherName = typing.Annotated[str, {"format": "dropdown", "metadata_query": "[].other.keys(@)[]"}]
|
| 41 |
+
"""A type annotation to be used for parameters of an operation. OtherName is
|
| 42 |
+
rendered as a dropdown in the frontend, listing the keys on the "other" part of the Bundle.
|
| 43 |
+
The key is passed to the operation as a string."""
|
| 44 |
+
|
| 45 |
+
ModelName = typing.Annotated[
|
| 46 |
+
str,
|
| 47 |
+
{
|
| 48 |
+
"format": "dropdown",
|
| 49 |
+
"metadata_query": "[].other.*[] | [?type == 'model'].key",
|
| 50 |
+
},
|
| 51 |
+
]
|
| 52 |
+
"""A type annotation to be used for parameters of an operation. ModelName is
|
| 53 |
+
rendered as a dropdown in the frontend, listing the models in the Bundle.
|
| 54 |
+
The model name is passed to the operation as a string."""
|
| 55 |
+
|
| 56 |
+
# Parameter names in angle brackets, like <table_name>, will be replaced with the parameter
|
| 57 |
+
# values. (This is not part of JMESPath.)
|
| 58 |
+
# ColumnNameByTableName will list the columns of the DataFrame with the name
|
| 59 |
+
# specified by the `table_name` parameter.
|
| 60 |
+
ColumnNameByTableName = typing.Annotated[
|
| 61 |
+
str, {"format": "dropdown", "metadata_query": "[].dataframes[].<table_name>.columns[]"}
|
| 62 |
+
]
|
| 63 |
+
"""A type annotation to be used for parameters of an operation. ColumnNameByTableName is
|
| 64 |
+
rendered as a dropdown in the frontend, listing the columns of the DataFrame
|
| 65 |
+
named by the "table_name" parameter. The column name is passed to the operation as a string."""
|
| 66 |
+
|
| 67 |
|
| 68 |
@dataclasses.dataclass
|
| 69 |
class RelationDefinition:
|
|
|
|
| 191 |
return {
|
| 192 |
"dataframes": {
|
| 193 |
name: {
|
| 194 |
+
"key": name,
|
| 195 |
"columns": sorted(str(c) for c in df.columns),
|
| 196 |
}
|
| 197 |
for name, df in self.dfs.items()
|
| 198 |
},
|
| 199 |
"relations": [dataclasses.asdict(relation) for relation in self.relations],
|
| 200 |
+
"other": {
|
| 201 |
+
k: {"key": k, **getattr(v, "metadata", lambda: {})()} for k, v in self.other.items()
|
| 202 |
+
},
|
| 203 |
}
|
| 204 |
|
| 205 |
|
|
|
|
| 226 |
seen = set()
|
| 227 |
for edge in reversed(ws.edges):
|
| 228 |
dst_node = nodes[edge.target]
|
| 229 |
+
op = catalog.get(dst_node.data.op_id)
|
| 230 |
+
if not op:
|
| 231 |
+
continue
|
| 232 |
+
t = op.get_input(edge.targetHandle).type
|
| 233 |
+
if t is list or typing.get_origin(t) is list:
|
| 234 |
# Takes multiple bundles as an input. No need to disambiguate.
|
| 235 |
continue
|
| 236 |
if (edge.target, edge.targetHandle) in seen:
|
| 237 |
i = ws.edges.index(edge)
|
| 238 |
del ws.edges[i]
|
| 239 |
+
if ws._crdt:
|
| 240 |
del ws._crdt["edges"][i]
|
| 241 |
seen.add((edge.target, edge.targetHandle))
|
| 242 |
|
|
|
|
| 276 |
return obj
|
| 277 |
|
| 278 |
|
| 279 |
+
def _to_bundle(x):
|
| 280 |
+
if isinstance(x, nx.Graph):
|
| 281 |
+
x = Bundle.from_nx(x)
|
| 282 |
+
elif isinstance(x, pd.DataFrame):
|
| 283 |
+
x = Bundle.from_df(x)
|
| 284 |
+
assert isinstance(x, Bundle), f"Input must be a graph or dataframe. Got: {x}"
|
| 285 |
+
return x
|
| 286 |
+
|
| 287 |
+
|
| 288 |
async def _execute_node(
|
| 289 |
node: workspace.WorkspaceNode, ws: workspace.Workspace, catalog: ops.Catalog, outputs: Outputs
|
| 290 |
):
|
| 291 |
params = {**node.data.params}
|
| 292 |
+
op = catalog.get(node.data.op_id)
|
| 293 |
if not op:
|
| 294 |
node.publish_error("Operation not found in catalog")
|
| 295 |
return
|
| 296 |
node.publish_started()
|
| 297 |
+
input_map = {}
|
| 298 |
+
for edge in ws.edges:
|
| 299 |
+
if edge.target == node.id:
|
| 300 |
+
input_map.setdefault(edge.targetHandle, []).append(
|
| 301 |
+
outputs[edge.source, edge.sourceHandle]
|
| 302 |
+
)
|
| 303 |
# Convert inputs types to match operation signature.
|
| 304 |
try:
|
| 305 |
inputs = []
|
| 306 |
missing = []
|
| 307 |
for p in op.inputs:
|
| 308 |
+
is_list = typing.get_origin(p.type) is list
|
| 309 |
if p.name not in input_map:
|
| 310 |
opt_type = ops.get_optional_type(p.type)
|
| 311 |
if opt_type is not None:
|
| 312 |
inputs.append(None)
|
| 313 |
+
elif is_list:
|
| 314 |
+
inputs.append([])
|
| 315 |
else:
|
| 316 |
missing.append(p.name)
|
| 317 |
continue
|
| 318 |
x = input_map[p.name]
|
| 319 |
+
if p.type == list[Bundle]:
|
| 320 |
+
x = [_to_bundle(i) for i in x]
|
| 321 |
+
elif is_list:
|
| 322 |
+
pass
|
| 323 |
+
else:
|
| 324 |
+
[x] = x # There should never be multiple inputs.
|
| 325 |
if p.type == nx.Graph:
|
| 326 |
if isinstance(x, Bundle):
|
| 327 |
x = x.to_nx()
|
| 328 |
assert isinstance(x, nx.Graph), f"Input must be a graph. Got: {x}"
|
| 329 |
elif p.type == Bundle:
|
| 330 |
+
x = _to_bundle(x)
|
| 331 |
+
if p.type == pd.DataFrame:
|
| 332 |
if isinstance(x, nx.Graph):
|
| 333 |
x = Bundle.from_nx(x)
|
| 334 |
+
if isinstance(x, Bundle):
|
| 335 |
+
assert len(x.dfs) == 1, "Bundle must contain a single DataFrame."
|
| 336 |
+
[x] = list(x.dfs.values())
|
| 337 |
+
assert isinstance(x, pd.DataFrame), f"Input must be a DataFrame. Got: {x}"
|
| 338 |
inputs.append(x)
|
| 339 |
except Exception as e:
|
| 340 |
if not os.environ.get("LYNXKITE_SUPPRESS_OP_ERRORS"):
|
|
|
|
| 354 |
traceback.print_exc()
|
| 355 |
result = ops.Result(error=str(e))
|
| 356 |
result.input_metadata = [_get_metadata(i) for i in inputs]
|
| 357 |
+
try:
|
| 358 |
+
if isinstance(result.output, dict):
|
| 359 |
+
for k, v in result.output.items():
|
| 360 |
+
outputs[node.id, k] = v
|
| 361 |
+
elif result.output is not None:
|
| 362 |
+
[k] = op.outputs
|
| 363 |
+
outputs[node.id, k.name] = result.output
|
| 364 |
+
except Exception as e:
|
| 365 |
+
if not os.environ.get("LYNXKITE_SUPPRESS_OP_ERRORS"):
|
| 366 |
+
traceback.print_exc()
|
| 367 |
+
result = ops.Result(error=str(e))
|
| 368 |
node.publish_result(result)
|
| 369 |
|
| 370 |
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/lynxkite_ops.py
CHANGED
|
@@ -8,7 +8,8 @@ from collections import deque
|
|
| 8 |
|
| 9 |
from . import core
|
| 10 |
import grandcypher
|
| 11 |
-
import matplotlib
|
|
|
|
| 12 |
import networkx as nx
|
| 13 |
import pandas as pd
|
| 14 |
import polars as pl
|
|
@@ -43,9 +44,10 @@ class FileFormat(enum.StrEnum):
|
|
| 43 |
default=FileFormat.csv,
|
| 44 |
),
|
| 45 |
],
|
|
|
|
| 46 |
)
|
| 47 |
def import_file(
|
| 48 |
-
*, file_path: str, table_name: str, file_format: FileFormat, **kwargs
|
| 49 |
) -> core.Bundle:
|
| 50 |
"""Read the contents of the a file into a `Bundle`.
|
| 51 |
|
|
@@ -74,6 +76,36 @@ def import_file(
|
|
| 74 |
return core.Bundle(dfs={table_name: df})
|
| 75 |
|
| 76 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
@op("Import Parquet")
|
| 78 |
def import_parquet(*, filename: str):
|
| 79 |
"""Imports a Parquet file."""
|
|
@@ -146,17 +178,6 @@ def cypher(bundle: core.Bundle, *, query: ops.LongStr, save_as: str = "result"):
|
|
| 146 |
return bundle
|
| 147 |
|
| 148 |
|
| 149 |
-
@op("Organize")
|
| 150 |
-
def organize(bundle: list[core.Bundle], *, code: ops.LongStr) -> core.Bundle:
|
| 151 |
-
"""Lets you rename/copy/delete DataFrames, and modify relations.
|
| 152 |
-
|
| 153 |
-
TODO: Merge this with "Create graph".
|
| 154 |
-
"""
|
| 155 |
-
bundle = bundle.copy()
|
| 156 |
-
exec(code, globals(), {"bundle": bundle})
|
| 157 |
-
return bundle
|
| 158 |
-
|
| 159 |
-
|
| 160 |
@op("Sample graph")
|
| 161 |
def sample_graph(graph: nx.Graph, *, nodes: int = 100):
|
| 162 |
"""Takes a (preferably connected) subgraph."""
|
|
@@ -185,6 +206,7 @@ def _map_color(value):
|
|
| 185 |
else:
|
| 186 |
cmap = matplotlib.cm.get_cmap("Paired")
|
| 187 |
categories = pd.Index(value.unique())
|
|
|
|
| 188 |
colors = cmap.colors[: len(categories)]
|
| 189 |
return [
|
| 190 |
"#{:02x}{:02x}{:02x}".format(int(r * 255), int(g * 255), int(b * 255))
|
|
@@ -196,9 +218,9 @@ def _map_color(value):
|
|
| 196 |
def visualize_graph(
|
| 197 |
graph: core.Bundle,
|
| 198 |
*,
|
| 199 |
-
color_nodes_by:
|
| 200 |
-
label_by:
|
| 201 |
-
color_edges_by:
|
| 202 |
):
|
| 203 |
nodes = core.df_for_frontend(graph.dfs["nodes"], 10_000)
|
| 204 |
if color_nodes_by:
|
|
@@ -288,29 +310,25 @@ def view_tables(bundle: core.Bundle, *, _tables_open: str = "", limit: int = 100
|
|
| 288 |
|
| 289 |
|
| 290 |
@op(
|
| 291 |
-
"
|
| 292 |
view="graph_creation_view",
|
| 293 |
outputs=["output"],
|
| 294 |
)
|
| 295 |
-
def
|
| 296 |
-
"""
|
| 297 |
-
|
| 298 |
-
relations is a stringified JSON, instead of a dict, because complex Yjs types (arrays, maps)
|
| 299 |
-
are not currently supported in the UI.
|
| 300 |
|
| 301 |
-
|
| 302 |
-
bundle: Bundle to modify
|
| 303 |
-
relations (str, optional): Set of relations to set for the bundle. The parameter
|
| 304 |
-
should be a JSON object where the keys are relation names and the values are
|
| 305 |
-
a dictionary representation of a `RelationDefinition`.
|
| 306 |
-
Defaults to None.
|
| 307 |
-
|
| 308 |
-
Returns:
|
| 309 |
-
Bundle: The input bundle with the new relations set.
|
| 310 |
"""
|
| 311 |
-
bundle =
|
| 312 |
-
|
| 313 |
-
bundle.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 314 |
return ops.Result(output=bundle, display=bundle.to_dict(limit=100))
|
| 315 |
|
| 316 |
|
|
|
|
| 8 |
|
| 9 |
from . import core
|
| 10 |
import grandcypher
|
| 11 |
+
import matplotlib.cm
|
| 12 |
+
import matplotlib.colors
|
| 13 |
import networkx as nx
|
| 14 |
import pandas as pd
|
| 15 |
import polars as pl
|
|
|
|
| 44 |
default=FileFormat.csv,
|
| 45 |
),
|
| 46 |
],
|
| 47 |
+
slow=True,
|
| 48 |
)
|
| 49 |
def import_file(
|
| 50 |
+
*, file_path: str, table_name: str, file_format: FileFormat = FileFormat.csv, **kwargs
|
| 51 |
) -> core.Bundle:
|
| 52 |
"""Read the contents of the a file into a `Bundle`.
|
| 53 |
|
|
|
|
| 76 |
return core.Bundle(dfs={table_name: df})
|
| 77 |
|
| 78 |
|
| 79 |
+
@op("Export to file")
|
| 80 |
+
def export_to_file(
|
| 81 |
+
bundle: core.Bundle,
|
| 82 |
+
*,
|
| 83 |
+
table_name: str,
|
| 84 |
+
filename: str,
|
| 85 |
+
file_format: FileFormat = FileFormat.csv,
|
| 86 |
+
):
|
| 87 |
+
"""Exports a DataFrame to a file.
|
| 88 |
+
|
| 89 |
+
Args:
|
| 90 |
+
bundle: The bundle containing the DataFrame to export.
|
| 91 |
+
table_name: The name of the DataFrame in the bundle to export.
|
| 92 |
+
filename: The name of the file to export to.
|
| 93 |
+
file_format: The format of the file to export to. Defaults to CSV.
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
df = bundle.dfs[table_name]
|
| 97 |
+
if file_format == FileFormat.csv:
|
| 98 |
+
df.to_csv(filename, index=False)
|
| 99 |
+
elif file_format == FileFormat.json:
|
| 100 |
+
df.to_json(filename, orient="records", lines=True)
|
| 101 |
+
elif file_format == FileFormat.parquet:
|
| 102 |
+
df.to_parquet(filename, index=False)
|
| 103 |
+
elif file_format == FileFormat.excel:
|
| 104 |
+
df.to_excel(filename, index=False)
|
| 105 |
+
else:
|
| 106 |
+
raise ValueError(f"Unsupported file format: {file_format}")
|
| 107 |
+
|
| 108 |
+
|
| 109 |
@op("Import Parquet")
|
| 110 |
def import_parquet(*, filename: str):
|
| 111 |
"""Imports a Parquet file."""
|
|
|
|
| 178 |
return bundle
|
| 179 |
|
| 180 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 181 |
@op("Sample graph")
|
| 182 |
def sample_graph(graph: nx.Graph, *, nodes: int = 100):
|
| 183 |
"""Takes a (preferably connected) subgraph."""
|
|
|
|
| 206 |
else:
|
| 207 |
cmap = matplotlib.cm.get_cmap("Paired")
|
| 208 |
categories = pd.Index(value.unique())
|
| 209 |
+
assert isinstance(cmap, matplotlib.colors.ListedColormap)
|
| 210 |
colors = cmap.colors[: len(categories)]
|
| 211 |
return [
|
| 212 |
"#{:02x}{:02x}{:02x}".format(int(r * 255), int(g * 255), int(b * 255))
|
|
|
|
| 218 |
def visualize_graph(
|
| 219 |
graph: core.Bundle,
|
| 220 |
*,
|
| 221 |
+
color_nodes_by: core.NodePropertyName = None,
|
| 222 |
+
label_by: core.NodePropertyName = None,
|
| 223 |
+
color_edges_by: core.EdgePropertyName = None,
|
| 224 |
):
|
| 225 |
nodes = core.df_for_frontend(graph.dfs["nodes"], 10_000)
|
| 226 |
if color_nodes_by:
|
|
|
|
| 310 |
|
| 311 |
|
| 312 |
@op(
|
| 313 |
+
"Organize",
|
| 314 |
view="graph_creation_view",
|
| 315 |
outputs=["output"],
|
| 316 |
)
|
| 317 |
+
def organize(bundles: list[core.Bundle], *, relations: str = ""):
|
| 318 |
+
"""Merge multiple inputs and construct graphs from the tables.
|
|
|
|
|
|
|
|
|
|
| 319 |
|
| 320 |
+
To create a graph, import tables for edges and nodes, and combine them in this operation.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 321 |
"""
|
| 322 |
+
bundle = core.Bundle()
|
| 323 |
+
for b in bundles:
|
| 324 |
+
bundle.dfs.update(b.dfs)
|
| 325 |
+
bundle.relations.extend(b.relations)
|
| 326 |
+
bundle.other.update(b.other)
|
| 327 |
+
if relations.strip():
|
| 328 |
+
bundle.relations = [
|
| 329 |
+
core.RelationDefinition(**r) # ty: ignore[missing-argument]
|
| 330 |
+
for r in json.loads(relations).values()
|
| 331 |
+
]
|
| 332 |
return ops.Result(output=bundle, display=bundle.to_dict(limit=100))
|
| 333 |
|
| 334 |
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/ml_ops.py
CHANGED
|
@@ -55,23 +55,53 @@ class ModelOutputMapping(pytorch_core.ModelMapping):
|
|
| 55 |
pass
|
| 56 |
|
| 57 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
@op("Train model", slow=True)
|
| 59 |
def train_model(
|
| 60 |
bundle: core.Bundle,
|
| 61 |
*,
|
| 62 |
-
model_name:
|
| 63 |
input_mapping: ModelTrainingInputMapping,
|
| 64 |
epochs: int = 1,
|
|
|
|
| 65 |
):
|
| 66 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
m = bundle.other[model_name].copy()
|
| 68 |
-
|
| 69 |
-
|
|
|
|
|
|
|
|
|
|
| 70 |
losses = []
|
| 71 |
-
for _ in
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 75 |
m.trained = True
|
| 76 |
bundle = bundle.copy()
|
| 77 |
bundle.dfs["training"] = pd.DataFrame({"training_loss": losses})
|
|
@@ -83,17 +113,27 @@ def train_model(
|
|
| 83 |
def model_inference(
|
| 84 |
bundle: core.Bundle,
|
| 85 |
*,
|
| 86 |
-
model_name:
|
| 87 |
input_mapping: ModelInferenceInputMapping,
|
| 88 |
output_mapping: ModelOutputMapping,
|
|
|
|
| 89 |
):
|
| 90 |
"""Executes a trained model."""
|
| 91 |
if input_mapping is None or output_mapping is None:
|
| 92 |
return ops.Result(bundle, error="Mapping is unset.")
|
| 93 |
m = bundle.other[model_name]
|
| 94 |
assert m.trained, "The model is not trained."
|
| 95 |
-
|
| 96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
bundle = bundle.copy()
|
| 98 |
copied = set()
|
| 99 |
for k, v in output_mapping.map.items():
|
|
@@ -102,15 +142,17 @@ def model_inference(
|
|
| 102 |
if v.df not in copied:
|
| 103 |
bundle.dfs[v.df] = bundle.dfs[v.df].copy()
|
| 104 |
copied.add(v.df)
|
| 105 |
-
bundle.dfs[v.df][v.column] = outputs[k]
|
| 106 |
return bundle
|
| 107 |
|
| 108 |
|
| 109 |
@op("Train/test split")
|
| 110 |
-
def train_test_split(
|
|
|
|
|
|
|
| 111 |
"""Splits a dataframe in the bundle into separate "_train" and "_test" dataframes."""
|
| 112 |
df = bundle.dfs[table_name]
|
| 113 |
-
test = df.sample(frac=test_ratio).reset_index()
|
| 114 |
train = df.drop(test.index).reset_index()
|
| 115 |
bundle = bundle.copy()
|
| 116 |
bundle.dfs[f"{table_name}_train"] = train
|
|
@@ -166,15 +208,15 @@ class UMAPMetric(str, enum.Enum):
|
|
| 166 |
def view_vectors(
|
| 167 |
bundle: core.Bundle,
|
| 168 |
*,
|
| 169 |
-
table_name:
|
| 170 |
-
vector_column:
|
| 171 |
-
label_column:
|
| 172 |
n_neighbors: int = 15,
|
| 173 |
min_dist: float = 0.1,
|
| 174 |
metric: UMAPMetric = UMAPMetric.euclidean,
|
| 175 |
):
|
| 176 |
try:
|
| 177 |
-
from cuml.manifold.umap import UMAP
|
| 178 |
except ImportError:
|
| 179 |
from umap import UMAP
|
| 180 |
vec = np.stack(bundle.dfs[table_name][vector_column].to_numpy())
|
|
|
|
| 55 |
pass
|
| 56 |
|
| 57 |
|
| 58 |
+
def _get_num_samples(bundle: core.Bundle, input_mapping: pytorch_core.ModelMapping):
|
| 59 |
+
"""Returns the number of samples in the input mapping."""
|
| 60 |
+
num_samples = None
|
| 61 |
+
for k, v in input_mapping.map.items():
|
| 62 |
+
if v.df in bundle.dfs and v.column in bundle.dfs[v.df]:
|
| 63 |
+
if num_samples is None:
|
| 64 |
+
num_samples = len(bundle.dfs[v.df][v.column])
|
| 65 |
+
else:
|
| 66 |
+
assert num_samples == len(bundle.dfs[v.df][v.column]), (
|
| 67 |
+
f"Input '{k}' has different number of samples ({len(bundle.dfs[v.df][v.column])}) "
|
| 68 |
+
f"than other inputs ({num_samples})."
|
| 69 |
+
)
|
| 70 |
+
return num_samples
|
| 71 |
+
|
| 72 |
+
|
| 73 |
@op("Train model", slow=True)
|
| 74 |
def train_model(
|
| 75 |
bundle: core.Bundle,
|
| 76 |
*,
|
| 77 |
+
model_name: core.ModelName = "model",
|
| 78 |
input_mapping: ModelTrainingInputMapping,
|
| 79 |
epochs: int = 1,
|
| 80 |
+
batch_size: int = 1,
|
| 81 |
):
|
| 82 |
+
"""
|
| 83 |
+
Trains the selected model on the selected dataset.
|
| 84 |
+
Training parameters specific to the model are set in the model definition,
|
| 85 |
+
while parameters specific to the hardware environment and dataset are set here.
|
| 86 |
+
"""
|
| 87 |
+
if input_mapping is None:
|
| 88 |
+
return ops.Result(bundle, error="No inputs are selected.")
|
| 89 |
m = bundle.other[model_name].copy()
|
| 90 |
+
num_samples = _get_num_samples(bundle, input_mapping)
|
| 91 |
+
if num_samples is None:
|
| 92 |
+
return ops.Result(bundle, error="No inputs are selected.")
|
| 93 |
+
num_batches = num_samples // batch_size
|
| 94 |
+
tepochs = tqdm(range(epochs), desc="Training model")
|
| 95 |
losses = []
|
| 96 |
+
for _ in tepochs:
|
| 97 |
+
total_loss = 0
|
| 98 |
+
for i in tqdm(range(num_batches)):
|
| 99 |
+
inputs = pytorch_core.to_batch_tensors(bundle, batch_size, i, input_mapping)
|
| 100 |
+
loss = m.train(inputs)
|
| 101 |
+
total_loss += loss
|
| 102 |
+
mean_loss = total_loss / len(inputs)
|
| 103 |
+
tepochs.set_postfix({"loss": mean_loss})
|
| 104 |
+
losses.append(mean_loss)
|
| 105 |
m.trained = True
|
| 106 |
bundle = bundle.copy()
|
| 107 |
bundle.dfs["training"] = pd.DataFrame({"training_loss": losses})
|
|
|
|
| 113 |
def model_inference(
|
| 114 |
bundle: core.Bundle,
|
| 115 |
*,
|
| 116 |
+
model_name: core.ModelName = "model",
|
| 117 |
input_mapping: ModelInferenceInputMapping,
|
| 118 |
output_mapping: ModelOutputMapping,
|
| 119 |
+
batch_size: int = 1,
|
| 120 |
):
|
| 121 |
"""Executes a trained model."""
|
| 122 |
if input_mapping is None or output_mapping is None:
|
| 123 |
return ops.Result(bundle, error="Mapping is unset.")
|
| 124 |
m = bundle.other[model_name]
|
| 125 |
assert m.trained, "The model is not trained."
|
| 126 |
+
num_samples = _get_num_samples(bundle, input_mapping)
|
| 127 |
+
if num_samples is None:
|
| 128 |
+
return ops.Result(bundle, error="No inputs are selected.")
|
| 129 |
+
num_batches = num_samples // batch_size
|
| 130 |
+
outputs = {}
|
| 131 |
+
for i in tqdm(range(num_batches)):
|
| 132 |
+
inputs = pytorch_core.to_batch_tensors(bundle, batch_size, i, input_mapping)
|
| 133 |
+
batch_outputs = m.inference(inputs)
|
| 134 |
+
for k, v in batch_outputs.items():
|
| 135 |
+
v = v.detach().numpy().reshape(batch_size, -1)
|
| 136 |
+
outputs.setdefault(k, []).extend(v.tolist())
|
| 137 |
bundle = bundle.copy()
|
| 138 |
copied = set()
|
| 139 |
for k, v in output_mapping.map.items():
|
|
|
|
| 142 |
if v.df not in copied:
|
| 143 |
bundle.dfs[v.df] = bundle.dfs[v.df].copy()
|
| 144 |
copied.add(v.df)
|
| 145 |
+
bundle.dfs[v.df][v.column] = outputs[k]
|
| 146 |
return bundle
|
| 147 |
|
| 148 |
|
| 149 |
@op("Train/test split")
|
| 150 |
+
def train_test_split(
|
| 151 |
+
bundle: core.Bundle, *, table_name: core.TableName, test_ratio: float = 0.1, seed=1234
|
| 152 |
+
):
|
| 153 |
"""Splits a dataframe in the bundle into separate "_train" and "_test" dataframes."""
|
| 154 |
df = bundle.dfs[table_name]
|
| 155 |
+
test = df.sample(frac=test_ratio, random_state=seed).reset_index()
|
| 156 |
train = df.drop(test.index).reset_index()
|
| 157 |
bundle = bundle.copy()
|
| 158 |
bundle.dfs[f"{table_name}_train"] = train
|
|
|
|
| 208 |
def view_vectors(
|
| 209 |
bundle: core.Bundle,
|
| 210 |
*,
|
| 211 |
+
table_name: core.TableName = "nodes",
|
| 212 |
+
vector_column: core.ColumnNameByTableName = "",
|
| 213 |
+
label_column: core.ColumnNameByTableName = "",
|
| 214 |
n_neighbors: int = 15,
|
| 215 |
min_dist: float = 0.1,
|
| 216 |
metric: UMAPMetric = UMAPMetric.euclidean,
|
| 217 |
):
|
| 218 |
try:
|
| 219 |
+
from cuml.manifold.umap import UMAP # ty: ignore[unresolved-import]
|
| 220 |
except ImportError:
|
| 221 |
from umap import UMAP
|
| 222 |
vec = np.stack(bundle.dfs[table_name][vector_column].to_numpy())
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/networkx_ops.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
| 1 |
"""Automatically wraps all NetworkX functions as LynxKite operations."""
|
| 2 |
|
| 3 |
-
import collections
|
| 4 |
-
import types
|
| 5 |
from lynxkite.core import ops
|
|
|
|
|
|
|
| 6 |
import functools
|
| 7 |
import inspect
|
| 8 |
import networkx as nx
|
| 9 |
-
import re
|
| 10 |
-
|
| 11 |
import pandas as pd
|
|
|
|
|
|
|
| 12 |
|
| 13 |
ENV = "LynxKite Graph Analytics"
|
| 14 |
|
|
@@ -17,20 +17,22 @@ class UnsupportedParameterType(Exception):
|
|
| 17 |
pass
|
| 18 |
|
| 19 |
|
| 20 |
-
|
| 21 |
-
|
|
|
|
| 22 |
|
| 23 |
|
| 24 |
-
def doc_to_type(name: str, type_hint: str) -> type:
|
| 25 |
type_hint = type_hint.lower()
|
| 26 |
type_hint = re.sub("[(][^)]+[)]", "", type_hint).strip().strip(".")
|
| 27 |
if " " in name or "http" in name:
|
| 28 |
-
return
|
| 29 |
if type_hint.endswith(", optional"):
|
| 30 |
w = doc_to_type(name, type_hint.removesuffix(", optional").strip())
|
| 31 |
-
if w is
|
| 32 |
-
return
|
| 33 |
-
|
|
|
|
| 34 |
if type_hint in [
|
| 35 |
"a digraph or multidigraph",
|
| 36 |
"a graph g",
|
|
@@ -54,15 +56,15 @@ def doc_to_type(name: str, type_hint: str) -> type:
|
|
| 54 |
]:
|
| 55 |
return nx.DiGraph
|
| 56 |
elif type_hint == "node":
|
| 57 |
-
return
|
| 58 |
elif type_hint == '"node (optional)"':
|
| 59 |
-
return
|
| 60 |
elif type_hint == '"edge"':
|
| 61 |
-
return
|
| 62 |
elif type_hint == '"edge (optional)"':
|
| 63 |
-
return
|
| 64 |
elif type_hint in ["class", "data type"]:
|
| 65 |
-
return
|
| 66 |
elif type_hint in ["string", "str", "node label"]:
|
| 67 |
return str
|
| 68 |
elif type_hint in ["string or none", "none or string", "string, or none"]:
|
|
@@ -72,27 +74,27 @@ def doc_to_type(name: str, type_hint: str) -> type:
|
|
| 72 |
elif type_hint in ["bool", "boolean"]:
|
| 73 |
return bool
|
| 74 |
elif type_hint == "tuple":
|
| 75 |
-
return
|
| 76 |
elif type_hint == "set":
|
| 77 |
-
return
|
| 78 |
elif type_hint == "list of floats":
|
| 79 |
-
return
|
| 80 |
elif type_hint == "list of floats or float":
|
| 81 |
return float
|
| 82 |
elif type_hint in ["dict", "dictionary"]:
|
| 83 |
-
return
|
| 84 |
elif type_hint == "scalar or dictionary":
|
| 85 |
return float
|
| 86 |
elif type_hint == "none or dict":
|
| 87 |
-
return
|
| 88 |
elif type_hint in ["function", "callable"]:
|
| 89 |
-
return
|
| 90 |
elif type_hint in [
|
| 91 |
"collection",
|
| 92 |
"container of nodes",
|
| 93 |
"list of nodes",
|
| 94 |
]:
|
| 95 |
-
return
|
| 96 |
elif type_hint in [
|
| 97 |
"container",
|
| 98 |
"generator",
|
|
@@ -104,13 +106,13 @@ def doc_to_type(name: str, type_hint: str) -> type:
|
|
| 104 |
"list or tuple",
|
| 105 |
"list",
|
| 106 |
]:
|
| 107 |
-
return
|
| 108 |
elif type_hint == "generator of sets":
|
| 109 |
-
return
|
| 110 |
elif type_hint == "dict or a set of 2 or 3 tuples":
|
| 111 |
-
return
|
| 112 |
elif type_hint == "set of 2 or 3 tuples":
|
| 113 |
-
return
|
| 114 |
elif type_hint == "none, string or function":
|
| 115 |
return str | None
|
| 116 |
elif type_hint == "string or function" and name == "weight":
|
|
@@ -135,8 +137,8 @@ def doc_to_type(name: str, type_hint: str) -> type:
|
|
| 135 |
elif name == "weight":
|
| 136 |
return str
|
| 137 |
elif type_hint == "object":
|
| 138 |
-
return
|
| 139 |
-
return
|
| 140 |
|
| 141 |
|
| 142 |
def types_from_doc(doc: str) -> dict[str, type]:
|
|
@@ -186,13 +188,13 @@ def wrapped(name: str, func):
|
|
| 186 |
return wrapper
|
| 187 |
|
| 188 |
|
| 189 |
-
def _get_params(func) ->
|
| 190 |
sig = inspect.signature(func)
|
| 191 |
# Get types from docstring.
|
| 192 |
types = types_from_doc(func.__doc__)
|
| 193 |
# Always hide these.
|
| 194 |
for k in ["backend", "backend_kwargs", "create_using"]:
|
| 195 |
-
types[k] =
|
| 196 |
# Add in types based on signature.
|
| 197 |
for k, param in sig.parameters.items():
|
| 198 |
if k in types:
|
|
@@ -203,10 +205,10 @@ def _get_params(func) -> dict | None:
|
|
| 203 |
types[k] = int
|
| 204 |
params = []
|
| 205 |
for name, param in sig.parameters.items():
|
| 206 |
-
_type = types.get(name,
|
| 207 |
-
if _type is
|
| 208 |
raise UnsupportedParameterType(name)
|
| 209 |
-
if _type is
|
| 210 |
continue
|
| 211 |
p = ops.Parameter.basic(
|
| 212 |
name=name,
|
|
@@ -218,32 +220,104 @@ def _get_params(func) -> dict | None:
|
|
| 218 |
|
| 219 |
|
| 220 |
_REPLACEMENTS = [
|
| 221 |
-
("
|
| 222 |
-
("
|
| 223 |
-
("
|
| 224 |
-
("
|
| 225 |
-
("
|
| 226 |
-
("
|
| 227 |
-
("
|
| 228 |
-
("
|
| 229 |
-
("
|
| 230 |
-
("
|
| 231 |
-
("
|
| 232 |
-
("
|
| 233 |
-
("
|
| 234 |
-
("
|
| 235 |
-
("
|
| 236 |
-
("
|
| 237 |
-
("
|
| 238 |
-
("
|
| 239 |
-
("
|
| 240 |
-
("
|
| 241 |
-
("
|
| 242 |
-
("
|
| 243 |
-
("
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 244 |
]
|
| 245 |
|
| 246 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 247 |
def register_networkx(env: str):
|
| 248 |
cat = ops.CATALOGS.setdefault(env, {})
|
| 249 |
counter = 0
|
|
@@ -253,19 +327,24 @@ def register_networkx(env: str):
|
|
| 253 |
params = _get_params(func)
|
| 254 |
except UnsupportedParameterType:
|
| 255 |
continue
|
| 256 |
-
inputs = [
|
| 257 |
-
|
|
|
|
|
|
|
| 258 |
for a, b in _REPLACEMENTS:
|
| 259 |
nicename = nicename.replace(a, b)
|
|
|
|
|
|
|
| 260 |
op = ops.Op(
|
| 261 |
func=wrapped(name, func),
|
| 262 |
name=nicename,
|
|
|
|
| 263 |
params=params,
|
| 264 |
inputs=inputs,
|
| 265 |
-
outputs=[ops.Output(name="output", type=nx.Graph)],
|
| 266 |
type="basic",
|
| 267 |
)
|
| 268 |
-
cat[
|
| 269 |
counter += 1
|
| 270 |
print(f"Registered {counter} NetworkX operations.")
|
| 271 |
|
|
|
|
| 1 |
"""Automatically wraps all NetworkX functions as LynxKite operations."""
|
| 2 |
|
|
|
|
|
|
|
| 3 |
from lynxkite.core import ops
|
| 4 |
+
import collections.abc
|
| 5 |
+
import enum
|
| 6 |
import functools
|
| 7 |
import inspect
|
| 8 |
import networkx as nx
|
|
|
|
|
|
|
| 9 |
import pandas as pd
|
| 10 |
+
import re
|
| 11 |
+
import types
|
| 12 |
|
| 13 |
ENV = "LynxKite Graph Analytics"
|
| 14 |
|
|
|
|
| 17 |
pass
|
| 18 |
|
| 19 |
|
| 20 |
+
class Failure(str, enum.Enum):
|
| 21 |
+
UNSUPPORTED = "unsupported" # This parameter will be hidden.
|
| 22 |
+
SKIP = "skip" # We have to skip the whole function.
|
| 23 |
|
| 24 |
|
| 25 |
+
def doc_to_type(name: str, type_hint: str) -> type | types.UnionType | Failure:
|
| 26 |
type_hint = type_hint.lower()
|
| 27 |
type_hint = re.sub("[(][^)]+[)]", "", type_hint).strip().strip(".")
|
| 28 |
if " " in name or "http" in name:
|
| 29 |
+
return Failure.UNSUPPORTED # Not a parameter type.
|
| 30 |
if type_hint.endswith(", optional"):
|
| 31 |
w = doc_to_type(name, type_hint.removesuffix(", optional").strip())
|
| 32 |
+
if w is Failure.UNSUPPORTED or w is Failure.SKIP:
|
| 33 |
+
return Failure.SKIP
|
| 34 |
+
assert not isinstance(w, Failure)
|
| 35 |
+
return w | None
|
| 36 |
if type_hint in [
|
| 37 |
"a digraph or multidigraph",
|
| 38 |
"a graph g",
|
|
|
|
| 56 |
]:
|
| 57 |
return nx.DiGraph
|
| 58 |
elif type_hint == "node":
|
| 59 |
+
return Failure.UNSUPPORTED
|
| 60 |
elif type_hint == '"node (optional)"':
|
| 61 |
+
return Failure.SKIP
|
| 62 |
elif type_hint == '"edge"':
|
| 63 |
+
return Failure.UNSUPPORTED
|
| 64 |
elif type_hint == '"edge (optional)"':
|
| 65 |
+
return Failure.SKIP
|
| 66 |
elif type_hint in ["class", "data type"]:
|
| 67 |
+
return Failure.UNSUPPORTED
|
| 68 |
elif type_hint in ["string", "str", "node label"]:
|
| 69 |
return str
|
| 70 |
elif type_hint in ["string or none", "none or string", "string, or none"]:
|
|
|
|
| 74 |
elif type_hint in ["bool", "boolean"]:
|
| 75 |
return bool
|
| 76 |
elif type_hint == "tuple":
|
| 77 |
+
return Failure.UNSUPPORTED
|
| 78 |
elif type_hint == "set":
|
| 79 |
+
return Failure.UNSUPPORTED
|
| 80 |
elif type_hint == "list of floats":
|
| 81 |
+
return Failure.UNSUPPORTED
|
| 82 |
elif type_hint == "list of floats or float":
|
| 83 |
return float
|
| 84 |
elif type_hint in ["dict", "dictionary"]:
|
| 85 |
+
return Failure.UNSUPPORTED
|
| 86 |
elif type_hint == "scalar or dictionary":
|
| 87 |
return float
|
| 88 |
elif type_hint == "none or dict":
|
| 89 |
+
return Failure.SKIP
|
| 90 |
elif type_hint in ["function", "callable"]:
|
| 91 |
+
return Failure.UNSUPPORTED
|
| 92 |
elif type_hint in [
|
| 93 |
"collection",
|
| 94 |
"container of nodes",
|
| 95 |
"list of nodes",
|
| 96 |
]:
|
| 97 |
+
return Failure.UNSUPPORTED
|
| 98 |
elif type_hint in [
|
| 99 |
"container",
|
| 100 |
"generator",
|
|
|
|
| 106 |
"list or tuple",
|
| 107 |
"list",
|
| 108 |
]:
|
| 109 |
+
return Failure.UNSUPPORTED
|
| 110 |
elif type_hint == "generator of sets":
|
| 111 |
+
return Failure.UNSUPPORTED
|
| 112 |
elif type_hint == "dict or a set of 2 or 3 tuples":
|
| 113 |
+
return Failure.UNSUPPORTED
|
| 114 |
elif type_hint == "set of 2 or 3 tuples":
|
| 115 |
+
return Failure.UNSUPPORTED
|
| 116 |
elif type_hint == "none, string or function":
|
| 117 |
return str | None
|
| 118 |
elif type_hint == "string or function" and name == "weight":
|
|
|
|
| 137 |
elif name == "weight":
|
| 138 |
return str
|
| 139 |
elif type_hint == "object":
|
| 140 |
+
return Failure.UNSUPPORTED
|
| 141 |
+
return Failure.SKIP
|
| 142 |
|
| 143 |
|
| 144 |
def types_from_doc(doc: str) -> dict[str, type]:
|
|
|
|
| 188 |
return wrapper
|
| 189 |
|
| 190 |
|
| 191 |
+
def _get_params(func) -> list[ops.Parameter | ops.ParameterGroup]:
|
| 192 |
sig = inspect.signature(func)
|
| 193 |
# Get types from docstring.
|
| 194 |
types = types_from_doc(func.__doc__)
|
| 195 |
# Always hide these.
|
| 196 |
for k in ["backend", "backend_kwargs", "create_using"]:
|
| 197 |
+
types[k] = Failure.SKIP
|
| 198 |
# Add in types based on signature.
|
| 199 |
for k, param in sig.parameters.items():
|
| 200 |
if k in types:
|
|
|
|
| 205 |
types[k] = int
|
| 206 |
params = []
|
| 207 |
for name, param in sig.parameters.items():
|
| 208 |
+
_type = types.get(name, Failure.UNSUPPORTED)
|
| 209 |
+
if _type is Failure.UNSUPPORTED:
|
| 210 |
raise UnsupportedParameterType(name)
|
| 211 |
+
if _type is Failure.SKIP or _type in [nx.Graph, nx.DiGraph]:
|
| 212 |
continue
|
| 213 |
p = ops.Parameter.basic(
|
| 214 |
name=name,
|
|
|
|
| 220 |
|
| 221 |
|
| 222 |
_REPLACEMENTS = [
|
| 223 |
+
(" at free", " AT-free"),
|
| 224 |
+
(" dag", " DAG"),
|
| 225 |
+
(" k out ", " k-out "),
|
| 226 |
+
(" rary", " r-ary"),
|
| 227 |
+
("2d ", "2D "),
|
| 228 |
+
("3d ", "3D "),
|
| 229 |
+
("adamic adar", "Adamic–Adar"),
|
| 230 |
+
("barabasi albert", "Barabasi–Albert"),
|
| 231 |
+
("bellman ford", "Bellman–Ford"),
|
| 232 |
+
("bethe hessian", "Bethe–Hessian"),
|
| 233 |
+
("bfs", "BFS"),
|
| 234 |
+
("d separator", "d-separator"),
|
| 235 |
+
("dag ", "DAG "),
|
| 236 |
+
("dfs", "DFS"),
|
| 237 |
+
("dijkstra", "Dijkstra"),
|
| 238 |
+
("dorogovtsev goltsev mendes", "Dorogovtsev–Goltsev–Mendes"),
|
| 239 |
+
("erdos renyi", "Erdos–Renyi"),
|
| 240 |
+
("euler", "Euler"),
|
| 241 |
+
("floyd warshall", "Floyd–Warshall"),
|
| 242 |
+
("forceatlas2", "ForceAtlas2"),
|
| 243 |
+
("gexf ", "GEXF "),
|
| 244 |
+
("gml", "GML"),
|
| 245 |
+
("gnc", "G(n,c)"),
|
| 246 |
+
("gnm", "G(n,m)"),
|
| 247 |
+
("gnp", "G(n,p)"),
|
| 248 |
+
("gnr", "G(n,r)"),
|
| 249 |
+
("graphml", "GraphML"),
|
| 250 |
+
("harary", "Harary"),
|
| 251 |
+
("havel hakimi", "Havel–Hakimi"),
|
| 252 |
+
("hkn", "H(k,n)"),
|
| 253 |
+
("hnm", "H(n,m)"),
|
| 254 |
+
("internet", "Internet"),
|
| 255 |
+
("k core", "k-core"),
|
| 256 |
+
("k corona", "k-corona"),
|
| 257 |
+
("k crust", "k-crust"),
|
| 258 |
+
("k shell", "k-shell"),
|
| 259 |
+
("k truss", "k-truss"),
|
| 260 |
+
("kl ", "KL "),
|
| 261 |
+
("laplacian", "Laplacian"),
|
| 262 |
+
("lfr ", "LFR "),
|
| 263 |
+
("margulis gabber galil", "Margulis–Gabber–Galil"),
|
| 264 |
+
("moebius kantor", "Moebius–Kantor"),
|
| 265 |
+
("newman watts strogatz", "Newman–Watts–Strogatz"),
|
| 266 |
+
("numpy", "NumPy"),
|
| 267 |
+
("pagerank", "PageRank"),
|
| 268 |
+
("pajek", "Pajek"),
|
| 269 |
+
("pandas", "Pandas"),
|
| 270 |
+
("parse leda", "Parse LEDA"),
|
| 271 |
+
("powerlaw", "power-law"),
|
| 272 |
+
("prufer", "Prüfer"),
|
| 273 |
+
("radzik", "Radzik"),
|
| 274 |
+
("s metric", "s-metric"),
|
| 275 |
+
("scale free", "Scale-free"),
|
| 276 |
+
("scipy", "SciPy"),
|
| 277 |
+
("small world", "small-world"),
|
| 278 |
+
("soundarajan hopcroft", "Soundarajan–Hopcroft"),
|
| 279 |
+
("southern women", "Southern women"),
|
| 280 |
+
("vf2pp", "VF2++"),
|
| 281 |
+
("watts strogatz", "Watts–Strogatz"),
|
| 282 |
+
("weisfeiler lehman", "Weisfeiler–Lehman"),
|
| 283 |
+
]
|
| 284 |
+
_CATEGORY_REPLACEMENTS = [
|
| 285 |
+
("Networkx", "NetworkX"),
|
| 286 |
+
("D separation", "D-separation"),
|
| 287 |
+
("Dag", "DAG"),
|
| 288 |
+
("Pagerank alg", "PageRank alg"),
|
| 289 |
+
("Richclub", "Rich-club"),
|
| 290 |
+
("Smallworld", "Small-world"),
|
| 291 |
+
("Smetric", "S-metric"),
|
| 292 |
+
("Structuralholes", "Structural holes"),
|
| 293 |
+
("Edgedfs", "Edge DFS"),
|
| 294 |
+
("Edgebfs", "Edge BFS"),
|
| 295 |
+
("Edge_kcomponents", "Edge k-components"),
|
| 296 |
+
("Mincost", "Min cost"),
|
| 297 |
+
("Networksimplex", "Network simplex"),
|
| 298 |
+
("Vf2pp", "VF2++"),
|
| 299 |
+
("Mst", "MST"),
|
| 300 |
+
("Attrmatrix", "Attr matrix"),
|
| 301 |
+
("Graphmatrix", "Graph matrix"),
|
| 302 |
+
("Laplacianmatrix", "Laplacian matrix"),
|
| 303 |
+
("Algebraicconnectivity", "Algebraic connectivity"),
|
| 304 |
+
("Modularitymatrix", "Modularity matrix"),
|
| 305 |
+
("Bethehessianmatrix", "Bethe–Hessian matrix"),
|
| 306 |
]
|
| 307 |
|
| 308 |
|
| 309 |
+
def _categories(func) -> list[str]:
|
| 310 |
+
"""Extract categories from the function's docstring."""
|
| 311 |
+
path = func.__module__.split(".")
|
| 312 |
+
cats = []
|
| 313 |
+
for p in path:
|
| 314 |
+
p = p.replace("_", " ").capitalize()
|
| 315 |
+
for a, b in _CATEGORY_REPLACEMENTS:
|
| 316 |
+
p = p.replace(a, b)
|
| 317 |
+
cats.append(p)
|
| 318 |
+
return cats
|
| 319 |
+
|
| 320 |
+
|
| 321 |
def register_networkx(env: str):
|
| 322 |
cat = ops.CATALOGS.setdefault(env, {})
|
| 323 |
counter = 0
|
|
|
|
| 327 |
params = _get_params(func)
|
| 328 |
except UnsupportedParameterType:
|
| 329 |
continue
|
| 330 |
+
inputs = [
|
| 331 |
+
ops.Input(name=k, type=nx.Graph, position=ops.Position.LEFT) for k in func.graphs
|
| 332 |
+
]
|
| 333 |
+
nicename = name.replace("_", " ")
|
| 334 |
for a, b in _REPLACEMENTS:
|
| 335 |
nicename = nicename.replace(a, b)
|
| 336 |
+
if nicename[1] != "-":
|
| 337 |
+
nicename = nicename[0].upper() + nicename[1:]
|
| 338 |
op = ops.Op(
|
| 339 |
func=wrapped(name, func),
|
| 340 |
name=nicename,
|
| 341 |
+
categories=_categories(func),
|
| 342 |
params=params,
|
| 343 |
inputs=inputs,
|
| 344 |
+
outputs=[ops.Output(name="output", type=nx.Graph, position=ops.Position.RIGHT)],
|
| 345 |
type="basic",
|
| 346 |
)
|
| 347 |
+
cat[op.id] = op
|
| 348 |
counter += 1
|
| 349 |
print(f"Registered {counter} NetworkX operations.")
|
| 350 |
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/pytorch/__init__.py
CHANGED
|
@@ -1,2 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
from . import pytorch_core # noqa
|
| 2 |
from . import pytorch_ops # noqa
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module provides the "PyTorch model" LynxKite environment. This is a passive
|
| 3 |
+
environment: you can build PyTorch models here from neural network layers,
|
| 4 |
+
but the workspace can't be executed. Instead, it can be loaded as a model
|
| 5 |
+
definition in a "LynxKite Graph Analytics" workspace.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
from . import pytorch_core # noqa
|
| 9 |
from . import pytorch_ops # noqa
|