From 9967c503c1cb105074e262ca13bd4428283437b0 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 12:11:05 +0200 Subject: [PATCH 001/108] Create azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 52 ++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 .github/workflows/azure-static-web-app.yml diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml new file mode 100644 index 00000000..19b98c8d --- /dev/null +++ b/.github/workflows/azure-static-web-app.yml @@ -0,0 +1,52 @@ +name: Deploy futurecoder to Azure Static Web Apps + +on: + push: + branches: [ main, master ] + workflow_dispatch: + +jobs: + build_and_deploy: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + # --- Python build phase (futurecoder content generation) --- + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python3 - + echo "$HOME/.local/bin" >> $GITHUB_PATH + - name: Generate site content + env: + FUTURECODER_LANGUAGE: en + run: | + poetry install --no-root + ./scripts/generate.sh + + # --- Node build phase (frontend bundle) --- + - name: Set up Node + uses: actions/setup-node@v4 + with: + node-version: '22' + - name: Install and build frontend + working-directory: frontend + run: | + npm ci + npm run build + # If the project expects a service worker from the course folder, copy it if present + if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi + + # --- Deploy to Azure Static Web Apps --- + - name: Deploy + uses: Azure/static-web-apps-deploy@v1 + with: + azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} + action: upload + app_location: "/" # repo root + output_location: "frontend/build" # final static files From 5e10dcdfd3a81cbb472e30d7e950cfc8dec84d4a Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 12:24:38 +0200 Subject: [PATCH 002/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 19b98c8d..70164441 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -17,7 +17,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: '3.12.1' - name: Install Poetry run: | curl -sSL https://install.python-poetry.org | python3 - From bfcbf270185d6eb2de14c82f7e08b85f78680743 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 12:31:17 +0200 Subject: [PATCH 003/108] Turned off CI --- .github/workflows/azure-static-web-app.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 70164441..da4dad56 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -36,6 +36,8 @@ jobs: node-version: '22' - name: Install and build frontend working-directory: frontend + env: + CI: false run: | npm ci npm run build From e99f140b81f5f6fc7f33076002ef40782d1d753e Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 12:39:36 +0200 Subject: [PATCH 004/108] Skip App Build --- .github/workflows/azure-static-web-app.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index da4dad56..a3f444de 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -50,5 +50,5 @@ jobs: with: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} action: upload - app_location: "/" # repo root - output_location: "frontend/build" # final static files + app_location: "frontend/build" # repo root + skip_app_build: true From abbb289ccd9466146574e3b579884208a61cbbae Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 12:46:33 +0200 Subject: [PATCH 005/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index a3f444de..938bf74d 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -44,6 +44,18 @@ jobs: # If the project expects a service worker from the course folder, copy it if present if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi + - name: Verify build output + run: | + echo "Repo root contents:" + ls -la + echo "frontend contents:" + ls -la frontend || true + echo "frontend/build contents:" + ls -la frontend/build || true + # hard fail if the built index.html is missing + test -f frontend/build/index.html + + # --- Deploy to Azure Static Web Apps --- - name: Deploy uses: Azure/static-web-apps-deploy@v1 From 14920434786eb13da058ecb717171515551637bb Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 12:54:44 +0200 Subject: [PATCH 006/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 938bf74d..8e727818 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -34,6 +34,12 @@ jobs: uses: actions/setup-node@v4 with: node-version: '22' + + - name: Override CRA homepage to relative URLs + working-directory: frontend + run: | + node -e "let p=require('./package.json'); p.homepage='.'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2)); console.log('homepage now', p.homepage)" + - name: Install and build frontend working-directory: frontend env: @@ -62,5 +68,5 @@ jobs: with: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} action: upload - app_location: "frontend/build" # repo root + app_location: "frontend/course" # repo root skip_app_build: true From 385377fd2242225a3f30d820d99e670cfb9a4a40 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 12:59:09 +0200 Subject: [PATCH 007/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 41 ++++++++++++---------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 8e727818..3d4d5555 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -50,23 +50,26 @@ jobs: # If the project expects a service worker from the course folder, copy it if present if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi - - name: Verify build output - run: | - echo "Repo root contents:" - ls -la - echo "frontend contents:" - ls -la frontend || true - echo "frontend/build contents:" - ls -la frontend/build || true - # hard fail if the built index.html is missing - test -f frontend/build/index.html - +- name: Verify build output + run: | + echo "frontend contents:" + ls -la frontend || true + echo "frontend/course contents:" + ls -la frontend/course || true + # hard fail if the built index.html is missing + test -f frontend/course/index.html + + - name: Add SWA routing config + run: | + cat > frontend/course/staticwebapp.config.json <<'JSON' + { "routes": [ { "route": "/*", "serve": "/index.html", "statusCode": 200 } ] } + JSON + + - name: Deploy + uses: Azure/static-web-apps-deploy@v1 + with: + azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} + action: upload + app_location: "frontend/course" # <-- correct folder + skip_app_build: true - # --- Deploy to Azure Static Web Apps --- - - name: Deploy - uses: Azure/static-web-apps-deploy@v1 - with: - azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} - action: upload - app_location: "frontend/course" # repo root - skip_app_build: true From 3a9d9f9c9c5028aa500672a7795dcfb807c65dc0 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 13:02:15 +0200 Subject: [PATCH 008/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 39 +++++++++++----------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 3d4d5555..ba39b9cf 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -50,26 +50,25 @@ jobs: # If the project expects a service worker from the course folder, copy it if present if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi -- name: Verify build output - run: | - echo "frontend contents:" - ls -la frontend || true - echo "frontend/course contents:" - ls -la frontend/course || true - # hard fail if the built index.html is missing - test -f frontend/course/index.html + - name: Verify build output + run: | + echo "frontend contents:" + ls -la frontend || true + echo "frontend/course contents:" + ls -la frontend/course || true + test -f frontend/course/index.html - - name: Add SWA routing config - run: | - cat > frontend/course/staticwebapp.config.json <<'JSON' - { "routes": [ { "route": "/*", "serve": "/index.html", "statusCode": 200 } ] } - JSON + - name: Add SWA routing config + run: | + cat > frontend/course/staticwebapp.config.json <<'JSON' + { "routes": [ { "route": "/*", "serve": "/index.html", "statusCode": 200 } ] } + JSON - - name: Deploy - uses: Azure/static-web-apps-deploy@v1 - with: - azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} - action: upload - app_location: "frontend/course" # <-- correct folder - skip_app_build: true + - name: Deploy + uses: Azure/static-web-apps-deploy@v1 + with: + azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} + action: upload + app_location: "frontend/course" # <-- correct folder + skip_app_build: true From cc9a3b6791c985e6505817d047c61b5f9bdc4daf Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 13:11:19 +0200 Subject: [PATCH 009/108] Fixed Redirect --- .github/workflows/azure-static-web-app.yml | 26 +++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index ba39b9cf..7ba3357f 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -64,11 +64,35 @@ jobs: { "routes": [ { "route": "/*", "serve": "/index.html", "statusCode": 200 } ] } JSON + + - name: Stage files for SWA + run: | + rm -rf deploy + mkdir -p deploy/course + cp -R frontend/course/* deploy/course/ + # Root redirect to /course/ + cat > deploy/index.html <<'HTML' + + + HTML + # SWA routes so deep links under /course work + cat > deploy/staticwebapp.config.json <<'JSON' + { + "routes": [ + { "route": "/", "redirect": "/course/", "statusCode": 302 }, + { "route": "/course/*", "rewrite": "/course/index.html" } + ] + } + JSON + ls -la deploy || true + ls -la deploy/course || true + test -f deploy/course/index.html + - name: Deploy uses: Azure/static-web-apps-deploy@v1 with: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} action: upload - app_location: "frontend/course" # <-- correct folder + app_location: "deploy" # <-- correct folder skip_app_build: true From b9faa79920b209c305c22771f16c21d8be8a01c7 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 13:22:51 +0200 Subject: [PATCH 010/108] Python Runner Fix --- .github/workflows/azure-static-web-app.yml | 50 ++++++++++++++++------ 1 file changed, 38 insertions(+), 12 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 7ba3357f..01efa30c 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -70,23 +70,49 @@ jobs: rm -rf deploy mkdir -p deploy/course cp -R frontend/course/* deploy/course/ + # Root redirect to /course/ cat > deploy/index.html <<'HTML' HTML - # SWA routes so deep links under /course work - cat > deploy/staticwebapp.config.json <<'JSON' - { - "routes": [ - { "route": "/", "redirect": "/course/", "statusCode": 302 }, - { "route": "/course/*", "rewrite": "/course/index.html" } - ] - } - JSON - ls -la deploy || true - ls -la deploy/course || true - test -f deploy/course/index.html + - name: Stage files for SWA + run: | + rm -rf deploy + mkdir -p deploy/course + cp -R frontend/course/* deploy/course/ + + # Root redirect to /course/ + cat > deploy/index.html <<'HTML' + + + HTML + + # SWA routes: serve assets first, then SPA rewrite + cat > deploy/staticwebapp.config.json <<'JSON' + { + "routes": [ + { "route": "/", "redirect": "/course/", "statusCode": 302 }, + + { "route": "/course/static/*", "serve": "/course/static/*" }, + { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, + { "route": "/course/*.js", "serve": "/course/*.js" }, + { "route": "/course/*.css", "serve": "/course/*.css" }, + { "route": "/course/*.map", "serve": "/course/*.map" }, + { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, + { "route": "/course/*.json", "serve": "/course/*.json" }, + + { "route": "/course/*", "rewrite": "/course/index.html" } + ], + "mimeTypes": { ".wasm": "application/wasm" } + } + JSON + + echo "Verifying:" + ls -la deploy || true + ls -la deploy/course || true + test -f deploy/course/index.html + - name: Deploy uses: Azure/static-web-apps-deploy@v1 From 94b4759afa09a58c866cf9197839d3254ec13f90 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 13:31:57 +0200 Subject: [PATCH 011/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 92 +++++++++++----------- 1 file changed, 48 insertions(+), 44 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 01efa30c..7b1a9e7c 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -68,50 +68,54 @@ jobs: - name: Stage files for SWA run: | rm -rf deploy - mkdir -p deploy/course - cp -R frontend/course/* deploy/course/ - - # Root redirect to /course/ - cat > deploy/index.html <<'HTML' - - - HTML - - name: Stage files for SWA - run: | - rm -rf deploy - mkdir -p deploy/course - cp -R frontend/course/* deploy/course/ - - # Root redirect to /course/ - cat > deploy/index.html <<'HTML' - - - HTML - - # SWA routes: serve assets first, then SPA rewrite - cat > deploy/staticwebapp.config.json <<'JSON' - { - "routes": [ - { "route": "/", "redirect": "/course/", "statusCode": 302 }, - - { "route": "/course/static/*", "serve": "/course/static/*" }, - { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, - { "route": "/course/*.js", "serve": "/course/*.js" }, - { "route": "/course/*.css", "serve": "/course/*.css" }, - { "route": "/course/*.map", "serve": "/course/*.map" }, - { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, - { "route": "/course/*.json", "serve": "/course/*.json" }, - - { "route": "/course/*", "rewrite": "/course/index.html" } - ], - "mimeTypes": { ".wasm": "application/wasm" } - } - JSON - - echo "Verifying:" - ls -la deploy || true - ls -la deploy/course || true - test -f deploy/course/index.html + mkdir -p deploy/course + cp -R frontend/course/* deploy/course/ + + # root -> /course/ + cat > deploy/index.html <<'HTML' + + + HTML + + # Serve static + package assets explicitly, then SPA fallback + cat > deploy/staticwebapp.config.json <<'JSON' + { + "routes": [ + { "route": "/", "redirect": "/course/", "statusCode": 302 }, + + { "route": "/course/static/*", "serve": "/course/static/*" }, + { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, + { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, + { "route": "/course/packages/*", "serve": "/course/packages/*" }, + { "route": "/course/*.whl", "serve": "/course/*.whl" }, + { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, + { "route": "/course/*.data", "serve": "/course/*.data" }, + { "route": "/course/*.js", "serve": "/course/*.js" }, + { "route": "/course/*.css", "serve": "/course/*.css" }, + { "route": "/course/*.map", "serve": "/course/*.map" }, + { "route": "/course/*.json", "serve": "/course/*.json" }, + + { "route": "/course/*", "rewrite": "/course/index.html" } + ], + "mimeTypes": { + ".wasm": "application/wasm", + ".whl": "application/octet-stream", + ".data": "application/octet-stream" + } + } + JSON + + echo "Show top-level folders:" + find deploy/course -maxdepth 2 -type d -print + + - name: Verify package folders exist + run: | + echo "Expect wheels/packages here:" + ls -la deploy/course/wheels || true + ls -la deploy/course/packages || true + # hard fail if neither exists + if [ ! -d deploy/course/wheels ] && [ ! -d deploy/course/packages ]; then + echo "No wheels/packages folder found in build output."; exit 1; fi - name: Deploy From 1080062f4fe12e5a984e997d94b70c746697063a Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 14:08:42 +0200 Subject: [PATCH 012/108] Pyodide fix --- .github/workflows/azure-static-web-app.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 7b1a9e7c..c3842cd5 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -117,6 +117,26 @@ jobs: if [ ! -d deploy/course/wheels ] && [ ! -d deploy/course/packages ]; then echo "No wheels/packages folder found in build output."; exit 1; fi + - name: Vendor Pyodide full runtime and a few wheels + run: | + set -e + PYODIDE_VER=0.24.1 + BASE="https://cdn.jsdelivr.net/pyodide/v$%7BPYODIDE_VER%7D/full" + mkdir -p deploy/course/pyodide + + echo "Fetch core runtime..." + curl -fsSL "$BASE/pyodide.js" -o deploy/course/pyodide/pyodide.js + curl -fsSL "$BASE/pyodide.asm.wasm" -o deploy/course/pyodide/pyodide.asm.wasm + curl -fsSL "$BASE/pyodide.asm.data" -o deploy/course/pyodide/pyodide.asm.data + curl -fsSL "$BASE/repodata.json" -o deploy/course/pyodide/repodata.json + + echo "Prime a few common wheels (add more if Network tab shows 404s):" + for pkg in micropip pyparsing packaging; do + curl -fsSL "$BASE/$pkg.whl" -o "deploy/course/pyodide/$pkg.whl" || true + done + + echo "Staged files:" + ls -la deploy/course/pyodide - name: Deploy uses: Azure/static-web-apps-deploy@v1 From 6173582a24b3f14618419a43c1ddc3280947f964 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 14:17:34 +0200 Subject: [PATCH 013/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 63 ++++++++++------------ 1 file changed, 29 insertions(+), 34 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index c3842cd5..6247a5a4 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -50,39 +50,36 @@ jobs: # If the project expects a service worker from the course folder, copy it if present if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi - - name: Verify build output - run: | + - name: Verify build output + run: | echo "frontend contents:" ls -la frontend || true echo "frontend/course contents:" ls -la frontend/course || true - test -f frontend/course/index.html - - - name: Add SWA routing config - run: | - cat > frontend/course/staticwebapp.config.json <<'JSON' - { "routes": [ { "route": "/*", "serve": "/index.html", "statusCode": 200 } ] } - JSON + # ensure the SPA entry exists + test -f frontend/course/index.html + # (Removed) Add SWA routing config inside frontend/course (redundant) + # We will ship a single config at deploy/staticwebapp.config.json instead. - name: Stage files for SWA run: | rm -rf deploy mkdir -p deploy/course cp -R frontend/course/* deploy/course/ - - # root -> /course/ + + # Root -> /course/ cat > deploy/index.html <<'HTML' HTML - - # Serve static + package assets explicitly, then SPA fallback + + # SWA routes: serve assets first, then SPA fallback cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - + { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, @@ -94,7 +91,7 @@ jobs: { "route": "/course/*.css", "serve": "/course/*.css" }, { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, - + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { @@ -104,45 +101,43 @@ jobs: } } JSON - + echo "Show top-level folders:" find deploy/course -maxdepth 2 -type d -print - - name: Verify package folders exist - run: | - echo "Expect wheels/packages here:" - ls -la deploy/course/wheels || true - ls -la deploy/course/packages || true - # hard fail if neither exists - if [ ! -d deploy/course/wheels ] && [ ! -d deploy/course/packages ]; then - echo "No wheels/packages folder found in build output."; exit 1; fi - - name: Vendor Pyodide full runtime and a few wheels run: | set -e PYODIDE_VER=0.24.1 BASE="https://cdn.jsdelivr.net/pyodide/v$%7BPYODIDE_VER%7D/full" mkdir -p deploy/course/pyodide - + echo "Fetch core runtime..." curl -fsSL "$BASE/pyodide.js" -o deploy/course/pyodide/pyodide.js curl -fsSL "$BASE/pyodide.asm.wasm" -o deploy/course/pyodide/pyodide.asm.wasm curl -fsSL "$BASE/pyodide.asm.data" -o deploy/course/pyodide/pyodide.asm.data curl -fsSL "$BASE/repodata.json" -o deploy/course/pyodide/repodata.json - - echo "Prime a few common wheels (add more if Network tab shows 404s):" + + echo "Prime a few common wheels (optional):" for pkg in micropip pyparsing packaging; do curl -fsSL "$BASE/$pkg.whl" -o "deploy/course/pyodide/$pkg.whl" || true done - + echo "Staged files:" ls -la deploy/course/pyodide + - name: Verify pyodide core exists + run: | + ls -la deploy/course || true + ls -la deploy/course/pyodide || true + test -f deploy/course/pyodide/pyodide.js + test -f deploy/course/pyodide/pyodide.asm.wasm + test -f deploy/course/pyodide/repodata.json + - name: Deploy uses: Azure/static-web-apps-deploy@v1 with: - azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} - action: upload - app_location: "deploy" # <-- correct folder - skip_app_build: true - + azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} + action: upload + app_location: "deploy" + skip_app_build: true From 5f8b8d3833bd089116ad93e13d20f6ece39c09ad Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 14:22:51 +0200 Subject: [PATCH 014/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 32 ++++++++++++---------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 6247a5a4..ccffd53c 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -18,10 +18,15 @@ jobs: uses: actions/setup-python@v5 with: python-version: '3.12.1' + - name: Install Poetry run: | curl -sSL https://install.python-poetry.org | python3 - echo "$HOME/.local/bin" >> $GITHUB_PATH + + - name: Make scripts executable + run: chmod +x scripts/generate.sh || true + - name: Generate site content env: FUTURECODER_LANGUAGE: en @@ -29,7 +34,7 @@ jobs: poetry install --no-root ./scripts/generate.sh - # --- Node build phase (frontend bundle) --- + # --- Node build phase (frontend bundle -> outputs under frontend/course) --- - name: Set up Node uses: actions/setup-node@v4 with: @@ -47,28 +52,25 @@ jobs: run: | npm ci npm run build - # If the project expects a service worker from the course folder, copy it if present if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi - - name: Verify build output - run: | + - name: Verify build output + run: | echo "frontend contents:" ls -la frontend || true echo "frontend/course contents:" ls -la frontend/course || true - # ensure the SPA entry exists test -f frontend/course/index.html - # (Removed) Add SWA routing config inside frontend/course (redundant) - # We will ship a single config at deploy/staticwebapp.config.json instead. - + # --- Stage deploy/ with /course under it and SWA routing rules --- - name: Stage files for SWA run: | + set -e rm -rf deploy mkdir -p deploy/course cp -R frontend/course/* deploy/course/ - # Root -> /course/ + # root -> /course/ cat > deploy/index.html <<'HTML' @@ -102,9 +104,10 @@ jobs: } JSON - echo "Show top-level folders:" - find deploy/course -maxdepth 2 -type d -print + echo "Show top-level folders in deploy:" + find deploy -maxdepth 2 -type d -print + # --- Vendor Pyodide core (so /course/pyodide/* exists) --- - name: Vendor Pyodide full runtime and a few wheels run: | set -e @@ -118,22 +121,21 @@ jobs: curl -fsSL "$BASE/pyodide.asm.data" -o deploy/course/pyodide/pyodide.asm.data curl -fsSL "$BASE/repodata.json" -o deploy/course/pyodide/repodata.json - echo "Prime a few common wheels (optional):" + echo "Prime a few common wheels (optional)..." for pkg in micropip pyparsing packaging; do curl -fsSL "$BASE/$pkg.whl" -o "deploy/course/pyodide/$pkg.whl" || true done - echo "Staged files:" + echo "Staged files under deploy/course/pyodide:" ls -la deploy/course/pyodide - name: Verify pyodide core exists run: | - ls -la deploy/course || true - ls -la deploy/course/pyodide || true test -f deploy/course/pyodide/pyodide.js test -f deploy/course/pyodide/pyodide.asm.wasm test -f deploy/course/pyodide/repodata.json + # --- Deploy to Azure Static Web Apps (upload only; skip Oryx) --- - name: Deploy uses: Azure/static-web-apps-deploy@v1 with: From c09d6e4cee3d741f020e74fdf2cc508da264ff90 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 14:30:12 +0200 Subject: [PATCH 015/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 47 +++++++--------------- 1 file changed, 14 insertions(+), 33 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index ccffd53c..7b6958b9 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -54,15 +54,16 @@ jobs: npm run build if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi - - name: Verify build output - run: | - echo "frontend contents:" - ls -la frontend || true - echo "frontend/course contents:" - ls -la frontend/course || true + - name: Verify build output + run: | + echo "frontend/course (summary):" + find frontend/course -maxdepth 2 -type d -print + # must exist: test -f frontend/course/index.html + # pyodide folder produced by the build should already exist: + test -d frontend/course/pyodide - # --- Stage deploy/ with /course under it and SWA routing rules --- + # --- Stage deploy/ with /course and SWA routing rules --- - name: Stage files for SWA run: | set -e @@ -107,33 +108,13 @@ jobs: echo "Show top-level folders in deploy:" find deploy -maxdepth 2 -type d -print - # --- Vendor Pyodide core (so /course/pyodide/* exists) --- - - name: Vendor Pyodide full runtime and a few wheels - run: | - set -e - PYODIDE_VER=0.24.1 - BASE="https://cdn.jsdelivr.net/pyodide/v$%7BPYODIDE_VER%7D/full" - mkdir -p deploy/course/pyodide - - echo "Fetch core runtime..." - curl -fsSL "$BASE/pyodide.js" -o deploy/course/pyodide/pyodide.js - curl -fsSL "$BASE/pyodide.asm.wasm" -o deploy/course/pyodide/pyodide.asm.wasm - curl -fsSL "$BASE/pyodide.asm.data" -o deploy/course/pyodide/pyodide.asm.data - curl -fsSL "$BASE/repodata.json" -o deploy/course/pyodide/repodata.json - - echo "Prime a few common wheels (optional)..." - for pkg in micropip pyparsing packaging; do - curl -fsSL "$BASE/$pkg.whl" -o "deploy/course/pyodide/$pkg.whl" || true - done - - echo "Staged files under deploy/course/pyodide:" - ls -la deploy/course/pyodide - - - name: Verify pyodide core exists + - name: Verify pyodide core exists in deploy run: | - test -f deploy/course/pyodide/pyodide.js - test -f deploy/course/pyodide/pyodide.asm.wasm - test -f deploy/course/pyodide/repodata.json + ls -la deploy/course/pyodide || true + # core files should be present from the build + test -f deploy/course/pyodide/pyodide.asm.js || echo "pyodide.asm.js not found (ok if using other backend)" + # at least the directory must exist + test -d deploy/course/pyodide # --- Deploy to Azure Static Web Apps (upload only; skip Oryx) --- - name: Deploy From 24d2e6850672e5a3071be97f8e8edb2637842ffe Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 14:38:03 +0200 Subject: [PATCH 016/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 7b6958b9..105c170c 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -54,35 +54,39 @@ jobs: npm run build if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi - - name: Verify build output - run: | + - name: Verify build output + run: | + bash -eo pipefail <<'BASH' echo "frontend/course (summary):" find frontend/course -maxdepth 2 -type d -print # must exist: test -f frontend/course/index.html - # pyodide folder produced by the build should already exist: + # pyodide dir should be there: test -d frontend/course/pyodide + BASH + # --- Stage deploy/ with /course and SWA routing rules --- - name: Stage files for SWA run: | + bash -eo pipefail <<'BASH' set -e rm -rf deploy mkdir -p deploy/course cp -R frontend/course/* deploy/course/ - + # root -> /course/ cat > deploy/index.html <<'HTML' HTML - + # SWA routes: serve assets first, then SPA fallback cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - + { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, @@ -94,7 +98,7 @@ jobs: { "route": "/course/*.css", "serve": "/course/*.css" }, { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, - + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { @@ -104,9 +108,11 @@ jobs: } } JSON - + echo "Show top-level folders in deploy:" find deploy -maxdepth 2 -type d -print + BASH + - name: Verify pyodide core exists in deploy run: | From 1c31714bb5e8e7cc08a18b3fc5ffc45f66f6ad17 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 14:48:51 +0200 Subject: [PATCH 017/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 105c170c..5165f49a 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -98,6 +98,9 @@ jobs: { "route": "/course/*.css", "serve": "/course/*.css" }, { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, + + { "route": "/course/python_core.tar*", "serve": "/course/pyodide/python_core.tar" }, + { "route": "/course/python_stdlib.zip*", "serve": "/course/pyodide/python_stdlib.zip" }, { "route": "/course/*", "rewrite": "/course/index.html" } ], @@ -113,6 +116,16 @@ jobs: find deploy -maxdepth 2 -type d -print BASH + - name: Vendor Pyodide core archives (python_core & stdlib) + run: | + set -e + PYODIDE_VER=0.24.1 + BASE="https://cdn.jsdelivr.net/pyodide/v$%7BPYODIDE_VER%7D/full" + mkdir -p deploy/course/pyodide + # Fetch the core tar and stdlib zip expected by the runtime + curl -fsSL "$BASE/python_core.tar" -o deploy/course/pyodide/python_core.tar + curl -fsSL "$BASE/python_stdlib.zip" -o deploy/course/pyodide/python_stdlib.zip + ls -la deploy/course/pyodide - name: Verify pyodide core exists in deploy run: | From 2be92832c1b3940e43281e666720c31284e4a264 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 14:56:15 +0200 Subject: [PATCH 018/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 27 ++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 5165f49a..1c0c81fb 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -116,6 +116,25 @@ jobs: find deploy -maxdepth 2 -type d -print BASH + - name: Locate and copy python_core.tar / python_stdlib.zip + run: | + set -e + echo "Searching for python_core.tar and python_stdlib.zip..." + CORE_PATH="$(find . -type f -name 'python_core.tar' | head -n1 || true)" + STDLIB_PATH="$(find . -type f -name 'python_stdlib.zip' | head -n1 || true)" + echo "CORE_PATH=${CORE_PATH}" + echo "STDLIB_PATH=${STDLIB_PATH}" + + # Ensure deploy/course exists + mkdir -p deploy/course + + # Copy if found + if [ -n "$CORE_PATH" ]; then cp -f "$CORE_PATH" deploy/course/python_core.tar; fi + if [ -n "$STDLIB_PATH" ]; then cp -f "$STDLIB_PATH" deploy/course/python_stdlib.zip; fi + + echo "Deployed course root contents:" + ls -la deploy/course | sed -n '1,200p' + - name: Vendor Pyodide core archives (python_core & stdlib) run: | set -e @@ -129,11 +148,11 @@ jobs: - name: Verify pyodide core exists in deploy run: | - ls -la deploy/course/pyodide || true - # core files should be present from the build - test -f deploy/course/pyodide/pyodide.asm.js || echo "pyodide.asm.js not found (ok if using other backend)" - # at least the directory must exist test -d deploy/course/pyodide + # these two must be present at course root for the runtime + test -f deploy/course/python_core.tar + test -f deploy/course/python_stdlib.zip + # --- Deploy to Azure Static Web Apps (upload only; skip Oryx) --- - name: Deploy From d7580c4d44c617bac51322a0be3fe2d76237305b Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:04:02 +0200 Subject: [PATCH 019/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 51 ++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 1c0c81fb..0aa66df3 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -116,6 +116,57 @@ jobs: find deploy -maxdepth 2 -type d -print BASH + + - name: Fetch python_core/stdlib and place at course root + run: | + set -e + mkdir -p deploy/course/pyodide + + # candidate versions and filenames (we'll take the first that exists) + VERSIONS="0.26.4 0.26.3 0.26.2 0.26.1 0.26.0 0.25.1 0.25.0 0.24.1" + FILES_CORE="python_core-3.12.1.tar python_core.tar" + FILES_STDLIB="python_stdlib-3.12.1.zip python_stdlib.zip" + + fetch_first () { + ver="$1"; shift + base="https://cdn.jsdelivr.net/pyodide/v$%7Bver%7D/full" + for f in "$@"; do + echo "Trying $base/$f" + if curl -fsSL "$base/$f" -o "deploy/course/pyodide/$f"; then + echo "Fetched $f from v$ver" + echo "$f" + return 0 + fi + done + return 1 + } + + # Try to fetch core + CORE_FILE="" + for v in $VERSIONS; do + if CORE_FILE="$(fetch_first "$v" $FILES_CORE)"; then break; fi + done + if [ -z "$CORE_FILE" ]; then + echo "ERROR: Could not fetch python_core archive from known versions"; exit 1 + fi + + # Try to fetch stdlib + STDLIB_FILE="" + for v in $VERSIONS; do + if STDLIB_FILE="$(fetch_first "$v" $FILES_STDLIB)"; then break; fi + done + if [ -z "$STDLIB_FILE" ]; then + echo "ERROR: Could not fetch python_stdlib archive from known versions"; exit 1 + fi + + # Copy to the exact paths the app requests (course root) + cp -f "deploy/course/pyodide/$CORE_FILE" deploy/course/python_core.tar + cp -f "deploy/course/pyodide/$STDLIB_FILE" deploy/course/python_stdlib.zip + + echo "Deployed files:" + ls -la deploy/course/pyodide | sed -n '1,200p' + ls -la deploy/course | sed -n '1,200p' + - name: Locate and copy python_core.tar / python_stdlib.zip run: | set -e From 6c2e7e84e5b748fce004e0e1a85d89d171d7aacb Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:12:31 +0200 Subject: [PATCH 020/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 0aa66df3..05fb0abc 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -122,26 +122,26 @@ jobs: set -e mkdir -p deploy/course/pyodide - # candidate versions and filenames (we'll take the first that exists) - VERSIONS="0.26.4 0.26.3 0.26.2 0.26.1 0.26.0 0.25.1 0.25.0 0.24.1" - FILES_CORE="python_core-3.12.1.tar python_core.tar" - FILES_STDLIB="python_stdlib-3.12.1.zip python_stdlib.zip" + # Candidate Pyodide versions (newest first) + VERSIONS="0.27.2 0.27.1 0.27.0 0.26.4 0.26.3 0.26.2 0.26.1 0.26.0 0.25.1 0.25.0 0.24.1" + # Candidate filenames for Python 3.12 series + legacy names + FILES_CORE="python_core-3.12.2.tar python_core-3.12.1.tar python_core.tar" + FILES_STDLIB="python_stdlib-3.12.2.zip python_stdlib-3.12.1.zip python_stdlib.zip" fetch_first () { ver="$1"; shift base="https://cdn.jsdelivr.net/pyodide/v$%7Bver%7D/full" for f in "$@"; do - echo "Trying $base/$f" + >&2 echo "Trying $base/$f" if curl -fsSL "$base/$f" -o "deploy/course/pyodide/$f"; then - echo "Fetched $f from v$ver" - echo "$f" + echo "$f" # IMPORTANT: only filename to STDOUT return 0 fi done return 1 } - # Try to fetch core + # Try core CORE_FILE="" for v in $VERSIONS; do if CORE_FILE="$(fetch_first "$v" $FILES_CORE)"; then break; fi @@ -150,7 +150,7 @@ jobs: echo "ERROR: Could not fetch python_core archive from known versions"; exit 1 fi - # Try to fetch stdlib + # Try stdlib STDLIB_FILE="" for v in $VERSIONS; do if STDLIB_FILE="$(fetch_first "$v" $FILES_STDLIB)"; then break; fi @@ -167,6 +167,7 @@ jobs: ls -la deploy/course/pyodide | sed -n '1,200p' ls -la deploy/course | sed -n '1,200p' + - name: Locate and copy python_core.tar / python_stdlib.zip run: | set -e From f468b8855b8289dece60b21ec6a74795f072d5f0 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:19:44 +0200 Subject: [PATCH 021/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 162 +++++++-------------- 1 file changed, 52 insertions(+), 110 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 05fb0abc..a10f536f 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - # --- Python build phase (futurecoder content generation) --- + # --- Python phase (dependencies + content generation) --- - name: Set up Python uses: actions/setup-python@v5 with: @@ -24,49 +24,69 @@ jobs: curl -sSL https://install.python-poetry.org | python3 - echo "$HOME/.local/bin" >> $GITHUB_PATH - - name: Make scripts executable - run: chmod +x scripts/generate.sh || true + - name: Make project scripts executable + run: | + chmod +x scripts/generate.sh || true + chmod +x scripts/build.sh || true - - name: Generate site content + - name: Generate site content (translations/static files) env: FUTURECODER_LANGUAGE: en run: | poetry install --no-root ./scripts/generate.sh - # --- Node build phase (frontend bundle -> outputs under frontend/course) --- + # --- Node phase (project build script) --- - name: Set up Node uses: actions/setup-node@v4 with: node-version: '22' + # Keep CRA assets on relative URLs (so they work under /course/) - name: Override CRA homepage to relative URLs working-directory: frontend run: | node -e "let p=require('./package.json'); p.homepage='.'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2)); console.log('homepage now', p.homepage)" - - name: Install and build frontend - working-directory: frontend + - name: Build with project script env: + FUTURECODER_LANGUAGE: en + REACT_APP_USE_FIREBASE_EMULATORS: '1' + REACT_APP_FIREBASE_STAGING: '1' CI: false run: | - npm ci - npm run build - if [ -f "../course/service-worker.js" ]; then cp ../course/service-worker.js public/service-worker.js; fi + ./scripts/build.sh - - name: Verify build output + # --- Verify course output & core archives --- + - name: Verify course output & core archives run: | bash -eo pipefail <<'BASH' - echo "frontend/course (summary):" + echo "frontend/course (dirs):" find frontend/course -maxdepth 2 -type d -print - # must exist: + # must exist test -f frontend/course/index.html - # pyodide dir should be there: test -d frontend/course/pyodide + # these are what the runtime requests at /course/… + # if your build puts versioned filenames, we'll normalize next + ls -la frontend/course | sed -n '1,200p' BASH + - name: Normalize core archive names if versioned + run: | + bash -eo pipefail <<'BASH' + CORE=$(find frontend/course -type f -name 'python_core*.tar' | head -n1 || true) + STDLIB=$(find frontend/course -type f -name 'python_stdlib*.zip' | head -n1 || true) + if [ -n "$CORE" ] && [ ! -f frontend/course/python_core.tar ]; then + cp -f "$CORE" frontend/course/python_core.tar + fi + if [ -n "$STDLIB" ] && [ ! -f frontend/course/python_stdlib.zip ]; then + cp -f "$STDLIB" frontend/course/python_stdlib.zip + fi + test -f frontend/course/python_core.tar + test -f frontend/course/python_stdlib.zip + BASH - # --- Stage deploy/ with /course and SWA routing rules --- + # --- Stage deploy/ for SWA (keep /course path) --- - name: Stage files for SWA run: | bash -eo pipefail <<'BASH' @@ -74,23 +94,24 @@ jobs: rm -rf deploy mkdir -p deploy/course cp -R frontend/course/* deploy/course/ - - # root -> /course/ + + # Root -> /course/ cat > deploy/index.html <<'HTML' HTML - + # SWA routes: serve assets first, then SPA fallback cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - + { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, { "route": "/course/packages/*", "serve": "/course/packages/*" }, + { "route": "/course/*.whl", "serve": "/course/*.whl" }, { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, { "route": "/course/*.data", "serve": "/course/*.data" }, @@ -99,9 +120,9 @@ jobs: { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, - { "route": "/course/python_core.tar*", "serve": "/course/pyodide/python_core.tar" }, - { "route": "/course/python_stdlib.zip*", "serve": "/course/pyodide/python_stdlib.zip" }, - + { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, + { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { @@ -111,102 +132,23 @@ jobs: } } JSON - - echo "Show top-level folders in deploy:" - find deploy -maxdepth 2 -type d -print - BASH - - - name: Fetch python_core/stdlib and place at course root - run: | - set -e - mkdir -p deploy/course/pyodide - - # Candidate Pyodide versions (newest first) - VERSIONS="0.27.2 0.27.1 0.27.0 0.26.4 0.26.3 0.26.2 0.26.1 0.26.0 0.25.1 0.25.0 0.24.1" - # Candidate filenames for Python 3.12 series + legacy names - FILES_CORE="python_core-3.12.2.tar python_core-3.12.1.tar python_core.tar" - FILES_STDLIB="python_stdlib-3.12.2.zip python_stdlib-3.12.1.zip python_stdlib.zip" - - fetch_first () { - ver="$1"; shift - base="https://cdn.jsdelivr.net/pyodide/v$%7Bver%7D/full" - for f in "$@"; do - >&2 echo "Trying $base/$f" - if curl -fsSL "$base/$f" -o "deploy/course/pyodide/$f"; then - echo "$f" # IMPORTANT: only filename to STDOUT - return 0 - fi - done - return 1 - } - - # Try core - CORE_FILE="" - for v in $VERSIONS; do - if CORE_FILE="$(fetch_first "$v" $FILES_CORE)"; then break; fi - done - if [ -z "$CORE_FILE" ]; then - echo "ERROR: Could not fetch python_core archive from known versions"; exit 1 - fi - - # Try stdlib - STDLIB_FILE="" - for v in $VERSIONS; do - if STDLIB_FILE="$(fetch_first "$v" $FILES_STDLIB)"; then break; fi - done - if [ -z "$STDLIB_FILE" ]; then - echo "ERROR: Could not fetch python_stdlib archive from known versions"; exit 1 - fi - - # Copy to the exact paths the app requests (course root) - cp -f "deploy/course/pyodide/$CORE_FILE" deploy/course/python_core.tar - cp -f "deploy/course/pyodide/$STDLIB_FILE" deploy/course/python_stdlib.zip - - echo "Deployed files:" - ls -la deploy/course/pyodide | sed -n '1,200p' - ls -la deploy/course | sed -n '1,200p' - - - - name: Locate and copy python_core.tar / python_stdlib.zip - run: | - set -e - echo "Searching for python_core.tar and python_stdlib.zip..." - CORE_PATH="$(find . -type f -name 'python_core.tar' | head -n1 || true)" - STDLIB_PATH="$(find . -type f -name 'python_stdlib.zip' | head -n1 || true)" - echo "CORE_PATH=${CORE_PATH}" - echo "STDLIB_PATH=${STDLIB_PATH}" - - # Ensure deploy/course exists - mkdir -p deploy/course - - # Copy if found - if [ -n "$CORE_PATH" ]; then cp -f "$CORE_PATH" deploy/course/python_core.tar; fi - if [ -n "$STDLIB_PATH" ]; then cp -f "$STDLIB_PATH" deploy/course/python_stdlib.zip; fi - - echo "Deployed course root contents:" + echo "Deploy tree (top levels):" + find deploy -maxdepth 2 -type d -print ls -la deploy/course | sed -n '1,200p' + BASH - - name: Vendor Pyodide core archives (python_core & stdlib) - run: | - set -e - PYODIDE_VER=0.24.1 - BASE="https://cdn.jsdelivr.net/pyodide/v$%7BPYODIDE_VER%7D/full" - mkdir -p deploy/course/pyodide - # Fetch the core tar and stdlib zip expected by the runtime - curl -fsSL "$BASE/python_core.tar" -o deploy/course/pyodide/python_core.tar - curl -fsSL "$BASE/python_stdlib.zip" -o deploy/course/pyodide/python_stdlib.zip - ls -la deploy/course/pyodide - - - name: Verify pyodide core exists in deploy + # --- Final sanity: files that must exist in the artifact --- + - name: Verify deploy artifact essentials run: | + bash -eo pipefail <<'BASH' + test -f deploy/course/index.html test -d deploy/course/pyodide - # these two must be present at course root for the runtime test -f deploy/course/python_core.tar test -f deploy/course/python_stdlib.zip + BASH - - # --- Deploy to Azure Static Web Apps (upload only; skip Oryx) --- + # --- Upload to SWA (no server build; just upload) --- - name: Deploy uses: Azure/static-web-apps-deploy@v1 with: From 8d2345d1e462608fda1eca1351f1ac57b514df4a Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:23:33 +0200 Subject: [PATCH 022/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 70 +++++++++++----------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index a10f536f..249da7db 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -36,13 +36,18 @@ jobs: poetry install --no-root ./scripts/generate.sh - # --- Node phase (project build script) --- + # --- Node phase (install deps + project build script) --- - name: Set up Node uses: actions/setup-node@v4 with: node-version: '22' - # Keep CRA assets on relative URLs (so they work under /course/) + # Ensure frontend/node_modules exists so "craco" is available + - name: Install frontend dependencies + working-directory: frontend + run: npm ci + + # (Optional) Keep CRA assets on relative URLs in case the app isn't forcing /course - name: Override CRA homepage to relative URLs working-directory: frontend run: | @@ -55,45 +60,45 @@ jobs: REACT_APP_FIREBASE_STAGING: '1' CI: false run: | + chmod +x scripts/build.sh ./scripts/build.sh - # --- Verify course output & core archives --- - - name: Verify course output & core archives - run: | - bash -eo pipefail <<'BASH' - echo "frontend/course (dirs):" - find frontend/course -maxdepth 2 -type d -print - # must exist - test -f frontend/course/index.html - test -d frontend/course/pyodide - # these are what the runtime requests at /course/… - # if your build puts versioned filenames, we'll normalize next - ls -la frontend/course | sed -n '1,200p' - BASH - - - name: Normalize core archive names if versioned + # --- Figure out where the build landed (dist/course or frontend/course) --- + - name: Detect course output dir + id: outdir run: | - bash -eo pipefail <<'BASH' - CORE=$(find frontend/course -type f -name 'python_core*.tar' | head -n1 || true) - STDLIB=$(find frontend/course -type f -name 'python_stdlib*.zip' | head -n1 || true) - if [ -n "$CORE" ] && [ ! -f frontend/course/python_core.tar ]; then - cp -f "$CORE" frontend/course/python_core.tar + if [ -d dist/course ]; then + echo "dir=dist/course" >> $GITHUB_OUTPUT + elif [ -d frontend/course ]; then + echo "dir=frontend/course" >> $GITHUB_OUTPUT + else + echo "No course output found"; exit 1 fi - if [ -n "$STDLIB" ] && [ ! -f frontend/course/python_stdlib.zip ]; then - cp -f "$STDLIB" frontend/course/python_stdlib.zip - fi - test -f frontend/course/python_core.tar - test -f frontend/course/python_stdlib.zip - BASH + echo "Detected output dir: $(cat $GITHUB_OUTPUT)" + + - name: Verify course output & core archives + run: | + OUT="${{ steps.outdir.outputs.dir }}" + echo "Listing $OUT" + find "$OUT" -maxdepth 2 -type d -print + test -f "$OUT/index.html" + test -d "$OUT/pyodide" || (echo "missing $OUT/pyodide" && exit 1) + # normalize core archive names to the paths the app requests + CORE=$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true) + STDLIB=$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true) + if [ -n "$CORE" ] && [ ! -f "$OUT/python_core.tar" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi + if [ -n "$STDLIB" ] && [ ! -f "$OUT/python_stdlib.zip" ]; then cp -f "$STDLIB" "$OUT/python_stdlib.zip"; fi + test -f "$OUT/python_core.tar" + test -f "$OUT/python_stdlib.zip" # --- Stage deploy/ for SWA (keep /course path) --- - name: Stage files for SWA run: | - bash -eo pipefail <<'BASH' + OUT="${{ steps.outdir.outputs.dir }}" set -e rm -rf deploy mkdir -p deploy/course - cp -R frontend/course/* deploy/course/ + cp -R "$OUT"/* deploy/course/ # Root -> /course/ cat > deploy/index.html <<'HTML' @@ -136,19 +141,16 @@ jobs: echo "Deploy tree (top levels):" find deploy -maxdepth 2 -type d -print ls -la deploy/course | sed -n '1,200p' - BASH # --- Final sanity: files that must exist in the artifact --- - name: Verify deploy artifact essentials run: | - bash -eo pipefail <<'BASH' test -f deploy/course/index.html test -d deploy/course/pyodide test -f deploy/course/python_core.tar test -f deploy/course/python_stdlib.zip - BASH - # --- Upload to SWA (no server build; just upload) --- + # --- Upload to SWA (upload only; skip Oryx) --- - name: Deploy uses: Azure/static-web-apps-deploy@v1 with: From 390b9d372d8efce400d9c908528c949c26923c48 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:25:48 +0200 Subject: [PATCH 023/108] Update index.html --- frontend/public/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/public/index.html b/frontend/public/index.html index fc29ecb9..04ad7066 100644 --- a/frontend/public/index.html +++ b/frontend/public/index.html @@ -24,7 +24,7 @@ work correctly both with client-side routing and a non-root public URL. Learn how to configure a non-root public URL by running `npm run build`. --> - futurecoder + Python Training From 5e986be67962d9f023524ac29551417e4198c938 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:29:27 +0200 Subject: [PATCH 024/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 59 +++++++++++++++------- 1 file changed, 41 insertions(+), 18 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 249da7db..93673395 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -76,47 +76,69 @@ jobs: fi echo "Detected output dir: $(cat $GITHUB_OUTPUT)" - - name: Verify course output & core archives + - name: Verify & normalize core archives + id: norm run: | - OUT="${{ steps.outdir.outputs.dir }}" - echo "Listing $OUT" + # Use the detected OUT if you have that step; else default to dist/course + OUT="${{ steps.outdir.outputs.dir || 'dist/course' }}" + echo "OUT=$OUT" + + echo "Tree (2 levels):" find "$OUT" -maxdepth 2 -type d -print + + # Show what's in pyodide + echo "Listing $OUT/pyodide:" + ls -la "$OUT/pyodide" || true + + # Find versioned core/stdlib files anywhere under OUT + CORE="$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true)" + STDLIB="$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true)" + echo "Found CORE=$CORE" + echo "Found STDLIB=$STDLIB" + + # If found, copy/normalize to the exact paths the runtime requests + if [ -n "$CORE" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi + if [ -n "$STDLIB" ]; then cp -f "$STDLIB" "$OUT/python_stdlib.zip"; fi + + # Hard requirements from here on: test -f "$OUT/index.html" - test -d "$OUT/pyodide" || (echo "missing $OUT/pyodide" && exit 1) - # normalize core archive names to the paths the app requests - CORE=$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true) - STDLIB=$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true) - if [ -n "$CORE" ] && [ ! -f "$OUT/python_core.tar" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi - if [ -n "$STDLIB" ] && [ ! -f "$OUT/python_stdlib.zip" ]; then cp -f "$STDLIB" "$OUT/python_stdlib.zip"; fi + test -d "$OUT/pyodide" test -f "$OUT/python_core.tar" test -f "$OUT/python_stdlib.zip" + + echo "Course root after normalization:" + ls -la "$OUT" | sed -n '1,200p' + + # Export OUT so later steps reuse it + echo "dir=$OUT" >> "$GITHUB_OUTPUT" + # --- Stage deploy/ for SWA (keep /course path) --- - name: Stage files for SWA run: | - OUT="${{ steps.outdir.outputs.dir }}" + OUT="${{ steps.norm.outputs.dir || 'dist/course' }}" set -e rm -rf deploy mkdir -p deploy/course cp -R "$OUT"/* deploy/course/ - + # Root -> /course/ cat > deploy/index.html <<'HTML' HTML - + # SWA routes: serve assets first, then SPA fallback cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - + { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, { "route": "/course/packages/*", "serve": "/course/packages/*" }, - + { "route": "/course/*.whl", "serve": "/course/*.whl" }, { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, { "route": "/course/*.data", "serve": "/course/*.data" }, @@ -124,10 +146,10 @@ jobs: { "route": "/course/*.css", "serve": "/course/*.css" }, { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, - + { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { @@ -137,11 +159,12 @@ jobs: } } JSON - - echo "Deploy tree (top levels):" + + echo "Deploy tree:" find deploy -maxdepth 2 -type d -print ls -la deploy/course | sed -n '1,200p' + # --- Final sanity: files that must exist in the artifact --- - name: Verify deploy artifact essentials run: | From b28a5ca9ecb9947f29fcb3bc14858d64c9f3a359 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:38:11 +0200 Subject: [PATCH 025/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 25 +++++++++++----------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 93673395..7fdc82b1 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -79,47 +79,48 @@ jobs: - name: Verify & normalize core archives id: norm run: | - # Use the detected OUT if you have that step; else default to dist/course - OUT="${{ steps.outdir.outputs.dir || 'dist/course' }}" + OUT="dist/course" # or ${{ steps.outdir.outputs.dir }} if you kept that step echo "OUT=$OUT" echo "Tree (2 levels):" find "$OUT" -maxdepth 2 -type d -print - # Show what's in pyodide echo "Listing $OUT/pyodide:" ls -la "$OUT/pyodide" || true - # Find versioned core/stdlib files anywhere under OUT + # Find stdlib and (optional) core CORE="$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true)" STDLIB="$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true)" echo "Found CORE=$CORE" echo "Found STDLIB=$STDLIB" - # If found, copy/normalize to the exact paths the runtime requests - if [ -n "$CORE" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi + # Normalize: always copy stdlib to course root; core only if present if [ -n "$STDLIB" ]; then cp -f "$STDLIB" "$OUT/python_stdlib.zip"; fi + if [ -n "$CORE" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi - # Hard requirements from here on: + # Must-haves test -f "$OUT/index.html" test -d "$OUT/pyodide" - test -f "$OUT/python_core.tar" - test -f "$OUT/python_stdlib.zip" + test -f "$OUT/python_stdlib.zip" # required + + # Optional: core tar (don’t fail if absent) + if [ -f "$OUT/python_core.tar" ]; then echo "python_core.tar present"; else echo "python_core.tar not present (ok)"; fi echo "Course root after normalization:" ls -la "$OUT" | sed -n '1,200p' - # Export OUT so later steps reuse it echo "dir=$OUT" >> "$GITHUB_OUTPUT" # --- Stage deploy/ for SWA (keep /course path) --- - name: Stage files for SWA run: | - OUT="${{ steps.norm.outputs.dir || 'dist/course' }}" + bash -eo pipefail <<'BASH' set -e rm -rf deploy mkdir -p deploy/course + # if you have OUT from a prior step, use it; else default to dist/course + OUT="${OUT:-dist/course}" cp -R "$OUT"/* deploy/course/ # Root -> /course/ @@ -147,7 +148,6 @@ jobs: { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, - { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, { "route": "/course/*", "rewrite": "/course/index.html" } @@ -163,6 +163,7 @@ jobs: echo "Deploy tree:" find deploy -maxdepth 2 -type d -print ls -la deploy/course | sed -n '1,200p' + BASH # --- Final sanity: files that must exist in the artifact --- From 232e6406dffbee7849b0e542f7695abaa88210ee Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 15:48:07 +0200 Subject: [PATCH 026/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 52 +++++++++++----------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 7fdc82b1..00fee8fd 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -36,18 +36,17 @@ jobs: poetry install --no-root ./scripts/generate.sh - # --- Node phase (install deps + project build script) --- + # --- Node phase (install deps + project build script) --- - name: Set up Node uses: actions/setup-node@v4 with: node-version: '22' - # Ensure frontend/node_modules exists so "craco" is available - name: Install frontend dependencies working-directory: frontend run: npm ci - # (Optional) Keep CRA assets on relative URLs in case the app isn't forcing /course + # Keep CRA assets on relative URLs in case the app isn't forcing /course - name: Override CRA homepage to relative URLs working-directory: frontend run: | @@ -60,7 +59,6 @@ jobs: REACT_APP_FIREBASE_STAGING: '1' CI: false run: | - chmod +x scripts/build.sh ./scripts/build.sh # --- Figure out where the build landed (dist/course or frontend/course) --- @@ -79,67 +77,65 @@ jobs: - name: Verify & normalize core archives id: norm run: | - OUT="dist/course" # or ${{ steps.outdir.outputs.dir }} if you kept that step + OUT="${{ steps.outdir.outputs.dir }}" echo "OUT=$OUT" - + echo "Tree (2 levels):" find "$OUT" -maxdepth 2 -type d -print - + echo "Listing $OUT/pyodide:" ls -la "$OUT/pyodide" || true - + # Find stdlib and (optional) core CORE="$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true)" STDLIB="$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true)" echo "Found CORE=$CORE" echo "Found STDLIB=$STDLIB" - + # Normalize: always copy stdlib to course root; core only if present if [ -n "$STDLIB" ]; then cp -f "$STDLIB" "$OUT/python_stdlib.zip"; fi if [ -n "$CORE" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi - + # Must-haves test -f "$OUT/index.html" test -d "$OUT/pyodide" test -f "$OUT/python_stdlib.zip" # required - + # Optional: core tar (don’t fail if absent) if [ -f "$OUT/python_core.tar" ]; then echo "python_core.tar present"; else echo "python_core.tar not present (ok)"; fi - + echo "Course root after normalization:" ls -la "$OUT" | sed -n '1,200p' - - echo "dir=$OUT" >> "$GITHUB_OUTPUT" + echo "dir=$OUT" >> "$GITHUB_OUTPUT" # --- Stage deploy/ for SWA (keep /course path) --- - name: Stage files for SWA run: | bash -eo pipefail <<'BASH' set -e + OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" rm -rf deploy mkdir -p deploy/course - # if you have OUT from a prior step, use it; else default to dist/course - OUT="${OUT:-dist/course}" cp -R "$OUT"/* deploy/course/ - + # Root -> /course/ cat > deploy/index.html <<'HTML' HTML - + # SWA routes: serve assets first, then SPA fallback cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - + { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, { "route": "/course/packages/*", "serve": "/course/packages/*" }, - + { "route": "/course/*.whl", "serve": "/course/*.whl" }, { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, { "route": "/course/*.data", "serve": "/course/*.data" }, @@ -147,9 +143,9 @@ jobs: { "route": "/course/*.css", "serve": "/course/*.css" }, { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, - + { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { @@ -159,20 +155,24 @@ jobs: } } JSON - + echo "Deploy tree:" find deploy -maxdepth 2 -type d -print ls -la deploy/course | sed -n '1,200p' BASH - # --- Final sanity: files that must exist in the artifact --- - name: Verify deploy artifact essentials run: | test -f deploy/course/index.html test -d deploy/course/pyodide - test -f deploy/course/python_core.tar - test -f deploy/course/python_stdlib.zip + test -f deploy/course/python_stdlib.zip # required by runtime + # python_core.tar is optional in recent Pyodide builds + if [ -f deploy/course/python_core.tar ]; then + echo "python_core.tar present (optional)"; + else + echo "python_core.tar not present (ok)"; + fi # --- Upload to SWA (upload only; skip Oryx) --- - name: Deploy From 6cb386a4b885f31800683a4b67c3ac69fac04a1a Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 16:13:00 +0200 Subject: [PATCH 027/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 00fee8fd..512375d7 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -145,6 +145,7 @@ jobs: { "route": "/course/*.json", "serve": "/course/*.json" }, { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, + { "route": "/course/*python_core.tar*", "serve": "/course/python_stdlib.zip" }, { "route": "/course/*", "rewrite": "/course/index.html" } ], From df8f3b3e9c5b43a5d65bb639b2bac1785117e543 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 16:20:04 +0200 Subject: [PATCH 028/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 512375d7..31ba2907 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -145,14 +145,15 @@ jobs: { "route": "/course/*.json", "serve": "/course/*.json" }, { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - { "route": "/course/*python_core.tar*", "serve": "/course/python_stdlib.zip" }, + { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_stdlib.zip" } { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { ".wasm": "application/wasm", ".whl": "application/octet-stream", - ".data": "application/octet-stream" + ".data": "application/octet-stream", + ".load_by_url": "application/octet-stream" } } JSON From ae5335ec19eb2c2101cb9cdf9971fb5b77a77cdd Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 16:28:05 +0200 Subject: [PATCH 029/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 31ba2907..b7a9791a 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -118,24 +118,24 @@ jobs: rm -rf deploy mkdir -p deploy/course cp -R "$OUT"/* deploy/course/ - + # Root -> /course/ cat > deploy/index.html <<'HTML' HTML - - # SWA routes: serve assets first, then SPA fallback + + # Valid JSON only — no comments/trailing commas cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - + { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, { "route": "/course/packages/*", "serve": "/course/packages/*" }, - + { "route": "/course/*.whl", "serve": "/course/*.whl" }, { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, { "route": "/course/*.data", "serve": "/course/*.data" }, @@ -143,10 +143,10 @@ jobs: { "route": "/course/*.css", "serve": "/course/*.css" }, { "route": "/course/*.map", "serve": "/course/*.map" }, { "route": "/course/*.json", "serve": "/course/*.json" }, - + { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_stdlib.zip" } - + { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_stdlib.zip" }, + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { @@ -157,10 +157,7 @@ jobs: } } JSON - - echo "Deploy tree:" - find deploy -maxdepth 2 -type d -print - ls -la deploy/course | sed -n '1,200p' + BASH # --- Final sanity: files that must exist in the artifact --- From 5a6f15a107cebad8fe70c9cec131e998274548da Mon Sep 17 00:00:00 2001 From: llodewyks Date: Wed, 15 Oct 2025 16:34:04 +0200 Subject: [PATCH 030/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index b7a9791a..4697e357 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -130,6 +130,8 @@ jobs: { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, + + { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_stdlib.zip" }, { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, @@ -145,8 +147,7 @@ jobs: { "route": "/course/*.json", "serve": "/course/*.json" }, { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_stdlib.zip" }, - + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { From 85ab494702812339f63eb9dcaacf236518f70d2a Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 08:53:34 +0200 Subject: [PATCH 031/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 48 ++++++++++++---------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 4697e357..ef93c16a 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -129,33 +129,33 @@ jobs: cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ - { "route": "/", "redirect": "/course/", "statusCode": 302 }, - - { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_stdlib.zip" }, - - { "route": "/course/static/*", "serve": "/course/static/*" }, - { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, - { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, - { "route": "/course/packages/*", "serve": "/course/packages/*" }, - - { "route": "/course/*.whl", "serve": "/course/*.whl" }, - { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, - { "route": "/course/*.data", "serve": "/course/*.data" }, - { "route": "/course/*.js", "serve": "/course/*.js" }, - { "route": "/course/*.css", "serve": "/course/*.css" }, - { "route": "/course/*.map", "serve": "/course/*.map" }, - { "route": "/course/*.json", "serve": "/course/*.json" }, - - { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - - { "route": "/course/*", "rewrite": "/course/index.html" } + { "route": "/", "redirect": "/course/", "statusCode": 302 }, + + { "route": "/course/static/js/static/media/python_core.tar*", "rewrite": "/course/python_stdlib.zip" }, + + { "route": "/course/static/*", "serve": "/course/static/*" }, + { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, + { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, + { "route": "/course/packages/*", "serve": "/course/packages/*" }, + + { "route": "/course/*.whl", "serve": "/course/*.whl" }, + { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, + { "route": "/course/*.data", "serve": "/course/*.data" }, + { "route": "/course/*.js", "serve": "/course/*.js" }, + { "route": "/course/*.css", "serve": "/course/*.css" }, + { "route": "/course/*.map", "serve": "/course/*.map" }, + { "route": "/course/*.json", "serve": "/course/*.json" }, + + { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, + + { "route": "/course/*", "rewrite": "/course/index.html" } ], - "mimeTypes": { + "mimeTypes": { ".wasm": "application/wasm", ".whl": "application/octet-stream", ".data": "application/octet-stream", ".load_by_url": "application/octet-stream" - } + } } JSON @@ -174,6 +174,10 @@ jobs: echo "python_core.tar not present (ok)"; fi + - name: Assert stdlib present + run: test -f deploy/course/python_stdlib.zip + + # --- Upload to SWA (upload only; skip Oryx) --- - name: Deploy uses: Azure/static-web-apps-deploy@v1 From 43921504efe55c9e9ac75dcb8304a63e92d08455 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 09:03:28 +0200 Subject: [PATCH 032/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 45 ++++++++++++++++++---- 1 file changed, 38 insertions(+), 7 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index ef93c16a..8d5b001e 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -129,9 +129,9 @@ jobs: cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ - { "route": "/", "redirect": "/course/", "statusCode": 302 }, - - { "route": "/course/static/js/static/media/python_core.tar*", "rewrite": "/course/python_stdlib.zip" }, + { "route": "/", "redirect": "/course/", "statusCode": 302 }, + + { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, @@ -151,16 +151,42 @@ jobs: { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { - ".wasm": "application/wasm", - ".whl": "application/octet-stream", - ".data": "application/octet-stream", - ".load_by_url": "application/octet-stream" + ".wasm": "application/wasm", + ".whl": "application/octet-stream", + ".data": "application/octet-stream", + ".load_by_url": "application/octet-stream" } } JSON BASH + - name: Vendor Pyodide core tar (python_core.tar) + run: | + set -e + # Read version from the built files + OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" + VER="$(jq -r .version "$OUT/pyodide/package.json" 2>/dev/null || node -p 'require("./'"$OUT"'/pyodide/package.json").version')" + echo "Pyodide version: $VER" + + mkdir -p deploy/course + # Try jsDelivr first, then GitHub releases as fallback + URL_JSDELIVR="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" + URL_GH="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" + + echo "Fetching core from $URL_JSDELIVR ..." + if ! curl -fsSL "$URL_JSDELIVR" -o deploy/course/python_core.tar ; then + echo "jsDelivr failed; trying GitHub release $URL_GH ..." + curl -fsSL "$URL_GH" -o deploy/course/python_core.tar + fi + + # Sanity: it should be a bzip2 tar (magic check) + file deploy/course/python_core.tar || true + # Ensure non-empty + test -s deploy/course/python_core.tar + + + # --- Final sanity: files that must exist in the artifact --- - name: Verify deploy artifact essentials run: | @@ -177,6 +203,11 @@ jobs: - name: Assert stdlib present run: test -f deploy/course/python_stdlib.zip + - name: Assert core & stdlib present + run: | + test -f deploy/course/python_stdlib.zip + test -s deploy/course/python_core.tar + # --- Upload to SWA (upload only; skip Oryx) --- - name: Deploy From b3a471be78a34e0c5ca33134dce0d467858a3898 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 09:18:21 +0200 Subject: [PATCH 033/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 77 ++++++++++++++++------ 1 file changed, 58 insertions(+), 19 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 8d5b001e..276e023a 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -130,25 +130,25 @@ jobs: { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - - { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, - - { "route": "/course/static/*", "serve": "/course/static/*" }, - { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, - { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, - { "route": "/course/packages/*", "serve": "/course/packages/*" }, - - { "route": "/course/*.whl", "serve": "/course/*.whl" }, - { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, - { "route": "/course/*.data", "serve": "/course/*.data" }, - { "route": "/course/*.js", "serve": "/course/*.js" }, - { "route": "/course/*.css", "serve": "/course/*.css" }, - { "route": "/course/*.map", "serve": "/course/*.map" }, - { "route": "/course/*.json", "serve": "/course/*.json" }, - - { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - - { "route": "/course/*", "rewrite": "/course/index.html" } + + { "route": "/course/*python_core.tar*", "serve": "/course/python_core.tar" }, + + { "route": "/course/static/*", "serve": "/course/static/*" }, + { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, + { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, + { "route": "/course/packages/*", "serve": "/course/packages/*" }, + + { "route": "/course/*.whl", "serve": "/course/*.whl" }, + { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, + { "route": "/course/*.data", "serve": "/course/*.data" }, + { "route": "/course/*.js", "serve": "/course/*.js" }, + { "route": "/course/*.css", "serve": "/course/*.css" }, + { "route": "/course/*.map", "serve": "/course/*.map" }, + { "route": "/course/*.json", "serve": "/course/*.json" }, + + { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, + + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { ".wasm": "application/wasm", @@ -185,6 +185,27 @@ jobs: # Ensure non-empty test -s deploy/course/python_core.tar + - name: Vendor Pyodide core tar (python_core.tar) + run: | + set -e + OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" + mkdir -p deploy/course + + # Read Pyodide version from the built app (no jq needed) + VER=$(node -p "require('./${OUT}/pyodide/package.json').version") + echo "Pyodide version: $VER" + + URL_JSDELIVR="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" + URL_GH="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" + + echo "Fetching core tar from jsDelivr..." + if ! curl -fsSL "$URL_JSDELIVR" -o deploy/course/python_core.tar ; then + echo "jsDelivr failed; trying GitHub releases..." + curl -fsSL "$URL_GH" -o deploy/course/python_core.tar + fi + + # Must exist and be non-empty + test -s deploy/course/python_core.tar # --- Final sanity: files that must exist in the artifact --- @@ -208,6 +229,24 @@ jobs: test -f deploy/course/python_stdlib.zip test -s deploy/course/python_core.tar + # 1) Show the exact routing file we’ll upload (to catch ordering/typos) + - name: Print staticwebapp.config.json + run: | + echo "----- staticwebapp.config.json -----" + cat -n deploy/staticwebapp.config.json + + # 2) Prove the file exists and is a bzip2 tar (Pyodide core) and non-empty + - name: Assert core & stdlib present and valid + run: | + ls -l deploy/course/python_stdlib.zip + ls -l deploy/course/python_core.tar + # core must exist and be non-empty + test -s deploy/course/python_core.tar + # should identify as bzip2-compressed tar + sudo apt-get update -y >/dev/null 2>&1 || true + sudo apt-get install -y file >/dev/null 2>&1 || true + file deploy/course/python_core.tar || true + # --- Upload to SWA (upload only; skip Oryx) --- - name: Deploy From 91df8af332c9e83dfe19fbfde7e1473aed609356 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 09:24:45 +0200 Subject: [PATCH 034/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 276e023a..f2fab1c8 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -129,9 +129,9 @@ jobs: cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ - { "route": "/", "redirect": "/course/", "statusCode": 302 }, - - { "route": "/course/*python_core.tar*", "serve": "/course/python_core.tar" }, + { "route": "/", "redirect": "/course/", "statusCode": 302 }, + + { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, { "route": "/course/static/*", "serve": "/course/static/*" }, { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, From 31b573220b2934806b9ac7c44402e2996683cf15 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 10:08:55 +0200 Subject: [PATCH 035/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 49 +++++++++------------- 1 file changed, 19 insertions(+), 30 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index f2fab1c8..69fae44e 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -161,51 +161,40 @@ jobs: BASH - - name: Vendor Pyodide core tar (python_core.tar) - run: | - set -e - # Read version from the built files - OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" - VER="$(jq -r .version "$OUT/pyodide/package.json" 2>/dev/null || node -p 'require("./'"$OUT"'/pyodide/package.json").version')" - echo "Pyodide version: $VER" - - mkdir -p deploy/course - # Try jsDelivr first, then GitHub releases as fallback - URL_JSDELIVR="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" - URL_GH="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" - - echo "Fetching core from $URL_JSDELIVR ..." - if ! curl -fsSL "$URL_JSDELIVR" -o deploy/course/python_core.tar ; then - echo "jsDelivr failed; trying GitHub release $URL_GH ..." - curl -fsSL "$URL_GH" -o deploy/course/python_core.tar - fi - - # Sanity: it should be a bzip2 tar (magic check) - file deploy/course/python_core.tar || true - # Ensure non-empty - test -s deploy/course/python_core.tar - - name: Vendor Pyodide core tar (python_core.tar) run: | set -e OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" mkdir -p deploy/course - - # Read Pyodide version from the built app (no jq needed) VER=$(node -p "require('./${OUT}/pyodide/package.json').version") echo "Pyodide version: $VER" - URL_JSDELIVR="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" URL_GH="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" - echo "Fetching core tar from jsDelivr..." if ! curl -fsSL "$URL_JSDELIVR" -o deploy/course/python_core.tar ; then echo "jsDelivr failed; trying GitHub releases..." curl -fsSL "$URL_GH" -o deploy/course/python_core.tar fi - - # Must exist and be non-empty test -s deploy/course/python_core.tar + + - name: Replace hashed python_core.load_by_url files with the real tar + run: | + set -e + CORE="deploy/course/python_core.tar" + test -s "$CORE" + echo "Looking for hashed placeholders under /static/js/static/media/…" + mapfile -t TARGETS < <(find deploy/course -type f -path "*/static/js/static/media/python_core.tar*.load_by_url" | sort || true) + if [ "${#TARGETS[@]}" -eq 0 ]; then + echo "No hashed python_core.tar*.load_by_url files found."; exit 0 + fi + printf 'Found %s target(s):\n' "${#TARGETS[@]}" + printf '%s\n' "${TARGETS[@]}" + for f in "${TARGETS[@]}"; do + echo "Overwriting $f with core tar bytes…" + cp -f "$CORE" "$f" + done + echo "Done." + # --- Final sanity: files that must exist in the artifact --- From 96496acb614a273773e6a51e4ce503d6cd11e09b Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 10:22:11 +0200 Subject: [PATCH 036/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 26 +++++++++++++++------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 69fae44e..a1bea7f3 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -130,6 +130,7 @@ jobs: { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, + { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, @@ -177,23 +178,32 @@ jobs: fi test -s deploy/course/python_core.tar - - name: Replace hashed python_core.load_by_url files with the real tar - run: | + - name: Replace hashed python_core.load_by_url files with the real tar (any path) + run: | set -e CORE="deploy/course/python_core.tar" - test -s "$CORE" - echo "Looking for hashed placeholders under /static/js/static/media/…" - mapfile -t TARGETS < <(find deploy/course -type f -path "*/static/js/static/media/python_core.tar*.load_by_url" | sort || true) + test -s "$CORE" # must exist and be non-empty + + echo "Searching for ANY placeholders named python_core.tar*.load_by_url under deploy/course…" + mapfile -t TARGETS < <(find deploy/course -type f -name "python_core.tar*.load_by_url" | sort || true) + if [ "${#TARGETS[@]}" -eq 0 ]; then - echo "No hashed python_core.tar*.load_by_url files found."; exit 0 + echo "No placeholders found. Showing a few candidate paths for context:" + find deploy/course -maxdepth 4 -type d -path "*/static/js*" -print || true + exit 0 fi - printf 'Found %s target(s):\n' "${#TARGETS[@]}" + + echo "Found ${#TARGETS[@]} target(s):" printf '%s\n' "${TARGETS[@]}" + for f in "${TARGETS[@]}"; do echo "Overwriting $f with core tar bytes…" cp -f "$CORE" "$f" done - echo "Done." + + echo "Post-replacement file details:" + ls -l "$CORE" + ls -l "${TARGETS[@]}" From b251eb40427588459ab7c910c5db627d59fb058a Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 10:35:13 +0200 Subject: [PATCH 037/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 120 ++++++++------------- 1 file changed, 46 insertions(+), 74 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index a1bea7f3..a7f22469 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - # --- Python phase (dependencies + content generation) --- + # --- Python phase (generate content used by the frontend build) --- - name: Set up Python uses: actions/setup-python@v5 with: @@ -46,7 +46,6 @@ jobs: working-directory: frontend run: npm ci - # Keep CRA assets on relative URLs in case the app isn't forcing /course - name: Override CRA homepage to relative URLs working-directory: frontend run: | @@ -58,8 +57,7 @@ jobs: REACT_APP_USE_FIREBASE_EMULATORS: '1' REACT_APP_FIREBASE_STAGING: '1' CI: false - run: | - ./scripts/build.sh + run: ./scripts/build.sh # --- Figure out where the build landed (dist/course or frontend/course) --- - name: Detect course output dir @@ -79,34 +77,25 @@ jobs: run: | OUT="${{ steps.outdir.outputs.dir }}" echo "OUT=$OUT" - - echo "Tree (2 levels):" find "$OUT" -maxdepth 2 -type d -print - echo "Listing $OUT/pyodide:" ls -la "$OUT/pyodide" || true - # Find stdlib and (optional) core CORE="$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true)" STDLIB="$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true)" echo "Found CORE=$CORE" echo "Found STDLIB=$STDLIB" - # Normalize: always copy stdlib to course root; core only if present if [ -n "$STDLIB" ]; then cp -f "$STDLIB" "$OUT/python_stdlib.zip"; fi if [ -n "$CORE" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi - # Must-haves test -f "$OUT/index.html" test -d "$OUT/pyodide" - test -f "$OUT/python_stdlib.zip" # required + test -f "$OUT/python_stdlib.zip" - # Optional: core tar (don’t fail if absent) if [ -f "$OUT/python_core.tar" ]; then echo "python_core.tar present"; else echo "python_core.tar not present (ok)"; fi - echo "Course root after normalization:" ls -la "$OUT" | sed -n '1,200p' - echo "dir=$OUT" >> "$GITHUB_OUTPUT" # --- Stage deploy/ for SWA (keep /course path) --- @@ -118,50 +107,50 @@ jobs: rm -rf deploy mkdir -p deploy/course cp -R "$OUT"/* deploy/course/ - + # Root -> /course/ cat > deploy/index.html <<'HTML' HTML - + # Valid JSON only — no comments/trailing commas cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, - + + { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, - - { "route": "/course/static/*", "serve": "/course/static/*" }, - { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, - { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, + + { "route": "/course/static/*", "serve": "/course/static/*" }, + { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, + { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, { "route": "/course/packages/*", "serve": "/course/packages/*" }, - - { "route": "/course/*.whl", "serve": "/course/*.whl" }, - { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, - { "route": "/course/*.data", "serve": "/course/*.data" }, - { "route": "/course/*.js", "serve": "/course/*.js" }, - { "route": "/course/*.css", "serve": "/course/*.css" }, - { "route": "/course/*.map", "serve": "/course/*.map" }, - { "route": "/course/*.json", "serve": "/course/*.json" }, - + + { "route": "/course/*.whl", "serve": "/course/*.whl" }, + { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, + { "route": "/course/*.data", "serve": "/course/*.data" }, + { "route": "/course/*.js", "serve": "/course/*.js" }, + { "route": "/course/*.css", "serve": "/course/*.css" }, + { "route": "/course/*.map", "serve": "/course/*.map" }, + { "route": "/course/*.json", "serve": "/course/*.json" }, + { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - + { "route": "/course/*", "rewrite": "/course/index.html" } ], - "mimeTypes": { - ".wasm": "application/wasm", - ".whl": "application/octet-stream", - ".data": "application/octet-stream", - ".load_by_url": "application/octet-stream" - } + "mimeTypes": { + ".wasm": "application/wasm", + ".whl": "application/octet-stream", + ".data": "application/octet-stream", + ".load_by_url": "application/octet-stream" + } } JSON - BASH + # --- Vendor Pyodide core tar and place it at the exact requested bytes --- - name: Vendor Pyodide core tar (python_core.tar) run: | set -e @@ -169,84 +158,67 @@ jobs: mkdir -p deploy/course VER=$(node -p "require('./${OUT}/pyodide/package.json').version") echo "Pyodide version: $VER" + URL_JSDELIVR="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" URL_GH="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" + echo "Fetching core tar from jsDelivr..." - if ! curl -fsSL "$URL_JSDELIVR" -o deploy/course/python_core.tar ; then + if ! curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" \ + "$URL_JSDELIVR" -o deploy/course/python_core.tar ; then echo "jsDelivr failed; trying GitHub releases..." - curl -fsSL "$URL_GH" -o deploy/course/python_core.tar + curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" \ + "$URL_GH" -o deploy/course/python_core.tar fi test -s deploy/course/python_core.tar - - - name: Replace hashed python_core.load_by_url files with the real tar (any path) - run: | + + - name: Replace hashed python_core.load_by_url files with the real tar (any path) + run: | set -e CORE="deploy/course/python_core.tar" - test -s "$CORE" # must exist and be non-empty - + test -s "$CORE" + echo "Searching for ANY placeholders named python_core.tar*.load_by_url under deploy/course…" mapfile -t TARGETS < <(find deploy/course -type f -name "python_core.tar*.load_by_url" | sort || true) - + if [ "${#TARGETS[@]}" -eq 0 ]; then - echo "No placeholders found. Showing a few candidate paths for context:" + echo "No placeholders found. Showing candidate media dirs:" find deploy/course -maxdepth 4 -type d -path "*/static/js*" -print || true exit 0 fi - + echo "Found ${#TARGETS[@]} target(s):" printf '%s\n' "${TARGETS[@]}" - + for f in "${TARGETS[@]}"; do echo "Overwriting $f with core tar bytes…" cp -f "$CORE" "$f" done - + echo "Post-replacement file details:" ls -l "$CORE" ls -l "${TARGETS[@]}" - - # --- Final sanity: files that must exist in the artifact --- - name: Verify deploy artifact essentials run: | test -f deploy/course/index.html test -d deploy/course/pyodide - test -f deploy/course/python_stdlib.zip # required by runtime - # python_core.tar is optional in recent Pyodide builds - if [ -f deploy/course/python_core.tar ]; then - echo "python_core.tar present (optional)"; - else - echo "python_core.tar not present (ok)"; - fi - - - name: Assert stdlib present - run: test -f deploy/course/python_stdlib.zip - - - name: Assert core & stdlib present - run: | - test -f deploy/course/python_stdlib.zip + test -s deploy/course/python_stdlib.zip test -s deploy/course/python_core.tar - # 1) Show the exact routing file we’ll upload (to catch ordering/typos) - name: Print staticwebapp.config.json run: | echo "----- staticwebapp.config.json -----" cat -n deploy/staticwebapp.config.json - - # 2) Prove the file exists and is a bzip2 tar (Pyodide core) and non-empty + - name: Assert core & stdlib present and valid run: | ls -l deploy/course/python_stdlib.zip ls -l deploy/course/python_core.tar - # core must exist and be non-empty - test -s deploy/course/python_core.tar - # should identify as bzip2-compressed tar sudo apt-get update -y >/dev/null 2>&1 || true sudo apt-get install -y file >/dev/null 2>&1 || true file deploy/course/python_core.tar || true - # --- Upload to SWA (upload only; skip Oryx) --- - name: Deploy uses: Azure/static-web-apps-deploy@v1 From 3650e7e1f1388910e8aa43c0f1d28aafac98eafd Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 10:45:22 +0200 Subject: [PATCH 038/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 132 +++++++-------------- 1 file changed, 43 insertions(+), 89 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index a7f22469..422ca0a8 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - # --- Python phase (generate content used by the frontend build) --- + # --- Python phase (generate content for frontend) --- - name: Set up Python uses: actions/setup-python@v5 with: @@ -29,14 +29,14 @@ jobs: chmod +x scripts/generate.sh || true chmod +x scripts/build.sh || true - - name: Generate site content (translations/static files) + - name: Generate site content env: FUTURECODER_LANGUAGE: en run: | poetry install --no-root ./scripts/generate.sh - # --- Node phase (install deps + project build script) --- + # --- Node phase (install deps + build) --- - name: Set up Node uses: actions/setup-node@v4 with: @@ -59,7 +59,7 @@ jobs: CI: false run: ./scripts/build.sh - # --- Figure out where the build landed (dist/course or frontend/course) --- + # --- Where did the build land? (dist/course or frontend/course) --- - name: Detect course output dir id: outdir run: | @@ -70,35 +70,25 @@ jobs: else echo "No course output found"; exit 1 fi - echo "Detected output dir: $(cat $GITHUB_OUTPUT)" + echo "Detected: $(cat $GITHUB_OUTPUT)" - - name: Verify & normalize core archives + - name: Verify & normalize archives id: norm run: | OUT="${{ steps.outdir.outputs.dir }}" echo "OUT=$OUT" - find "$OUT" -maxdepth 2 -type d -print - - ls -la "$OUT/pyodide" || true - - CORE="$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true)" - STDLIB="$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true)" - echo "Found CORE=$CORE" - echo "Found STDLIB=$STDLIB" - - if [ -n "$STDLIB" ]; then cp -f "$STDLIB" "$OUT/python_stdlib.zip"; fi - if [ -n "$CORE" ]; then cp -f "$CORE" "$OUT/python_core.tar"; fi - + ls -la "$OUT" | sed -n '1,200p' test -f "$OUT/index.html" test -d "$OUT/pyodide" + # normalize stdlib/core names if present + STDLIB="$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true)" + CORE="$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true)" + [ -n "$STDLIB" ] && cp -f "$STDLIB" "$OUT/python_stdlib.zip" || true + [ -n "$CORE" ] && cp -f "$CORE" "$OUT/python_core.tar" || true test -f "$OUT/python_stdlib.zip" - - if [ -f "$OUT/python_core.tar" ]; then echo "python_core.tar present"; else echo "python_core.tar not present (ok)"; fi - - ls -la "$OUT" | sed -n '1,200p' echo "dir=$OUT" >> "$GITHUB_OUTPUT" - # --- Stage deploy/ for SWA (keep /course path) --- + # --- Stage deploy folder --- - name: Stage files for SWA run: | bash -eo pipefail <<'BASH' @@ -114,30 +104,11 @@ jobs: HTML - # Valid JSON only — no comments/trailing commas + # Keep routes *minimal* (SPA fallback + correct MIME) cat > deploy/staticwebapp.config.json <<'JSON' { "routes": [ { "route": "/", "redirect": "/course/", "statusCode": 302 }, - - { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, - { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, - - { "route": "/course/static/*", "serve": "/course/static/*" }, - { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, - { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, - { "route": "/course/packages/*", "serve": "/course/packages/*" }, - - { "route": "/course/*.whl", "serve": "/course/*.whl" }, - { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, - { "route": "/course/*.data", "serve": "/course/*.data" }, - { "route": "/course/*.js", "serve": "/course/*.js" }, - { "route": "/course/*.css", "serve": "/course/*.css" }, - { "route": "/course/*.map", "serve": "/course/*.map" }, - { "route": "/course/*.json", "serve": "/course/*.json" }, - - { "route": "/course/python_stdlib.zip*", "serve": "/course/python_stdlib.zip" }, - { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { @@ -150,7 +121,7 @@ jobs: JSON BASH - # --- Vendor Pyodide core tar and place it at the exact requested bytes --- + # --- Vend Pyodide core tar as deploy/course/python_core.tar --- - name: Vendor Pyodide core tar (python_core.tar) run: | set -e @@ -158,68 +129,51 @@ jobs: mkdir -p deploy/course VER=$(node -p "require('./${OUT}/pyodide/package.json').version") echo "Pyodide version: $VER" - - URL_JSDELIVR="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" - URL_GH="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" - - echo "Fetching core tar from jsDelivr..." - if ! curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" \ - "$URL_JSDELIVR" -o deploy/course/python_core.tar ; then - echo "jsDelivr failed; trying GitHub releases..." - curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" \ - "$URL_GH" -o deploy/course/python_core.tar + URL1="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" + URL2="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" + echo "Fetching core tar..." + if ! curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" "$URL1" -o deploy/course/python_core.tar; then + curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" "$URL2" -o deploy/course/python_core.tar fi test -s deploy/course/python_core.tar - - name: Replace hashed python_core.load_by_url files with the real tar (any path) + # --- Create the exact hashed files your app requests (any path) --- + - name: Materialize hashed python_core.load_by_url files run: | set -e CORE="deploy/course/python_core.tar" test -s "$CORE" - - echo "Searching for ANY placeholders named python_core.tar*.load_by_url under deploy/course…" - mapfile -t TARGETS < <(find deploy/course -type f -name "python_core.tar*.load_by_url" | sort || true) - - if [ "${#TARGETS[@]}" -eq 0 ]; then - echo "No placeholders found. Showing candidate media dirs:" - find deploy/course -maxdepth 4 -type d -path "*/static/js*" -print || true - exit 0 + # extract all unique filenames referenced in JS bundles + mapfile -t NAMES < <(grep -rhoE "python_core\.tar\.[a-f0-9]{6,}\.load_by_url" deploy/course | sort -u || true) + if [ "${#NAMES[@]}" -eq 0 ]; then + echo "No hashed names found in bundles." + else + echo "Found ${#NAMES[@]} hashed filenames:" + printf '%s\n' "${NAMES[@]}" + # place the real tar at BOTH likely locations + for name in "${NAMES[@]}"; do + for base in "deploy/course" "deploy/course/static/js/static/media"; do + mkdir -p "$base" + cp -f "$CORE" "$base/$name" + done + done fi + echo "Done." - echo "Found ${#TARGETS[@]} target(s):" - printf '%s\n' "${TARGETS[@]}" - - for f in "${TARGETS[@]}"; do - echo "Overwriting $f with core tar bytes…" - cp -f "$CORE" "$f" - done - - echo "Post-replacement file details:" - ls -l "$CORE" - ls -l "${TARGETS[@]}" - - # --- Final sanity: files that must exist in the artifact --- + # --- Final checks --- - name: Verify deploy artifact essentials run: | - test -f deploy/course/index.html + test -s deploy/course/index.html test -d deploy/course/pyodide test -s deploy/course/python_stdlib.zip test -s deploy/course/python_core.tar + echo "Listing any materialized hashed files:" + find deploy/course -type f -name "python_core.tar*.load_by_url" -maxdepth 4 -exec ls -l {} \; - name: Print staticwebapp.config.json - run: | - echo "----- staticwebapp.config.json -----" - cat -n deploy/staticwebapp.config.json - - - name: Assert core & stdlib present and valid - run: | - ls -l deploy/course/python_stdlib.zip - ls -l deploy/course/python_core.tar - sudo apt-get update -y >/dev/null 2>&1 || true - sudo apt-get install -y file >/dev/null 2>&1 || true - file deploy/course/python_core.tar || true + run: cat -n deploy/staticwebapp.config.json - # --- Upload to SWA (upload only; skip Oryx) --- + # --- Deploy to SWA (no Oryx build) --- - name: Deploy uses: Azure/static-web-apps-deploy@v1 with: From 80fdcfa99a210fb3614b385c41888fa1ca04a907 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 11:16:51 +0200 Subject: [PATCH 039/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 37 +++++++++++++--------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 422ca0a8..20bee9fb 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -138,27 +138,34 @@ jobs: test -s deploy/course/python_core.tar # --- Create the exact hashed files your app requests (any path) --- - - name: Materialize hashed python_core.load_by_url files + - name: Overwrite hashed python_core placeholders with the real tar (file-system based) run: | set -e CORE="deploy/course/python_core.tar" - test -s "$CORE" - # extract all unique filenames referenced in JS bundles - mapfile -t NAMES < <(grep -rhoE "python_core\.tar\.[a-f0-9]{6,}\.load_by_url" deploy/course | sort -u || true) - if [ "${#NAMES[@]}" -eq 0 ]; then - echo "No hashed names found in bundles." + test -s "$CORE" # must exist and be non-empty + + echo "Finding placeholder files named python_core.tar*.load_by_url under deploy/course…" + # Find in ANY path (root or static/js/static/media); list them deterministically + mapfile -t TARGETS < <(find deploy/course -type f -name "python_core.tar*.load_by_url" | sort || true) + + echo "Found ${#TARGETS[@]} placeholder file(s)." + if [ "${#TARGETS[@]}" -eq 0 ]; then + echo "No placeholders found. Show likely media dirs for context:" + find deploy/course -maxdepth 5 -type d -path "*/static/js*" -print || true + # Do NOT fail here; some builds may request only the root variant which will be covered by routes else - echo "Found ${#NAMES[@]} hashed filenames:" - printf '%s\n' "${NAMES[@]}" - # place the real tar at BOTH likely locations - for name in "${NAMES[@]}"; do - for base in "deploy/course" "deploy/course/static/js/static/media"; do - mkdir -p "$base" - cp -f "$CORE" "$base/$name" - done + printf '%s\n' "${TARGETS[@]}" + for f in "${TARGETS[@]}"; do + echo "Overwriting $f with core tar bytes…" + cp -f "$CORE" "$f" done + echo "Validate file types:" + sudo apt-get update -y >/dev/null 2>&1 || true + sudo apt-get install -y file >/dev/null 2>&1 || true + file "$CORE" || true + for f in "${TARGETS[@]}"; do file "$f" || true; done fi - echo "Done." + # --- Final checks --- - name: Verify deploy artifact essentials From 917dd8c05e8172f5de2a05a71cf277eed0399514 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 11:31:18 +0200 Subject: [PATCH 040/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 59 ++++++++++++++++------ 1 file changed, 44 insertions(+), 15 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 20bee9fb..4ce85432 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -122,19 +122,50 @@ jobs: BASH # --- Vend Pyodide core tar as deploy/course/python_core.tar --- - - name: Vendor Pyodide core tar (python_core.tar) + - name: Vendor Pyodide core tar (python_core.tar) with validation run: | set -e OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" mkdir -p deploy/course + VER=$(node -p "require('./${OUT}/pyodide/package.json').version") echo "Pyodide version: $VER" - URL1="https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" - URL2="https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2" - echo "Fetching core tar..." - if ! curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" "$URL1" -o deploy/course/python_core.tar; then - curl -LfsS --retry 3 --retry-delay 2 -H "User-Agent: GitHubActions" "$URL2" -o deploy/course/python_core.tar + + CANDIDATES=( + "https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" + "https://repo.pyodide.org/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" + "https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2?download=1" + ) + + OK="" + for URL in "${CANDIDATES[@]}"; do + echo "Trying $URL" + if curl -LfsS --retry 3 --retry-delay 2 \ + -H "User-Agent: GitHubActions" \ + -H "Accept: application/octet-stream" \ + "$URL" -o deploy/course/python_core.tar; then + # validate magic: bzip2 starts with BZh + MAGIC=$(head -c 3 deploy/course/python_core.tar || true) + if [ "$MAGIC" = $'BZh' ]; then + echo "Fetched a VALID bzip2 tar from: $URL" + OK="yes" + break + else + echo "Downloaded bytes are NOT a bzip2 tar (magic='$MAGIC'). Trying next source…" + fi + else + echo "Download failed from: $URL" + fi + done + + if [ -z "$OK" ]; then + echo "ERROR: Could not fetch a valid pyodide core tar (bzip2)." + echo "First bytes of the last attempt:" + head -c 200 deploy/course/python_core.tar | sed -e 's/[^[:print:]\t]/./g' || true + exit 1 fi + + # Extra sanity test -s deploy/course/python_core.tar # --- Create the exact hashed files your app requests (any path) --- @@ -142,31 +173,29 @@ jobs: run: | set -e CORE="deploy/course/python_core.tar" - test -s "$CORE" # must exist and be non-empty + test -s "$CORE" echo "Finding placeholder files named python_core.tar*.load_by_url under deploy/course…" - # Find in ANY path (root or static/js/static/media); list them deterministically mapfile -t TARGETS < <(find deploy/course -type f -name "python_core.tar*.load_by_url" | sort || true) + echo "Found ${#TARGETS[@]} placeholder(s)." - echo "Found ${#TARGETS[@]} placeholder file(s)." - if [ "${#TARGETS[@]}" -eq 0 ]; then - echo "No placeholders found. Show likely media dirs for context:" - find deploy/course -maxdepth 5 -type d -path "*/static/js*" -print || true - # Do NOT fail here; some builds may request only the root variant which will be covered by routes - else + if [ "${#TARGETS[@]}" -gt 0 ]; then printf '%s\n' "${TARGETS[@]}" for f in "${TARGETS[@]}"; do echo "Overwriting $f with core tar bytes…" cp -f "$CORE" "$f" done - echo "Validate file types:" + # prove they look like bzip2 too sudo apt-get update -y >/dev/null 2>&1 || true sudo apt-get install -y file >/dev/null 2>&1 || true file "$CORE" || true for f in "${TARGETS[@]}"; do file "$f" || true; done + else + echo "No placeholders found (that’s fine if routes serve from /course/python_core.tar)." fi + # --- Final checks --- - name: Verify deploy artifact essentials run: | From 2a9921d6352e237e8c4f11d10d0be4d4c442ce2b Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 12:18:57 +0200 Subject: [PATCH 041/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 4ce85432..e3669cc6 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -107,9 +107,25 @@ jobs: # Keep routes *minimal* (SPA fallback + correct MIME) cat > deploy/staticwebapp.config.json <<'JSON' { - "routes": [ - { "route": "/", "redirect": "/course/", "statusCode": 302 }, - { "route": "/course/*", "rewrite": "/course/index.html" } + { "route": "/", "redirect": "/course/", "statusCode": 302 }, + + { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, + { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, + + { "route": "/course/static/*", "serve": "/course/static/*" }, + { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, + { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, + { "route": "/course/packages/*", "serve": "/course/packages/*" }, + + { "route": "/course/*.whl", "serve": "/course/*.whl" }, + { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, + { "route": "/course/*.data", "serve": "/course/*.data" }, + { "route": "/course/*.js", "serve": "/course/*.js" }, + { "route": "/course/*.css", "serve": "/course/*.css" }, + { "route": "/course/*.map", "serve": "/course/*.map" }, + { "route": "/course/*.json", "serve": "/course/*.json" }, + + { "route": "/course/*", "rewrite": "/course/index.html" } ], "mimeTypes": { ".wasm": "application/wasm", From 5f8d9e08d6e859f98dc5a23ae9c528431b3b8b87 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 12:31:08 +0200 Subject: [PATCH 042/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 85 ++++++++++------------ 1 file changed, 38 insertions(+), 47 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index e3669cc6..9f04c709 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -91,9 +91,8 @@ jobs: # --- Stage deploy folder --- - name: Stage files for SWA run: | - bash -eo pipefail <<'BASH' set -e - OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" + OUT="${{ steps.norm.outputs.dir }}" rm -rf deploy mkdir -p deploy/course cp -R "$OUT"/* deploy/course/ @@ -104,55 +103,52 @@ jobs: HTML - # Keep routes *minimal* (SPA fallback + correct MIME) - cat > deploy/staticwebapp.config.json <<'JSON' - { - { "route": "/", "redirect": "/course/", "statusCode": 302 }, - - { "route": "/course/python_core.tar*", "serve": "/course/python_core.tar" }, - { "route": "/course/static/js/static/media/python_core.tar*", "serve": "/course/python_core.tar" }, - - { "route": "/course/static/*", "serve": "/course/static/*" }, - { "route": "/course/pyodide/*", "serve": "/course/pyodide/*" }, - { "route": "/course/wheels/*", "serve": "/course/wheels/*" }, - { "route": "/course/packages/*", "serve": "/course/packages/*" }, - - { "route": "/course/*.whl", "serve": "/course/*.whl" }, - { "route": "/course/*.wasm", "serve": "/course/*.wasm" }, - { "route": "/course/*.data", "serve": "/course/*.data" }, - { "route": "/course/*.js", "serve": "/course/*.js" }, - { "route": "/course/*.css", "serve": "/course/*.css" }, - { "route": "/course/*.map", "serve": "/course/*.map" }, - { "route": "/course/*.json", "serve": "/course/*.json" }, - - { "route": "/course/*", "rewrite": "/course/index.html" } + # --- Write & validate staticwebapp.config.json (valid JSON, one-wildcard routes first) --- + - name: Write & validate staticwebapp.config.json + run: | + set -e + node - <<'NODE' + const fs = require('fs'); + const cfg = { + routes: [ + { route: "/", redirect: "/course/", statusCode: 302 }, + + // Serve the real core tar for any hashed request at root or CRA media path + { route: "/course/python_core.tar*", serve: "/course/python_core.tar" }, + { route: "/course/static/js/static/media/python_core.tar*", serve: "/course/python_core.tar" }, + + // Let index.html handle SPA routes; static assets are direct + { route: "/course/*", rewrite: "/course/index.html" } ], - "mimeTypes": { + mimeTypes: { ".wasm": "application/wasm", ".whl": "application/octet-stream", ".data": "application/octet-stream", ".load_by_url": "application/octet-stream" } - } - JSON - BASH - - # --- Vend Pyodide core tar as deploy/course/python_core.tar --- + }; + const p = "deploy/staticwebapp.config.json"; + fs.writeFileSync(p, JSON.stringify(cfg, null, 2)); + JSON.parse(fs.readFileSync(p, "utf8")); // validate + console.log("Wrote valid", p); + NODE + + # --- Vend Pyodide core tar as deploy/course/python_core.tar (with validation) --- - name: Vendor Pyodide core tar (python_core.tar) with validation run: | set -e - OUT="${{ steps.norm.outputs.dir || steps.outdir.outputs.dir || 'dist/course' }}" + OUT="${{ steps.norm.outputs.dir }}" mkdir -p deploy/course - + VER=$(node -p "require('./${OUT}/pyodide/package.json').version") echo "Pyodide version: $VER" - + CANDIDATES=( "https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" "https://repo.pyodide.org/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" "https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2?download=1" ) - + OK="" for URL in "${CANDIDATES[@]}"; do echo "Trying $URL" @@ -160,7 +156,6 @@ jobs: -H "User-Agent: GitHubActions" \ -H "Accept: application/octet-stream" \ "$URL" -o deploy/course/python_core.tar; then - # validate magic: bzip2 starts with BZh MAGIC=$(head -c 3 deploy/course/python_core.tar || true) if [ "$MAGIC" = $'BZh' ]; then echo "Fetched a VALID bzip2 tar from: $URL" @@ -173,45 +168,41 @@ jobs: echo "Download failed from: $URL" fi done - + if [ -z "$OK" ]; then echo "ERROR: Could not fetch a valid pyodide core tar (bzip2)." echo "First bytes of the last attempt:" head -c 200 deploy/course/python_core.tar | sed -e 's/[^[:print:]\t]/./g' || true exit 1 fi - - # Extra sanity + test -s deploy/course/python_core.tar - # --- Create the exact hashed files your app requests (any path) --- + # --- Overwrite any hashed placeholders with the real tar (covers any path) --- - name: Overwrite hashed python_core placeholders with the real tar (file-system based) run: | set -e CORE="deploy/course/python_core.tar" test -s "$CORE" - + echo "Finding placeholder files named python_core.tar*.load_by_url under deploy/course…" - mapfile -t TARGETS < <(find deploy/course -type f -name "python_core.tar*.load_by_url" | sort || true) + mapfile -t TARGETS < <(find deploy/course -maxdepth 5 -type f -name "python_core.tar*.load_by_url" | sort || true) echo "Found ${#TARGETS[@]} placeholder(s)." - + if [ "${#TARGETS[@]}" -gt 0 ]; then printf '%s\n' "${TARGETS[@]}" for f in "${TARGETS[@]}"; do echo "Overwriting $f with core tar bytes…" cp -f "$CORE" "$f" done - # prove they look like bzip2 too sudo apt-get update -y >/dev/null 2>&1 || true sudo apt-get install -y file >/dev/null 2>&1 || true file "$CORE" || true for f in "${TARGETS[@]}"; do file "$f" || true; done else - echo "No placeholders found (that’s fine if routes serve from /course/python_core.tar)." + echo "No placeholders found (routes will serve /course/python_core.tar)." fi - - # --- Final checks --- - name: Verify deploy artifact essentials run: | @@ -220,7 +211,7 @@ jobs: test -s deploy/course/python_stdlib.zip test -s deploy/course/python_core.tar echo "Listing any materialized hashed files:" - find deploy/course -type f -name "python_core.tar*.load_by_url" -maxdepth 4 -exec ls -l {} \; + find deploy/course -maxdepth 5 -type f -name "python_core.tar*.load_by_url" -exec ls -l {} \; - name: Print staticwebapp.config.json run: cat -n deploy/staticwebapp.config.json From 27a000a1a935203d900c851b1bf79163b5b02ac8 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 12:43:30 +0200 Subject: [PATCH 043/108] Update azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 54 +++++++++------------- 1 file changed, 21 insertions(+), 33 deletions(-) diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml index 9f04c709..c1faba3d 100644 --- a/.github/workflows/azure-static-web-app.yml +++ b/.github/workflows/azure-static-web-app.yml @@ -103,7 +103,7 @@ jobs: HTML - # --- Write & validate staticwebapp.config.json (valid JSON, one-wildcard routes first) --- + # --- Write & validate staticwebapp.config.json (minimal & valid) --- - name: Write & validate staticwebapp.config.json run: | set -e @@ -112,19 +112,12 @@ jobs: const cfg = { routes: [ { route: "/", redirect: "/course/", statusCode: 302 }, - - // Serve the real core tar for any hashed request at root or CRA media path - { route: "/course/python_core.tar*", serve: "/course/python_core.tar" }, - { route: "/course/static/js/static/media/python_core.tar*", serve: "/course/python_core.tar" }, - - // Let index.html handle SPA routes; static assets are direct { route: "/course/*", rewrite: "/course/index.html" } ], mimeTypes: { ".wasm": "application/wasm", ".whl": "application/octet-stream", - ".data": "application/octet-stream", - ".load_by_url": "application/octet-stream" + ".data": "application/octet-stream" } }; const p = "deploy/staticwebapp.config.json"; @@ -178,30 +171,27 @@ jobs: test -s deploy/course/python_core.tar - # --- Overwrite any hashed placeholders with the real tar (covers any path) --- - - name: Overwrite hashed python_core placeholders with the real tar (file-system based) + # --- Patch built JS to request canonical filenames (no hashed load_by_url) --- + - name: Patch bundles to request canonical core/stdlib paths run: | set -e - CORE="deploy/course/python_core.tar" - test -s "$CORE" - - echo "Finding placeholder files named python_core.tar*.load_by_url under deploy/course…" - mapfile -t TARGETS < <(find deploy/course -maxdepth 5 -type f -name "python_core.tar*.load_by_url" | sort || true) - echo "Found ${#TARGETS[@]} placeholder(s)." - - if [ "${#TARGETS[@]}" -gt 0 ]; then - printf '%s\n' "${TARGETS[@]}" - for f in "${TARGETS[@]}"; do - echo "Overwriting $f with core tar bytes…" - cp -f "$CORE" "$f" - done - sudo apt-get update -y >/dev/null 2>&1 || true - sudo apt-get install -y file >/dev/null 2>&1 || true - file "$CORE" || true - for f in "${TARGETS[@]}"; do file "$f" || true; done - else - echo "No placeholders found (routes will serve /course/python_core.tar)." - fi + echo "Before patch (any references to load_by_url):" + grep -Rnoh --include="*.js" -E "python_(core|stdlib)\.(tar|zip)\.[a-f0-9]{6,}\.load_by_url" deploy/course || true + + # Replace any hashed ...load_by_url tokens with canonical filenames + find deploy/course -maxdepth 4 -type f -name "*.js" -print0 \ + | xargs -0 -r sed -i -E \ + -e 's#python_core\.tar\.[a-f0-9]{6,}\.load_by_url#python_core.tar#g' \ + -e 's#python_stdlib\.zip\.[a-f0-9]{6,}\.load_by_url#python_stdlib.zip#g' \ + -e 's#static/js/static/media/python_core\.tar[^"'"'"' ]*#python_core.tar#g' \ + -e 's#static/js/static/media/python_stdlib\.zip[^"'"'"' ]*#python_stdlib.zip#g' + + echo "After patch (should be empty):" + grep -Rnoh --include="*.js" -E "python_(core|stdlib)\.(tar|zip)\.[a-f0-9]{6,}\.load_by_url" deploy/course || true + + # prove the canonical files exist + test -s deploy/course/python_core.tar + test -s deploy/course/python_stdlib.zip # --- Final checks --- - name: Verify deploy artifact essentials @@ -210,8 +200,6 @@ jobs: test -d deploy/course/pyodide test -s deploy/course/python_stdlib.zip test -s deploy/course/python_core.tar - echo "Listing any materialized hashed files:" - find deploy/course -maxdepth 5 -type f -name "python_core.tar*.load_by_url" -exec ls -l {} \; - name: Print staticwebapp.config.json run: cat -n deploy/staticwebapp.config.json From dc5c7866838d47fb0b26196e2be2995433b3ce17 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 15:28:03 +0200 Subject: [PATCH 044/108] Create Dockerfile --- Dockerfile | 76 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..d1f0ed00 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,76 @@ +# ---------- BUILD STAGE ---------- +FROM node:22-bullseye AS build +WORKDIR /app + +# Python + curl for the project scripts +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3 python3-venv python3-pip curl ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +# Copy repo +COPY . . + +# Frontend deps (craco etc.) +RUN npm --prefix frontend ci + +# Poetry for the Python bits +RUN curl -sSL https://install.python-poetry.org | python3 - \ + && echo 'export PATH="$HOME/.local/bin:$PATH"' >> /root/.bashrc +ENV PATH="/root/.local/bin:${PATH}" + +# Generate translations/static files and build the site +RUN poetry --version \ + && poetry install --no-root \ + && ./scripts/generate.sh \ + && ./scripts/build.sh + +# Normalize Pyodide artifacts so the app (and our server) can fetch them deterministically +# - Ensure python_stdlib.zip is at /course/ +# - Ensure python_core.tar is at /course/ (download the official core tar if not produced) +RUN bash -e <<'BASH' +OUT="dist/course" +test -d "$OUT" && test -f "$OUT/index.html" + +# stdlib to root +if [ -f "$OUT/pyodide/python_stdlib.zip" ] && [ ! -f "$OUT/python_stdlib.zip" ]; then + cp "$OUT/pyodide/python_stdlib.zip" "$OUT/python_stdlib.zip" +fi +test -f "$OUT/python_stdlib.zip" + +# core tar to root (download official core if missing) +if [ ! -f "$OUT/python_core.tar" ]; then + VER=$(node -p "require('./${OUT}/pyodide/package.json').version") + echo "Pyodide version: $VER" + for U in \ + "https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" \ + "https://repo.pyodide.org/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" \ + "https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2?download=1" ; do + echo "Trying $U" + if curl -LfsS "$U" -o "$OUT/python_core.tar"; then + break + fi + done +fi + +# Sanity: non-empty + starts with BZh (bzip2) +test -s "$OUT/python_core.tar" +head -c 3 "$OUT/python_core.tar" | grep -q 'BZh' +BASH + + +# ---------- RUNTIME STAGE ---------- +FROM nginx:1.27-alpine +# Extra MIME types for Pyodide bits +RUN printf "\n types {\n application/wasm wasm;\n application/octet-stream whl data;\n application/x-bzip2 bz2 bz;\n }\n" >> /etc/nginx/mime.types + +# Nginx config +COPY nginx.conf /etc/nginx/conf.d/default.conf + +# Static site goes under /usr/share/nginx/html/course +COPY --from=build /app/dist/course /usr/share/nginx/html/course + +# Health check (optional) +RUN echo "ok" > /usr/share/nginx/html/healthz + +EXPOSE 80 +CMD ["nginx", "-g", "daemon off;"] From 5ae757622e54a2dc23ed8addc65ca68e4a34a302 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 15:28:28 +0200 Subject: [PATCH 045/108] Create nginx.conf --- nginx.conf | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 nginx.conf diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 00000000..95df2baa --- /dev/null +++ b/nginx.conf @@ -0,0 +1,36 @@ +server { + listen 80; + server_name _; + + root /usr/share/nginx/html; + + # Redirect site root to /course/ + location = / { return 302 /course/; } + + # --- Serve Pyodide archives BEFORE SPA fallback --- + # Futurecoder / Pyodide sometimes request hashed CRA paths; handle both. + location ~ ^/course/(?:static/js/static/media/)?python_core\.tar.*$ { + default_type application/x-bzip2; + try_files /course/python_core.tar =404; + } + location ~ ^/course/(?:static/js/static/media/)?python_stdlib\.zip.*$ { + default_type application/zip; + try_files /course/python_stdlib.zip =404; + } + + # Static assets served as-is + location ~ ^/course/(static|pyodide|wheels|packages)/ { + try_files $uri =404; + } + location ~ ^/course/.*\.(wasm|whl|data|js|css|map|json)$ { + try_files $uri =404; + } + + # SPA fallback for the rest of /course/ + location /course/ { + try_files $uri $uri/ /course/index.html; + } + + # Simple health check + location = /healthz { return 200 "ok\n"; add_header Content-Type text/plain; } +} From 39e0945a0f6da62ba6e00c86eb7f3a6673e8771c Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 15:29:44 +0200 Subject: [PATCH 046/108] Create azure-webapp-container.yml --- .github/workflows/azure-webapp-container.yml | 44 ++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 .github/workflows/azure-webapp-container.yml diff --git a/.github/workflows/azure-webapp-container.yml b/.github/workflows/azure-webapp-container.yml new file mode 100644 index 00000000..6128f2cb --- /dev/null +++ b/.github/workflows/azure-webapp-container.yml @@ -0,0 +1,44 @@ +name: Deploy Futurecoder (Nginx container) to Azure Web App + +on: + push: + branches: [ main, master ] + workflow_dispatch: + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + id-token: write + + env: + IMAGE_NAME: futurecoder-nginx + REGISTRY: ghcr.io/${{ github.repository_owner }} + + steps: + - uses: actions/checkout@v4 + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push image + uses: docker/build-push-action@v6 + with: + push: true + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }} + + # Deploy to an existing Linux Web App for Containers + # Create the Web App once in the Azure Portal and add its Publish Profile + # as a repo secret named AZUREAPPSERVICE_PUBLISHPROFILE + - name: Deploy to Azure Web App + uses: azure/webapps-deploy@v2 + with: + app-name: "" # <-- change this + images: "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }}" + publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE }} From 13c81f2da9015ff104825c18d184ba4b69d548c5 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 19:20:41 +0200 Subject: [PATCH 047/108] Create deploy-to-azure.yml --- .github/workflows/deploy-to-azure.yml | 33 +++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 .github/workflows/deploy-to-azure.yml diff --git a/.github/workflows/deploy-to-azure.yml b/.github/workflows/deploy-to-azure.yml new file mode 100644 index 00000000..36ed6f33 --- /dev/null +++ b/.github/workflows/deploy-to-azure.yml @@ -0,0 +1,33 @@ +name: Build and Deploy Container to Azure Web App + +on: + push: + branches: [ main ] + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Log in to GitHub Container Registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + + - name: Build container image + run: docker build . -t ghcr.io/${{ github.repository }}:latest + + - name: Push container image + run: docker push ghcr.io/${{ github.repository }}:latest + + deploy: + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to Azure Web App + uses: azure/webapps-deploy@v2 + with: + app-name: PythonCoding + images: ghcr.io/${{ github.repository }}:latest From bf2fff34547f7e4934a6a1db4de5e93c4f12ad41 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 19:32:20 +0200 Subject: [PATCH 048/108] Delete .github/workflows/azure-webapp-container.yml --- .github/workflows/azure-webapp-container.yml | 44 -------------------- 1 file changed, 44 deletions(-) delete mode 100644 .github/workflows/azure-webapp-container.yml diff --git a/.github/workflows/azure-webapp-container.yml b/.github/workflows/azure-webapp-container.yml deleted file mode 100644 index 6128f2cb..00000000 --- a/.github/workflows/azure-webapp-container.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Deploy Futurecoder (Nginx container) to Azure Web App - -on: - push: - branches: [ main, master ] - workflow_dispatch: - -jobs: - build-and-deploy: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - id-token: write - - env: - IMAGE_NAME: futurecoder-nginx - REGISTRY: ghcr.io/${{ github.repository_owner }} - - steps: - - uses: actions/checkout@v4 - - - name: Log in to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Build and push image - uses: docker/build-push-action@v6 - with: - push: true - tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }} - - # Deploy to an existing Linux Web App for Containers - # Create the Web App once in the Azure Portal and add its Publish Profile - # as a repo secret named AZUREAPPSERVICE_PUBLISHPROFILE - - name: Deploy to Azure Web App - uses: azure/webapps-deploy@v2 - with: - app-name: "" # <-- change this - images: "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }}" - publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE }} From 4f182360b27e02637a0bfbbef1a4cb563516ceb5 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 19:35:37 +0200 Subject: [PATCH 049/108] Update Dockerfile --- Dockerfile | 117 +++++++++++++++++++++++++---------------------------- 1 file changed, 56 insertions(+), 61 deletions(-) diff --git a/Dockerfile b/Dockerfile index d1f0ed00..b64f082d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,76 +1,71 @@ -# ---------- BUILD STAGE ---------- -FROM node:22-bullseye AS build -WORKDIR /app - -# Python + curl for the project scripts -RUN apt-get update && apt-get install -y --no-install-recommends \ - python3 python3-venv python3-pip curl ca-certificates \ - && rm -rf /var/lib/apt/lists/* - -# Copy repo -COPY . . +# ---------- build stage ---------- +FROM python:3.12.1-slim AS build -# Frontend deps (craco etc.) -RUN npm --prefix frontend ci +ARG NODE_MAJOR=22 +ENV DEBIAN_FRONTEND=noninteractive \ + PIP_NO_CACHE_DIR=1 \ + POETRY_VERSION=1.8.5 \ + # futurecoder build expects these; adjust if you want to use production Firebase etc. + FUTURECODER_LANGUAGE=en \ + REACT_APP_USE_FIREBASE_EMULATORS=1 \ + REACT_APP_FIREBASE_STAGING=1 \ + CI=false -# Poetry for the Python bits -RUN curl -sSL https://install.python-poetry.org | python3 - \ - && echo 'export PATH="$HOME/.local/bin:$PATH"' >> /root/.bashrc -ENV PATH="/root/.local/bin:${PATH}" +# system deps + Node.js +RUN set -eux; \ + apt-get update; \ + apt-get install -y --no-install-recommends curl ca-certificates gnupg git build-essential; \ + curl -fsSL https://deb.nodesource.com/setup_${NODE_MAJOR}.x | bash -; \ + apt-get install -y --no-install-recommends nodejs; \ + node -v && npm -v; \ + rm -rf /var/lib/apt/lists/* -# Generate translations/static files and build the site -RUN poetry --version \ - && poetry install --no-root \ - && ./scripts/generate.sh \ - && ./scripts/build.sh +# Poetry +RUN set -eux; \ + curl -sSL https://install.python-poetry.org | python3 -; \ + ln -s /root/.local/bin/poetry /usr/local/bin/poetry; \ + poetry --version -# Normalize Pyodide artifacts so the app (and our server) can fetch them deterministically -# - Ensure python_stdlib.zip is at /course/ -# - Ensure python_core.tar is at /course/ (download the official core tar if not produced) -RUN bash -e <<'BASH' -OUT="dist/course" -test -d "$OUT" && test -f "$OUT/index.html" +WORKDIR /app -# stdlib to root -if [ -f "$OUT/pyodide/python_stdlib.zip" ] && [ ! -f "$OUT/python_stdlib.zip" ]; then - cp "$OUT/pyodide/python_stdlib.zip" "$OUT/python_stdlib.zip" -fi -test -f "$OUT/python_stdlib.zip" +# Install Python deps first (better layer caching) +COPY pyproject.toml poetry.lock ./ +RUN set -eux; \ + poetry config virtualenvs.in-project true; \ + poetry install --no-root --no-interaction --no-ansi -# core tar to root (download official core if missing) -if [ ! -f "$OUT/python_core.tar" ]; then - VER=$(node -p "require('./${OUT}/pyodide/package.json').version") - echo "Pyodide version: $VER" - for U in \ - "https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" \ - "https://repo.pyodide.org/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" \ - "https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2?download=1" ; do - echo "Trying $U" - if curl -LfsS "$U" -o "$OUT/python_core.tar"; then - break - fi - done -fi +# Bring in the rest of the project +COPY . . +RUN chmod +x scripts/*.sh || true -# Sanity: non-empty + starts with BZh (bzip2) -test -s "$OUT/python_core.tar" -head -c 3 "$OUT/python_core.tar" | grep -q 'BZh' -BASH +# Build the site (show each failing command clearly) +RUN set -eux; \ + poetry --version; \ + poetry install --no-root -v; \ + ./scripts/generate.sh; \ + ./scripts/build.sh +# After build, the static site should be in dist/course +# Validate the build output early (this will fail the build if something’s missing) +RUN set -eux; \ + test -d dist/course; \ + test -f dist/course/index.html; \ + test -d dist/course/pyodide; \ + # stdlib is required by pyodide loader + test -f dist/course/python_stdlib.zip || (echo "python_stdlib.zip missing" && false) -# ---------- RUNTIME STAGE ---------- -FROM nginx:1.27-alpine -# Extra MIME types for Pyodide bits -RUN printf "\n types {\n application/wasm wasm;\n application/octet-stream whl data;\n application/x-bzip2 bz2 bz;\n }\n" >> /etc/nginx/mime.types +# ---------- runtime stage ---------- +FROM nginx:alpine AS runtime -# Nginx config -COPY nginx.conf /etc/nginx/conf.d/default.conf +# Nginx serves on 80 +EXPOSE 80 -# Static site goes under /usr/share/nginx/html/course +# Copy the built site under /usr/share/nginx/html COPY --from=build /app/dist/course /usr/share/nginx/html/course -# Health check (optional) -RUN echo "ok" > /usr/share/nginx/html/healthz +# Optional: redirect root to /course/ +RUN printf '' \ + > /usr/share/nginx/html/index.html -EXPOSE 80 +# Minimal nginx config (default works fine for static) CMD ["nginx", "-g", "daemon off;"] From 98347d0a966318738dd62054957019fa8545528e Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 19:40:01 +0200 Subject: [PATCH 050/108] Delete .github/workflows/azure-static-web-app.yml --- .github/workflows/azure-static-web-app.yml | 214 --------------------- 1 file changed, 214 deletions(-) delete mode 100644 .github/workflows/azure-static-web-app.yml diff --git a/.github/workflows/azure-static-web-app.yml b/.github/workflows/azure-static-web-app.yml deleted file mode 100644 index c1faba3d..00000000 --- a/.github/workflows/azure-static-web-app.yml +++ /dev/null @@ -1,214 +0,0 @@ -name: Deploy futurecoder to Azure Static Web Apps - -on: - push: - branches: [ main, master ] - workflow_dispatch: - -jobs: - build_and_deploy: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - # --- Python phase (generate content for frontend) --- - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.12.1' - - - name: Install Poetry - run: | - curl -sSL https://install.python-poetry.org | python3 - - echo "$HOME/.local/bin" >> $GITHUB_PATH - - - name: Make project scripts executable - run: | - chmod +x scripts/generate.sh || true - chmod +x scripts/build.sh || true - - - name: Generate site content - env: - FUTURECODER_LANGUAGE: en - run: | - poetry install --no-root - ./scripts/generate.sh - - # --- Node phase (install deps + build) --- - - name: Set up Node - uses: actions/setup-node@v4 - with: - node-version: '22' - - - name: Install frontend dependencies - working-directory: frontend - run: npm ci - - - name: Override CRA homepage to relative URLs - working-directory: frontend - run: | - node -e "let p=require('./package.json'); p.homepage='.'; require('fs').writeFileSync('package.json', JSON.stringify(p,null,2)); console.log('homepage now', p.homepage)" - - - name: Build with project script - env: - FUTURECODER_LANGUAGE: en - REACT_APP_USE_FIREBASE_EMULATORS: '1' - REACT_APP_FIREBASE_STAGING: '1' - CI: false - run: ./scripts/build.sh - - # --- Where did the build land? (dist/course or frontend/course) --- - - name: Detect course output dir - id: outdir - run: | - if [ -d dist/course ]; then - echo "dir=dist/course" >> $GITHUB_OUTPUT - elif [ -d frontend/course ]; then - echo "dir=frontend/course" >> $GITHUB_OUTPUT - else - echo "No course output found"; exit 1 - fi - echo "Detected: $(cat $GITHUB_OUTPUT)" - - - name: Verify & normalize archives - id: norm - run: | - OUT="${{ steps.outdir.outputs.dir }}" - echo "OUT=$OUT" - ls -la "$OUT" | sed -n '1,200p' - test -f "$OUT/index.html" - test -d "$OUT/pyodide" - # normalize stdlib/core names if present - STDLIB="$(find "$OUT" -type f -name 'python_stdlib*.zip' | head -n1 || true)" - CORE="$(find "$OUT" -type f -name 'python_core*.tar' | head -n1 || true)" - [ -n "$STDLIB" ] && cp -f "$STDLIB" "$OUT/python_stdlib.zip" || true - [ -n "$CORE" ] && cp -f "$CORE" "$OUT/python_core.tar" || true - test -f "$OUT/python_stdlib.zip" - echo "dir=$OUT" >> "$GITHUB_OUTPUT" - - # --- Stage deploy folder --- - - name: Stage files for SWA - run: | - set -e - OUT="${{ steps.norm.outputs.dir }}" - rm -rf deploy - mkdir -p deploy/course - cp -R "$OUT"/* deploy/course/ - - # Root -> /course/ - cat > deploy/index.html <<'HTML' - - - HTML - - # --- Write & validate staticwebapp.config.json (minimal & valid) --- - - name: Write & validate staticwebapp.config.json - run: | - set -e - node - <<'NODE' - const fs = require('fs'); - const cfg = { - routes: [ - { route: "/", redirect: "/course/", statusCode: 302 }, - { route: "/course/*", rewrite: "/course/index.html" } - ], - mimeTypes: { - ".wasm": "application/wasm", - ".whl": "application/octet-stream", - ".data": "application/octet-stream" - } - }; - const p = "deploy/staticwebapp.config.json"; - fs.writeFileSync(p, JSON.stringify(cfg, null, 2)); - JSON.parse(fs.readFileSync(p, "utf8")); // validate - console.log("Wrote valid", p); - NODE - - # --- Vend Pyodide core tar as deploy/course/python_core.tar (with validation) --- - - name: Vendor Pyodide core tar (python_core.tar) with validation - run: | - set -e - OUT="${{ steps.norm.outputs.dir }}" - mkdir -p deploy/course - - VER=$(node -p "require('./${OUT}/pyodide/package.json').version") - echo "Pyodide version: $VER" - - CANDIDATES=( - "https://cdn.jsdelivr.net/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" - "https://repo.pyodide.org/pyodide/v$%7BVER%7D/full/pyodide-core-$%7BVER%7D.tar.bz2" - "https://github.com/pyodide/pyodide/releases/download/$%7BVER%7D/pyodide-core-$%7BVER%7D.tar.bz2?download=1" - ) - - OK="" - for URL in "${CANDIDATES[@]}"; do - echo "Trying $URL" - if curl -LfsS --retry 3 --retry-delay 2 \ - -H "User-Agent: GitHubActions" \ - -H "Accept: application/octet-stream" \ - "$URL" -o deploy/course/python_core.tar; then - MAGIC=$(head -c 3 deploy/course/python_core.tar || true) - if [ "$MAGIC" = $'BZh' ]; then - echo "Fetched a VALID bzip2 tar from: $URL" - OK="yes" - break - else - echo "Downloaded bytes are NOT a bzip2 tar (magic='$MAGIC'). Trying next source…" - fi - else - echo "Download failed from: $URL" - fi - done - - if [ -z "$OK" ]; then - echo "ERROR: Could not fetch a valid pyodide core tar (bzip2)." - echo "First bytes of the last attempt:" - head -c 200 deploy/course/python_core.tar | sed -e 's/[^[:print:]\t]/./g' || true - exit 1 - fi - - test -s deploy/course/python_core.tar - - # --- Patch built JS to request canonical filenames (no hashed load_by_url) --- - - name: Patch bundles to request canonical core/stdlib paths - run: | - set -e - echo "Before patch (any references to load_by_url):" - grep -Rnoh --include="*.js" -E "python_(core|stdlib)\.(tar|zip)\.[a-f0-9]{6,}\.load_by_url" deploy/course || true - - # Replace any hashed ...load_by_url tokens with canonical filenames - find deploy/course -maxdepth 4 -type f -name "*.js" -print0 \ - | xargs -0 -r sed -i -E \ - -e 's#python_core\.tar\.[a-f0-9]{6,}\.load_by_url#python_core.tar#g' \ - -e 's#python_stdlib\.zip\.[a-f0-9]{6,}\.load_by_url#python_stdlib.zip#g' \ - -e 's#static/js/static/media/python_core\.tar[^"'"'"' ]*#python_core.tar#g' \ - -e 's#static/js/static/media/python_stdlib\.zip[^"'"'"' ]*#python_stdlib.zip#g' - - echo "After patch (should be empty):" - grep -Rnoh --include="*.js" -E "python_(core|stdlib)\.(tar|zip)\.[a-f0-9]{6,}\.load_by_url" deploy/course || true - - # prove the canonical files exist - test -s deploy/course/python_core.tar - test -s deploy/course/python_stdlib.zip - - # --- Final checks --- - - name: Verify deploy artifact essentials - run: | - test -s deploy/course/index.html - test -d deploy/course/pyodide - test -s deploy/course/python_stdlib.zip - test -s deploy/course/python_core.tar - - - name: Print staticwebapp.config.json - run: cat -n deploy/staticwebapp.config.json - - # --- Deploy to SWA (no Oryx build) --- - - name: Deploy - uses: Azure/static-web-apps-deploy@v1 - with: - azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN }} - action: upload - app_location: "deploy" - skip_app_build: true From 43818856dad7b9d83a0a9b1e7fa9bff73a3c6dbc Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 19:57:23 +0200 Subject: [PATCH 051/108] Create deploy.yml --- .github/workflows/deploy.yml | 90 ++++++++++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) create mode 100644 .github/workflows/deploy.yml diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 00000000..8b5a4f3a --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,90 @@ +name: Build & Deploy container to Azure Web App (no publish profile) + +on: + push: + branches: [ main ] + workflow_dispatch: + +permissions: + contents: read + packages: write + id-token: write # required for OIDC login to Azure + +env: + REGISTRY: ghcr.io + IMAGE_NAME: futurecoder # change if you like + APP_NAME: PythonCoding # <-- your Azure Web App name + RESOURCE_GROUP: MSAN-RG-Training # <-- your RG name + WEBSITES_PORT: "80" + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Compute tags + id: meta + run: | + OWNER_LC=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]') + echo "owner=${OWNER_LC}" >> $GITHUB_OUTPUT + echo "tag_sha=${{ env.REGISTRY }}/${OWNER_LC}/${{ env.IMAGE_NAME }}:${{ github.sha }}" >> $GITHUB_OUTPUT + echo "tag_latest=${{ env.REGISTRY }}/${OWNER_LC}/${{ env.IMAGE_NAME }}:latest" >> $GITHUB_OUTPUT + + - name: Set up Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & push image to GHCR + uses: docker/build-push-action@v6 + with: + context: . + push: true + tags: | + ${{ steps.meta.outputs.tag_sha }} + ${{ steps.meta.outputs.tag_latest }} + + # --- Azure login via OIDC (no publish profile) --- + - name: Azure login (OIDC) + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + + # Ensure WEBSITES_PORT is set + - name: Set app settings + run: | + az webapp config appsettings set \ + --name "${{ env.APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --settings WEBSITES_PORT=${{ env.WEBSITES_PORT }} + + # If image is PUBLIC on GHCR, you can omit the credentials block and just set --docker-custom-image-name + - name: Configure container (public GHCR) + if: ${{ secrets.GHCR_READ_TOKEN == '' }} + run: | + az webapp config container set \ + --name "${{ env.APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --docker-custom-image-name "${{ steps.meta.outputs.tag_sha }}" + + # If image is PRIVATE on GHCR, pass registry credentials for the Web App to pull the image + - name: Configure container (private GHCR) + if: ${{ secrets.GHCR_READ_TOKEN != '' }} + run: | + az webapp config container set \ + --name "${{ env.APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --docker-custom-image-name "${{ steps.meta.outputs.tag_sha }}" \ + --docker-registry-server-url "https://${{ env.REGISTRY }}" \ + --docker-registry-server-user "${{ github.actor }}" \ + --docker-registry-server-password "${{ secrets.GHCR_READ_TOKEN }}" From 83a94c92e891349f4afaaeb851b699637295c7db Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 20:18:02 +0200 Subject: [PATCH 052/108] Update deploy.yml From 5d4ba21120c474864cf8b847ffac07078725bba0 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 20:25:46 +0200 Subject: [PATCH 053/108] Update deploy.yml --- .github/workflows/deploy.yml | 46 ++++++++++++++++++++++++++---------- 1 file changed, 33 insertions(+), 13 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 8b5a4f3a..afa1bdaf 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,4 +1,4 @@ -name: Build & Deploy container to Azure Web App (no publish profile) +name: Build & Deploy container to Azure Web App (OIDC, GHCR) on: push: @@ -8,14 +8,16 @@ on: permissions: contents: read packages: write - id-token: write # required for OIDC login to Azure + id-token: write # needed for azure/login OIDC env: REGISTRY: ghcr.io - IMAGE_NAME: futurecoder # change if you like - APP_NAME: PythonCoding # <-- your Azure Web App name - RESOURCE_GROUP: MSAN-RG-Training # <-- your RG name + IMAGE_NAME: futurecoder # change if you prefer + APP_NAME: PythonCoding # <-- your Azure Web App name + RESOURCE_GROUP: MSAN-RG-Training # <-- your Azure Resource Group WEBSITES_PORT: "80" + DOCKER_BUILDKIT: "1" + BUILDKIT_PROGRESS: plain jobs: build-and-deploy: @@ -25,7 +27,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - - name: Compute tags + - name: Compute image tags id: meta run: | OWNER_LC=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]') @@ -36,7 +38,7 @@ jobs: - name: Set up Buildx uses: docker/setup-buildx-action@v3 - - name: Login to GHCR + - name: Log in to GHCR (push) uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} @@ -52,7 +54,7 @@ jobs: ${{ steps.meta.outputs.tag_sha }} ${{ steps.meta.outputs.tag_latest }} - # --- Azure login via OIDC (no publish profile) --- + # ---- Azure login via OIDC (no publish profile needed) ---- - name: Azure login (OIDC) uses: azure/login@v2 with: @@ -60,7 +62,7 @@ jobs: tenant-id: ${{ secrets.AZURE_TENANT_ID }} subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - # Ensure WEBSITES_PORT is set + # Ensure WEBSITES_PORT is set on the Web App - name: Set app settings run: | az webapp config appsettings set \ @@ -68,18 +70,31 @@ jobs: --resource-group "${{ env.RESOURCE_GROUP }}" \ --settings WEBSITES_PORT=${{ env.WEBSITES_PORT }} - # If image is PUBLIC on GHCR, you can omit the credentials block and just set --docker-custom-image-name + # Derive a flag from secret (don't reference secrets in `if:` directly) + - name: Determine if GHCR is private + id: ghcr + shell: bash + env: + GHCR_READ_TOKEN: ${{ secrets.GHCR_READ_TOKEN }} + run: | + if [ -n "${GHCR_READ_TOKEN}" ]; then + echo "use_auth=true" >> $GITHUB_OUTPUT + else + echo "use_auth=false" >> $GITHUB_OUTPUT + fi + + # Configure container for PUBLIC GHCR image (no creds) - name: Configure container (public GHCR) - if: ${{ secrets.GHCR_READ_TOKEN == '' }} + if: ${{ steps.ghcr.outputs.use_auth == 'false' }} run: | az webapp config container set \ --name "${{ env.APP_NAME }}" \ --resource-group "${{ env.RESOURCE_GROUP }}" \ --docker-custom-image-name "${{ steps.meta.outputs.tag_sha }}" - # If image is PRIVATE on GHCR, pass registry credentials for the Web App to pull the image + # Configure container for PRIVATE GHCR image (with creds) - name: Configure container (private GHCR) - if: ${{ secrets.GHCR_READ_TOKEN != '' }} + if: ${{ steps.ghcr.outputs.use_auth == 'true' }} run: | az webapp config container set \ --name "${{ env.APP_NAME }}" \ @@ -88,3 +103,8 @@ jobs: --docker-registry-server-url "https://${{ env.REGISTRY }}" \ --docker-registry-server-user "${{ github.actor }}" \ --docker-registry-server-password "${{ secrets.GHCR_READ_TOKEN }}" + + # (Optional) Restart the app to pick the new image immediately + - name: Restart Web App + run: | + az webapp restart --name "${{ env.APP_NAME }}" --resource-group "${{ env.RESOURCE_GROUP }}" From c01cb63996d06eb07fe489235da2262be9eb2f8a Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 20:40:17 +0200 Subject: [PATCH 054/108] Update Dockerfile --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index b64f082d..a7256f20 100644 --- a/Dockerfile +++ b/Dockerfile @@ -38,6 +38,9 @@ RUN set -eux; \ COPY . . RUN chmod +x scripts/*.sh || true +# ✅ Install frontend deps so "craco" exists +RUN npm ci --prefix frontend + # Build the site (show each failing command clearly) RUN set -eux; \ poetry --version; \ From fae54b791d68c06e53b073d8ed5d123212a8ca79 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 20:44:06 +0200 Subject: [PATCH 055/108] Delete .github/workflows/deploy-to-azure.yml --- .github/workflows/deploy-to-azure.yml | 33 --------------------------- 1 file changed, 33 deletions(-) delete mode 100644 .github/workflows/deploy-to-azure.yml diff --git a/.github/workflows/deploy-to-azure.yml b/.github/workflows/deploy-to-azure.yml deleted file mode 100644 index 36ed6f33..00000000 --- a/.github/workflows/deploy-to-azure.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: Build and Deploy Container to Azure Web App - -on: - push: - branches: [ main ] - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Log in to GitHub Container Registry - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - - name: Build container image - run: docker build . -t ghcr.io/${{ github.repository }}:latest - - - name: Push container image - run: docker push ghcr.io/${{ github.repository }}:latest - - deploy: - runs-on: ubuntu-latest - needs: build - steps: - - name: Deploy to Azure Web App - uses: azure/webapps-deploy@v2 - with: - app-name: PythonCoding - images: ghcr.io/${{ github.repository }}:latest From 7beba92337e043d7e70acd7be99fb8855bb88cf7 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Thu, 16 Oct 2025 20:44:27 +0200 Subject: [PATCH 056/108] Delete .github/workflows/workflow.yml --- .github/workflows/workflow.yml | 52 ---------------------------------- 1 file changed, 52 deletions(-) delete mode 100644 .github/workflows/workflow.yml diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml deleted file mode 100644 index 9d39f37f..00000000 --- a/.github/workflows/workflow.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: CI -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: '3.12.1' - - name: Set up Node - uses: actions/setup-node@v5 - with: - node-version: '22.17.0' - - name: Install chromedriver - uses: nanasess/setup-chromedriver@master - - name: Install Python and JS dependencies - run: ./scripts/install_deps.sh - - name: Build - id: build - env: - REACT_APP_SENTRY_DSN: https://37b1f01452b54bf4a0fe88656070998f@o871617.ingest.sentry.io/5824691 - REACT_APP_USE_FIREBASE_EMULATORS: '1' - REACT_APP_FIREBASE_STAGING: '1' - FUTURECODER_LANGUAGE: en - run: ./scripts/build.sh - - name: Test - env: - FUTURECODER_LANGUAGE: en - FIREBASE_TOKEN: '1//03I37hFeN4kn3CgYIARAAGAMSNwF-L9IrUvqofZbhOkS8YMtQBhw_bu2TpWYC5MHvnaZDsWPP0KJMypXPyoxogkl8A6p2RxPJQwQ' - run: ./scripts/ci_test.sh - - name: Upload test artifacts - uses: actions/upload-artifact@v4 - if: steps.build.outcome == 'success' - with: - path: '**/test_frontend_assets/' - - name: Deploy preview - uses: FirebaseExtended/action-hosting-deploy@v0 - if: steps.build.outcome == 'success' && github.ref != 'refs/heads/main' - with: - repoToken: '${{ secrets.GITHUB_TOKEN }}' - firebaseServiceAccount: '${{ secrets.FIREBASE_SERVICE_ACCOUNT }}' - projectId: futurecoder-staging - channelId: ${{ github.ref }} From d84e2eaabbe4bd479441967170e4d51d13c9b7ff Mon Sep 17 00:00:00 2001 From: llodewyks Date: Fri, 17 Oct 2025 08:35:01 +0200 Subject: [PATCH 057/108] Update Dockerfile --- Dockerfile | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index a7256f20..f290f31c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -50,12 +50,34 @@ RUN set -eux; \ # After build, the static site should be in dist/course # Validate the build output early (this will fail the build if something’s missing) +# ---------- normalize & validate build output ---------- RUN set -eux; \ + # Base structure must exist test -d dist/course; \ test -f dist/course/index.html; \ test -d dist/course/pyodide; \ - # stdlib is required by pyodide loader - test -f dist/course/python_stdlib.zip || (echo "python_stdlib.zip missing" && false) + \ + # Normalize python_stdlib.zip (required by runtime) + if [ -f dist/course/python_stdlib.zip ]; then \ + echo "Found stdlib at dist/course/python_stdlib.zip"; \ + elif [ -f dist/course/pyodide/python_stdlib.zip ]; then \ + echo "Found stdlib under pyodide/, normalizing to course root..."; \ + cp -f dist/course/pyodide/python_stdlib.zip dist/course/python_stdlib.zip; \ + else \ + echo "python_stdlib.zip missing (looked in course/ and course/pyodide/)"; \ + exit 1; \ + fi; \ + \ + # Optional: normalize python_core tar if the build produced one + CORE_FILE="$(find dist/course -maxdepth 2 -type f -name 'python_core*.tar' | head -n1 || true)"; \ + if [ -n "$CORE_FILE" ] && [ ! -f dist/course/python_core.tar ]; then \ + echo "Normalizing $CORE_FILE -> dist/course/python_core.tar"; \ + cp -f "$CORE_FILE" dist/course/python_core.tar || true; \ + fi; \ + \ + # Final assertions + test -f dist/course/python_stdlib.zip + # ---------- runtime stage ---------- FROM nginx:alpine AS runtime From ec8fbc719aa2a01031aca8088a0449f08c475120 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Tue, 21 Oct 2025 16:10:56 +0200 Subject: [PATCH 058/108] Add files via upload Added robots.txt --- robots.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 robots.txt diff --git a/robots.txt b/robots.txt new file mode 100644 index 00000000..c2aab7e0 --- /dev/null +++ b/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: / \ No newline at end of file From d426aa5f5e4bb931f9406d7fd17a8a2133a2fa86 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Tue, 21 Oct 2025 16:12:25 +0200 Subject: [PATCH 059/108] Copy robots.txt to nginx HTML directory Added robots.txt to nginx root for SEO purposes. --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index f290f31c..e449dbb2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -88,6 +88,9 @@ EXPOSE 80 # Copy the built site under /usr/share/nginx/html COPY --from=build /app/dist/course /usr/share/nginx/html/course +# Copy robots.txt from project root into the nginx root +COPY robots.txt /usr/share/nginx/html/robots.txt + # Optional: redirect root to /course/ RUN printf '' \ > /usr/share/nginx/html/index.html From 0afffa37f5a1b250d0694433517ba8323ab11c12 Mon Sep 17 00:00:00 2001 From: Puller28 Date: Fri, 24 Oct 2025 14:13:33 +0200 Subject: [PATCH 060/108] Add Azure progress API integration and admin dashboard --- docs/azure-progress-api.md | 255 +++++++++++++++++++ frontend/.env.local.example | 0 frontend/README.md | 29 +++ frontend/src/App.js | 76 +++++- frontend/src/book/store.js | 63 ++++- frontend/src/components/AdminDashboard.jsx | 269 +++++++++++++++++++++ frontend/src/services/progressApi.js | 60 +++++ 7 files changed, 735 insertions(+), 17 deletions(-) create mode 100644 docs/azure-progress-api.md create mode 100644 frontend/.env.local.example create mode 100644 frontend/src/components/AdminDashboard.jsx create mode 100644 frontend/src/services/progressApi.js diff --git a/docs/azure-progress-api.md b/docs/azure-progress-api.md new file mode 100644 index 00000000..cfbd2b1c --- /dev/null +++ b/docs/azure-progress-api.md @@ -0,0 +1,255 @@ +Azure Progress API Integration +================================ + +This frontend can persist learner progress and feed the admin dashboard entirely through Azure Functions. The sections below describe a minimal implementation you can adapt to your infrastructure. + +Environment variables +--------------------- + +Configure the React app via `.env.local` (see `frontend/.env.local.example`): + +``` +REACT_APP_PROGRESS_API_BASE=https://your-function-app.azurewebsites.net/api +REACT_APP_PROGRESS_API_KEY= +REACT_APP_ADMIN_EMAILS=admin@example.com,@company.com +``` + +Endpoint contract +----------------- + +The UI expects three endpoints under `REACT_APP_PROGRESS_API_BASE`: + +1. **GET `/users/{id}`** + Returns a JSON document shaped like: + ```json + { + "pageSlug": "Introduction", + "developerMode": false, + "editorContent": "print('hello world')", + "pagesProgress": { + "Introduction": { + "step_name": "writing_code", + "updated_at": "2025-10-24T08:00:00Z" + } + } + } + ``` + Fields can contain more data, but these keys are required. + +2. **PATCH `/users/{id}`** + Accepts partial updates in the same shape and merges them into storage. For example: + ```json + { + "pagesProgress/Introduction/step_name": "next_step", + "pagesProgress/Introduction/updated_at": "2025-10-24T08:05:00Z" + } + ``` + You can implement this either as a document merge or convert the flattened paths back to nested objects before writing. + +3. **GET `/admin/progress`** + Returns either an array or an object with a `users` array. Each entry needs: + ```json + { + "userId": "1234-5678", + "email": "learner@example.com", + "pagesProgress": { "...": { "step_name": "...", "updated_at": "..." } } + } + ``` + +Sample Azure Functions skeleton +------------------------------- + +Below is a TypeScript/JavaScript example using Cosmos DB bindings. Adjust names and bindings to match your resource group. + +`UsersGet/index.js` +```javascript +module.exports = async function (context, req) { + const { id } = req.params; + const user = context.bindings.userDocument || {}; + context.res = { + status: 200, + body: { + pageSlug: user.pageSlug || "loading_placeholder", + developerMode: Boolean(user.developerMode), + editorContent: user.editorContent || "", + pagesProgress: user.pagesProgress || {}, + email: user.email || null + } + }; +}; +``` + +`UsersGet/function.json` +```json +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ "get" ], + "route": "users/{id}" + }, + { + "type": "cosmosDB", + "direction": "in", + "name": "userDocument", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "partitionKey": "{id}", + "id": "{id}" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} +``` + +`UsersPatch/index.js` +```javascript +const applyPatch = (doc, updates) => { + const clone = { ...doc }; + Object.entries(updates).forEach(([path, value]) => { + const segments = path.split("/"); + let cursor = clone; + while (segments.length > 1) { + const key = segments.shift(); + cursor[key] = cursor[key] || {}; + cursor = cursor[key]; + } + cursor[segments[0]] = value; + }); + return clone; +}; + +module.exports = async function (context, req) { + const { id } = req.params; + const updates = req.body || {}; + const current = context.bindings.userDocument || {}; + const next = applyPatch(current, updates); + next.userId = next.userId || id; + context.bindings.updatedDocument = next; + context.res = { status: 204 }; +}; +``` + +`UsersPatch/function.json` +```json +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ "patch" ], + "route": "users/{id}" + }, + { + "type": "cosmosDB", + "direction": "in", + "name": "userDocument", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "partitionKey": "{id}", + "id": "{id}" + }, + { + "type": "cosmosDB", + "direction": "out", + "name": "updatedDocument", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "createIfNotExists": true, + "partitionKey": "{id}" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} +``` + +`AdminProgress/index.js` +```javascript +module.exports = async function (context, req) { + const users = context.bindings.progressDocuments || []; + context.res = { + status: 200, + body: { + users: users.map(doc => ({ + userId: doc.userId || doc.id, + email: doc.email || null, + pagesProgress: doc.pagesProgress || {} + })) + } + }; +}; +``` + +`AdminProgress/function.json` +```json +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ "get" ], + "route": "admin/progress" + }, + { + "type": "cosmosDB", + "direction": "in", + "name": "progressDocuments", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "sqlQuery": "SELECT * FROM c" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} +``` + +Local testing tips +------------------ + +1. Install the Azure Functions Core Tools and Cosmos DB emulator (or target your cloud instance). +2. Create `local.settings.json` alongside `host.json`: + ```json + { + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "node", + "CosmosConnection": "AccountEndpoint=https://localhost:8081/;AccountKey=local-emulator-key;" + } + } + ``` +3. Seed the `progress` container with documents matching the schema above. +4. Run `func start` and confirm the endpoints return sample data. +5. Set `REACT_APP_PROGRESS_API_BASE=http://localhost:7071/api` in `.env.local` and start the React app. + +Security considerations +----------------------- + +- Swap `authLevel` to `anonymous` and enforce Azure AD via Easy Auth if you plan to use MSAL access tokens instead of a functions key. +- Restrict `AdminProgress` to admins only (e.g. verify group claims in Easy Auth headers or add your own JWT validation layer). +- Sanitize inputs when you expand the schema; the skeleton above blindly merges update paths, so refine it for production. + +Once these endpoints are live, the frontend will automatically persist learner progress and render the admin dashboard using your Azure stack. diff --git a/frontend/.env.local.example b/frontend/.env.local.example new file mode 100644 index 00000000..e69de29b diff --git a/frontend/README.md b/frontend/README.md index 859d27a6..18ba6f02 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,5 +1,34 @@ This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). +## Azure Progress API Setup + +See `../docs/azure-progress-api.md` for a full backend blueprint. + +The frontend now talks to an Azure-hosted progress service instead of Firebase when the following environment variables are present (see `.env.local.example`): + +``` +REACT_APP_PROGRESS_API_BASE=https://.azurewebsites.net/api +REACT_APP_PROGRESS_API_KEY= +REACT_APP_ADMIN_EMAILS=comma,separated,list,@or-domains +``` + +1. Copy `.env.local.example` to `.env.local` and fill in the values for your environment. +2. Restart the React dev server after editing env files; Create React App only reads them on startup. +3. The frontend expects the Azure API to expose: + - `GET /users/{id}` → returns a document containing at least `pagesProgress`, `pageSlug`, `developerMode`, `editorContent`. + - `PATCH /users/{id}` → merges the posted JSON into the stored user document. + - `GET /admin/progress` → returns either an array or object with `users` array; each entry should expose `userId`/`email` and `pagesProgress`. + +If these variables are omitted the app will fall back to the legacy Firebase implementation (or local storage only if Firebase is disabled). + +### Quick local test workflow + +1. Run the Azure Functions progress API locally with `REACT_APP_PROGRESS_API_BASE` pointing to the emulator URL (for example `http://localhost:7071/api`). +2. Populate your backing data store (e.g. Cosmos DB or the emulator) with sample progress documents. +3. From `frontend/`, run `npm install` and `npm start`. +4. Sign in as a learner and navigate course content to confirm progress updates. +5. Open `#admin` (or use the Admin Dashboard link) with an admin user to verify cross-user summaries. + ## Available Scripts In the project directory, you can run: diff --git a/frontend/src/App.js b/frontend/src/App.js index 51131bb7..111c785f 100644 --- a/frontend/src/App.js +++ b/frontend/src/App.js @@ -54,10 +54,39 @@ import {interrupt, runCode, terminalRef} from "./RunCode"; import firebase from "firebase/app"; import {TableOfContents} from "./TableOfContents"; import HeaderLoginInfo from "./components/HeaderLoginInfo"; +import AdminDashboard from "./components/AdminDashboard"; import terms from "./terms.json" import _ from "lodash"; import {otherVisibleLanguages} from "./languages"; +const rawAdminEmailRules = (process.env.REACT_APP_ADMIN_EMAILS || "") + .split(",") + .map(rule => rule.trim().toLowerCase()) + .filter(Boolean); + +const matchesAdminEmail = (email) => { + if (!email) { + return false; + } + const normalised = email.toLowerCase(); + return rawAdminEmailRules.some(rule => { + if (rule.startsWith("@")) { + return normalised.endsWith(rule); + } + return normalised === rule; + }); +}; + +const isAdminUser = (user) => { + if (!user) { + return false; + } + if (user.developerMode) { + return true; + } + return matchesAdminEmail(user.email || ""); +}; + const EditorButtons = ( { @@ -427,12 +456,34 @@ const CourseText = ( class AppComponent extends React.Component { render() { - if (this.props.route === "toc") { + const {route, user, pages} = this.props; + const admin = isAdminUser(user); + if (route === "toc") { return } + if (route === "admin") { + return
+ + + {admin + ? + : ( +
+

Admin access required

+

+ You need to be marked as an admin to view this dashboard. + Add your email address to REACT_APP_ADMIN_EMAILS or enable developer mode in settings. +

+
+ ) + } +
+
+ } + return
- + @@ -440,14 +491,18 @@ class AppComponent extends React.Component { } } -function NavBar({user}) { +function NavBar({user, isAdmin}) { return
-const MenuPopup = ({user}) => +const MenuPopup = ({user, isAdmin}) =>

+ {isAdmin && +

+ close()} + > + Admin dashboard + +

+ } { otherVisibleLanguages.map(lang => diff --git a/frontend/src/book/store.js b/frontend/src/book/store.js index b2feeeae..fae4b14c 100644 --- a/frontend/src/book/store.js +++ b/frontend/src/book/store.js @@ -13,6 +13,7 @@ import {wrapAsync} from "../frontendlib/sentry"; import pRetry from 'p-retry'; import localforage from "localforage"; import {languageConfig} from "../languages"; +import {fetchUserProgress, patchUserProgress, progressApiAvailable} from "../services/progressApi"; export const disableFirebase = !!process.env.REACT_APP_DISABLE_FIREBASE; export const disableLogin = disableFirebase || !!process.env.REACT_APP_DISABLE_LOGIN; @@ -75,6 +76,7 @@ const initialState = { pagesProgress: { loading_placeholder: { step_name: "loading_placeholder", + updated_at: null, } }, pageSlug: "loading_placeholder", @@ -150,7 +152,7 @@ const afterSetPage = (pageSlug, state = localState) => { window.location.hash = pageSlug; } -export const specialHash = (hash) => ["toc", "ide", "question"].includes(hash); +export const specialHash = (hash) => ["toc", "ide", "question", "admin"].includes(hash); export const navigate = () => { const hash = window.location.hash.substring(1); @@ -182,7 +184,9 @@ export const moveStep = (delta) => { if (delta > 0) { animateStep(stepIndex); } - setUserStateAndDatabase(["pagesProgress", localState.user.pageSlug, "step_name"], step.name); + const progressPath = ["pagesProgress", localState.user.pageSlug]; + setUserStateAndDatabase([...progressPath, "step_name"], step.name); + setUserStateAndDatabase([...progressPath, "updated_at"], new Date().toISOString()); setState("assistant", initialState.assistant); }; @@ -259,13 +263,25 @@ if (!disableFirebase) { } export const updateUserData = async (user) => { - Sentry.setUser({id: user.uid}); - const userData = await databaseRequest("GET"); + const identifier = user.uid || user.email; + if (identifier) { + Sentry.setUser({id: identifier}); + } + let userData = {}; + if (progressApiAvailable) { + try { + userData = await fetchUserProgress(identifier) || {}; + } catch (error) { + console.error("Failed to load user progress from Azure API", error); + } + } else { + userData = await databaseRequest("GET"); + } // loadUser should be called on the local store data first // for proper merging with the firebase user data in loadUserAndPages await loadUserFromLocalStorePromise; loadUser({ - uid: user.uid, + uid: user.uid || identifier, email: user.email, ...userData, }); @@ -280,7 +296,7 @@ const loadUserFromLocalStorePromise = localStore.getItem("user").then(user => { }); export const databaseRequest = wrapAsync(async function databaseRequest(method, data={}, endpoint="users") { - if (disableFirebase) { + if (progressApiAvailable || disableFirebase) { return; } const currentUser = firebase.auth().currentUser; @@ -301,6 +317,15 @@ export const databaseRequest = wrapAsync(async function databaseRequest(method, }); export const updateDatabase = (updates) => { + if (progressApiAvailable) { + const userId = localState.user?.uid || localState.user?.email; + if (!userId) { + return Promise.resolve(); + } + return patchUserProgress(userId, updates).catch(error => { + console.error("Failed to update progress via Azure API", error, updates); + }); + } return databaseRequest("PATCH", updates); } @@ -343,8 +368,10 @@ const loadUserAndPages = (state, previousUser = {}) => { pagesProgress = {...(pagesProgress || {})}; pageSlugsList.forEach(slug => { - const steps = pages[slug].steps; - let step_name = pagesProgress[slug]?.step_name || steps[0].name; + const steps = pages[slug].steps || []; + const currentProgress = pagesProgress[slug] || {}; + let step_name = currentProgress.step_name || steps[0]?.name || ""; + let updated_at = currentProgress.updated_at ?? null; const progress = previousUser.pagesProgress?.[slug]; if (progress) { const findStepIndex = (name) => _.find(steps, {name})?.index || 0 @@ -353,9 +380,15 @@ const loadUserAndPages = (state, previousUser = {}) => { if (previousIndex > currentIndex) { step_name = progress.step_name; updates[`pagesProgress/${slug}/step_name`] = step_name; + if (progress.updated_at) { + updates[`pagesProgress/${slug}/updated_at`] = progress.updated_at; + updated_at = progress.updated_at; + } + } else if (!updated_at && progress.updated_at) { + updated_at = progress.updated_at; } } - pagesProgress[slug] = {step_name}; + pagesProgress[slug] = {...currentProgress, step_name, updated_at}; }); migrateUserState(pages, pagesProgress, updates); @@ -375,12 +408,18 @@ const loadUserAndPages = (state, previousUser = {}) => { function migrateUserState(pages, pagesProgress, updates) { const oldSlug = "GettingElementsAtPosition"; const newSlug = "GettingElementsAtPositionExercises"; - const {step_name} = pagesProgress[oldSlug]; + const oldProgress = pagesProgress[oldSlug] || {}; + const newProgress = pagesProgress[newSlug] || {}; + const {step_name} = oldProgress; if (!pages[oldSlug].step_names.includes(step_name)) { - pagesProgress[oldSlug] = {step_name: "final_text"}; - pagesProgress[newSlug] = {step_name}; + const updated_at = oldProgress.updated_at ?? null; + pagesProgress[oldSlug] = {...oldProgress, step_name: "final_text"}; + pagesProgress[newSlug] = {...newProgress, step_name, updated_at}; updates[`pagesProgress/${oldSlug}/step_name`] = "final_text"; updates[`pagesProgress/${newSlug}/step_name`] = step_name; + if (updated_at) { + updates[`pagesProgress/${newSlug}/updated_at`] = updated_at; + } } } diff --git a/frontend/src/components/AdminDashboard.jsx b/frontend/src/components/AdminDashboard.jsx new file mode 100644 index 00000000..673ee580 --- /dev/null +++ b/frontend/src/components/AdminDashboard.jsx @@ -0,0 +1,269 @@ +import React, {useEffect, useMemo, useState} from "react"; +import {fetchAdminProgress, progressApiAvailable} from "../services/progressApi"; + +const stripHtml = (html) => (html || "").replace(/<[^>]+>/g, ""); + +const formatTimestamp = (timestamp) => { + if (!timestamp) { + return "Not updated yet"; + } + const date = new Date(timestamp); + if (Number.isNaN(date.getTime())) { + return "Not updated yet"; + } + return new Intl.DateTimeFormat(undefined, {dateStyle: "medium", timeStyle: "short"}).format(date); +}; + +const statusLabels = { + completed: "Completed", + inProgress: "In progress", + notStarted: "Not started", + noSteps: "No steps defined", +}; + +const AdminDashboard = ({pages, pagesProgress, user, isAdmin}) => { + const fallbackUsers = useMemo(() => [ + { + userId: user?.uid || user?.email || "current-user", + email: user?.email || "Current learner", + pagesProgress: pagesProgress || {}, + } + ], [pagesProgress, user]); + const [availableUsers, setAvailableUsers] = useState(fallbackUsers); + const [activeUserId, setActiveUserId] = useState(fallbackUsers[0]?.userId || ""); + const [loading, setLoading] = useState(progressApiAvailable && isAdmin); + const [error, setError] = useState(null); + + useEffect(() => { + if (!progressApiAvailable) { + setAvailableUsers(fallbackUsers); + setActiveUserId(fallbackUsers[0]?.userId || ""); + } + }, [fallbackUsers, progressApiAvailable]); + + useEffect(() => { + if (!progressApiAvailable || !isAdmin) { + setLoading(false); + return; + } + let cancelled = false; + const load = async () => { + setLoading(true); + setError(null); + try { + const response = await fetchAdminProgress(); + const items = Array.isArray(response?.users) ? response.users : Array.isArray(response) ? response : []; + const mapped = items + .map(entry => ({ + userId: entry.userId || entry.uid || entry.id || entry.email, + email: entry.email || entry.userEmail || entry.user?.email || `(user ${entry.userId || entry.uid || entry.id || entry.email || "unknown"})`, + pagesProgress: entry.pagesProgress || entry.progress || {}, + })) + .filter(item => item.userId); + if (!cancelled && mapped.length) { + setAvailableUsers(mapped); + setActiveUserId(prev => mapped.some(item => item.userId === prev) ? prev : mapped[0].userId); + } + } catch (err) { + if (!cancelled) { + console.error("Failed to load admin progress from Azure API", err); + setError("Unable to load progress data from Azure right now."); + } + } finally { + if (!cancelled) { + setLoading(false); + } + } + }; + load(); + return () => { + cancelled = true; + }; + }, [isAdmin]); + + const activeUser = useMemo( + () => availableUsers.find(candidate => candidate.userId === activeUserId) || availableUsers[0] || fallbackUsers[0], + [availableUsers, activeUserId] + ); + + const summary = useMemo(() => { + const values = Object.values(pages || {}) + .filter(page => page?.slug && page.slug !== "loading_placeholder") + .sort((a, b) => (a.index ?? 0) - (b.index ?? 0)) + .map(page => { + const steps = page.steps || []; + const progress = activeUser?.pagesProgress?.[page.slug] || {}; + const stepName = progress.step_name || steps[0]?.name || ""; + let stepIndex = steps.findIndex(step => step.name === stepName); + if (stepIndex === -1) { + stepIndex = 0; + } + const totalSteps = steps.length || 0; + const hasActivity = Boolean(progress.updated_at) || stepIndex > 0; + let statusKey = "notStarted"; + let completedSteps = Math.min(stepIndex, totalSteps); + if (!totalSteps) { + statusKey = "noSteps"; + completedSteps = 0; + } else if (hasActivity && stepIndex >= totalSteps - 1) { + statusKey = "completed"; + completedSteps = totalSteps; + } else if (hasActivity) { + statusKey = "inProgress"; + } + const percent = totalSteps ? Math.round((completedSteps / totalSteps) * 100) : 0; + const currentStepNumber = totalSteps ? Math.min(stepIndex + 1, totalSteps) : 0; + return { + slug: page.slug, + title: stripHtml(page.title) || page.slug, + totalSteps, + percent, + statusKey, + updatedAt: progress.updated_at || null, + currentStepNumber, + stepName, + }; + }); + + const totals = values.reduce((acc, row) => { + acc.percentSum += row.percent; + if (row.statusKey === "completed") { + acc.completed += 1; + } else if (row.statusKey === "inProgress") { + acc.inProgress += 1; + } else if (row.statusKey === "notStarted") { + acc.notStarted += 1; + } + return acc; + }, {percentSum: 0, completed: 0, inProgress: 0, notStarted: 0}); + + const overallPercent = values.length ? Math.round(totals.percentSum / values.length) : 0; + + return { + rows: values, + overallPercent, + counts: totals, + totalPages: values.length, + }; + }, [pages, activeUser]); + + return ( +
+

Admin Progress Dashboard

+

+ Tracking progress for {activeUser?.email || "selected learner"} across {summary.totalPages} pages. +

+ + {availableUsers.length > 1 && +
+ + +
+ } + + {loading && +
Loading progress data from Azure...
+ } + {error && +
{error}
+ } + +
+
+
+
+
Overall completion
+

{summary.overallPercent}%

+
+
+
+
+
+
+
+
+
+
Pages completed
+

{summary.counts.completed} / {summary.totalPages}

+

In progress: {summary.counts.inProgress}

+

Not started: {summary.counts.notStarted}

+
+
+
+
+ +
+ + + + + + + + + + + + {summary.rows.length === 0 + ? ( + + + + ) + : summary.rows.map(row => ( + + + + + + + + ))} + +
PageCurrent stepProgressStatusLast updated
+ Course content is still loading. Please check back shortly. +
{row.title} + {row.totalSteps + ? ( + <> +
Step {row.currentStepNumber} of {row.totalSteps}
+
{row.stepName}
+ + ) + : "No steps available"} +
+
+
+
+
{row.percent}%
+
{statusLabels[row.statusKey] || row.statusKey}{formatTimestamp(row.updatedAt)}
+
+
+ ); +}; + +export default AdminDashboard; diff --git a/frontend/src/services/progressApi.js b/frontend/src/services/progressApi.js new file mode 100644 index 00000000..3148e199 --- /dev/null +++ b/frontend/src/services/progressApi.js @@ -0,0 +1,60 @@ +import axios from "axios"; + +const baseUrl = (process.env.REACT_APP_PROGRESS_API_BASE || "").replace(/\/$/, ""); +const apiKey = process.env.REACT_APP_PROGRESS_API_KEY; + +export const progressApiAvailable = Boolean(baseUrl); + +const safeHeaders = () => { + if (!apiKey) { + return {}; + } + return {"x-functions-key": apiKey}; +}; + +const request = async (config) => { + if (!progressApiAvailable) { + return null; + } + const finalConfig = { + ...config, + headers: { + ...safeHeaders(), + ...(config.headers || {}), + }, + timeout: 15000, + }; + const response = await axios(finalConfig); + return response.data; +}; + +export async function fetchUserProgress(userId) { + if (!progressApiAvailable || !userId) { + return null; + } + return request({ + method: "GET", + url: `${baseUrl}/users/${encodeURIComponent(userId)}`, + }); +} + +export async function patchUserProgress(userId, updates) { + if (!progressApiAvailable || !userId || !updates || !Object.keys(updates).length) { + return null; + } + return request({ + method: "PATCH", + url: `${baseUrl}/users/${encodeURIComponent(userId)}`, + data: updates, + }); +} + +export async function fetchAdminProgress() { + if (!progressApiAvailable) { + return null; + } + return request({ + method: "GET", + url: `${baseUrl}/admin/progress`, + }); +} From b06c455c28591324665f5f76ff100ed42f4d4762 Mon Sep 17 00:00:00 2001 From: Puller28 Date: Fri, 24 Oct 2025 14:48:24 +0200 Subject: [PATCH 061/108] Wire Azure progress env vars and workflow config --- .github/workflows/deploy.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index afa1bdaf..60ccb83b 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -68,7 +68,10 @@ jobs: az webapp config appsettings set \ --name "${{ env.APP_NAME }}" \ --resource-group "${{ env.RESOURCE_GROUP }}" \ - --settings WEBSITES_PORT=${{ env.WEBSITES_PORT }} + --settings WEBSITES_PORT=${{ env.WEBSITES_PORT }} \ + REACT_APP_PROGRESS_API_BASE=${{ secrets.REACT_APP_PROGRESS_API_BASE }} \ + REACT_APP_PROGRESS_API_KEY=${{ secrets.REACT_APP_PROGRESS_API_KEY }} \ + REACT_APP_ADMIN_EMAILS=${{ secrets.REACT_APP_ADMIN_EMAILS }} # Derive a flag from secret (don't reference secrets in `if:` directly) - name: Determine if GHCR is private From 4f13aded99faac95fd318e4f35c50fb0b13a3cda Mon Sep 17 00:00:00 2001 From: Puller28 Date: Fri, 24 Oct 2025 15:07:22 +0200 Subject: [PATCH 062/108] Add Azure progress integration and workflow build args --- .github/workflows/deploy.yml | 4 ++++ Dockerfile | 9 +++++++- frontend/src/services/progressApi.js | 32 ++++++++++++++++++++-------- 3 files changed, 35 insertions(+), 10 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 60ccb83b..eac0aca6 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -50,6 +50,10 @@ jobs: with: context: . push: true + build-args: | + REACT_APP_PROGRESS_API_BASE=${{ secrets.REACT_APP_PROGRESS_API_BASE }} + REACT_APP_PROGRESS_API_KEY=${{ secrets.REACT_APP_PROGRESS_API_KEY }} + REACT_APP_ADMIN_EMAILS=${{ secrets.REACT_APP_ADMIN_EMAILS }} tags: | ${{ steps.meta.outputs.tag_sha }} ${{ steps.meta.outputs.tag_latest }} diff --git a/Dockerfile b/Dockerfile index e449dbb2..2b827a34 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,6 +2,10 @@ FROM python:3.12.1-slim AS build ARG NODE_MAJOR=22 +ARG REACT_APP_PROGRESS_API_BASE="" +ARG REACT_APP_PROGRESS_API_KEY="" +ARG REACT_APP_ADMIN_EMAILS="" + ENV DEBIAN_FRONTEND=noninteractive \ PIP_NO_CACHE_DIR=1 \ POETRY_VERSION=1.8.5 \ @@ -9,7 +13,10 @@ ENV DEBIAN_FRONTEND=noninteractive \ FUTURECODER_LANGUAGE=en \ REACT_APP_USE_FIREBASE_EMULATORS=1 \ REACT_APP_FIREBASE_STAGING=1 \ - CI=false + CI=false \ + REACT_APP_PROGRESS_API_BASE=${REACT_APP_PROGRESS_API_BASE} \ + REACT_APP_PROGRESS_API_KEY=${REACT_APP_PROGRESS_API_KEY} \ + REACT_APP_ADMIN_EMAILS=${REACT_APP_ADMIN_EMAILS} # system deps + Node.js RUN set -eux; \ diff --git a/frontend/src/services/progressApi.js b/frontend/src/services/progressApi.js index 3148e199..b8f6951c 100644 --- a/frontend/src/services/progressApi.js +++ b/frontend/src/services/progressApi.js @@ -32,21 +32,35 @@ export async function fetchUserProgress(userId) { if (!progressApiAvailable || !userId) { return null; } - return request({ - method: "GET", - url: `${baseUrl}/users/${encodeURIComponent(userId)}`, - }); + try { + return await request({ + method: "GET", + url: `${baseUrl}/users/${encodeURIComponent(userId)}`, + }); + } catch (error) { + if (error?.response?.status === 404) { + return {}; + } + throw error; + } } export async function patchUserProgress(userId, updates) { if (!progressApiAvailable || !userId || !updates || !Object.keys(updates).length) { return null; } - return request({ - method: "PATCH", - url: `${baseUrl}/users/${encodeURIComponent(userId)}`, - data: updates, - }); + try { + return await request({ + method: "PATCH", + url: `${baseUrl}/users/${encodeURIComponent(userId)}`, + data: updates, + }); + } catch (error) { + if (error?.response?.status === 404) { + return null; + } + throw error; + } } export async function fetchAdminProgress() { From f04c72b48b46c848b7593c4cb0a41e5fbfd4305b Mon Sep 17 00:00:00 2001 From: Puller28 Date: Mon, 27 Oct 2025 09:53:22 +0200 Subject: [PATCH 063/108] Implement Cosmos-backed progress API endpoints --- progress-api/package-lock.json | 388 ++++++++++++++++++++ progress-api/package.json | 14 + progress-api/src/functions/AdminProgress.js | 34 ++ progress-api/src/functions/UsersGet.js | 59 +++ progress-api/src/functions/UsersPatch.js | 85 +++++ progress-api/src/shared/cosmos.js | 49 +++ 6 files changed, 629 insertions(+) create mode 100644 progress-api/package-lock.json create mode 100644 progress-api/package.json create mode 100644 progress-api/src/functions/AdminProgress.js create mode 100644 progress-api/src/functions/UsersGet.js create mode 100644 progress-api/src/functions/UsersPatch.js create mode 100644 progress-api/src/shared/cosmos.js diff --git a/progress-api/package-lock.json b/progress-api/package-lock.json new file mode 100644 index 00000000..228d8233 --- /dev/null +++ b/progress-api/package-lock.json @@ -0,0 +1,388 @@ +{ + "name": "progress-api", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "progress-api", + "version": "1.0.0", + "dependencies": { + "@azure/cosmos": "^4.7.0", + "@azure/functions": "^4.0.0" + }, + "devDependencies": {} + }, + "node_modules/@azure-rest/core-client": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@azure-rest/core-client/-/core-client-2.5.1.tgz", + "integrity": "sha512-EHaOXW0RYDKS5CFffnixdyRPak5ytiCtU7uXDcP/uiY+A6jFRwNGzzJBiznkCzvi5EYpY+YWinieqHb0oY916A==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.10.0", + "@azure/core-rest-pipeline": "^1.22.0", + "@azure/core-tracing": "^1.3.0", + "@typespec/ts-http-runtime": "^0.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/abort-controller": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.2.tgz", + "integrity": "sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-auth": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.10.1.tgz", + "integrity": "sha512-ykRMW8PjVAn+RS6ww5cmK9U2CyH9p4Q88YJwvUslfuMmN98w/2rdGRLPqJYObapBCdzBVeDgYWdJnFPFb7qzpg==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-util": "^1.13.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-client": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.10.1.tgz", + "integrity": "sha512-Nh5PhEOeY6PrnxNPsEHRr9eimxLwgLlpmguQaHKBinFYA/RU9+kOYVOQqOrTsCL+KSxrLLl1gD8Dk5BFW/7l/w==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.10.0", + "@azure/core-rest-pipeline": "^1.22.0", + "@azure/core-tracing": "^1.3.0", + "@azure/core-util": "^1.13.0", + "@azure/logger": "^1.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-http-compat": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@azure/core-http-compat/-/core-http-compat-2.3.1.tgz", + "integrity": "sha512-az9BkXND3/d5VgdRRQVkiJb2gOmDU8Qcq4GvjtBmDICNiQ9udFmDk4ZpSB5Qq1OmtDJGlQAfBaS4palFsazQ5g==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-client": "^1.10.0", + "@azure/core-rest-pipeline": "^1.22.0" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-lro": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.2.tgz", + "integrity": "sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-util": "^1.2.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-paging": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.2.tgz", + "integrity": "sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.22.1.tgz", + "integrity": "sha512-UVZlVLfLyz6g3Hy7GNDpooMQonUygH7ghdiSASOOHy97fKj/mPLqgDX7aidOijn+sCMU+WU8NjlPlNTgnvbcGA==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.10.0", + "@azure/core-tracing": "^1.3.0", + "@azure/core-util": "^1.13.0", + "@azure/logger": "^1.3.0", + "@typespec/ts-http-runtime": "^0.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-tracing": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.3.1.tgz", + "integrity": "sha512-9MWKevR7Hz8kNzzPLfX4EAtGM2b8mr50HPDBvio96bURP/9C+HjdH3sBlLSNNrvRAr5/k/svoH457gB5IKpmwQ==", + "license": "MIT", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/core-util": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.13.1.tgz", + "integrity": "sha512-XPArKLzsvl0Hf0CaGyKHUyVgF7oDnhKoP85Xv6M4StF/1AhfORhZudHtOyf2s+FcbuQ9dPRAjB8J2KvRRMUK2A==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@typespec/ts-http-runtime": "^0.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/cosmos": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@azure/cosmos/-/cosmos-4.7.0.tgz", + "integrity": "sha512-a8OV7E41u/ZDaaaDAFdqTTiJ7c82jZc/+ot3XzNCIIilR25NBB+1ixzWQOAgP8SHRUIKfaUl6wAPdTuiG9I66A==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.9.0", + "@azure/core-rest-pipeline": "^1.19.1", + "@azure/core-tracing": "^1.2.0", + "@azure/core-util": "^1.11.0", + "@azure/keyvault-keys": "^4.9.0", + "@azure/logger": "^1.1.4", + "fast-json-stable-stringify": "^2.1.0", + "priorityqueuejs": "^2.0.0", + "semaphore": "^1.1.0", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@azure/functions": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/@azure/functions/-/functions-4.8.0.tgz", + "integrity": "sha512-LNtl3xZNE40vE7+SIST+GYQX5cnnI1M65fXPi26l9XCdPakuQrz54lHv+qQQt1GG5JbqLfQk75iM7A6Y9O+2dQ==", + "license": "MIT", + "dependencies": { + "cookie": "^0.7.0", + "long": "^4.0.0", + "undici": "^5.29.0" + }, + "engines": { + "node": ">=18.0" + } + }, + "node_modules/@azure/keyvault-common": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@azure/keyvault-common/-/keyvault-common-2.0.0.tgz", + "integrity": "sha512-wRLVaroQtOqfg60cxkzUkGKrKMsCP6uYXAOomOIysSMyt1/YM0eUn9LqieAWM8DLcU4+07Fio2YGpPeqUbpP9w==", + "license": "MIT", + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-client": "^1.5.0", + "@azure/core-rest-pipeline": "^1.8.0", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.10.0", + "@azure/logger": "^1.1.4", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/keyvault-keys": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@azure/keyvault-keys/-/keyvault-keys-4.10.0.tgz", + "integrity": "sha512-eDT7iXoBTRZ2n3fLiftuGJFD+yjkiB1GNqzU2KbY1TLYeXeSPVTVgn2eJ5vmRTZ11978jy2Kg2wI7xa9Tyr8ag==", + "license": "MIT", + "dependencies": { + "@azure-rest/core-client": "^2.3.3", + "@azure/abort-controller": "^2.1.2", + "@azure/core-auth": "^1.9.0", + "@azure/core-http-compat": "^2.2.0", + "@azure/core-lro": "^2.7.2", + "@azure/core-paging": "^1.6.2", + "@azure/core-rest-pipeline": "^1.19.0", + "@azure/core-tracing": "^1.2.0", + "@azure/core-util": "^1.11.0", + "@azure/keyvault-common": "^2.0.0", + "@azure/logger": "^1.1.4", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/logger": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.3.0.tgz", + "integrity": "sha512-fCqPIfOcLE+CGqGPd66c8bZpwAji98tZ4JI9i/mlTNTlsIWslCfpg48s/ypyLxZTump5sypjrKn2/kY7q8oAbA==", + "license": "MIT", + "dependencies": { + "@typespec/ts-http-runtime": "^0.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/@typespec/ts-http-runtime": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@typespec/ts-http-runtime/-/ts-http-runtime-0.3.1.tgz", + "integrity": "sha512-SnbaqayTVFEA6/tYumdF0UmybY0KHyKwGPBXnyckFlrrKdhWFrL3a2HIPXHjht5ZOElKGcXfD2D63P36btb+ww==", + "license": "MIT", + "dependencies": { + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==", + "license": "Apache-2.0" + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/priorityqueuejs": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/priorityqueuejs/-/priorityqueuejs-2.0.0.tgz", + "integrity": "sha512-19BMarhgpq3x4ccvVi8k2QpJZcymo/iFUcrhPd4V96kYGovOdTsWwy7fxChYi4QY+m2EnGBWSX9Buakz+tWNQQ==", + "license": "MIT" + }, + "node_modules/semaphore": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/semaphore/-/semaphore-1.1.0.tgz", + "integrity": "sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "license": "MIT", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + } + } +} diff --git a/progress-api/package.json b/progress-api/package.json new file mode 100644 index 00000000..63b28c0f --- /dev/null +++ b/progress-api/package.json @@ -0,0 +1,14 @@ +{ + "name": "progress-api", + "version": "1.0.0", + "description": "", + "scripts": { + "start": "func start", + "test": "echo \"No tests yet...\"" + }, + "dependencies": { + "@azure/cosmos": "^4.7.0", + "@azure/functions": "^4.0.0" + }, + "main": "src/{index.js,functions/*.js}" +} diff --git a/progress-api/src/functions/AdminProgress.js b/progress-api/src/functions/AdminProgress.js new file mode 100644 index 00000000..3191003a --- /dev/null +++ b/progress-api/src/functions/AdminProgress.js @@ -0,0 +1,34 @@ +const {app} = require("@azure/functions"); +const {listAllUsers} = require("../shared/cosmos"); + +const normalise = (document = {}) => ({ + userId: document.userId || document.id, + email: document.email ?? null, + pageSlug: document.pageSlug ?? null, + lastActiveAt: document.lastActiveAt ?? null, + isAdmin: Boolean(document.isAdmin), + pagesProgress: typeof document.pagesProgress === "object" && document.pagesProgress + ? document.pagesProgress + : {}, +}); + +app.http("AdminProgress", { + methods: ["GET"], + authLevel: "anonymous", + route: "admin/progress", + handler: async (_request, context) => { + try { + const users = (await listAllUsers()).map(normalise); + return { + status: 200, + jsonBody: {users}, + }; + } catch (error) { + context.log(`Failed to load admin progress: ${error.message}`); + return { + status: 500, + jsonBody: {error: "Failed to load admin progress"}, + }; + } + }, +}); diff --git a/progress-api/src/functions/UsersGet.js b/progress-api/src/functions/UsersGet.js new file mode 100644 index 00000000..bab8f9d5 --- /dev/null +++ b/progress-api/src/functions/UsersGet.js @@ -0,0 +1,59 @@ +const {app} = require("@azure/functions"); +const {readUserDocument} = require("../shared/cosmos"); + +const fallbackPageSlug = "loading_placeholder"; + +const normaliseUserDocument = (userId, document = {}) => { + const pagesProgress = typeof document.pagesProgress === "object" && document.pagesProgress + ? document.pagesProgress + : {}; + + return { + userId, + email: document.email ?? null, + pageSlug: document.pageSlug || fallbackPageSlug, + developerMode: Boolean(document.developerMode), + editorContent: document.editorContent || "", + lastActiveAt: document.lastActiveAt ?? null, + isAdmin: Boolean(document.isAdmin), + pagesProgress, + }; +}; + +app.http("UsersGet", { + methods: ["GET"], + authLevel: "anonymous", + route: "users/{id}", + handler: async (request, context) => { + const userId = request.params.get("id") || request.query.get("id"); + if (!userId) { + return { + status: 400, + jsonBody: {error: "Missing required path parameter: id"}, + }; + } + + context.log(`Fetching progress for user "${userId}"`); + + try { + const document = await readUserDocument(userId); + if (!document) { + return { + status: 404, + jsonBody: {error: "User not found"}, + }; + } + + return { + status: 200, + jsonBody: normaliseUserDocument(userId, document), + }; + } catch (error) { + context.log(`Failed to fetch user ${userId}: ${error.message}`); + return { + status: 500, + jsonBody: {error: "Failed to load user progress"}, + }; + } + }, +}); diff --git a/progress-api/src/functions/UsersPatch.js b/progress-api/src/functions/UsersPatch.js new file mode 100644 index 00000000..9d9fc5b8 --- /dev/null +++ b/progress-api/src/functions/UsersPatch.js @@ -0,0 +1,85 @@ +const {app} = require("@azure/functions"); +const {readUserDocument, upsertUserDocument} = require("../shared/cosmos"); + +const parseRequestBody = async (request) => { + try { + if (!request.headers.get("content-type")?.includes("application/json")) { + return {}; + } + const body = await request.json(); + return body && typeof body === "object" ? body : {}; + } catch { + return {}; + } +}; + +const applyPatch = (source, updates) => { + const clone = JSON.parse(JSON.stringify(source || {})); + for (const [path, value] of Object.entries(updates || {})) { + if (!path) { + continue; + } + const segments = path.split("/").filter(Boolean); + if (!segments.length) { + continue; + } + let cursor = clone; + while (segments.length > 1) { + const key = segments.shift(); + if (typeof cursor[key] !== "object" || cursor[key] === null) { + cursor[key] = {}; + } + cursor = cursor[key]; + } + cursor[segments[0]] = value; + } + return clone; +}; + +const ensureDefaults = (userId, document) => { + const result = document || {}; + result.id = result.id || userId; + result.userId = result.userId || userId; + if (typeof result.pagesProgress !== "object" || result.pagesProgress === null) { + result.pagesProgress = {}; + } + return result; +}; + +app.http("UsersPatch", { + methods: ["PATCH"], + authLevel: "anonymous", + route: "users/{id}", + handler: async (request, context) => { + const userId = request.params.get("id") || request.query.get("id"); + if (!userId) { + return { + status: 400, + jsonBody: {error: "Missing required path parameter: id"}, + }; + } + + const updates = await parseRequestBody(request); + if (!Object.keys(updates).length) { + return { + status: 400, + jsonBody: {error: "Request body must be a JSON object of updates"}, + }; + } + + context.log(`Applying ${Object.keys(updates).length} updates for user "${userId}"`); + + try { + const current = ensureDefaults(userId, await readUserDocument(userId)); + const next = ensureDefaults(userId, applyPatch(current, updates)); + await upsertUserDocument(next); + return {status: 204}; + } catch (error) { + context.log(`Failed to patch user ${userId}: ${error.message}`); + return { + status: 500, + jsonBody: {error: "Failed to update user progress"}, + }; + } + }, +}); diff --git a/progress-api/src/shared/cosmos.js b/progress-api/src/shared/cosmos.js new file mode 100644 index 00000000..cf9ae5eb --- /dev/null +++ b/progress-api/src/shared/cosmos.js @@ -0,0 +1,49 @@ +const {CosmosClient} = require("@azure/cosmos"); + +const connectionString = + process.env.CosmosConnection || + process.env.COSMOS_CONNECTION || + process.env.COSMOS_CONNECTION_STRING || + process.env.COSMOS_DB_CONNECTION_STRING; + +if (!connectionString) { + throw new Error("Cosmos DB connection string is not configured (CosmosConnection)."); +} + +const databaseId = process.env.COSMOS_DATABASE_ID || "futurecoder"; +const containerId = process.env.COSMOS_CONTAINER_ID || "progress"; + +const client = new CosmosClient(connectionString); +const containerRef = client.database(databaseId).container(containerId); + +const notFoundCodes = new Set([404, "NotFound"]); + +async function readUserDocument(userId) { + try { + const {resource} = await containerRef.item(userId, userId).read(); + return resource || null; + } catch (error) { + if (notFoundCodes.has(error?.code) || notFoundCodes.has(error?.name)) { + return null; + } + throw error; + } +} + +async function upsertUserDocument(document) { + await containerRef.items.upsert(document); +} + +async function listAllUsers() { + const {resources} = await containerRef.items.query("SELECT * FROM c").fetchAll(); + return resources || []; +} + +module.exports = { + container: containerRef, + readUserDocument, + upsertUserDocument, + listAllUsers, + databaseId, + containerId, +}; From 4f5c3835624c86eb4b6f1617603bb6e870c0925e Mon Sep 17 00:00:00 2001 From: Puller28 Date: Mon, 27 Oct 2025 10:18:22 +0200 Subject: [PATCH 064/108] Avoid store access during progress API updates --- frontend/src/book/store.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/frontend/src/book/store.js b/frontend/src/book/store.js index fae4b14c..d27a5cc9 100644 --- a/frontend/src/book/store.js +++ b/frontend/src/book/store.js @@ -111,6 +111,8 @@ const initialState = { const {reducer, makeAction, setState, localState, statePush} = redact('book', initialState, {dispatched: true}); +let progressApiUserId = null; + export {reducer as bookReducer, setState as bookSetState, localState as bookState, statePush as bookStatePush}; const isLoaded = (state) => state.user.uid && state.pageSlugsList.length > 1 @@ -318,7 +320,7 @@ export const databaseRequest = wrapAsync(async function databaseRequest(method, export const updateDatabase = (updates) => { if (progressApiAvailable) { - const userId = localState.user?.uid || localState.user?.email; + const userId = progressApiUserId; if (!userId) { return Promise.resolve(); } @@ -396,6 +398,7 @@ const loadUserAndPages = (state, previousUser = {}) => { updateDatabase(updates); state = {...state, user: {...state.user, pagesProgress, pageSlug, developerMode}}; + progressApiUserId = state.user.uid || state.user.userId || state.user.email || null; if (!specialHash(hash)) { afterSetPage(pageSlug, state); } From d79b1ecbe5b1bb0c1d92ff2cbd55942d83ce90aa Mon Sep 17 00:00:00 2001 From: Puller28 Date: Mon, 27 Oct 2025 10:39:44 +0200 Subject: [PATCH 065/108] Derive progress API user id without reducer getState --- frontend/src/book/store.js | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/frontend/src/book/store.js b/frontend/src/book/store.js index d27a5cc9..f696884e 100644 --- a/frontend/src/book/store.js +++ b/frontend/src/book/store.js @@ -318,9 +318,22 @@ export const databaseRequest = wrapAsync(async function databaseRequest(method, return response.data; }); -export const updateDatabase = (updates) => { +const resolveUserId = (override) => { + if (typeof override === "string") { + return override; + } + if (override && typeof override === "object") { + return override.userId || override.uid || override.email || null; + } + return null; +}; + +export const updateDatabase = (updates, userContext) => { if (progressApiAvailable) { - const userId = progressApiUserId; + const explicit = resolveUserId(userContext); + const userId = explicit + || progressApiUserId + || resolveUserId(localState.user); if (!userId) { return Promise.resolve(); } @@ -395,7 +408,8 @@ const loadUserAndPages = (state, previousUser = {}) => { migrateUserState(pages, pagesProgress, updates); - updateDatabase(updates); + const userId = state.user.uid || state.user.userId || state.user.email || previousUser.uid || previousUser.userId || previousUser.email || null; + updateDatabase(updates, userId); state = {...state, user: {...state.user, pagesProgress, pageSlug, developerMode}}; progressApiUserId = state.user.uid || state.user.userId || state.user.email || null; From 329f857d428646ddf1903ea400f78e4014b8f79a Mon Sep 17 00:00:00 2001 From: Puller28 Date: Mon, 27 Oct 2025 11:36:51 +0200 Subject: [PATCH 066/108] Add CORS support for progress API functions --- progress-api/package-lock.json | 3 +-- progress-api/src/functions/AdminProgress.js | 16 ++++++++----- progress-api/src/functions/UsersGet.js | 22 ++++++++++------- progress-api/src/functions/UsersPatch.js | 20 +++++++++------- progress-api/src/shared/http.js | 26 +++++++++++++++++++++ 5 files changed, 62 insertions(+), 25 deletions(-) create mode 100644 progress-api/src/shared/http.js diff --git a/progress-api/package-lock.json b/progress-api/package-lock.json index 228d8233..d5270c6b 100644 --- a/progress-api/package-lock.json +++ b/progress-api/package-lock.json @@ -10,8 +10,7 @@ "dependencies": { "@azure/cosmos": "^4.7.0", "@azure/functions": "^4.0.0" - }, - "devDependencies": {} + } }, "node_modules/@azure-rest/core-client": { "version": "2.5.1", diff --git a/progress-api/src/functions/AdminProgress.js b/progress-api/src/functions/AdminProgress.js index 3191003a..0de95af5 100644 --- a/progress-api/src/functions/AdminProgress.js +++ b/progress-api/src/functions/AdminProgress.js @@ -1,5 +1,6 @@ const {app} = require("@azure/functions"); const {listAllUsers} = require("../shared/cosmos"); +const {withCors, handleOptions} = require("../shared/http"); const normalise = (document = {}) => ({ userId: document.userId || document.id, @@ -13,22 +14,25 @@ const normalise = (document = {}) => ({ }); app.http("AdminProgress", { - methods: ["GET"], + methods: ["GET", "OPTIONS"], authLevel: "anonymous", route: "admin/progress", - handler: async (_request, context) => { + handler: async (request, context) => { + if (request.method === "OPTIONS") { + return handleOptions(); + } try { const users = (await listAllUsers()).map(normalise); - return { + return withCors({ status: 200, jsonBody: {users}, - }; + }); } catch (error) { context.log(`Failed to load admin progress: ${error.message}`); - return { + return withCors({ status: 500, jsonBody: {error: "Failed to load admin progress"}, - }; + }); } }, }); diff --git a/progress-api/src/functions/UsersGet.js b/progress-api/src/functions/UsersGet.js index bab8f9d5..855e4113 100644 --- a/progress-api/src/functions/UsersGet.js +++ b/progress-api/src/functions/UsersGet.js @@ -1,5 +1,6 @@ const {app} = require("@azure/functions"); const {readUserDocument} = require("../shared/cosmos"); +const {withCors, handleOptions} = require("../shared/http"); const fallbackPageSlug = "loading_placeholder"; @@ -21,16 +22,19 @@ const normaliseUserDocument = (userId, document = {}) => { }; app.http("UsersGet", { - methods: ["GET"], + methods: ["GET", "OPTIONS"], authLevel: "anonymous", route: "users/{id}", handler: async (request, context) => { + if (request.method === "OPTIONS") { + return handleOptions(); + } const userId = request.params.get("id") || request.query.get("id"); if (!userId) { - return { + return withCors({ status: 400, jsonBody: {error: "Missing required path parameter: id"}, - }; + }); } context.log(`Fetching progress for user "${userId}"`); @@ -38,22 +42,22 @@ app.http("UsersGet", { try { const document = await readUserDocument(userId); if (!document) { - return { + return withCors({ status: 404, jsonBody: {error: "User not found"}, - }; + }); } - return { + return withCors({ status: 200, jsonBody: normaliseUserDocument(userId, document), - }; + }); } catch (error) { context.log(`Failed to fetch user ${userId}: ${error.message}`); - return { + return withCors({ status: 500, jsonBody: {error: "Failed to load user progress"}, - }; + }); } }, }); diff --git a/progress-api/src/functions/UsersPatch.js b/progress-api/src/functions/UsersPatch.js index 9d9fc5b8..405f605e 100644 --- a/progress-api/src/functions/UsersPatch.js +++ b/progress-api/src/functions/UsersPatch.js @@ -1,5 +1,6 @@ const {app} = require("@azure/functions"); const {readUserDocument, upsertUserDocument} = require("../shared/cosmos"); +const {withCors, handleOptions} = require("../shared/http"); const parseRequestBody = async (request) => { try { @@ -47,24 +48,27 @@ const ensureDefaults = (userId, document) => { }; app.http("UsersPatch", { - methods: ["PATCH"], + methods: ["PATCH", "OPTIONS"], authLevel: "anonymous", route: "users/{id}", handler: async (request, context) => { + if (request.method === "OPTIONS") { + return handleOptions(); + } const userId = request.params.get("id") || request.query.get("id"); if (!userId) { - return { + return withCors({ status: 400, jsonBody: {error: "Missing required path parameter: id"}, - }; + }); } const updates = await parseRequestBody(request); if (!Object.keys(updates).length) { - return { + return withCors({ status: 400, jsonBody: {error: "Request body must be a JSON object of updates"}, - }; + }); } context.log(`Applying ${Object.keys(updates).length} updates for user "${userId}"`); @@ -73,13 +77,13 @@ app.http("UsersPatch", { const current = ensureDefaults(userId, await readUserDocument(userId)); const next = ensureDefaults(userId, applyPatch(current, updates)); await upsertUserDocument(next); - return {status: 204}; + return withCors({status: 204}); } catch (error) { context.log(`Failed to patch user ${userId}: ${error.message}`); - return { + return withCors({ status: 500, jsonBody: {error: "Failed to update user progress"}, - }; + }); } }, }); diff --git a/progress-api/src/shared/http.js b/progress-api/src/shared/http.js new file mode 100644 index 00000000..f2321b40 --- /dev/null +++ b/progress-api/src/shared/http.js @@ -0,0 +1,26 @@ +const allowedOrigin = process.env.CORS_ALLOWED_ORIGIN || "*"; +const allowedHeaders = process.env.CORS_ALLOWED_HEADERS || "content-type,x-functions-key"; +const allowedMethods = process.env.CORS_ALLOWED_METHODS || "GET,POST,PATCH,OPTIONS"; + +const baseHeaders = { + "Access-Control-Allow-Origin": allowedOrigin, + "Access-Control-Allow-Headers": allowedHeaders, + "Access-Control-Allow-Methods": allowedMethods, +}; + +const withCors = (response = {}) => ({ + ...response, + headers: { + ...baseHeaders, + ...(response.headers || {}), + }, +}); + +const handleOptions = () => withCors({ + status: 204, +}); + +module.exports = { + withCors, + handleOptions, +}; From 3c8590110209492391b43c9882c77b2d65ae4f53 Mon Sep 17 00:00:00 2001 From: Puller28 Date: Mon, 27 Oct 2025 12:53:47 +0200 Subject: [PATCH 067/108] Include function host configuration --- .github/workflows/deploy.yml | 26 ++++++++++++++++++++++++++ progress-api/.funcignore | 10 ++++++++++ progress-api/host.json | 15 +++++++++++++++ 3 files changed, 51 insertions(+) create mode 100644 progress-api/.funcignore create mode 100644 progress-api/host.json diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index eac0aca6..654c0afc 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -15,6 +15,7 @@ env: IMAGE_NAME: futurecoder # change if you prefer APP_NAME: PythonCoding # <-- your Azure Web App name RESOURCE_GROUP: MSAN-RG-Training # <-- your Azure Resource Group + FUNCTION_APP_NAME: futurecoder-progress-api WEBSITES_PORT: "80" DOCKER_BUILDKIT: "1" BUILDKIT_PROGRESS: plain @@ -77,6 +78,15 @@ jobs: REACT_APP_PROGRESS_API_KEY=${{ secrets.REACT_APP_PROGRESS_API_KEY }} \ REACT_APP_ADMIN_EMAILS=${{ secrets.REACT_APP_ADMIN_EMAILS }} + - name: Configure Function App settings + run: | + az functionapp config appsettings set \ + --name "${{ env.FUNCTION_APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --settings CosmosConnection='${{ secrets.COSMOS_CONNECTION_STRING }}' \ + CORS_ALLOWED_ORIGIN=${{ secrets.FUNCTION_CORS_ALLOWED_ORIGIN }} \ + ADMIN_EMAIL_ALLOWLIST=${{ secrets.FUNCTION_ADMIN_EMAIL_ALLOWLIST }} + # Derive a flag from secret (don't reference secrets in `if:` directly) - name: Determine if GHCR is private id: ghcr @@ -111,6 +121,22 @@ jobs: --docker-registry-server-user "${{ github.actor }}" \ --docker-registry-server-password "${{ secrets.GHCR_READ_TOKEN }}" + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + + - name: Install Functions dependencies + working-directory: progress-api + run: npm ci + + - name: Install Azure Functions Core Tools + run: sudo npm install -g azure-functions-core-tools@4 --unsafe-perm true + + - name: Publish Azure Functions + working-directory: progress-api + run: func azure functionapp publish "${{ env.FUNCTION_APP_NAME }}" --javascript + # (Optional) Restart the app to pick the new image immediately - name: Restart Web App run: | diff --git a/progress-api/.funcignore b/progress-api/.funcignore new file mode 100644 index 00000000..d5b3b4a2 --- /dev/null +++ b/progress-api/.funcignore @@ -0,0 +1,10 @@ +*.js.map +*.ts +.git* +.vscode +__azurite_db*__.json +__blobstorage__ +__queuestorage__ +local.settings.json +test +tsconfig.json \ No newline at end of file diff --git a/progress-api/host.json b/progress-api/host.json new file mode 100644 index 00000000..9df91361 --- /dev/null +++ b/progress-api/host.json @@ -0,0 +1,15 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[4.*, 5.0.0)" + } +} \ No newline at end of file From 33336c19f17525af33b556b0432934636bf39fd8 Mon Sep 17 00:00:00 2001 From: Puller28 Date: Mon, 27 Oct 2025 12:54:41 +0200 Subject: [PATCH 068/108] Add Azure Functions entrypoint --- progress-api/src/index.js | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 progress-api/src/index.js diff --git a/progress-api/src/index.js b/progress-api/src/index.js new file mode 100644 index 00000000..0c7432ef --- /dev/null +++ b/progress-api/src/index.js @@ -0,0 +1,5 @@ +const { app } = require('@azure/functions'); + +app.setup({ + enableHttpStream: true, +}); From ddbb61016323355ecc4120c45d5c3447da653ac1 Mon Sep 17 00:00:00 2001 From: llodewyks Date: Mon, 27 Oct 2025 13:28:19 +0200 Subject: [PATCH 069/108] Fix formatting of app settings in deploy.yml --- .github/workflows/deploy.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 654c0afc..d56c4f25 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -83,9 +83,9 @@ jobs: az functionapp config appsettings set \ --name "${{ env.FUNCTION_APP_NAME }}" \ --resource-group "${{ env.RESOURCE_GROUP }}" \ - --settings CosmosConnection='${{ secrets.COSMOS_CONNECTION_STRING }}' \ - CORS_ALLOWED_ORIGIN=${{ secrets.FUNCTION_CORS_ALLOWED_ORIGIN }} \ - ADMIN_EMAIL_ALLOWLIST=${{ secrets.FUNCTION_ADMIN_EMAIL_ALLOWLIST }} + --settings "CosmosConnection=${{ secrets.COSMOS_CONNECTION_STRING }}" \ + "CORS_ALLOWED_ORIGIN=${{ secrets.FUNCTION_CORS_ALLOWED_ORIGIN }}" \ + "ADMIN_EMAIL_ALLOWLIST=${{ secrets.FUNCTION_ADMIN_EMAIL_ALLOWLIST }}" # Derive a flag from secret (don't reference secrets in `if:` directly) - name: Determine if GHCR is private From 57c12568e9601cd58d0374868fe0e93a098d7976 Mon Sep 17 00:00:00 2001 From: Puller28 Date: Mon, 27 Oct 2025 15:11:57 +0200 Subject: [PATCH 070/108] Handle Azure Functions params without Map interface --- progress-api/src/functions/AdminProgress.js | 2 +- progress-api/src/functions/UsersGet.js | 9 +++++++-- progress-api/src/functions/UsersPatch.js | 9 +++++++-- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/progress-api/src/functions/AdminProgress.js b/progress-api/src/functions/AdminProgress.js index 0de95af5..3269e163 100644 --- a/progress-api/src/functions/AdminProgress.js +++ b/progress-api/src/functions/AdminProgress.js @@ -28,7 +28,7 @@ app.http("AdminProgress", { jsonBody: {users}, }); } catch (error) { - context.log(`Failed to load admin progress: ${error.message}`); + context.log.error(`Failed to load admin progress: ${error.message}`, error); return withCors({ status: 500, jsonBody: {error: "Failed to load admin progress"}, diff --git a/progress-api/src/functions/UsersGet.js b/progress-api/src/functions/UsersGet.js index 855e4113..b463d36c 100644 --- a/progress-api/src/functions/UsersGet.js +++ b/progress-api/src/functions/UsersGet.js @@ -29,7 +29,12 @@ app.http("UsersGet", { if (request.method === "OPTIONS") { return handleOptions(); } - const userId = request.params.get("id") || request.query.get("id"); + const userId = + (request.params && typeof request.params.get === "function" && request.params.get("id")) || + (request.params && request.params.id) || + (request.query && typeof request.query.get === "function" && request.query.get("id")) || + (request.query && request.query.id) || + null; if (!userId) { return withCors({ status: 400, @@ -53,7 +58,7 @@ app.http("UsersGet", { jsonBody: normaliseUserDocument(userId, document), }); } catch (error) { - context.log(`Failed to fetch user ${userId}: ${error.message}`); + context.log.error(`Failed to fetch user ${userId}: ${error.message}`, error); return withCors({ status: 500, jsonBody: {error: "Failed to load user progress"}, diff --git a/progress-api/src/functions/UsersPatch.js b/progress-api/src/functions/UsersPatch.js index 405f605e..eeb18b86 100644 --- a/progress-api/src/functions/UsersPatch.js +++ b/progress-api/src/functions/UsersPatch.js @@ -55,7 +55,12 @@ app.http("UsersPatch", { if (request.method === "OPTIONS") { return handleOptions(); } - const userId = request.params.get("id") || request.query.get("id"); + const userId = + (request.params && typeof request.params.get === "function" && request.params.get("id")) || + (request.params && request.params.id) || + (request.query && typeof request.query.get === "function" && request.query.get("id")) || + (request.query && request.query.id) || + null; if (!userId) { return withCors({ status: 400, @@ -79,7 +84,7 @@ app.http("UsersPatch", { await upsertUserDocument(next); return withCors({status: 204}); } catch (error) { - context.log(`Failed to patch user ${userId}: ${error.message}`); + context.log.error(`Failed to patch user ${userId}: ${error.message}`, error); return withCors({ status: 500, jsonBody: {error: "Failed to update user progress"}, From 90294b3d4c52c0ddcdde8e2d2dd96471e18210cc Mon Sep 17 00:00:00 2001 From: Puller28 Date: Tue, 28 Oct 2025 10:23:16 +0200 Subject: [PATCH 071/108] Expose admin summary endpoint under new route --- progress-api/src/functions/AdminProgress.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/progress-api/src/functions/AdminProgress.js b/progress-api/src/functions/AdminProgress.js index 3269e163..1f246e9c 100644 --- a/progress-api/src/functions/AdminProgress.js +++ b/progress-api/src/functions/AdminProgress.js @@ -13,10 +13,10 @@ const normalise = (document = {}) => ({ : {}, }); -app.http("AdminProgress", { +app.http("AdminSummary", { methods: ["GET", "OPTIONS"], authLevel: "anonymous", - route: "admin/progress", + route: "admin/progress/summary", handler: async (request, context) => { if (request.method === "OPTIONS") { return handleOptions(); From c675b8a1b6770f244929bb808759ce0d2dfeeb52 Mon Sep 17 00:00:00 2001 From: Puller28 Date: Wed, 29 Oct 2025 11:29:10 +0200 Subject: [PATCH 072/108] Serve admin summary on default function route --- progress-api/src/functions/AdminProgress.js | 1 - 1 file changed, 1 deletion(-) diff --git a/progress-api/src/functions/AdminProgress.js b/progress-api/src/functions/AdminProgress.js index 1f246e9c..132e44c2 100644 --- a/progress-api/src/functions/AdminProgress.js +++ b/progress-api/src/functions/AdminProgress.js @@ -16,7 +16,6 @@ const normalise = (document = {}) => ({ app.http("AdminSummary", { methods: ["GET", "OPTIONS"], authLevel: "anonymous", - route: "admin/progress/summary", handler: async (request, context) => { if (request.method === "OPTIONS") { return handleOptions(); From 06263da931d7b92dc6e8a2acf07868a010146562 Mon Sep 17 00:00:00 2001 From: Puller28 Date: Fri, 31 Oct 2025 12:45:32 +0200 Subject: [PATCH 073/108] Update Azure Function configuration and add deployment workflow --- .funcignore | 10 + .github/workflows/deploy-progress-api.yml | 51 + host.json | 15 + local.settings.json | 7 + node_modules/.package-lock.json | 58 + node_modules/@azure/functions/LICENSE | 21 + node_modules/@azure/functions/README.md | 67 + node_modules/@azure/functions/package.json | 86 ++ .../@azure/functions/src/InvocationContext.ts | 95 ++ .../@azure/functions/src/InvocationModel.ts | 176 +++ .../@azure/functions/src/ProgrammingModel.ts | 31 + .../@azure/functions/src/addBindingName.ts | 27 + node_modules/@azure/functions/src/app.ts | 173 +++ .../@azure/functions/src/constants.ts | 6 + .../src/converters/fromRpcBindings.ts | 40 + .../src/converters/fromRpcContext.ts | 43 + .../src/converters/fromRpcNullable.ts | 19 + .../src/converters/fromRpcTriggerMetadata.ts | 27 + .../src/converters/fromRpcTypedData.ts | 44 + .../functions/src/converters/toCamelCase.ts | 20 + .../src/converters/toCoreFunctionMetadata.ts | 76 + .../toMcpToolTriggerOptionsToRpc.ts | 149 ++ .../functions/src/converters/toRpcDuration.ts | 36 + .../functions/src/converters/toRpcHttp.ts | 47 + .../src/converters/toRpcHttpCookie.ts | 39 + .../functions/src/converters/toRpcNullable.ts | 132 ++ .../src/converters/toRpcTypedData.ts | 28 + node_modules/@azure/functions/src/errors.ts | 69 + .../functions/src/hooks/AppStartContext.ts | 7 + .../src/hooks/AppTerminateContext.ts | 7 + .../@azure/functions/src/hooks/HookContext.ts | 23 + .../src/hooks/InvocationHookContext.ts | 35 + .../functions/src/hooks/LogHookContext.ts | 51 + .../src/hooks/PostInvocationContext.ts | 30 + .../src/hooks/PreInvocationContext.ts | 24 + .../functions/src/hooks/registerHook.ts | 74 + .../@azure/functions/src/http/HttpRequest.ts | 165 ++ .../@azure/functions/src/http/HttpResponse.ts | 82 + .../src/http/extractHttpUserFromHeaders.ts | 38 + .../@azure/functions/src/http/httpProxy.ts | 173 +++ node_modules/@azure/functions/src/index.ts | 28 + node_modules/@azure/functions/src/input.ts | 79 + node_modules/@azure/functions/src/output.ts | 124 ++ node_modules/@azure/functions/src/setup.ts | 49 + node_modules/@azure/functions/src/trigger.ts | 152 ++ .../@azure/functions/src/utils/Disposable.ts | 35 + .../functions/src/utils/fallbackLogHandler.ts | 27 + .../functions/src/utils/getRandomHexString.ts | 13 + .../@azure/functions/src/utils/isTrigger.ts | 14 + .../@azure/functions/src/utils/nonNull.ts | 40 + .../functions/src/utils/tryGetCoreApiLazy.ts | 17 + .../@azure/functions/src/utils/util.ts | 6 + .../functions/src/utils/workerSystemLog.ts | 17 + .../functions/types/InvocationContext.d.ts | 366 +++++ node_modules/@azure/functions/types/app.d.ts | 202 +++ .../@azure/functions/types/cosmosDB.d.ts | 36 + .../@azure/functions/types/cosmosDB.v3.d.ts | 216 +++ .../@azure/functions/types/cosmosDB.v4.d.ts | 203 +++ .../@azure/functions/types/eventGrid.d.ts | 109 ++ .../@azure/functions/types/eventHub.d.ts | 55 + .../@azure/functions/types/generic.d.ts | 24 + .../functions/types/hooks/HookContext.d.ts | 27 + .../functions/types/hooks/appHooks.d.ts | 46 + .../types/hooks/invocationHooks.d.ts | 106 ++ .../functions/types/hooks/logHooks.d.ts | 58 + .../functions/types/hooks/registerHook.d.ts | 50 + node_modules/@azure/functions/types/http.d.ts | 386 +++++ .../@azure/functions/types/index.d.ts | 206 +++ .../@azure/functions/types/input.d.ts | 57 + .../@azure/functions/types/mcpTool.d.ts | 107 ++ .../@azure/functions/types/mySql.d.ts | 73 + .../@azure/functions/types/output.d.ts | 86 ++ .../@azure/functions/types/serviceBus.d.ts | 98 ++ .../@azure/functions/types/setup.d.ts | 16 + node_modules/@azure/functions/types/sql.d.ts | 75 + .../@azure/functions/types/storage.d.ts | 66 + .../@azure/functions/types/table.d.ts | 60 + .../@azure/functions/types/timer.d.ts | 70 + .../@azure/functions/types/trigger.d.ts | 103 ++ .../@azure/functions/types/warmup.d.ts | 17 + .../@azure/functions/types/webpubsub.d.ts | 124 ++ node_modules/@fastify/busboy/LICENSE | 19 + node_modules/@fastify/busboy/README.md | 271 ++++ .../@fastify/busboy/deps/dicer/LICENSE | 19 + .../@fastify/busboy/deps/streamsearch/sbmh.js | 228 +++ node_modules/@fastify/busboy/package.json | 86 ++ node_modules/cookie/LICENSE | 24 + node_modules/cookie/README.md | 317 ++++ node_modules/cookie/SECURITY.md | 25 + node_modules/cookie/index.js | 335 +++++ node_modules/cookie/package.json | 44 + node_modules/long/LICENSE | 202 +++ node_modules/long/README.md | 246 +++ node_modules/long/index.js | 1 + node_modules/long/package.json | 34 + node_modules/long/src/long.js | 1323 +++++++++++++++++ node_modules/undici/LICENSE | 21 + node_modules/undici/README.md | 443 ++++++ node_modules/undici/docs/api/Agent.md | 80 + node_modules/undici/docs/api/BalancedPool.md | 99 ++ node_modules/undici/docs/api/CacheStorage.md | 30 + node_modules/undici/docs/api/Client.md | 273 ++++ node_modules/undici/docs/api/Connector.md | 115 ++ node_modules/undici/docs/api/ContentType.md | 57 + node_modules/undici/docs/api/Cookies.md | 101 ++ .../undici/docs/api/DiagnosticsChannel.md | 204 +++ .../undici/docs/api/DispatchInterceptor.md | 60 + node_modules/undici/docs/api/Dispatcher.md | 887 +++++++++++ node_modules/undici/docs/api/Errors.md | 47 + node_modules/undici/docs/api/Fetch.md | 27 + node_modules/undici/docs/api/MockAgent.md | 540 +++++++ node_modules/undici/docs/api/MockClient.md | 77 + node_modules/undici/docs/api/MockErrors.md | 12 + node_modules/undici/docs/api/MockPool.md | 547 +++++++ node_modules/undici/docs/api/Pool.md | 84 ++ node_modules/undici/docs/api/PoolStats.md | 35 + node_modules/undici/docs/api/ProxyAgent.md | 126 ++ node_modules/undici/docs/api/RetryHandler.md | 108 ++ node_modules/undici/docs/api/WebSocket.md | 43 + node_modules/undici/docs/api/api-lifecycle.md | 62 + .../undici/docs/assets/lifecycle-diagram.png | Bin 0 -> 47090 bytes .../docs/best-practices/client-certificate.md | 64 + .../docs/best-practices/mocking-request.md | 136 ++ .../undici/docs/best-practices/proxy.md | 127 ++ .../docs/best-practices/writing-tests.md | 20 + node_modules/undici/index-fetch.js | 15 + node_modules/undici/index.d.ts | 3 + node_modules/undici/index.js | 167 +++ node_modules/undici/package.json | 167 +++ node_modules/undici/types/README.md | 6 + node_modules/undici/types/agent.d.ts | 31 + node_modules/undici/types/api.d.ts | 43 + node_modules/undici/types/balanced-pool.d.ts | 18 + node_modules/undici/types/cache.d.ts | 36 + node_modules/undici/types/client.d.ts | 97 ++ node_modules/undici/types/connector.d.ts | 34 + node_modules/undici/types/content-type.d.ts | 21 + node_modules/undici/types/cookies.d.ts | 28 + .../undici/types/diagnostics-channel.d.ts | 67 + node_modules/undici/types/dispatcher.d.ts | 241 +++ node_modules/undici/types/errors.d.ts | 128 ++ node_modules/undici/types/fetch.d.ts | 209 +++ node_modules/undici/types/file.d.ts | 39 + node_modules/undici/types/filereader.d.ts | 54 + node_modules/undici/types/formdata.d.ts | 108 ++ .../undici/types/global-dispatcher.d.ts | 9 + node_modules/undici/types/global-origin.d.ts | 7 + node_modules/undici/types/handlers.d.ts | 9 + node_modules/undici/types/header.d.ts | 4 + node_modules/undici/types/index.d.ts | 65 + node_modules/undici/types/interceptors.d.ts | 5 + node_modules/undici/types/mock-agent.d.ts | 50 + node_modules/undici/types/mock-client.d.ts | 25 + node_modules/undici/types/mock-errors.d.ts | 12 + .../undici/types/mock-interceptor.d.ts | 93 ++ node_modules/undici/types/mock-pool.d.ts | 25 + node_modules/undici/types/patch.d.ts | 71 + node_modules/undici/types/pool-stats.d.ts | 19 + node_modules/undici/types/pool.d.ts | 28 + node_modules/undici/types/proxy-agent.d.ts | 30 + node_modules/undici/types/readable.d.ts | 61 + node_modules/undici/types/retry-handler.d.ts | 116 ++ node_modules/undici/types/webidl.d.ts | 220 +++ node_modules/undici/types/websocket.d.ts | 131 ++ package-lock.json | 66 + package.json | 15 + progress-api/.gitignore | 99 ++ progress-api/src/functions/AdminProgress.js | 21 +- progress-api/src/functions/function.json | 17 + src/index.js | 5 + 170 files changed, 16251 insertions(+), 7 deletions(-) create mode 100644 .funcignore create mode 100644 .github/workflows/deploy-progress-api.yml create mode 100644 host.json create mode 100644 local.settings.json create mode 100644 node_modules/.package-lock.json create mode 100644 node_modules/@azure/functions/LICENSE create mode 100644 node_modules/@azure/functions/README.md create mode 100644 node_modules/@azure/functions/package.json create mode 100644 node_modules/@azure/functions/src/InvocationContext.ts create mode 100644 node_modules/@azure/functions/src/InvocationModel.ts create mode 100644 node_modules/@azure/functions/src/ProgrammingModel.ts create mode 100644 node_modules/@azure/functions/src/addBindingName.ts create mode 100644 node_modules/@azure/functions/src/app.ts create mode 100644 node_modules/@azure/functions/src/constants.ts create mode 100644 node_modules/@azure/functions/src/converters/fromRpcBindings.ts create mode 100644 node_modules/@azure/functions/src/converters/fromRpcContext.ts create mode 100644 node_modules/@azure/functions/src/converters/fromRpcNullable.ts create mode 100644 node_modules/@azure/functions/src/converters/fromRpcTriggerMetadata.ts create mode 100644 node_modules/@azure/functions/src/converters/fromRpcTypedData.ts create mode 100644 node_modules/@azure/functions/src/converters/toCamelCase.ts create mode 100644 node_modules/@azure/functions/src/converters/toCoreFunctionMetadata.ts create mode 100644 node_modules/@azure/functions/src/converters/toMcpToolTriggerOptionsToRpc.ts create mode 100644 node_modules/@azure/functions/src/converters/toRpcDuration.ts create mode 100644 node_modules/@azure/functions/src/converters/toRpcHttp.ts create mode 100644 node_modules/@azure/functions/src/converters/toRpcHttpCookie.ts create mode 100644 node_modules/@azure/functions/src/converters/toRpcNullable.ts create mode 100644 node_modules/@azure/functions/src/converters/toRpcTypedData.ts create mode 100644 node_modules/@azure/functions/src/errors.ts create mode 100644 node_modules/@azure/functions/src/hooks/AppStartContext.ts create mode 100644 node_modules/@azure/functions/src/hooks/AppTerminateContext.ts create mode 100644 node_modules/@azure/functions/src/hooks/HookContext.ts create mode 100644 node_modules/@azure/functions/src/hooks/InvocationHookContext.ts create mode 100644 node_modules/@azure/functions/src/hooks/LogHookContext.ts create mode 100644 node_modules/@azure/functions/src/hooks/PostInvocationContext.ts create mode 100644 node_modules/@azure/functions/src/hooks/PreInvocationContext.ts create mode 100644 node_modules/@azure/functions/src/hooks/registerHook.ts create mode 100644 node_modules/@azure/functions/src/http/HttpRequest.ts create mode 100644 node_modules/@azure/functions/src/http/HttpResponse.ts create mode 100644 node_modules/@azure/functions/src/http/extractHttpUserFromHeaders.ts create mode 100644 node_modules/@azure/functions/src/http/httpProxy.ts create mode 100644 node_modules/@azure/functions/src/index.ts create mode 100644 node_modules/@azure/functions/src/input.ts create mode 100644 node_modules/@azure/functions/src/output.ts create mode 100644 node_modules/@azure/functions/src/setup.ts create mode 100644 node_modules/@azure/functions/src/trigger.ts create mode 100644 node_modules/@azure/functions/src/utils/Disposable.ts create mode 100644 node_modules/@azure/functions/src/utils/fallbackLogHandler.ts create mode 100644 node_modules/@azure/functions/src/utils/getRandomHexString.ts create mode 100644 node_modules/@azure/functions/src/utils/isTrigger.ts create mode 100644 node_modules/@azure/functions/src/utils/nonNull.ts create mode 100644 node_modules/@azure/functions/src/utils/tryGetCoreApiLazy.ts create mode 100644 node_modules/@azure/functions/src/utils/util.ts create mode 100644 node_modules/@azure/functions/src/utils/workerSystemLog.ts create mode 100644 node_modules/@azure/functions/types/InvocationContext.d.ts create mode 100644 node_modules/@azure/functions/types/app.d.ts create mode 100644 node_modules/@azure/functions/types/cosmosDB.d.ts create mode 100644 node_modules/@azure/functions/types/cosmosDB.v3.d.ts create mode 100644 node_modules/@azure/functions/types/cosmosDB.v4.d.ts create mode 100644 node_modules/@azure/functions/types/eventGrid.d.ts create mode 100644 node_modules/@azure/functions/types/eventHub.d.ts create mode 100644 node_modules/@azure/functions/types/generic.d.ts create mode 100644 node_modules/@azure/functions/types/hooks/HookContext.d.ts create mode 100644 node_modules/@azure/functions/types/hooks/appHooks.d.ts create mode 100644 node_modules/@azure/functions/types/hooks/invocationHooks.d.ts create mode 100644 node_modules/@azure/functions/types/hooks/logHooks.d.ts create mode 100644 node_modules/@azure/functions/types/hooks/registerHook.d.ts create mode 100644 node_modules/@azure/functions/types/http.d.ts create mode 100644 node_modules/@azure/functions/types/index.d.ts create mode 100644 node_modules/@azure/functions/types/input.d.ts create mode 100644 node_modules/@azure/functions/types/mcpTool.d.ts create mode 100644 node_modules/@azure/functions/types/mySql.d.ts create mode 100644 node_modules/@azure/functions/types/output.d.ts create mode 100644 node_modules/@azure/functions/types/serviceBus.d.ts create mode 100644 node_modules/@azure/functions/types/setup.d.ts create mode 100644 node_modules/@azure/functions/types/sql.d.ts create mode 100644 node_modules/@azure/functions/types/storage.d.ts create mode 100644 node_modules/@azure/functions/types/table.d.ts create mode 100644 node_modules/@azure/functions/types/timer.d.ts create mode 100644 node_modules/@azure/functions/types/trigger.d.ts create mode 100644 node_modules/@azure/functions/types/warmup.d.ts create mode 100644 node_modules/@azure/functions/types/webpubsub.d.ts create mode 100644 node_modules/@fastify/busboy/LICENSE create mode 100644 node_modules/@fastify/busboy/README.md create mode 100644 node_modules/@fastify/busboy/deps/dicer/LICENSE create mode 100644 node_modules/@fastify/busboy/deps/streamsearch/sbmh.js create mode 100644 node_modules/@fastify/busboy/package.json create mode 100644 node_modules/cookie/LICENSE create mode 100644 node_modules/cookie/README.md create mode 100644 node_modules/cookie/SECURITY.md create mode 100644 node_modules/cookie/index.js create mode 100644 node_modules/cookie/package.json create mode 100644 node_modules/long/LICENSE create mode 100644 node_modules/long/README.md create mode 100644 node_modules/long/index.js create mode 100644 node_modules/long/package.json create mode 100644 node_modules/long/src/long.js create mode 100644 node_modules/undici/LICENSE create mode 100644 node_modules/undici/README.md create mode 100644 node_modules/undici/docs/api/Agent.md create mode 100644 node_modules/undici/docs/api/BalancedPool.md create mode 100644 node_modules/undici/docs/api/CacheStorage.md create mode 100644 node_modules/undici/docs/api/Client.md create mode 100644 node_modules/undici/docs/api/Connector.md create mode 100644 node_modules/undici/docs/api/ContentType.md create mode 100644 node_modules/undici/docs/api/Cookies.md create mode 100644 node_modules/undici/docs/api/DiagnosticsChannel.md create mode 100644 node_modules/undici/docs/api/DispatchInterceptor.md create mode 100644 node_modules/undici/docs/api/Dispatcher.md create mode 100644 node_modules/undici/docs/api/Errors.md create mode 100644 node_modules/undici/docs/api/Fetch.md create mode 100644 node_modules/undici/docs/api/MockAgent.md create mode 100644 node_modules/undici/docs/api/MockClient.md create mode 100644 node_modules/undici/docs/api/MockErrors.md create mode 100644 node_modules/undici/docs/api/MockPool.md create mode 100644 node_modules/undici/docs/api/Pool.md create mode 100644 node_modules/undici/docs/api/PoolStats.md create mode 100644 node_modules/undici/docs/api/ProxyAgent.md create mode 100644 node_modules/undici/docs/api/RetryHandler.md create mode 100644 node_modules/undici/docs/api/WebSocket.md create mode 100644 node_modules/undici/docs/api/api-lifecycle.md create mode 100644 node_modules/undici/docs/assets/lifecycle-diagram.png create mode 100644 node_modules/undici/docs/best-practices/client-certificate.md create mode 100644 node_modules/undici/docs/best-practices/mocking-request.md create mode 100644 node_modules/undici/docs/best-practices/proxy.md create mode 100644 node_modules/undici/docs/best-practices/writing-tests.md create mode 100644 node_modules/undici/index-fetch.js create mode 100644 node_modules/undici/index.d.ts create mode 100644 node_modules/undici/index.js create mode 100644 node_modules/undici/package.json create mode 100644 node_modules/undici/types/README.md create mode 100644 node_modules/undici/types/agent.d.ts create mode 100644 node_modules/undici/types/api.d.ts create mode 100644 node_modules/undici/types/balanced-pool.d.ts create mode 100644 node_modules/undici/types/cache.d.ts create mode 100644 node_modules/undici/types/client.d.ts create mode 100644 node_modules/undici/types/connector.d.ts create mode 100644 node_modules/undici/types/content-type.d.ts create mode 100644 node_modules/undici/types/cookies.d.ts create mode 100644 node_modules/undici/types/diagnostics-channel.d.ts create mode 100644 node_modules/undici/types/dispatcher.d.ts create mode 100644 node_modules/undici/types/errors.d.ts create mode 100644 node_modules/undici/types/fetch.d.ts create mode 100644 node_modules/undici/types/file.d.ts create mode 100644 node_modules/undici/types/filereader.d.ts create mode 100644 node_modules/undici/types/formdata.d.ts create mode 100644 node_modules/undici/types/global-dispatcher.d.ts create mode 100644 node_modules/undici/types/global-origin.d.ts create mode 100644 node_modules/undici/types/handlers.d.ts create mode 100644 node_modules/undici/types/header.d.ts create mode 100644 node_modules/undici/types/index.d.ts create mode 100644 node_modules/undici/types/interceptors.d.ts create mode 100644 node_modules/undici/types/mock-agent.d.ts create mode 100644 node_modules/undici/types/mock-client.d.ts create mode 100644 node_modules/undici/types/mock-errors.d.ts create mode 100644 node_modules/undici/types/mock-interceptor.d.ts create mode 100644 node_modules/undici/types/mock-pool.d.ts create mode 100644 node_modules/undici/types/patch.d.ts create mode 100644 node_modules/undici/types/pool-stats.d.ts create mode 100644 node_modules/undici/types/pool.d.ts create mode 100644 node_modules/undici/types/proxy-agent.d.ts create mode 100644 node_modules/undici/types/readable.d.ts create mode 100644 node_modules/undici/types/retry-handler.d.ts create mode 100644 node_modules/undici/types/webidl.d.ts create mode 100644 node_modules/undici/types/websocket.d.ts create mode 100644 package-lock.json create mode 100644 package.json create mode 100644 progress-api/.gitignore create mode 100644 progress-api/src/functions/function.json create mode 100644 src/index.js diff --git a/.funcignore b/.funcignore new file mode 100644 index 00000000..d5b3b4a2 --- /dev/null +++ b/.funcignore @@ -0,0 +1,10 @@ +*.js.map +*.ts +.git* +.vscode +__azurite_db*__.json +__blobstorage__ +__queuestorage__ +local.settings.json +test +tsconfig.json \ No newline at end of file diff --git a/.github/workflows/deploy-progress-api.yml b/.github/workflows/deploy-progress-api.yml new file mode 100644 index 00000000..e91e0542 --- /dev/null +++ b/.github/workflows/deploy-progress-api.yml @@ -0,0 +1,51 @@ +name: Deploy Progress API to Azure Functions + +on: + push: + branches: [ main ] + paths: + - 'progress-api/**' + workflow_dispatch: + +env: + AZURE_FUNCTIONAPP_NAME: futurecoder-progress-api-cub7aje5cae5a2fa # Your function app name + AZURE_FUNCTIONAPP_PACKAGE: 'progress-api' + NODE_VERSION: '18.x' + +permissions: + contents: read + id-token: write + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + environment: production + + steps: + - name: 'Checkout repository' + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + + - name: Install dependencies + working-directory: ./progress-api + run: npm install + + - name: 'Run npm build' + working-directory: ./progress-api + run: npm run build --if-present + + - name: 'Run Azure Functions Action' + uses: Azure/functions-action@v1 + id: fa + with: + app-name: ${{ env.AZURE_FUNCTIONAPP_NAME }} + package: ${{ env.AZURE_FUNCTIONAPP_PACKAGE }} + enable-oryx-build: true + publish-profile: ${{ secrets.AZURE_FUNCTIONAPP_PUBLISH_PROFILE }} + scm-do-build-during-deployment: true + enable-oryx-build: true diff --git a/host.json b/host.json new file mode 100644 index 00000000..9df91361 --- /dev/null +++ b/host.json @@ -0,0 +1,15 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[4.*, 5.0.0)" + } +} \ No newline at end of file diff --git a/local.settings.json b/local.settings.json new file mode 100644 index 00000000..356253ec --- /dev/null +++ b/local.settings.json @@ -0,0 +1,7 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "", + "FUNCTIONS_WORKER_RUNTIME": "node" + } +} \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 00000000..31c6fcd5 --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,58 @@ +{ + "name": "futurecoder", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@azure/functions": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/@azure/functions/-/functions-4.8.0.tgz", + "integrity": "sha512-LNtl3xZNE40vE7+SIST+GYQX5cnnI1M65fXPi26l9XCdPakuQrz54lHv+qQQt1GG5JbqLfQk75iM7A6Y9O+2dQ==", + "license": "MIT", + "dependencies": { + "cookie": "^0.7.0", + "long": "^4.0.0", + "undici": "^5.29.0" + }, + "engines": { + "node": ">=18.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==", + "license": "Apache-2.0" + }, + "node_modules/undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "license": "MIT", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + } + } +} diff --git a/node_modules/@azure/functions/LICENSE b/node_modules/@azure/functions/LICENSE new file mode 100644 index 00000000..4f0d38ca --- /dev/null +++ b/node_modules/@azure/functions/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) .NET Foundation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@azure/functions/README.md b/node_modules/@azure/functions/README.md new file mode 100644 index 00000000..4d666e65 --- /dev/null +++ b/node_modules/@azure/functions/README.md @@ -0,0 +1,67 @@ +# Azure Functions Node.js Programming Model + +|Branch|Status|Support level|Node.js Versions| +|---|---|---|---| +|v4.x (default)|[![Build Status](https://img.shields.io/azure-devops/build/azfunc/public/514/v4.x)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v4.x) [![Test Status](https://img.shields.io/azure-devops/tests/azfunc/public/514/v4.x?compact_message)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v4.x)|GA|20, 18| +|v3.x|[![Build Status](https://img.shields.io/azure-devops/build/azfunc/public/514/v3.x)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v3.x) [![Test Status](https://img.shields.io/azure-devops/tests/azfunc/public/514/v3.x?compact_message)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v3.x)|GA|20, 18| + +## Install + +```bash +npm install @azure/functions +``` + +## Documentation + +- [Azure Functions JavaScript Developer Guide](https://learn.microsoft.com/azure/azure-functions/functions-reference-node?pivots=nodejs-model-v4) +- [Upgrade guide from v3 to v4](https://learn.microsoft.com/azure/azure-functions/functions-node-upgrade-v4) +- [Create your first TypeScript function](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-typescript?pivots=nodejs-model-v4) +- [Create your first JavaScript function](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-node?pivots=nodejs-model-v4) + +## Considerations + +- The Node.js "programming model" shouldn't be confused with the Azure Functions "runtime". + - _**Programming model**_: Defines how you author your code and is specific to JavaScript and TypeScript. + - _**Runtime**_: Defines underlying behavior of Azure Functions and is shared across all languages. +- The programming model version is strictly tied to the version of the [`@azure/functions`](https://www.npmjs.com/package/@azure/functions) npm package, and is versioned independently of the [runtime](https://learn.microsoft.com/azure/azure-functions/functions-versions?pivots=programming-language-javascript). Both the runtime and the programming model use "4" as their latest major version, but that is purely a coincidence. +- You can't mix the v3 and v4 programming models in the same function app. As soon as you register one v4 function in your app, any v3 functions registered in _function.json_ files are ignored. + +## Usage + +### TypeScript + +```typescript +import { app, HttpRequest, HttpResponseInit, InvocationContext } from "@azure/functions"; + +export async function httpTrigger1(request: HttpRequest, context: InvocationContext): Promise { + context.log(`Http function processed request for url "${request.url}"`); + + const name = request.query.get('name') || await request.text() || 'world'; + + return { body: `Hello, ${name}!` }; +}; + +app.http('httpTrigger1', { + methods: ['GET', 'POST'], + authLevel: 'anonymous', + handler: httpTrigger1 +}); +``` + +### JavaScript + +```javascript +const { app } = require('@azure/functions'); + +app.http('httpTrigger1', { + methods: ['GET', 'POST'], + authLevel: 'anonymous', + handler: async (request, context) => { + context.log(`Http function processed request for url "${request.url}"`); + + const name = request.query.get('name') || await request.text() || 'world'; + + return { body: `Hello, ${name}!` }; + } +}); +``` diff --git a/node_modules/@azure/functions/package.json b/node_modules/@azure/functions/package.json new file mode 100644 index 00000000..e0f57eed --- /dev/null +++ b/node_modules/@azure/functions/package.json @@ -0,0 +1,86 @@ +{ + "name": "@azure/functions", + "version": "4.8.0", + "description": "Microsoft Azure Functions NodeJS Framework", + "keywords": [ + "azure", + "azure-functions", + "serverless", + "typescript" + ], + "author": "Microsoft", + "license": "MIT", + "homepage": "https://github.com/Azure/azure-functions-nodejs-library", + "repository": { + "type": "git", + "url": "https://github.com/Azure/azure-functions-nodejs-library.git" + }, + "bugs": { + "url": "https://github.com/Azure/azure-functions-nodejs-library/issues" + }, + "main": "./dist/azure-functions.js", + "types": "types/index.d.ts", + "files": [ + "dist/", + "src/", + "types/", + "LICENSE", + "README.md" + ], + "engines": { + "node": ">=18.0" + }, + "scripts": { + "build": "webpack --mode development", + "minify": "webpack --mode production", + "test": "ts-node ./test/index.ts", + "format": "prettier . --write", + "lint": "eslint . --fix", + "updateVersion": "ts-node ./scripts/updateVersion.ts", + "validateRelease": "ts-node ./scripts/validateRelease.ts", + "watch": "webpack --watch --mode development" + }, + "dependencies": { + "cookie": "^0.7.0", + "long": "^4.0.0", + "undici": "^5.29.0" + }, + "devDependencies": { + "@types/chai": "^4.2.22", + "@types/chai-as-promised": "^7.1.5", + "@types/cookie": "^0.6.0", + "@types/fs-extra": "^9.0.13", + "@types/long": "^4.0.2", + "@types/minimist": "^1.2.2", + "@types/mocha": "^9.1.1", + "@types/node": "^18.0.0", + "@types/semver": "^7.3.9", + "@typescript-eslint/eslint-plugin": "^5.12.1", + "@typescript-eslint/parser": "^5.12.1", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "eslint": "^7.32.0", + "eslint-config-prettier": "^8.3.0", + "eslint-plugin-deprecation": "^1.3.2", + "eslint-plugin-header": "^3.1.1", + "eslint-plugin-import": "^2.29.0", + "eslint-plugin-prettier": "^4.0.0", + "eslint-webpack-plugin": "^3.2.0", + "eslint-plugin-simple-import-sort": "^10.0.0", + "fork-ts-checker-webpack-plugin": "^7.2.13", + "fs-extra": "^10.0.1", + "globby": "^11.0.0", + "minimist": "^1.2.6", + "mocha": "^11.1.0", + "mocha-junit-reporter": "^2.0.2", + "mocha-multi-reporters": "^1.5.1", + "prettier": "^2.4.1", + "semver": "^7.3.5", + "ts-loader": "^9.3.1", + "ts-node": "^3.3.0", + "typescript": "^4.5.5", + "typescript4": "npm:typescript@~4.0.0", + "webpack": "^5.74.0", + "webpack-cli": "^4.10.0" + } +} diff --git a/node_modules/@azure/functions/src/InvocationContext.ts b/node_modules/@azure/functions/src/InvocationContext.ts new file mode 100644 index 00000000..56ce2417 --- /dev/null +++ b/node_modules/@azure/functions/src/InvocationContext.ts @@ -0,0 +1,95 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { + EffectiveFunctionOptions, + InvocationContextInit, + LogHandler, + RetryContext, + TraceContext, + TriggerMetadata, +} from '@azure/functions'; +import { fallbackLogHandler } from './utils/fallbackLogHandler'; + +export class InvocationContext implements types.InvocationContext { + invocationId: string; + functionName: string; + extraInputs: InvocationContextExtraInputs; + extraOutputs: InvocationContextExtraOutputs; + retryContext?: RetryContext; + traceContext?: TraceContext; + triggerMetadata?: TriggerMetadata; + options: EffectiveFunctionOptions; + #userLogHandler: LogHandler; + + constructor(init?: InvocationContextInit) { + init = init || {}; + const fallbackString = 'unknown'; + this.invocationId = init.invocationId || fallbackString; + this.functionName = init.functionName || fallbackString; + this.extraInputs = new InvocationContextExtraInputs(); + this.extraOutputs = new InvocationContextExtraOutputs(); + this.retryContext = init.retryContext; + this.traceContext = init.traceContext; + this.triggerMetadata = init.triggerMetadata; + this.options = { + trigger: init.options?.trigger || { + name: fallbackString, + type: fallbackString, + }, + return: init.options?.return, + extraInputs: init.options?.extraInputs || [], + extraOutputs: init.options?.extraOutputs || [], + }; + this.#userLogHandler = init.logHandler || fallbackLogHandler; + } + + log(...args: unknown[]): void { + this.#userLogHandler('information', ...args); + } + + trace(...args: unknown[]): void { + this.#userLogHandler('trace', ...args); + } + + debug(...args: unknown[]): void { + this.#userLogHandler('debug', ...args); + } + + info(...args: unknown[]): void { + this.#userLogHandler('information', ...args); + } + + warn(...args: unknown[]): void { + this.#userLogHandler('warning', ...args); + } + + error(...args: unknown[]): void { + this.#userLogHandler('error', ...args); + } +} + +class InvocationContextExtraInputs implements types.InvocationContextExtraInputs { + #inputs: Record = {}; + get(inputOrName: types.FunctionInput | string): any { + const name = typeof inputOrName === 'string' ? inputOrName : inputOrName.name; + return this.#inputs[name]; + } + set(inputOrName: types.FunctionInput | string, value: unknown): void { + const name = typeof inputOrName === 'string' ? inputOrName : inputOrName.name; + this.#inputs[name] = value; + } +} + +class InvocationContextExtraOutputs implements types.InvocationContextExtraOutputs { + #outputs: Record = {}; + get(outputOrName: types.FunctionOutput | string): unknown { + const name = typeof outputOrName === 'string' ? outputOrName : outputOrName.name; + return this.#outputs[name]; + } + set(outputOrName: types.FunctionOutput | string, value: unknown): void { + const name = typeof outputOrName === 'string' ? outputOrName : outputOrName.name; + this.#outputs[name] = value; + } +} diff --git a/node_modules/@azure/functions/src/InvocationModel.ts b/node_modules/@azure/functions/src/InvocationModel.ts new file mode 100644 index 00000000..d1958aed --- /dev/null +++ b/node_modules/@azure/functions/src/InvocationModel.ts @@ -0,0 +1,176 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as coreTypes from '@azure/functions-core'; +import { + CoreInvocationContext, + InvocationArguments, + RpcBindingInfo, + RpcInvocationResponse, + RpcLogCategory, + RpcLogLevel, + RpcTypedData, +} from '@azure/functions-core'; +import { format } from 'util'; +import { returnBindingKey } from './constants'; +import { fromRpcBindings } from './converters/fromRpcBindings'; +import { fromRpcRetryContext, fromRpcTraceContext } from './converters/fromRpcContext'; +import { fromRpcTriggerMetadata } from './converters/fromRpcTriggerMetadata'; +import { fromRpcTypedData } from './converters/fromRpcTypedData'; +import { toCamelCaseValue } from './converters/toCamelCase'; +import { toRpcHttp } from './converters/toRpcHttp'; +import { toRpcTypedData } from './converters/toRpcTypedData'; +import { AzFuncSystemError } from './errors'; +import { waitForProxyRequest } from './http/httpProxy'; +import { createStreamRequest } from './http/HttpRequest'; +import { InvocationContext } from './InvocationContext'; +import { enableHttpStream } from './setup'; +import { isHttpTrigger, isTimerTrigger, isTrigger } from './utils/isTrigger'; +import { isDefined, nonNullProp, nonNullValue } from './utils/nonNull'; + +export class InvocationModel implements coreTypes.InvocationModel { + #isDone = false; + #coreCtx: CoreInvocationContext; + #functionName: string; + #bindings: Record; + #triggerType: string; + + constructor(coreCtx: CoreInvocationContext) { + this.#coreCtx = coreCtx; + this.#functionName = nonNullProp(coreCtx.metadata, 'name'); + this.#bindings = nonNullProp(coreCtx.metadata, 'bindings'); + const triggerBinding = nonNullValue( + Object.values(this.#bindings).find((b) => isTrigger(b.type)), + 'triggerBinding' + ); + this.#triggerType = nonNullProp(triggerBinding, 'type'); + } + + // eslint-disable-next-line @typescript-eslint/require-await + async getArguments(): Promise { + const req = this.#coreCtx.request; + + const context = new InvocationContext({ + invocationId: nonNullProp(this.#coreCtx, 'invocationId'), + functionName: this.#functionName, + logHandler: (level: RpcLogLevel, ...args: unknown[]) => this.#userLog(level, ...args), + retryContext: fromRpcRetryContext(req.retryContext), + traceContext: fromRpcTraceContext(req.traceContext), + triggerMetadata: fromRpcTriggerMetadata(req.triggerMetadata, this.#triggerType), + options: fromRpcBindings(this.#bindings), + }); + + const inputs: unknown[] = []; + if (req.inputData) { + for (const binding of req.inputData) { + const bindingName = nonNullProp(binding, 'name'); + + const rpcBinding = this.#bindings[bindingName]; + if (!rpcBinding) { + throw new AzFuncSystemError( + `Failed to find binding "${bindingName}" in bindings "${Object.keys(this.#bindings).join( + ', ' + )}".` + ); + } + const bindingType = rpcBinding.type; + + let input: unknown; + if (isHttpTrigger(bindingType) && enableHttpStream) { + const proxyRequest = await waitForProxyRequest(this.#coreCtx.invocationId); + input = createStreamRequest(proxyRequest, nonNullProp(req, 'triggerMetadata')); + } else { + input = fromRpcTypedData(binding.data); + } + + if (isTimerTrigger(bindingType)) { + input = toCamelCaseValue(input); + } + + if (isTrigger(bindingType)) { + inputs.push(input); + } else { + context.extraInputs.set(bindingName, input); + } + } + } + + return { context, inputs }; + } + + async invokeFunction( + context: InvocationContext, + inputs: unknown[], + handler: coreTypes.FunctionCallback + ): Promise { + try { + return await Promise.resolve(handler(...inputs, context)); + } finally { + this.#isDone = true; + } + } + + async getResponse(context: InvocationContext, result: unknown): Promise { + const response: RpcInvocationResponse = { invocationId: this.#coreCtx.invocationId }; + + response.outputData = []; + let usedReturnValue = false; + for (const [name, binding] of Object.entries(this.#bindings)) { + if (binding.direction === 'out') { + if (name === returnBindingKey) { + response.returnValue = await this.#convertOutput(context.invocationId, binding, result); + usedReturnValue = true; + } else { + const outputValue = await this.#convertOutput( + context.invocationId, + binding, + context.extraOutputs.get(name) + ); + if (isDefined(outputValue)) { + response.outputData.push({ name, data: outputValue }); + } + } + } + } + + // This allows the return value of non-HTTP triggered functions to be passed back + // to the host, even if no explicit output binding is set. In most cases, this is ignored, + // but e.g., Durable uses this to pass orchestrator state back to the Durable extension, w/o + // an explicit output binding. See here for more details: https://github.com/Azure/azure-functions-nodejs-library/pull/25 + if (!usedReturnValue && !isHttpTrigger(this.#triggerType)) { + response.returnValue = toRpcTypedData(result); + } + + return response; + } + + async #convertOutput( + invocationId: string, + binding: RpcBindingInfo, + value: unknown + ): Promise { + if (binding.type?.toLowerCase() === 'http') { + return toRpcHttp(invocationId, value); + } else { + return toRpcTypedData(value); + } + } + + #log(level: RpcLogLevel, logCategory: RpcLogCategory, ...args: unknown[]): void { + this.#coreCtx.log(level, logCategory, format(...args)); + } + + #systemLog(level: RpcLogLevel, ...args: unknown[]) { + this.#log(level, 'system', ...args); + } + + #userLog(level: RpcLogLevel, ...args: unknown[]): void { + if (this.#isDone && this.#coreCtx.state !== 'postInvocationHooks') { + let badAsyncMsg = + "Warning: Unexpected call to 'log' on the context object after function execution has completed. Please check for asynchronous calls that are not awaited. "; + badAsyncMsg += `Function name: ${this.#functionName}. Invocation Id: ${this.#coreCtx.invocationId}.`; + this.#systemLog('warning', badAsyncMsg); + } + this.#log(level, 'user', ...args); + } +} diff --git a/node_modules/@azure/functions/src/ProgrammingModel.ts b/node_modules/@azure/functions/src/ProgrammingModel.ts new file mode 100644 index 00000000..009ed92e --- /dev/null +++ b/node_modules/@azure/functions/src/ProgrammingModel.ts @@ -0,0 +1,31 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as coreTypes from '@azure/functions-core'; +import { CoreInvocationContext, WorkerCapabilities } from '@azure/functions-core'; +import { version } from './constants'; +import { setupHttpProxy } from './http/httpProxy'; +import { InvocationModel } from './InvocationModel'; +import { capabilities as libraryCapabilities, enableHttpStream, lockSetup } from './setup'; + +export class ProgrammingModel implements coreTypes.ProgrammingModel { + name = '@azure/functions'; + version = version; + + getInvocationModel(coreCtx: CoreInvocationContext): InvocationModel { + return new InvocationModel(coreCtx); + } + + async getCapabilities(workerCapabilities: WorkerCapabilities): Promise { + lockSetup(); + + if (enableHttpStream) { + const httpUri = await setupHttpProxy(); + workerCapabilities.HttpUri = httpUri; + } + + Object.assign(workerCapabilities, libraryCapabilities); + + return workerCapabilities; + } +} diff --git a/node_modules/@azure/functions/src/addBindingName.ts b/node_modules/@azure/functions/src/addBindingName.ts new file mode 100644 index 00000000..3ac0f952 --- /dev/null +++ b/node_modules/@azure/functions/src/addBindingName.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { getStringHash } from './utils/getRandomHexString'; + +/** + * If the host spawns multiple workers, it expects the metadata (including binding name) to be the same across workers. + * That means we need to generate binding names in a deterministic fashion, so we'll do that using a string hash of the binding data + * A few considerations: + * 1. We will include the binding type in the name to make it more readable + * 2. Users can manually specify the name themselves and we will respect that + * 3. The only time the hash should cause a conflict is if a single function has duplicate bindings. Not sure why someone would do that, but we will throw an error at function registration time + * More info here: https://github.com/Azure/azure-functions-nodejs-worker/issues/638 + */ +export function addBindingName( + binding: T, + suffix: string +): T & { name: string } { + if (!binding.name) { + let bindingType = binding.type; + if (!bindingType.toLowerCase().endsWith(suffix.toLowerCase())) { + bindingType += suffix; + } + binding.name = bindingType + getStringHash(JSON.stringify(binding)); + } + return binding; +} diff --git a/node_modules/@azure/functions/src/app.ts b/node_modules/@azure/functions/src/app.ts new file mode 100644 index 00000000..b7818459 --- /dev/null +++ b/node_modules/@azure/functions/src/app.ts @@ -0,0 +1,173 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBFunctionOptions, + EventGridFunctionOptions, + EventHubFunctionOptions, + FunctionTrigger, + GenericFunctionOptions, + HttpFunctionOptions, + HttpHandler, + HttpMethod, + HttpMethodFunctionOptions, + McpToolFunctionOptions, + MySqlFunctionOptions, + ServiceBusQueueFunctionOptions, + ServiceBusTopicFunctionOptions, + SqlFunctionOptions, + StorageBlobFunctionOptions, + StorageQueueFunctionOptions, + TimerFunctionOptions, + WarmupFunctionOptions, + WebPubSubFunctionOptions, +} from '@azure/functions'; +import { FunctionCallback } from '@azure/functions-core'; +import { toCoreFunctionMetadata } from './converters/toCoreFunctionMetadata'; +import * as output from './output'; +import { ProgrammingModel } from './ProgrammingModel'; +import * as trigger from './trigger'; +import { tryGetCoreApiLazy } from './utils/tryGetCoreApiLazy'; + +export * as hook from './hooks/registerHook'; +export { setup } from './setup'; + +let hasSetModel = false; +function setProgrammingModel() { + const coreApi = tryGetCoreApiLazy(); + if (!coreApi) { + console.warn( + 'WARNING: Failed to detect the Azure Functions runtime. Switching "@azure/functions" package to test mode - not all features are supported.' + ); + } else { + coreApi.setProgrammingModel(new ProgrammingModel()); + } + hasSetModel = true; +} + +function convertToHttpOptions( + optionsOrHandler: HttpFunctionOptions | HttpHandler, + method: HttpMethod +): HttpFunctionOptions { + const options: HttpFunctionOptions = + typeof optionsOrHandler === 'function' ? { handler: optionsOrHandler } : optionsOrHandler; + options.methods = [method]; + return options; +} + +function convertToGenericOptions & Partial>( + options: T, + triggerMethod: ( + o: Omit + ) => FunctionTrigger +): GenericFunctionOptions { + const { handler, return: ret, trigger, extraInputs, extraOutputs, retry, ...triggerOptions } = options; + return { + trigger: trigger ?? triggerMethod(triggerOptions), + return: ret, + retry, + extraInputs, + extraOutputs, + handler, + }; +} + +export function get(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'GET')); +} + +export function put(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'PUT')); +} + +export function post(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'POST')); +} + +export function patch(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'PATCH')); +} + +export function deleteRequest(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'DELETE')); +} + +export function http(name: string, options: HttpFunctionOptions): void { + options.return ||= output.http({}); + generic(name, convertToGenericOptions(options, trigger.http)); +} + +export function timer(name: string, options: TimerFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.timer)); +} + +export function storageBlob(name: string, options: StorageBlobFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.storageBlob)); +} + +export function storageQueue(name: string, options: StorageQueueFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.storageQueue)); +} + +export function serviceBusQueue(name: string, options: ServiceBusQueueFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.serviceBusQueue)); +} + +export function serviceBusTopic(name: string, options: ServiceBusTopicFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.serviceBusTopic)); +} + +export function eventHub(name: string, options: EventHubFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.eventHub)); +} + +export function eventGrid(name: string, options: EventGridFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.eventGrid)); +} + +export function cosmosDB(name: string, options: CosmosDBFunctionOptions): void { + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + generic(name, convertToGenericOptions(options, trigger.cosmosDB)); +} + +export function warmup(name: string, options: WarmupFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.warmup)); +} + +export function sql(name: string, options: SqlFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.sql)); +} + +export function mySql(name: string, options: MySqlFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.mySql)); +} + +export function webPubSub(name: string, options: WebPubSubFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.webPubSub)); +} + +/** + * Registers an MCP Tool function in your app. + * This function is triggered by MCP Tool events and allows you to define the behavior of the function. + * + * @param name - The name of the function. This must be unique within your app and is primarily used for tracking purposes. + * @param options - Configuration options for the MCP Tool function, including the handler and trigger-specific settings. + */ +export function mcpTool(name: string, options: McpToolFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.mcpTool)); +} + +export function generic(name: string, options: GenericFunctionOptions): void { + if (!hasSetModel) { + setProgrammingModel(); + } + + const coreApi = tryGetCoreApiLazy(); + if (!coreApi) { + console.warn( + `WARNING: Skipping call to register function "${name}" because the "@azure/functions" package is in test mode.` + ); + } else { + coreApi.registerFunction(toCoreFunctionMetadata(name, options), options.handler); + } +} diff --git a/node_modules/@azure/functions/src/constants.ts b/node_modules/@azure/functions/src/constants.ts new file mode 100644 index 00000000..3b05c2d4 --- /dev/null +++ b/node_modules/@azure/functions/src/constants.ts @@ -0,0 +1,6 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export const version = '4.8.0'; + +export const returnBindingKey = '$return'; diff --git a/node_modules/@azure/functions/src/converters/fromRpcBindings.ts b/node_modules/@azure/functions/src/converters/fromRpcBindings.ts new file mode 100644 index 00000000..abdaf07b --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcBindings.ts @@ -0,0 +1,40 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { EffectiveFunctionOptions, FunctionInput, FunctionOutput, FunctionTrigger } from '@azure/functions'; +import { RpcBindingInfo } from '@azure/functions-core'; +import { returnBindingKey } from '../constants'; +import { isTrigger } from '../utils/isTrigger'; +import { nonNullProp, nonNullValue } from '../utils/nonNull'; + +export function fromRpcBindings(bindings: Record | null | undefined): EffectiveFunctionOptions { + let trigger: FunctionTrigger | undefined; + let returnBinding: FunctionOutput | undefined; + const extraInputs: FunctionInput[] = []; + const extraOutputs: FunctionOutput[] = []; + for (const [name, binding] of Object.entries(nonNullValue(bindings, 'bindings'))) { + if (isTrigger(binding.type)) { + trigger = fromRpcBinding(name, binding); + } else if (name === returnBindingKey) { + returnBinding = fromRpcBinding(name, binding); + } else if (binding.direction === 'in') { + extraInputs.push(fromRpcBinding(name, binding)); + } else if (binding.direction === 'out') { + extraOutputs.push(fromRpcBinding(name, binding)); + } + } + return { + trigger: nonNullValue(trigger, 'trigger'), + return: returnBinding, + extraInputs, + extraOutputs, + }; +} + +function fromRpcBinding(name: string, binding: RpcBindingInfo): FunctionTrigger | FunctionInput | FunctionOutput { + return { + ...binding, + type: nonNullProp(binding, 'type'), + name, + }; +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcContext.ts b/node_modules/@azure/functions/src/converters/fromRpcContext.ts new file mode 100644 index 00000000..71f84952 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcContext.ts @@ -0,0 +1,43 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Exception, RetryContext, TraceContext } from '@azure/functions'; +import { RpcException, RpcRetryContext, RpcTraceContext } from '@azure/functions-core'; +import { copyPropIfDefined, nonNullProp } from '../utils/nonNull'; + +export function fromRpcRetryContext(retryContext: RpcRetryContext | null | undefined): RetryContext | undefined { + if (!retryContext) { + return undefined; + } else { + const result: RetryContext = { + retryCount: nonNullProp(retryContext, 'retryCount'), + maxRetryCount: nonNullProp(retryContext, 'maxRetryCount'), + }; + if (retryContext.exception) { + result.exception = fromRpcException(retryContext.exception); + } + return result; + } +} + +function fromRpcException(exception: RpcException): Exception { + const result: Exception = {}; + copyPropIfDefined(exception, result, 'message'); + copyPropIfDefined(exception, result, 'source'); + copyPropIfDefined(exception, result, 'stackTrace'); + return result; +} + +export function fromRpcTraceContext(traceContext: RpcTraceContext | null | undefined): TraceContext | undefined { + if (!traceContext) { + return undefined; + } else { + const result: TraceContext = {}; + copyPropIfDefined(traceContext, result, 'traceParent'); + copyPropIfDefined(traceContext, result, 'traceState'); + if (traceContext.attributes) { + result.attributes = traceContext.attributes; + } + return result; + } +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcNullable.ts b/node_modules/@azure/functions/src/converters/fromRpcNullable.ts new file mode 100644 index 00000000..613be930 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcNullable.ts @@ -0,0 +1,19 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcNullableString } from '@azure/functions-core'; + +export function fromNullableMapping( + nullableMapping: Record | null | undefined, + originalMapping?: Record | null +): Record { + let converted: Record = {}; + if (nullableMapping && Object.keys(nullableMapping).length > 0) { + for (const key in nullableMapping) { + converted[key] = nullableMapping[key]?.value || ''; + } + } else if (originalMapping && Object.keys(originalMapping).length > 0) { + converted = originalMapping; + } + return converted; +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcTriggerMetadata.ts b/node_modules/@azure/functions/src/converters/fromRpcTriggerMetadata.ts new file mode 100644 index 00000000..4e5efd1c --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcTriggerMetadata.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { TriggerMetadata } from '@azure/functions'; +import { RpcTypedData } from '@azure/functions-core'; +import { isHttpTrigger, isTimerTrigger } from '../utils/isTrigger'; +import { fromRpcTypedData } from './fromRpcTypedData'; +import { toCamelCaseKey, toCamelCaseValue } from './toCamelCase'; + +export function fromRpcTriggerMetadata( + triggerMetadata: Record | null | undefined, + triggerType: string +): TriggerMetadata | undefined { + // For http and timer triggers, we will avoid using `triggerMetadata` for a few reasons: + // 1. It uses `toCamelCase` methods, which can lead to weird casing bugs + // 2. It's generally a large medley of properties that is difficult for us to document/type + // 3. We can represent that information on the request & timer objects instead + if (!triggerMetadata || isHttpTrigger(triggerType) || isTimerTrigger(triggerType)) { + return undefined; + } else { + const result: TriggerMetadata = {}; + for (const [key, value] of Object.entries(triggerMetadata)) { + result[toCamelCaseKey(key)] = toCamelCaseValue(fromRpcTypedData(value)); + } + return result; + } +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcTypedData.ts b/node_modules/@azure/functions/src/converters/fromRpcTypedData.ts new file mode 100644 index 00000000..a8ed9631 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcTypedData.ts @@ -0,0 +1,44 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcTypedData } from '@azure/functions-core'; +import { HttpRequest } from '../http/HttpRequest'; +import { isDefined } from '../utils/nonNull'; + +export function fromRpcTypedData(data: RpcTypedData | null | undefined): unknown { + if (!data) { + return undefined; + } else if (isDefined(data.string)) { + return tryJsonParse(data.string); + } else if (isDefined(data.json)) { + return JSON.parse(data.json); + } else if (isDefined(data.bytes)) { + return Buffer.from(data.bytes); + } else if (isDefined(data.stream)) { + return Buffer.from(data.stream); + } else if (isDefined(data.http)) { + return new HttpRequest(data.http); + } else if (isDefined(data.int)) { + return data.int; + } else if (isDefined(data.double)) { + return data.double; + } else if (data.collectionBytes && isDefined(data.collectionBytes.bytes)) { + return data.collectionBytes.bytes.map((d) => Buffer.from(d)); + } else if (data.collectionString && isDefined(data.collectionString.string)) { + return data.collectionString.string.map(tryJsonParse); + } else if (data.collectionDouble && isDefined(data.collectionDouble.double)) { + return data.collectionDouble.double; + } else if (data.collectionSint64 && isDefined(data.collectionSint64.sint64)) { + return data.collectionSint64.sint64; + } else { + return undefined; + } +} + +function tryJsonParse(data: string): unknown { + try { + return JSON.parse(data); + } catch { + return data; + } +} diff --git a/node_modules/@azure/functions/src/converters/toCamelCase.ts b/node_modules/@azure/functions/src/converters/toCamelCase.ts new file mode 100644 index 00000000..20e2b6a5 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toCamelCase.ts @@ -0,0 +1,20 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export function toCamelCaseValue(data: unknown): unknown { + if (typeof data !== 'object' || data === null) { + return data; + } else if (Array.isArray(data)) { + return data.map(toCamelCaseValue); + } else { + const result: Record = {}; + for (const [key, value] of Object.entries(data)) { + result[toCamelCaseKey(key)] = toCamelCaseValue(value); + } + return result; + } +} + +export function toCamelCaseKey(key: string): string { + return key.charAt(0).toLowerCase() + key.slice(1); +} diff --git a/node_modules/@azure/functions/src/converters/toCoreFunctionMetadata.ts b/node_modules/@azure/functions/src/converters/toCoreFunctionMetadata.ts new file mode 100644 index 00000000..390c9eaf --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toCoreFunctionMetadata.ts @@ -0,0 +1,76 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { ExponentialBackoffRetryOptions, FixedDelayRetryOptions, GenericFunctionOptions } from '@azure/functions'; +import * as coreTypes from '@azure/functions-core'; +import { returnBindingKey } from '../constants'; +import { AzFuncSystemError } from '../errors'; +import { isTrigger } from '../utils/isTrigger'; +import { toRpcDuration } from './toRpcDuration'; + +export function toCoreFunctionMetadata(name: string, options: GenericFunctionOptions): coreTypes.FunctionMetadata { + const bindings: Record = {}; + const bindingNames: string[] = []; + + const trigger = options.trigger; + bindings[trigger.name] = { + ...trigger, + direction: 'in', + type: isTrigger(trigger.type) ? trigger.type : trigger.type + 'Trigger', + }; + bindingNames.push(trigger.name); + + if (options.extraInputs) { + for (const input of options.extraInputs) { + bindings[input.name] = { + ...input, + direction: 'in', + }; + bindingNames.push(input.name); + } + } + + if (options.return) { + bindings[returnBindingKey] = { + ...options.return, + direction: 'out', + }; + bindingNames.push(returnBindingKey); + } + + if (options.extraOutputs) { + for (const output of options.extraOutputs) { + bindings[output.name] = { + ...output, + direction: 'out', + }; + bindingNames.push(output.name); + } + } + + const dupeBindings = bindingNames.filter((v, i) => bindingNames.indexOf(v) !== i); + if (dupeBindings.length > 0) { + throw new AzFuncSystemError( + `Duplicate bindings found for function "${name}". Remove a duplicate binding or manually specify the "name" property to make it unique.` + ); + } + + let retryOptions: coreTypes.RpcRetryOptions | undefined; + if (options.retry) { + retryOptions = { + ...options.retry, + retryStrategy: options.retry.strategy, + delayInterval: toRpcDuration((options.retry).delayInterval, 'retry.delayInterval'), + maximumInterval: toRpcDuration( + (options.retry).maximumInterval, + 'retry.maximumInterval' + ), + minimumInterval: toRpcDuration( + (options.retry).minimumInterval, + 'retry.minimumInterval' + ), + }; + } + + return { name, bindings, retryOptions }; +} diff --git a/node_modules/@azure/functions/src/converters/toMcpToolTriggerOptionsToRpc.ts b/node_modules/@azure/functions/src/converters/toMcpToolTriggerOptionsToRpc.ts new file mode 100644 index 00000000..fe6cf1ee --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toMcpToolTriggerOptionsToRpc.ts @@ -0,0 +1,149 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { McpToolProperty, McpToolTriggerOptions, McpToolTriggerOptionsToRpc } from '../../types'; + +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +/** + * Converts an McpToolTriggerOptions object to an McpToolTriggerOptionsToRpc object. + * + * @param mcpToolTriggerOptions - The input options to be converted. + * @returns The converted McpToolTriggerOptionsToRpc object. + */ +export function converToMcpToolTriggerOptionsToRpc( + mcpToolTriggerOptions: McpToolTriggerOptions +): McpToolTriggerOptionsToRpc { + // Base object for the return value + const baseResult = { + toolName: mcpToolTriggerOptions.toolName, + description: mcpToolTriggerOptions.description, + }; + + // Check for null or undefined toolProperties + if (!mcpToolTriggerOptions?.toolProperties) { + return { + ...baseResult, + toolProperties: JSON.stringify([]), // Default to an empty array + }; + } + + // Check if toolProperties is an array of McpToolProperty objects + if (Array.isArray(mcpToolTriggerOptions.toolProperties)) { + const isValid = mcpToolTriggerOptions.toolProperties.every(isMcpToolProperty); + if (isValid) { + return { + ...baseResult, + toolProperties: JSON.stringify(mcpToolTriggerOptions.toolProperties), + }; + } else { + throw new Error( + 'Invalid toolProperties: Array contains invalid McpToolProperty, please validate the parameters.' + ); + } + } + + // Handle cases where toolProperties is an object (e.g., Zod schema) + if (typeof mcpToolTriggerOptions.toolProperties === 'object') { + // Define the type of the ZodObject shape and ZodPropertyDef + type ZodPropertyDef = { + description?: string; + typeName: string; + }; + type ZodObjectShape = Record; + + // Define the type of the toolProperties object + type ToolProperties = + | { + _def?: { + typeName?: string; + }; + shape?: ZodObjectShape; + } + | Record; + + let isZodObject = false; + + const toolProperties = mcpToolTriggerOptions.toolProperties as ToolProperties; + + // Check if the object is a ZodObject + if ((toolProperties?._def as { typeName?: string })?.typeName === 'ZodObject') { + isZodObject = true; + } + + // Check if shape is a valid ZodObject shape + const shape: ZodObjectShape | Record = isZodObject + ? (toolProperties as { shape: ZodObjectShape }).shape + : toolProperties; + + // Extract properties from the ZodObject shape + const result = Object.keys(shape).map((propertyName) => { + const property = shape[propertyName] as { _def: ZodPropertyDef }; + const description = property?._def?.description || ''; + const propertyType = getPropertyType(property?._def?.typeName?.toLowerCase() || 'unknown'); // Extract type name or default to "unknown" + + return { + propertyName, + propertyType, + description, + }; + }); + + return { + ...baseResult, + toolProperties: JSON.stringify(result), + }; + } + // Handle cases where toolProperties is not an array + throw new Error('Invalid toolProperties: Expected an array of McpToolProperty objects or zod objects.'); +} + +// Helper function to infer property type from zod schema +function getPropertyType(zodType: string): string { + switch (zodType) { + case 'zodnumber': + return 'number'; + case 'zodstring': + return 'string'; + case 'zodboolean': + return 'boolean'; + case 'zodarray': + return 'array'; + case 'zodobject': + return 'object'; + case 'zodbigint': + return 'long'; + case 'zoddate': + return 'DateTime'; + case 'zodtuple': + return 'Tuple'; + default: + console.warn(`Unknown zod type: ${zodType}`); + return 'unknown'; + } +} + +/** + * Type guard to check if a given object is of type McpToolProperty. + * + * @param property - The object to check. + * @returns True if the object is of type McpToolProperty, otherwise false. + * + * This function ensures that the object: + * - Is not null and is of type 'object'. + * - Contains the required properties: 'propertyName', 'propertyValue', and 'description'. + * - Each of these properties is of the correct type (string). + */ +function isMcpToolProperty(property: unknown): property is McpToolProperty { + return ( + typeof property === 'object' && + property !== null && + 'propertyName' in property && + 'propertyType' in property && + 'description' in property && + typeof (property as McpToolProperty).propertyName === 'string' && + typeof (property as McpToolProperty).propertyType === 'string' && + typeof (property as McpToolProperty).description === 'string' + ); +} diff --git a/node_modules/@azure/functions/src/converters/toRpcDuration.ts b/node_modules/@azure/functions/src/converters/toRpcDuration.ts new file mode 100644 index 00000000..7e03c1f3 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcDuration.ts @@ -0,0 +1,36 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcDuration } from '@azure/functions-core'; +import { Duration } from '../../types'; +import { AzFuncSystemError } from '../errors'; +import { isDefined } from '../utils/nonNull'; + +export function toRpcDuration(dateTime: Duration | number | undefined, propertyName: string): RpcDuration | undefined { + if (isDefined(dateTime)) { + try { + let timeInMilliseconds: number | undefined; + if (typeof dateTime === 'object') { + const minutes = (dateTime.minutes || 0) + (dateTime.hours || 0) * 60; + const seconds = (dateTime.seconds || 0) + minutes * 60; + timeInMilliseconds = (dateTime.milliseconds || 0) + seconds * 1000; + } else if (typeof dateTime === 'number') { + timeInMilliseconds = dateTime; + } + + if (isDefined(timeInMilliseconds) && timeInMilliseconds >= 0) { + return { + seconds: Math.round(timeInMilliseconds / 1000), + }; + } + } catch { + // fall through + } + + throw new AzFuncSystemError( + `A 'number' or 'Duration' object was expected instead of a '${typeof dateTime}'. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcHttp.ts b/node_modules/@azure/functions/src/converters/toRpcHttp.ts new file mode 100644 index 00000000..c314f17a --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcHttp.ts @@ -0,0 +1,47 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcHttpData, RpcTypedData } from '@azure/functions-core'; +import { AzFuncSystemError } from '../errors'; +import { sendProxyResponse } from '../http/httpProxy'; +import { HttpResponse } from '../http/HttpResponse'; +import { enableHttpStream } from '../setup'; +import { toRpcHttpCookie } from './toRpcHttpCookie'; +import { toRpcTypedData } from './toRpcTypedData'; + +export async function toRpcHttp(invocationId: string, data: unknown): Promise { + if (data === null || data === undefined) { + return data; + } else if (typeof data !== 'object') { + throw new AzFuncSystemError( + 'The HTTP response must be an object with optional properties "body", "status", "headers", and "cookies".' + ); + } + + const response = data instanceof HttpResponse ? data : new HttpResponse(data); + if (enableHttpStream) { + // send http data over http proxy instead of rpc + await sendProxyResponse(invocationId, response); + return; + } + + const rpcResponse: RpcHttpData = {}; + rpcResponse.statusCode = response.status.toString(); + + rpcResponse.headers = {}; + for (const [key, value] of response.headers.entries()) { + rpcResponse.headers[key] = value; + } + + rpcResponse.cookies = []; + for (const cookie of response.cookies) { + rpcResponse.cookies.push(toRpcHttpCookie(cookie)); + } + + rpcResponse.enableContentNegotiation = response.enableContentNegotiation; + + const bodyBytes = await response.arrayBuffer(); + rpcResponse.body = toRpcTypedData(bodyBytes); + + return { http: rpcResponse }; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcHttpCookie.ts b/node_modules/@azure/functions/src/converters/toRpcHttpCookie.ts new file mode 100644 index 00000000..e3532434 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcHttpCookie.ts @@ -0,0 +1,39 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Cookie } from '@azure/functions'; +import { RpcHttpCookie, RpcHttpCookieSameSite } from '@azure/functions-core'; +import { toNullableBool, toNullableDouble, toNullableString, toNullableTimestamp, toRpcString } from './toRpcNullable'; + +/** + * From RFC specifications for 'Set-Cookie' response header: https://www.rfc-editor.org/rfc/rfc6265.txt + * @param inputCookie + */ +export function toRpcHttpCookie(inputCookie: Cookie): RpcHttpCookie { + // Resolve RpcHttpCookie.SameSite enum, a one-off + let rpcSameSite: RpcHttpCookieSameSite = 'none'; + if (inputCookie && inputCookie.sameSite) { + const sameSite = inputCookie.sameSite.toLocaleLowerCase(); + if (sameSite === 'lax') { + rpcSameSite = 'lax'; + } else if (sameSite === 'strict') { + rpcSameSite = 'strict'; + } else if (sameSite === 'none') { + rpcSameSite = 'explicitNone'; + } + } + + const rpcCookie: RpcHttpCookie = { + name: inputCookie && toRpcString(inputCookie.name, 'cookie.name'), + value: inputCookie && toRpcString(inputCookie.value, 'cookie.value'), + domain: toNullableString(inputCookie && inputCookie.domain, 'cookie.domain'), + path: toNullableString(inputCookie && inputCookie.path, 'cookie.path'), + expires: toNullableTimestamp(inputCookie && inputCookie.expires, 'cookie.expires'), + secure: toNullableBool(inputCookie && inputCookie.secure, 'cookie.secure'), + httpOnly: toNullableBool(inputCookie && inputCookie.httpOnly, 'cookie.httpOnly'), + sameSite: rpcSameSite, + maxAge: toNullableDouble(inputCookie && inputCookie.maxAge, 'cookie.maxAge'), + }; + + return rpcCookie; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcNullable.ts b/node_modules/@azure/functions/src/converters/toRpcNullable.ts new file mode 100644 index 00000000..56375e80 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcNullable.ts @@ -0,0 +1,132 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcNullableBool, RpcNullableDouble, RpcNullableString, RpcNullableTimestamp } from '@azure/functions-core'; +import { AzFuncSystemError } from '../errors'; +import { isDefined } from '../utils/nonNull'; + +/** + * Converts boolean input to an 'INullableBool' to be sent through the RPC layer. + * Input that is not a boolean but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableBool if it is a valid boolean + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableBool(nullable: boolean | undefined, propertyName: string): undefined | RpcNullableBool { + if (typeof nullable === 'boolean') { + return { + value: nullable, + }; + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'boolean' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} + +/** + * Converts number or string that parses to a number to an 'INullableDouble' to be sent through the RPC layer. + * Input that is not a valid number but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableDouble if it is a valid number + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableDouble( + nullable: number | string | undefined, + propertyName: string +): undefined | RpcNullableDouble { + if (typeof nullable === 'number') { + return { + value: nullable, + }; + } else if (typeof nullable === 'string') { + if (!isNaN(Number(nullable))) { + const parsedNumber = parseFloat(nullable); + return { + value: parsedNumber, + }; + } + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'number' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} + +/** + * Converts string input to an 'INullableString' to be sent through the RPC layer. + * Input that is not a string but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableString if it is a valid string + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toRpcString(nullable: string | undefined, propertyName: string): string { + if (typeof nullable === 'string') { + return nullable; + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'string' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return ''; +} + +/** + * Converts string input to an 'INullableString' to be sent through the RPC layer. + * Input that is not a string but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableString if it is a valid string + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableString(nullable: string | undefined, propertyName: string): undefined | RpcNullableString { + if (typeof nullable === 'string') { + return { + value: nullable, + }; + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'string' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} + +/** + * Converts Date or number input to an 'INullableTimestamp' to be sent through the RPC layer. + * Input that is not a Date or number but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableTimestamp if it is valid input + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableTimestamp( + dateTime: Date | number | undefined, + propertyName: string +): RpcNullableTimestamp | undefined { + if (isDefined(dateTime)) { + try { + const timeInMilliseconds = typeof dateTime === 'number' ? dateTime : dateTime.getTime(); + + if (timeInMilliseconds && timeInMilliseconds >= 0) { + return { + value: { + seconds: Math.round(timeInMilliseconds / 1000), + }, + }; + } + } catch { + throw new AzFuncSystemError( + `A 'number' or 'Date' input was expected instead of a '${typeof dateTime}'. Cannot parse value of '${propertyName}'.` + ); + } + } + return undefined; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcTypedData.ts b/node_modules/@azure/functions/src/converters/toRpcTypedData.ts new file mode 100644 index 00000000..bd4f83fc --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcTypedData.ts @@ -0,0 +1,28 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcTypedData } from '@azure/functions-core'; + +export function toRpcTypedData(data: unknown): RpcTypedData | null | undefined { + if (data === null || data === undefined) { + return data; + } else if (typeof data === 'string') { + return { string: data }; + } else if (Buffer.isBuffer(data)) { + return { bytes: data }; + } else if (ArrayBuffer.isView(data)) { + const bytes = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + return { bytes: bytes }; + } else if (data instanceof ArrayBuffer) { + const bytes = new Uint8Array(data); + return { bytes: bytes }; + } else if (typeof data === 'number') { + if (Number.isInteger(data)) { + return { int: data }; + } else { + return { double: data }; + } + } else { + return { json: JSON.stringify(data) }; + } +} diff --git a/node_modules/@azure/functions/src/errors.ts b/node_modules/@azure/functions/src/errors.ts new file mode 100644 index 00000000..7f8192a4 --- /dev/null +++ b/node_modules/@azure/functions/src/errors.ts @@ -0,0 +1,69 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export interface AzFuncError { + /** + * System errors can be tracked in our telemetry + * User errors cannot be tracked in our telemetry because they could have user information (users can still track it themselves in their app insights resource) + */ + isAzureFunctionsSystemError: boolean; +} + +export interface ValidatedError extends Error, Partial { + /** + * Use `trySetErrorMessage` to set the error message + */ + readonly message: string; +} + +export class AzFuncSystemError extends Error { + isAzureFunctionsSystemError = true; +} + +export class AzFuncTypeError extends TypeError implements AzFuncError { + isAzureFunctionsSystemError = true; +} + +export class AzFuncRangeError extends RangeError implements AzFuncError { + isAzureFunctionsSystemError = true; +} + +export class ReadOnlyError extends AzFuncTypeError { + constructor(propertyName: string) { + super(`Cannot assign to read only property '${propertyName}'`); + } +} + +export function ensureErrorType(err: unknown): ValidatedError { + if (err instanceof Error) { + return err; + } else { + let message: string; + if (err === undefined || err === null) { + message = 'Unknown error'; + } else if (typeof err === 'string') { + message = err; + } else if (typeof err === 'object') { + message = JSON.stringify(err); + } else { + message = String(err); + } + return new Error(message); + } +} + +export function trySetErrorMessage(err: Error, message: string): void { + try { + err.message = message; + } catch { + // If we can't set the message, we'll keep the error as is + } +} + +/** + * This is mostly for callbacks where `null` or `undefined` indicates there is no error + * By contrast, anything thrown/caught is assumed to be an error regardless of what it is + */ +export function isError(err: unknown): boolean { + return err !== null && err !== undefined; +} diff --git a/node_modules/@azure/functions/src/hooks/AppStartContext.ts b/node_modules/@azure/functions/src/hooks/AppStartContext.ts new file mode 100644 index 00000000..dda2b77c --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/AppStartContext.ts @@ -0,0 +1,7 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HookContext } from './HookContext'; + +export class AppStartContext extends HookContext implements types.AppStartContext {} diff --git a/node_modules/@azure/functions/src/hooks/AppTerminateContext.ts b/node_modules/@azure/functions/src/hooks/AppTerminateContext.ts new file mode 100644 index 00000000..84e3694f --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/AppTerminateContext.ts @@ -0,0 +1,7 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HookContext } from './HookContext'; + +export class AppTerminateContext extends HookContext implements types.AppTerminateContext {} diff --git a/node_modules/@azure/functions/src/hooks/HookContext.ts b/node_modules/@azure/functions/src/hooks/HookContext.ts new file mode 100644 index 00000000..c2794cad --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/HookContext.ts @@ -0,0 +1,23 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { ReadOnlyError } from '../errors'; +import { nonNullProp } from '../utils/nonNull'; + +export class HookContext implements types.HookContext { + #init: types.HookContextInit; + + constructor(init?: types.HookContextInit) { + this.#init = init ?? {}; + this.#init.hookData ??= {}; + } + + get hookData(): Record { + return nonNullProp(this.#init, 'hookData'); + } + + set hookData(_value: unknown) { + throw new ReadOnlyError('hookData'); + } +} diff --git a/node_modules/@azure/functions/src/hooks/InvocationHookContext.ts b/node_modules/@azure/functions/src/hooks/InvocationHookContext.ts new file mode 100644 index 00000000..13454d1e --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/InvocationHookContext.ts @@ -0,0 +1,35 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { ReadOnlyError } from '../errors'; +import { InvocationContext } from '../InvocationContext'; +import { nonNullProp } from '../utils/nonNull'; +import { HookContext } from './HookContext'; + +export class InvocationHookContext extends HookContext implements types.InvocationHookContext { + #init: types.InvocationHookContextInit; + + constructor(init?: types.InvocationHookContextInit) { + super(init); + this.#init = init ?? {}; + this.#init.inputs ??= []; + this.#init.invocationContext ??= new InvocationContext(); + } + + get invocationContext(): types.InvocationContext { + return nonNullProp(this.#init, 'invocationContext'); + } + + set invocationContext(_value: types.InvocationContext) { + throw new ReadOnlyError('invocationContext'); + } + + get inputs(): unknown[] { + return nonNullProp(this.#init, 'inputs'); + } + + set inputs(value: unknown[]) { + this.#init.inputs = value; + } +} diff --git a/node_modules/@azure/functions/src/hooks/LogHookContext.ts b/node_modules/@azure/functions/src/hooks/LogHookContext.ts new file mode 100644 index 00000000..1f5e2ef5 --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/LogHookContext.ts @@ -0,0 +1,51 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { ReadOnlyError } from '../errors'; +import { nonNullProp } from '../utils/nonNull'; +import { HookContext } from './HookContext'; + +export class LogHookContext extends HookContext implements types.LogHookContext { + #init: types.LogHookContextInit; + + constructor(init?: types.LogHookContextInit) { + super(init); + this.#init = init ?? {}; + this.#init.level ??= 'information'; + this.#init.message ??= 'unknown'; + this.#init.category ??= 'user'; + } + + get level(): types.LogLevel { + return nonNullProp(this.#init, 'level'); + } + + set level(value: types.LogLevel) { + this.#init.level = value; + } + + get message(): string { + return nonNullProp(this.#init, 'message'); + } + + set message(value: string) { + this.#init.message = value; + } + + get category(): types.LogCategory { + return nonNullProp(this.#init, 'category'); + } + + set category(_value: types.LogCategory) { + throw new ReadOnlyError('category'); + } + + get invocationContext(): types.InvocationContext | undefined { + return this.#init.invocationContext; + } + + set invocationContext(_value: types.InvocationContext | undefined) { + throw new ReadOnlyError('invocationContext'); + } +} diff --git a/node_modules/@azure/functions/src/hooks/PostInvocationContext.ts b/node_modules/@azure/functions/src/hooks/PostInvocationContext.ts new file mode 100644 index 00000000..889532fc --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/PostInvocationContext.ts @@ -0,0 +1,30 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { InvocationHookContext } from './InvocationHookContext'; + +export class PostInvocationContext extends InvocationHookContext implements types.PostInvocationContext { + #init: types.PostInvocationContextInit; + + constructor(init?: types.PostInvocationContextInit) { + super(init); + this.#init = init ?? {}; + } + + get result(): unknown { + return this.#init.result; + } + + set result(value: unknown) { + this.#init.result = value; + } + + get error(): unknown { + return this.#init.error; + } + + set error(value: unknown) { + this.#init.error = value; + } +} diff --git a/node_modules/@azure/functions/src/hooks/PreInvocationContext.ts b/node_modules/@azure/functions/src/hooks/PreInvocationContext.ts new file mode 100644 index 00000000..5a7f0be5 --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/PreInvocationContext.ts @@ -0,0 +1,24 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { nonNullProp } from '../utils/nonNull'; +import { InvocationHookContext } from './InvocationHookContext'; + +export class PreInvocationContext extends InvocationHookContext implements types.PreInvocationContext { + #init: types.PreInvocationContextInit; + + constructor(init?: types.PreInvocationContextInit) { + super(init); + this.#init = init ?? {}; + this.#init.functionCallback ??= () => {}; + } + + get functionHandler(): types.FunctionHandler { + return nonNullProp(this.#init, 'functionCallback'); + } + + set functionHandler(value: types.FunctionHandler) { + this.#init.functionCallback = value; + } +} diff --git a/node_modules/@azure/functions/src/hooks/registerHook.ts b/node_modules/@azure/functions/src/hooks/registerHook.ts new file mode 100644 index 00000000..104b3ae8 --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/registerHook.ts @@ -0,0 +1,74 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + AppStartHandler, + AppTerminateHandler, + LogHookHandler, + PostInvocationHandler, + PreInvocationHandler, +} from '@azure/functions'; +import * as coreTypes from '@azure/functions-core'; +import { AzFuncSystemError, ensureErrorType } from '../errors'; +import { Disposable } from '../utils/Disposable'; +import { tryGetCoreApiLazy } from '../utils/tryGetCoreApiLazy'; +import { AppStartContext } from './AppStartContext'; +import { AppTerminateContext } from './AppTerminateContext'; +import { LogHookContext } from './LogHookContext'; +import { PostInvocationContext } from './PostInvocationContext'; +import { PreInvocationContext } from './PreInvocationContext'; + +function registerHook(hookName: string, callback: coreTypes.HookCallback): coreTypes.Disposable { + const coreApi = tryGetCoreApiLazy(); + if (!coreApi) { + console.warn( + `WARNING: Skipping call to register ${hookName} hook because the "@azure/functions" package is in test mode.` + ); + return new Disposable(() => { + console.warn( + `WARNING: Skipping call to dispose ${hookName} hook because the "@azure/functions" package is in test mode.` + ); + }); + } else { + return coreApi.registerHook(hookName, callback); + } +} + +export function appStart(handler: AppStartHandler): Disposable { + return registerHook('appStart', (coreContext) => { + return handler(new AppStartContext(coreContext)); + }); +} + +export function appTerminate(handler: AppTerminateHandler): Disposable { + return registerHook('appTerminate', (coreContext) => { + return handler(new AppTerminateContext(coreContext)); + }); +} + +export function preInvocation(handler: PreInvocationHandler): Disposable { + return registerHook('preInvocation', (coreContext) => { + return handler(new PreInvocationContext(coreContext)); + }); +} + +export function postInvocation(handler: PostInvocationHandler): Disposable { + return registerHook('postInvocation', (coreContext) => { + return handler(new PostInvocationContext(coreContext)); + }); +} + +export function log(handler: LogHookHandler): Disposable { + try { + return registerHook('log', (coreContext) => { + return handler(new LogHookContext(coreContext)); + }); + } catch (err) { + const error = ensureErrorType(err); + if (error.name === 'RangeError' && error.isAzureFunctionsSystemError) { + throw new AzFuncSystemError(`Log hooks require Azure Functions Host v4.34 or higher.`); + } else { + throw err; + } + } +} diff --git a/node_modules/@azure/functions/src/http/HttpRequest.ts b/node_modules/@azure/functions/src/http/HttpRequest.ts new file mode 100644 index 00000000..a775a183 --- /dev/null +++ b/node_modules/@azure/functions/src/http/HttpRequest.ts @@ -0,0 +1,165 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HttpRequestParams, HttpRequestUser } from '@azure/functions'; +import { RpcHttpData, RpcTypedData } from '@azure/functions-core'; +import { Blob } from 'buffer'; +import { IncomingMessage } from 'http'; +import * as stream from 'stream'; +import { ReadableStream } from 'stream/web'; +import { FormData, Headers, HeadersInit, Request as uRequest } from 'undici'; +import { URLSearchParams } from 'url'; +import { fromNullableMapping } from '../converters/fromRpcNullable'; +import { fromRpcTypedData } from '../converters/fromRpcTypedData'; +import { AzFuncSystemError } from '../errors'; +import { isDefined, nonNullProp } from '../utils/nonNull'; +import { extractHttpUserFromHeaders } from './extractHttpUserFromHeaders'; + +interface InternalHttpRequestInit extends RpcHttpData { + undiciRequest?: uRequest; +} + +export class HttpRequest implements types.HttpRequest { + readonly query: URLSearchParams; + readonly params: HttpRequestParams; + + #cachedUser?: HttpRequestUser | null; + #uReq: uRequest; + #init: InternalHttpRequestInit; + + constructor(init: InternalHttpRequestInit) { + this.#init = init; + + let uReq = init.undiciRequest; + if (!uReq) { + const url = nonNullProp(init, 'url'); + + let body: Buffer | string | undefined; + if (init.body?.bytes) { + body = Buffer.from(init.body?.bytes); + } else if (init.body?.string) { + body = init.body.string; + } + + uReq = new uRequest(url, { + body, + method: nonNullProp(init, 'method'), + headers: fromNullableMapping(init.nullableHeaders, init.headers), + }); + } + this.#uReq = uReq; + + if (init.nullableQuery || init.query) { + this.query = new URLSearchParams(fromNullableMapping(init.nullableQuery, init.query)); + } else { + this.query = new URL(this.#uReq.url).searchParams; + } + + this.params = fromNullableMapping(init.nullableParams, init.params); + } + + get url(): string { + return this.#uReq.url; + } + + get method(): string { + return this.#uReq.method; + } + + get headers(): Headers { + return this.#uReq.headers; + } + + get user(): HttpRequestUser | null { + if (this.#cachedUser === undefined) { + this.#cachedUser = extractHttpUserFromHeaders(this.headers); + } + + return this.#cachedUser; + } + + get body(): ReadableStream | null { + return this.#uReq.body; + } + + get bodyUsed(): boolean { + return this.#uReq.bodyUsed; + } + + async arrayBuffer(): Promise { + return this.#uReq.arrayBuffer(); + } + + async blob(): Promise { + return this.#uReq.blob(); + } + + async formData(): Promise { + return this.#uReq.formData(); + } + + async json(): Promise { + return this.#uReq.json(); + } + + async text(): Promise { + return this.#uReq.text(); + } + + clone(): HttpRequest { + const newInit = structuredClone(this.#init); + newInit.undiciRequest = this.#uReq.clone(); + return new HttpRequest(newInit); + } +} + +export function createStreamRequest( + proxyReq: IncomingMessage, + triggerMetadata: Record +): HttpRequest { + const hostHeaderName = 'x-forwarded-host'; + const protoHeaderName = 'x-forwarded-proto'; + const host = proxyReq.headers[hostHeaderName]; + const proto = proxyReq.headers[protoHeaderName]; + if (typeof host !== 'string' || typeof proto !== 'string') { + throw new AzFuncSystemError(`Expected headers "${hostHeaderName}" and "${protoHeaderName}" to be set.`); + } + const url = `${proto}://${host}${nonNullProp(proxyReq, 'url')}`; + + let body: stream.Readable | undefined; + const lowerMethod = proxyReq.method?.toLowerCase(); + if (lowerMethod !== 'get' && lowerMethod !== 'head') { + body = proxyReq; + } + + // Get headers and params from trigger metadata + // See here for more info: https://github.com/Azure/azure-functions-host/issues/9840 + // NOTE: We ignore query info because it has this bug: https://github.com/Azure/azure-functions-nodejs-library/issues/168 + const { Query: rpcQueryIgnored, Headers: rpcHeaders, ...rpcParams } = triggerMetadata; + + let headers: HeadersInit | undefined; + const headersData = fromRpcTypedData(rpcHeaders); + if (typeof headersData === 'object' && isDefined(headersData)) { + headers = headersData; + } + + const uReq = new uRequest(url, { + body, + duplex: 'half', + method: nonNullProp(proxyReq, 'method'), + headers, + }); + + const params: Record = {}; + for (const [key, rpcValue] of Object.entries(rpcParams)) { + if (isDefined(rpcValue.string)) { + params[key] = rpcValue.string; + } + } + + return new HttpRequest({ + undiciRequest: uReq, + params, + }); +} diff --git a/node_modules/@azure/functions/src/http/HttpResponse.ts b/node_modules/@azure/functions/src/http/HttpResponse.ts new file mode 100644 index 00000000..6a2282de --- /dev/null +++ b/node_modules/@azure/functions/src/http/HttpResponse.ts @@ -0,0 +1,82 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HttpResponseInit } from '@azure/functions'; +import { Blob } from 'buffer'; +import { ReadableStream } from 'stream/web'; +import { FormData, Headers, Response as uResponse, ResponseInit as uResponseInit } from 'undici'; +import { isDefined } from '../utils/nonNull'; + +interface InternalHttpResponseInit extends HttpResponseInit { + undiciResponse?: uResponse; +} + +export class HttpResponse implements types.HttpResponse { + readonly cookies: types.Cookie[]; + readonly enableContentNegotiation: boolean; + + #uRes: uResponse; + #init: InternalHttpResponseInit; + + constructor(init?: InternalHttpResponseInit) { + init ??= {}; + this.#init = init; + + if (init.undiciResponse) { + this.#uRes = init.undiciResponse; + } else { + const uResInit: uResponseInit = { status: init.status, headers: init.headers }; + if (isDefined(init.jsonBody)) { + this.#uRes = uResponse.json(init.jsonBody, uResInit); + } else { + this.#uRes = new uResponse(init.body, uResInit); + } + } + + this.cookies = init.cookies ?? []; + this.enableContentNegotiation = !!init.enableContentNegotiation; + } + + get status(): number { + return this.#uRes.status; + } + + get headers(): Headers { + return this.#uRes.headers; + } + + get body(): ReadableStream | null { + return this.#uRes.body; + } + + get bodyUsed(): boolean { + return this.#uRes.bodyUsed; + } + + async arrayBuffer(): Promise { + return this.#uRes.arrayBuffer(); + } + + async blob(): Promise { + return this.#uRes.blob(); + } + + async formData(): Promise { + return this.#uRes.formData(); + } + + async json(): Promise { + return this.#uRes.json(); + } + + async text(): Promise { + return this.#uRes.text(); + } + + clone(): HttpResponse { + const newInit = structuredClone(this.#init); + newInit.undiciResponse = this.#uRes.clone(); + return new HttpResponse(newInit); + } +} diff --git a/node_modules/@azure/functions/src/http/extractHttpUserFromHeaders.ts b/node_modules/@azure/functions/src/http/extractHttpUserFromHeaders.ts new file mode 100644 index 00000000..a2b24a22 --- /dev/null +++ b/node_modules/@azure/functions/src/http/extractHttpUserFromHeaders.ts @@ -0,0 +1,38 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { HttpRequestUser } from '@azure/functions'; +import { Headers } from 'undici'; +import { nonNullValue } from '../utils/nonNull'; + +/* grandfathered in. Should fix when possible */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access */ + +export function extractHttpUserFromHeaders(headers: Headers): HttpRequestUser | null { + let user: HttpRequestUser | null = null; + + const clientPrincipal = headers.get('x-ms-client-principal'); + if (clientPrincipal) { + const claimsPrincipalData = JSON.parse(Buffer.from(clientPrincipal, 'base64').toString('utf-8')); + + if (claimsPrincipalData['identityProvider']) { + user = { + type: 'StaticWebApps', + id: claimsPrincipalData['userId'], + username: claimsPrincipalData['userDetails'], + identityProvider: claimsPrincipalData['identityProvider'], + claimsPrincipalData, + }; + } else { + user = { + type: 'AppService', + id: nonNullValue(headers.get('x-ms-client-principal-id'), 'user-id'), + username: nonNullValue(headers.get('x-ms-client-principal-name'), 'user-name'), + identityProvider: nonNullValue(headers.get('x-ms-client-principal-idp'), 'user-idp'), + claimsPrincipalData, + }; + } + } + + return user; +} diff --git a/node_modules/@azure/functions/src/http/httpProxy.ts b/node_modules/@azure/functions/src/http/httpProxy.ts new file mode 100644 index 00000000..b1c683b2 --- /dev/null +++ b/node_modules/@azure/functions/src/http/httpProxy.ts @@ -0,0 +1,173 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { serialize as serializeCookie } from 'cookie'; +import { EventEmitter } from 'events'; +import * as http from 'http'; +import * as net from 'net'; +import { AzFuncSystemError, ensureErrorType } from '../errors'; +import { nonNullProp } from '../utils/nonNull'; +import { workerSystemLog } from '../utils/workerSystemLog'; +import { HttpResponse } from './HttpResponse'; + +const requests: Record = {}; +const responses: Record = {}; +const minPort = 55000; +const maxPort = 55025; + +const invocRequestEmitter = new EventEmitter(); + +export async function waitForProxyRequest(invocationId: string): Promise { + return new Promise((resolve, _reject) => { + const req = requests[invocationId]; + if (req) { + resolve(req); + delete requests[invocationId]; + } else { + invocRequestEmitter.once(invocationId, () => { + const req = requests[invocationId]; + if (req) { + resolve(req); + delete requests[invocationId]; + } + }); + } + }); +} + +const invocationIdHeader = 'x-ms-invocation-id'; +export async function sendProxyResponse(invocationId: string, userRes: HttpResponse): Promise { + const proxyRes = nonNullProp(responses, invocationId); + delete responses[invocationId]; + for (const [key, val] of userRes.headers.entries()) { + proxyRes.setHeader(key, val); + } + proxyRes.setHeader(invocationIdHeader, invocationId); + proxyRes.statusCode = userRes.status; + + if (userRes.cookies.length > 0) { + setCookies(userRes, proxyRes); + } + + if (userRes.body) { + for await (const chunk of userRes.body.values()) { + proxyRes.write(chunk); + } + } + proxyRes.end(); +} + +function setCookies(userRes: HttpResponse, proxyRes: http.ServerResponse): void { + const serializedCookies: string[] = userRes.cookies.map((c) => { + let sameSite: true | false | 'lax' | 'strict' | 'none' | undefined; + switch (c.sameSite) { + case 'Lax': + sameSite = 'lax'; + break; + case 'None': + sameSite = 'none'; + break; + case 'Strict': + sameSite = 'strict'; + break; + default: + sameSite = c.sameSite; + } + return serializeCookie(c.name, c.value, { + domain: c.domain, + path: c.path, + expires: typeof c.expires === 'number' ? new Date(c.expires) : c.expires, + secure: c.secure, + httpOnly: c.httpOnly, + sameSite: sameSite, + maxAge: c.maxAge, + }); + }); + proxyRes.setHeader('Set-Cookie', serializedCookies); +} + +export async function setupHttpProxy(): Promise { + return new Promise((resolve, reject) => { + const server = http.createServer(); + + server.on('request', (req, res) => { + const invocationId = req.headers[invocationIdHeader]; + if (typeof invocationId === 'string') { + requests[invocationId] = req; + responses[invocationId] = res; + invocRequestEmitter.emit(invocationId); + } else { + workerSystemLog('error', `Http proxy request missing header ${invocationIdHeader}`); + } + }); + + server.on('error', (err) => { + err = ensureErrorType(err); + workerSystemLog('error', `Http proxy error: ${err.stack || err.message}`); + }); + + server.listen(() => { + const address = server.address(); + // Valid address has been created + if (address !== null && typeof address === 'object') { + if (address.port === 0) { + // Auto-assigned port is 0, find and bind to an open port + workerSystemLog('debug', `Port 0 assigned. Finding open port.`); + findOpenPort((openPort: number) => { + // Close the server and re-listen on the found open port + server.close(); + server.listen(openPort, () => { + workerSystemLog('debug', `Server is now listening on found open port: ${openPort}`); + }); + resolve(`http://localhost:${openPort}/`); + }); + } else { + // Auto-assigned port is not 0 + workerSystemLog('debug', `Auto-assigned port is valid. Port: ${address.port}`); + resolve(`http://localhost:${address.port}/`); + } + } else { + reject(new AzFuncSystemError('Unexpected server address during http proxy setup')); + } + }); + + server.on('close', () => { + workerSystemLog('information', 'Http proxy closing'); + }); + }); +} + +// Function to find an open port starting from a specified port +function findOpenPort(callback: (port: number) => void): void { + const server = net.createServer(); + + function tryPort(port: number) { + if (port > maxPort) { + // If we've reached the maximum port, throw an error + throw new AzFuncSystemError( + `No available ports found between ${minPort} and ${maxPort}. To enable HTTP streaming, please open a port in this range.` + ); + } + + server.once('error', () => { + // If the port is unavailable, increment and try the next one + tryPort(port + 1); + }); + + // If the port is available, return it + server.once('listening', () => { + const address = server.address(); + if (address !== null && typeof address === 'object') { + port = address.port; + server.close(); + callback(port); + } + }); + + // Try binding to the given port + server.listen(port); + } + + // Start trying from the specified starting port + tryPort(minPort); +} diff --git a/node_modules/@azure/functions/src/index.ts b/node_modules/@azure/functions/src/index.ts new file mode 100644 index 00000000..0b2409f7 --- /dev/null +++ b/node_modules/@azure/functions/src/index.ts @@ -0,0 +1,28 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export * as app from './app'; +export { AppStartContext } from './hooks/AppStartContext'; +export { AppTerminateContext } from './hooks/AppTerminateContext'; +export { HookContext } from './hooks/HookContext'; +export { InvocationHookContext } from './hooks/InvocationHookContext'; +export { LogHookContext } from './hooks/LogHookContext'; +export { PostInvocationContext } from './hooks/PostInvocationContext'; +export { PreInvocationContext } from './hooks/PreInvocationContext'; +export { HttpRequest } from './http/HttpRequest'; +export { HttpResponse } from './http/HttpResponse'; +export * as input from './input'; +export { InvocationContext } from './InvocationContext'; +export * as output from './output'; +export * as trigger from './trigger'; +export { Disposable } from './utils/Disposable'; + +export enum SqlChangeOperation { + Insert = 0, + Update = 1, + Delete = 2, +} + +export enum MySqlChangeOperation { + Update = 0, +} diff --git a/node_modules/@azure/functions/src/input.ts b/node_modules/@azure/functions/src/input.ts new file mode 100644 index 00000000..3e238bec --- /dev/null +++ b/node_modules/@azure/functions/src/input.ts @@ -0,0 +1,79 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBInput, + CosmosDBInputOptions, + FunctionInput, + GenericInputOptions, + MySqlInput, + MySqlInputOptions, + SqlInput, + SqlInputOptions, + StorageBlobInput, + StorageBlobInputOptions, + TableInput, + TableInputOptions, + WebPubSubConnectionInput, + WebPubSubConnectionInputOptions, + WebPubSubContextInput, + WebPubSubContextInputOptions, +} from '@azure/functions'; +import { addBindingName } from './addBindingName'; + +export function storageBlob(options: StorageBlobInputOptions): StorageBlobInput { + return addInputBindingName({ + ...options, + type: 'blob', + }); +} + +export function table(options: TableInputOptions): TableInput { + return addInputBindingName({ + ...options, + type: 'table', + }); +} + +export function cosmosDB(options: CosmosDBInputOptions): CosmosDBInput { + return addInputBindingName({ + ...options, + type: 'cosmosDB', + }); +} + +export function sql(options: SqlInputOptions): SqlInput { + return addInputBindingName({ + ...options, + type: 'sql', + }); +} + +export function mySql(options: MySqlInputOptions): MySqlInput { + return addInputBindingName({ + ...options, + type: 'mysql', + }); +} + +export function webPubSubConnection(options: WebPubSubConnectionInputOptions): WebPubSubConnectionInput { + return addInputBindingName({ + ...options, + type: 'webPubSubConnection', + }); +} + +export function webPubSubContext(options: WebPubSubContextInputOptions): WebPubSubContextInput { + return addInputBindingName({ + ...options, + type: 'webPubSubContext', + }); +} + +export function generic(options: GenericInputOptions): FunctionInput { + return addInputBindingName(options); +} + +function addInputBindingName(binding: T): T & { name: string } { + return addBindingName(binding, 'Input'); +} diff --git a/node_modules/@azure/functions/src/output.ts b/node_modules/@azure/functions/src/output.ts new file mode 100644 index 00000000..f79fb5d1 --- /dev/null +++ b/node_modules/@azure/functions/src/output.ts @@ -0,0 +1,124 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBOutput, + CosmosDBOutputOptions, + EventGridOutput, + EventGridOutputOptions, + EventHubOutput, + EventHubOutputOptions, + FunctionOutput, + GenericOutputOptions, + HttpOutput, + HttpOutputOptions, + MySqlOutput, + MySqlOutputOptions, + ServiceBusQueueOutput, + ServiceBusQueueOutputOptions, + ServiceBusTopicOutput, + ServiceBusTopicOutputOptions, + SqlOutput, + SqlOutputOptions, + StorageBlobOutput, + StorageBlobOutputOptions, + StorageQueueOutput, + StorageQueueOutputOptions, + TableOutput, + TableOutputOptions, + WebPubSubOutput, + WebPubSubOutputOptions, +} from '@azure/functions'; +import { addBindingName } from './addBindingName'; + +export function http(options: HttpOutputOptions): HttpOutput { + return addOutputBindingName({ + ...options, + type: 'http', + }); +} + +export function storageBlob(options: StorageBlobOutputOptions): StorageBlobOutput { + return addOutputBindingName({ + ...options, + type: 'blob', + }); +} + +export function table(options: TableOutputOptions): TableOutput { + return addOutputBindingName({ + ...options, + type: 'table', + }); +} + +export function storageQueue(options: StorageQueueOutputOptions): StorageQueueOutput { + return addOutputBindingName({ + ...options, + type: 'queue', + }); +} + +export function serviceBusQueue(options: ServiceBusQueueOutputOptions): ServiceBusQueueOutput { + return addOutputBindingName({ + ...options, + type: 'serviceBus', + }); +} + +export function serviceBusTopic(options: ServiceBusTopicOutputOptions): ServiceBusTopicOutput { + return addOutputBindingName({ + ...options, + type: 'serviceBus', + }); +} + +export function eventHub(options: EventHubOutputOptions): EventHubOutput { + return addOutputBindingName({ + ...options, + type: 'eventHub', + }); +} + +export function eventGrid(options: EventGridOutputOptions): EventGridOutput { + return addOutputBindingName({ + ...options, + type: 'eventGrid', + }); +} + +export function cosmosDB(options: CosmosDBOutputOptions): CosmosDBOutput { + return addOutputBindingName({ + ...options, + type: 'cosmosDB', + }); +} + +export function sql(options: SqlOutputOptions): SqlOutput { + return addOutputBindingName({ + ...options, + type: 'sql', + }); +} + +export function mySql(options: MySqlOutputOptions): MySqlOutput { + return addOutputBindingName({ + ...options, + type: 'mysql', + }); +} + +export function webPubSub(options: WebPubSubOutputOptions): WebPubSubOutput { + return addOutputBindingName({ + ...options, + type: 'webPubSub', + }); +} + +export function generic(options: GenericOutputOptions): FunctionOutput { + return addOutputBindingName(options); +} + +function addOutputBindingName(binding: T): T & { name: string } { + return addBindingName(binding, 'Output'); +} diff --git a/node_modules/@azure/functions/src/setup.ts b/node_modules/@azure/functions/src/setup.ts new file mode 100644 index 00000000..d6b54c34 --- /dev/null +++ b/node_modules/@azure/functions/src/setup.ts @@ -0,0 +1,49 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { SetupOptions } from '../types'; +import { AzFuncSystemError } from './errors'; +import { isDefined } from './utils/nonNull'; +import { tryGetCoreApiLazy } from './utils/tryGetCoreApiLazy'; +import { workerSystemLog } from './utils/workerSystemLog'; + +let setupLocked = false; +export function lockSetup(): void { + setupLocked = true; +} + +export let enableHttpStream = false; +export const capabilities: Record = {}; + +export function setup(opts: SetupOptions): void { + if (setupLocked) { + throw new AzFuncSystemError("Setup options can't be changed after app startup has finished."); + } + + if (opts.enableHttpStream) { + // NOTE: coreApi.log was coincidentally added the same time as http streaming, + // so we can use that to validate the host version instead of messing with semver parsing + const coreApi = tryGetCoreApiLazy(); + if (coreApi && !coreApi.log) { + throw new AzFuncSystemError(`HTTP streaming requires Azure Functions Host v4.28 or higher.`); + } + } + + if (isDefined(opts.enableHttpStream)) { + enableHttpStream = opts.enableHttpStream; + } + + if (opts.capabilities) { + for (let [key, val] of Object.entries(opts.capabilities)) { + if (isDefined(val)) { + val = String(val); + workerSystemLog('debug', `Capability ${key} set to ${val}.`); + capabilities[key] = val; + } + } + } + + if (enableHttpStream) { + workerSystemLog('debug', `HTTP streaming enabled.`); + } +} diff --git a/node_modules/@azure/functions/src/trigger.ts b/node_modules/@azure/functions/src/trigger.ts new file mode 100644 index 00000000..564781e1 --- /dev/null +++ b/node_modules/@azure/functions/src/trigger.ts @@ -0,0 +1,152 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBTrigger, + CosmosDBTriggerOptions, + EventGridTrigger, + EventGridTriggerOptions, + EventHubTrigger, + EventHubTriggerOptions, + FunctionTrigger, + GenericTriggerOptions, + HttpTrigger, + HttpTriggerOptions, + McpToolTrigger, + McpToolTriggerOptions, + MySqlTrigger, + MySqlTriggerOptions, + ServiceBusQueueTrigger, + ServiceBusQueueTriggerOptions, + ServiceBusTopicTrigger, + ServiceBusTopicTriggerOptions, + SqlTrigger, + SqlTriggerOptions, + StorageBlobTrigger, + StorageBlobTriggerOptions, + StorageQueueTrigger, + StorageQueueTriggerOptions, + TimerTrigger, + TimerTriggerOptions, + WarmupTrigger, + WarmupTriggerOptions, + WebPubSubTrigger, + WebPubSubTriggerOptions, +} from '@azure/functions'; +import { addBindingName } from './addBindingName'; +import { converToMcpToolTriggerOptionsToRpc } from './converters/toMcpToolTriggerOptionsToRpc'; + +export function http(options: HttpTriggerOptions): HttpTrigger { + return addTriggerBindingName({ + ...options, + authLevel: options.authLevel || 'anonymous', + methods: options.methods || ['GET', 'POST'], + type: 'httpTrigger', + }); +} + +export function timer(options: TimerTriggerOptions): TimerTrigger { + return addTriggerBindingName({ + ...options, + type: 'timerTrigger', + }); +} + +export function storageBlob(options: StorageBlobTriggerOptions): StorageBlobTrigger { + return addTriggerBindingName({ + ...options, + type: 'blobTrigger', + }); +} + +export function storageQueue(options: StorageQueueTriggerOptions): StorageQueueTrigger { + return addTriggerBindingName({ + ...options, + type: 'queueTrigger', + }); +} + +export function serviceBusQueue(options: ServiceBusQueueTriggerOptions): ServiceBusQueueTrigger { + return addTriggerBindingName({ + ...options, + type: 'serviceBusTrigger', + }); +} + +export function serviceBusTopic(options: ServiceBusTopicTriggerOptions): ServiceBusTopicTrigger { + return addTriggerBindingName({ + ...options, + type: 'serviceBusTrigger', + }); +} + +export function eventHub(options: EventHubTriggerOptions): EventHubTrigger { + return addTriggerBindingName({ + ...options, + type: 'eventHubTrigger', + }); +} + +export function eventGrid(options: EventGridTriggerOptions): EventGridTrigger { + return addTriggerBindingName({ + ...options, + type: 'eventGridTrigger', + }); +} + +export function cosmosDB(options: CosmosDBTriggerOptions): CosmosDBTrigger { + return addTriggerBindingName({ + ...options, + type: 'cosmosDBTrigger', + }); +} + +export function warmup(options: WarmupTriggerOptions): WarmupTrigger { + return addTriggerBindingName({ + ...options, + type: 'warmupTrigger', + }); +} + +export function sql(options: SqlTriggerOptions): SqlTrigger { + return addTriggerBindingName({ + ...options, + type: 'sqlTrigger', + }); +} + +export function mySql(options: MySqlTriggerOptions): MySqlTrigger { + return addTriggerBindingName({ + ...options, + type: 'mysqlTrigger', + }); +} + +export function webPubSub(options: WebPubSubTriggerOptions): WebPubSubTrigger { + return addTriggerBindingName({ + ...options, + type: 'webPubSubTrigger', + }); +} + +/** + * Creates an MCP Tool trigger configuration. + * This function is used to define an MCP Tool trigger for an Azure Function. + * + * @param options - The configuration options for the MCP Tool trigger, including tool-specific metadata. + * @returns An MCP Tool trigger object with the specified configuration. + */ +export function mcpTool(options: McpToolTriggerOptions): McpToolTrigger { + return addTriggerBindingName({ + ...converToMcpToolTriggerOptionsToRpc(options), + type: 'mcpToolTrigger', + }); +} + +export function generic(options: GenericTriggerOptions): FunctionTrigger { + return addTriggerBindingName(options); +} + +function addTriggerBindingName(binding: T): T & { name: string } { + return addBindingName(binding, 'Trigger'); +} diff --git a/node_modules/@azure/functions/src/utils/Disposable.ts b/node_modules/@azure/functions/src/utils/Disposable.ts new file mode 100644 index 00000000..913a080b --- /dev/null +++ b/node_modules/@azure/functions/src/utils/Disposable.ts @@ -0,0 +1,35 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +/** + * Based off of VS Code + * https://github.com/microsoft/vscode/blob/7bed4ce3e9f5059b5fc638c348f064edabcce5d2/src/vs/workbench/api/common/extHostTypes.ts#L65 + */ +export class Disposable { + static from(...inDisposables: { dispose(): any }[]): Disposable { + let disposables: ReadonlyArray<{ dispose(): any }> | undefined = inDisposables; + return new Disposable(function () { + if (disposables) { + for (const disposable of disposables) { + if (disposable && typeof disposable.dispose === 'function') { + disposable.dispose(); + } + } + disposables = undefined; + } + }); + } + + #callOnDispose?: () => any; + + constructor(callOnDispose: () => any) { + this.#callOnDispose = callOnDispose; + } + + dispose(): any { + if (typeof this.#callOnDispose === 'function') { + this.#callOnDispose(); + this.#callOnDispose = undefined; + } + } +} diff --git a/node_modules/@azure/functions/src/utils/fallbackLogHandler.ts b/node_modules/@azure/functions/src/utils/fallbackLogHandler.ts new file mode 100644 index 00000000..1c2d9669 --- /dev/null +++ b/node_modules/@azure/functions/src/utils/fallbackLogHandler.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; + +export function fallbackLogHandler(level: types.LogLevel, ...args: unknown[]): void { + switch (level) { + case 'trace': + console.trace(...args); + break; + case 'debug': + console.debug(...args); + break; + case 'information': + console.info(...args); + break; + case 'warning': + console.warn(...args); + break; + case 'critical': + case 'error': + console.error(...args); + break; + default: + console.log(...args); + } +} diff --git a/node_modules/@azure/functions/src/utils/getRandomHexString.ts b/node_modules/@azure/functions/src/utils/getRandomHexString.ts new file mode 100644 index 00000000..5f9476cf --- /dev/null +++ b/node_modules/@azure/functions/src/utils/getRandomHexString.ts @@ -0,0 +1,13 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as crypto from 'crypto'; + +export function getRandomHexString(length = 10): string { + const buffer: Buffer = crypto.randomBytes(Math.ceil(length / 2)); + return buffer.toString('hex').slice(0, length); +} + +export function getStringHash(data: string, length = 10): string { + return crypto.createHash('sha256').update(data).digest('hex').slice(0, length); +} diff --git a/node_modules/@azure/functions/src/utils/isTrigger.ts b/node_modules/@azure/functions/src/utils/isTrigger.ts new file mode 100644 index 00000000..b58aceeb --- /dev/null +++ b/node_modules/@azure/functions/src/utils/isTrigger.ts @@ -0,0 +1,14 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export function isTrigger(typeName: string | undefined | null): boolean { + return !!typeName && /trigger$/i.test(typeName); +} + +export function isHttpTrigger(typeName: string | undefined | null): boolean { + return typeName?.toLowerCase() === 'httptrigger'; +} + +export function isTimerTrigger(typeName: string | undefined | null): boolean { + return typeName?.toLowerCase() === 'timertrigger'; +} diff --git a/node_modules/@azure/functions/src/utils/nonNull.ts b/node_modules/@azure/functions/src/utils/nonNull.ts new file mode 100644 index 00000000..336b317a --- /dev/null +++ b/node_modules/@azure/functions/src/utils/nonNull.ts @@ -0,0 +1,40 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { AzFuncSystemError } from '../errors'; + +/** + * Retrieves a property by name from an object and checks that it's not null and not undefined. It is strongly typed + * for the property and will give a compile error if the given name is not a property of the source. + */ +export function nonNullProp( + source: TSource, + name: TKey +): NonNullable { + const value: NonNullable = >source[name]; + return nonNullValue(value, name); +} + +/** + * Validates that a given value is not null and not undefined. + */ +export function nonNullValue(value: T | undefined | null, propertyNameOrMessage?: string): T { + if (value === null || value === undefined) { + throw new AzFuncSystemError( + 'Internal error: Expected value to be neither null nor undefined' + + (propertyNameOrMessage ? `: ${propertyNameOrMessage}` : '') + ); + } + + return value; +} + +export function copyPropIfDefined(source: TData, destination: TData, key: TKey): void { + if (source[key] !== null && source[key] !== undefined) { + destination[key] = source[key]; + } +} + +export function isDefined(data: T | undefined | null): data is T { + return data !== null && data !== undefined; +} diff --git a/node_modules/@azure/functions/src/utils/tryGetCoreApiLazy.ts b/node_modules/@azure/functions/src/utils/tryGetCoreApiLazy.ts new file mode 100644 index 00000000..b42c5238 --- /dev/null +++ b/node_modules/@azure/functions/src/utils/tryGetCoreApiLazy.ts @@ -0,0 +1,17 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as coreTypes from '@azure/functions-core'; + +let coreApi: typeof coreTypes | undefined | null; +export function tryGetCoreApiLazy(): typeof coreTypes | null { + if (coreApi === undefined) { + try { + // eslint-disable-next-line @typescript-eslint/no-var-requires + coreApi = require('@azure/functions-core'); + } catch { + coreApi = null; + } + } + return coreApi; +} diff --git a/node_modules/@azure/functions/src/utils/util.ts b/node_modules/@azure/functions/src/utils/util.ts new file mode 100644 index 00000000..e870de86 --- /dev/null +++ b/node_modules/@azure/functions/src/utils/util.ts @@ -0,0 +1,6 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export function isEnvironmentVariableSet(val: string | boolean | number | undefined | null): boolean { + return !/^(false|0)?$/i.test(val === undefined || val === null ? '' : String(val)); +} diff --git a/node_modules/@azure/functions/src/utils/workerSystemLog.ts b/node_modules/@azure/functions/src/utils/workerSystemLog.ts new file mode 100644 index 00000000..0011b12c --- /dev/null +++ b/node_modules/@azure/functions/src/utils/workerSystemLog.ts @@ -0,0 +1,17 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { format } from 'util'; +import { fallbackLogHandler } from './fallbackLogHandler'; +import { tryGetCoreApiLazy } from './tryGetCoreApiLazy'; + +export function workerSystemLog(level: types.LogLevel, ...args: unknown[]): void { + const coreApi = tryGetCoreApiLazy(); + // NOTE: coreApi.log doesn't exist on older versions of the worker + if (coreApi && coreApi.log) { + coreApi.log(level, 'system', format(...args)); + } else { + fallbackLogHandler(level, ...args); + } +} diff --git a/node_modules/@azure/functions/types/InvocationContext.d.ts b/node_modules/@azure/functions/types/InvocationContext.d.ts new file mode 100644 index 00000000..8815bdf3 --- /dev/null +++ b/node_modules/@azure/functions/types/InvocationContext.d.ts @@ -0,0 +1,366 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBInput, CosmosDBOutput } from './cosmosDB'; +import { EventGridOutput, EventGridPartialEvent } from './eventGrid'; +import { EventHubOutput } from './eventHub'; +import { HttpOutput, HttpResponse } from './http'; +import { FunctionInput, FunctionOutput, FunctionTrigger, LogLevel } from './index'; +import { MySqlInput, MySqlOutput } from './mySql'; +import { ServiceBusQueueOutput, ServiceBusTopicOutput } from './serviceBus'; +import { SqlInput, SqlOutput } from './sql'; +import { StorageBlobInput, StorageBlobOutput, StorageQueueOutput } from './storage'; +import { TableInput, TableOutput } from './table'; +import { WebPubSubOutput } from './webpubsub'; + +/** + * Contains metadata and helper methods specific to this invocation + */ +export declare class InvocationContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: InvocationContextInit); + + /** + * A unique guid specific to this invocation + */ + invocationId: string; + + /** + * The name of the function that is being invoked + */ + functionName: string; + + /** + * An object used to get secondary inputs + */ + extraInputs: InvocationContextExtraInputs; + + /** + * An object used to set secondary outputs + */ + extraOutputs: InvocationContextExtraOutputs; + + /** + * The recommended way to log data during invocation. + * Similar to Node.js's `console.log`, but has integration with Azure features like application insights + * Uses the 'information' log level + */ + log(...args: any[]): void; + + /** + * The recommended way to log trace data (level 0) during invocation. + * Similar to Node.js's `console.trace`, but has integration with Azure features like application insights + */ + trace(...args: any[]): void; + + /** + * The recommended way to log debug data (level 1) during invocation. + * Similar to Node.js's `console.debug`, but has integration with Azure features like application insights + */ + debug(...args: any[]): void; + + /** + * The recommended way to log information data (level 2) during invocation. + * Similar to Node.js's `console.info`, but has integration with Azure features like application insights + */ + info(...args: any[]): void; + + /** + * The recommended way to log warning data (level 3) during invocation. + * Similar to Node.js's `console.warn`, but has integration with Azure features like application insights + */ + warn(...args: any[]): void; + + /** + * The recommended way to log error data (level 4) during invocation. + * Similar to Node.js's `console.error`, but has integration with Azure features like application insights + */ + error(...args: any[]): void; + + /** + * The retry context of the current function execution if the retry policy is defined + */ + retryContext?: RetryContext; + + /** + * TraceContext information to enable distributed tracing scenarios + */ + traceContext?: TraceContext; + + /** + * Metadata about the trigger or undefined if the metadata is already represented elsewhere + * For example, this will be undefined for http and timer triggers because you can find that information on the request & timer object instead + */ + triggerMetadata?: TriggerMetadata; + + /** + * The options used when registering the function + * NOTE: This value may differ slightly from the original because it has been validated and defaults may have been explicitly added + */ + options: EffectiveFunctionOptions; +} + +/** + * An object used to get secondary inputs + */ +export interface InvocationContextExtraInputs { + /** + * Get a secondary storage blob entry input for this invocation + * @input the configuration object for this storage blob input + */ + get(input: StorageBlobInput): unknown; + + /** + * Get a secondary table input for this invocation + * @input the configuration object for this table input + */ + get(input: TableInput): unknown; + + /** + * Get a secondary Cosmos DB documents input for this invocation + * @input the configuration object for this Cosmos DB input + */ + get(input: CosmosDBInput): unknown; + + /** + * Get a secondary SQL items input for this invocation + * @input the configuration object for this SQL input + */ + get(input: SqlInput): unknown; + + /** + * Get a secondary MySql items input for this invocation + * @input the configuration object for this MySql input + */ + get(input: MySqlInput): unknown; + + /** + * Get a secondary generic input for this invocation + * @inputOrName the configuration object or name for this input + */ + get(inputOrName: FunctionInput | string): unknown; + + /** + * Set a secondary generic input for this invocation + * @inputOrName the configuration object or name for this input + * @value the input value + */ + set(inputOrName: FunctionInput | string, value: unknown): void; +} + +/** + * An object used to set secondary outputs + */ +export interface InvocationContextExtraOutputs { + /** + * Set a secondary http response output for this invocation + * @output the configuration object for this http output + * @response the http response output value + */ + set(output: HttpOutput, response: HttpResponse): void; + + /** + * Set a secondary storage blob entry output for this invocation + * @output the configuration object for this storage blob output + * @blob the blob output value + */ + set(output: StorageBlobOutput, blob: unknown): void; + + /** + * Set a secondary table output for this invocation + * @output the configuration object for this table output + * @tableEntity the table output value + */ + set(output: TableOutput, tableEntity: unknown): void; + + /** + * Set a secondary storage queue entry output for this invocation + * @output the configuration object for this storage queue output + * @queueItem the queue entry output value + */ + set(output: StorageQueueOutput, queueItem: unknown): void; + + /** + * Set a secondary Cosmos DB documents output for this invocation + * @output the configuration object for this Cosmos DB output + * @documents the output document(s) value + */ + set(output: CosmosDBOutput, documents: unknown): void; + + /** + * Set a secondary SQL items output for this invocation + * @output the configuration object for this SQL output + * @documents the output item(s) value + */ + set(output: SqlOutput, items: unknown): void; + + /** + * Set a secondary Service Bus queue output for this invocation + * @output the configuration object for this Service Bus output + * @message the output message(s) value + */ + set(output: ServiceBusQueueOutput, messages: unknown): void; + + /** + * Set a secondary Service Bus topic output for this invocation + * @output the configuration object for this Service Bus output + * @message the output message(s) value + */ + set(output: ServiceBusTopicOutput, messages: unknown): void; + + /** + * Set a secondary Event Hub output for this invocation + * @output the configuration object for this EventHub output + * @message the output message(s) value + */ + set(output: EventHubOutput, messages: unknown): void; + + /** + * Set a secondary Event Grid output for this invocation + * @output the configuration object for this Event Grid output + * @message the output event(s) value + */ + set(output: EventGridOutput, events: EventGridPartialEvent | EventGridPartialEvent[]): void; + + /** + * Set a secondary MySql items output for this invocation + * @output the configuration object for this MySql output + * @documents the output item(s) value + */ + set(output: MySqlOutput, items: unknown): void; + + /** + * Set a secondary Web PubSub output for this invocation + * @output the configuration object for this Web PubSub output + * @message the output message(s) value + */ + set(output: WebPubSubOutput, messages: unknown): void; + + /** + * Set a secondary generic output for this invocation + * @outputOrName the configuration object or name for this output + * @value the output value + */ + set(outputOrName: FunctionOutput | string, value: unknown): void; + + /** + * Get a secondary generic output for this invocation + * @outputOrName the configuration object or name for this output + */ + get(outputOrName: FunctionOutput | string): unknown; +} + +/** + * Metadata related to the input that triggered your function + */ +export type TriggerMetadata = Record; + +export interface RetryContext { + /** + * Current retry count of the function executions. + */ + retryCount: number; + + /** + * Max retry count is the maximum number of times an execution is retried before eventual failure. A value of -1 means to retry indefinitely. + */ + maxRetryCount: number; + + /** + * Exception that caused the retry + */ + exception?: Exception; +} + +export interface Exception { + source?: string; + + stackTrace?: string; + + message?: string; +} + +/** + * TraceContext information to enable distributed tracing scenarios + */ +export interface TraceContext { + /** + * Describes the position of the incoming request in its trace graph in a portable, fixed-length format + */ + traceParent?: string | undefined; + + /** + * Extends traceparent with vendor-specific data + */ + traceState?: string | undefined; + + /** + * Holds additional properties being sent as part of request telemetry + */ + attributes?: Record; +} + +/** + * The options used when registering the function, as passed to a specific invocation + * NOTE: This value may differ slightly from the original because it has been validated and defaults may have been explicitly added + */ +export interface EffectiveFunctionOptions { + /** + * Configuration for the primary input to the function, aka the reason it will be triggered + * This is the only input that is passed as an argument to the function handler during invocation + */ + trigger: FunctionTrigger; + + /** + * Configuration for the optional primary output of the function + * This is the main output that you should set as the return value of the function handler during invocation + */ + return?: FunctionOutput; + + /** + * Configuration for an optional set of secondary inputs + * During invocation, get these values with `context.extraInputs.get()` + */ + extraInputs: FunctionInput[]; + + /** + * Configuration for an optional set of secondary outputs + * During invocation, set these values with `context.extraOutputs.set()` + */ + extraOutputs: FunctionOutput[]; +} + +/** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ +export interface InvocationContextInit { + /** + * Defaults to 'unknown' if not specified + */ + invocationId?: string; + + /** + * Defaults to 'unknown' if not specified + */ + functionName?: string; + + /** + * Defaults to Node.js console if not specified + */ + logHandler?: LogHandler; + + traceContext?: TraceContext; + + retryContext?: RetryContext; + + triggerMetadata?: TriggerMetadata; + + /** + * Defaults to a trigger with 'unknown' type and name if not specified + */ + options?: Partial; +} + +export type LogHandler = (level: LogLevel, ...args: unknown[]) => void; diff --git a/node_modules/@azure/functions/types/app.d.ts b/node_modules/@azure/functions/types/app.d.ts new file mode 100644 index 00000000..db3e059d --- /dev/null +++ b/node_modules/@azure/functions/types/app.d.ts @@ -0,0 +1,202 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBFunctionOptions } from './cosmosDB'; +import { EventGridFunctionOptions } from './eventGrid'; +import { EventHubFunctionOptions } from './eventHub'; +import { GenericFunctionOptions } from './generic'; +import { HttpFunctionOptions, HttpHandler, HttpMethodFunctionOptions } from './http'; +import { McpToolFunctionOptions } from './mcpTool'; +import { MySqlFunctionOptions } from './mySql'; +import { ServiceBusQueueFunctionOptions, ServiceBusTopicFunctionOptions } from './serviceBus'; +import { SetupOptions } from './setup'; +import { SqlFunctionOptions } from './sql'; +import { StorageBlobFunctionOptions, StorageQueueFunctionOptions } from './storage'; +import { TimerFunctionOptions } from './timer'; +import { WarmupFunctionOptions } from './warmup'; +import { WebPubSubFunctionOptions } from './webpubsub'; + +/** + * Optional method to configure the behavior of your app. + * This can only be done during app startup, before invocations occur. + * If called multiple times, options will be merged with the previous options specified. + */ +export declare function setup(options: SetupOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function http(name: string, options: HttpFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'GET' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function get(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'GET' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function get(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PUT' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function put(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PUT' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function put(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'POST' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function post(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'POST' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function post(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PATCH' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function patch(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PATCH' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function patch(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'DELETE' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function deleteRequest(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'DELETE' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function deleteRequest(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers a timer function in your app that will be triggered on a schedule + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function timer(name: string, options: TimerFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever an item is added to a storage blob path + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function storageBlob(name: string, options: StorageBlobFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever an item is added to a storage queue + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function storageQueue(name: string, options: StorageQueueFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever a message is added to a service bus queue + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function serviceBusQueue(name: string, options: ServiceBusQueueFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever a message is added to a service bus topic + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function serviceBusTopic(name: string, options: ServiceBusTopicFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever a message is added to an event hub + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function eventHub(name: string, options: EventHubFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever an event is sent by an event grid source + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function eventGrid(name: string, options: EventGridFunctionOptions): void; + +/** + * Registers a Cosmos DB function in your app that will be triggered whenever inserts and updates occur (not deletions) + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function cosmosDB(name: string, options: CosmosDBFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered when an instance is added to scale a running function app. + * The warmup trigger is only called during scale-out operations, not during restarts or other non-scale startups. + * Make sure your logic can load all required dependencies without relying on the warmup trigger. + * Lazy loading is a good pattern to achieve this goal. + * The warmup trigger isn't available to apps running on the Consumption plan. + * For more information, please see the [Azure Functions warmup trigger documentation](https://learn.microsoft.com/azure/azure-functions/functions-bindings-warmup?tabs=isolated-process&pivots=programming-language-javascript). + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function warmup(name: string, options: WarmupFunctionOptions): void; + +/** + * Registers a SQL function in your app that will be triggered when a row is created, updated, or deleted + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function sql(name: string, options: SqlFunctionOptions): void; + +/** + * Registers a MySql function in your app that will be triggered when a row is created or updated + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function mySql(name: string, options: MySqlFunctionOptions): void; + +/** + * Registers a generic function in your app that will be triggered based on the type specified in `options.trigger.type` + * Use this method if your desired trigger type does not already have its own method + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function generic(name: string, options: GenericFunctionOptions): void; + +/** + * Registers a WebPubSub function in your app that will be triggered by WebPubSub events + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function webPubSub(name: string, options: WebPubSubFunctionOptions): void; + +export function mcpTool(name: string, options: McpToolFunctionOptions): void; + +export * as hook from './hooks/registerHook'; diff --git a/node_modules/@azure/functions/types/cosmosDB.d.ts b/node_modules/@azure/functions/types/cosmosDB.d.ts new file mode 100644 index 00000000..8aecad9c --- /dev/null +++ b/node_modules/@azure/functions/types/cosmosDB.d.ts @@ -0,0 +1,36 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBv3FunctionOptions, + CosmosDBv3Handler, + CosmosDBv3Input, + CosmosDBv3InputOptions, + CosmosDBv3Output, + CosmosDBv3OutputOptions, + CosmosDBv3Trigger, + CosmosDBv3TriggerOptions, +} from './cosmosDB.v3'; +import { + CosmosDBv4FunctionOptions, + CosmosDBv4Handler, + CosmosDBv4Input, + CosmosDBv4InputOptions, + CosmosDBv4Output, + CosmosDBv4OutputOptions, + CosmosDBv4Trigger, + CosmosDBv4TriggerOptions, +} from './cosmosDB.v4'; + +export type CosmosDBHandler = CosmosDBv3Handler | CosmosDBv4Handler; + +export type CosmosDBFunctionOptions = CosmosDBv3FunctionOptions | CosmosDBv4FunctionOptions; + +export type CosmosDBInputOptions = CosmosDBv3InputOptions | CosmosDBv4InputOptions; +export type CosmosDBInput = CosmosDBv3Input | CosmosDBv4Input; + +export type CosmosDBTriggerOptions = CosmosDBv3TriggerOptions | CosmosDBv4TriggerOptions; +export type CosmosDBTrigger = CosmosDBv3Trigger | CosmosDBv4Trigger; + +export type CosmosDBOutputOptions = CosmosDBv3OutputOptions | CosmosDBv4OutputOptions; +export type CosmosDBOutput = CosmosDBv3Output | CosmosDBv4Output; diff --git a/node_modules/@azure/functions/types/cosmosDB.v3.d.ts b/node_modules/@azure/functions/types/cosmosDB.v3.d.ts new file mode 100644 index 00000000..4fec1f16 --- /dev/null +++ b/node_modules/@azure/functions/types/cosmosDB.v3.d.ts @@ -0,0 +1,216 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type CosmosDBv3Handler = (documents: unknown[], context: InvocationContext) => FunctionResult; + +export interface CosmosDBv3FunctionOptions extends CosmosDBv3TriggerOptions, Partial { + handler: CosmosDBv3Handler; + + trigger?: CosmosDBv3Trigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface CosmosDBv3InputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connectionStringSetting: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + collectionName: string; + + /** + * Specifies the partition key value for the lookup. May include binding parameters. It is required for lookups in partitioned collections + */ + partitionKey?: string; + + /** + * The ID of the document to retrieve. This property supports [binding expressions](https://docs.microsoft.com/azure/azure-functions/functions-bindings-expressions-patterns). + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire collection is retrieved. + */ + id?: string; + + /** + * An Azure Cosmos DB SQL query used for retrieving multiple documents. The property supports runtime bindings, as in this example: + * `SELECT * FROM c where c.departmentId = {departmentId}` + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire collection is retrieved. + */ + sqlQuery?: string; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv3Input = FunctionInput & CosmosDBv3InputOptions; + +export interface CosmosDBv3TriggerOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connectionStringSetting: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + collectionName: string; + + /** + * The name of an app setting that contains the connection string to the service which holds the lease collection. + * If not set it will connect to the service defined by `connectionStringSetting` + */ + leaseConnectionStringSetting?: string; + + /** + * The name of the database that holds the collection to store leases. If not set, it will use the value of `databaseName` + */ + leaseDatabaseName?: string; + + /** + * The name of the collection to store leases. If not set, it will use "leases" + */ + leaseCollectionName?: string; + + /** + * Checks for existence and automatically creates the leases collection. Default is `false` + */ + createLeaseCollectionIfNotExists?: boolean; + + /** + * When `createLeaseCollectionIfNotExists` is set to `true`, defines the amount of Request Units to assign to the created lease collection + */ + leaseCollectionThroughput?: number; + + /** + * When set, the value is added as a prefix to the leases created in the Lease collection for this function. + * Using a prefix allows two separate Azure Functions to share the same Lease collection by using different prefixes. + */ + leaseCollectionPrefix?: string; + + /** + * The time (in milliseconds) for the delay between polling a partition for new changes on the feed, after all current changes are drained. + * Default is 5,000 milliseconds, or 5 seconds. + */ + feedPollDelay?: number; + + /** + * When set, it defines, in milliseconds, the interval to kick off a task to compute if partitions are distributed evenly among known host instances. + * Default is 13000 (13 seconds). + */ + leaseAcquireInterval?: number; + + /** + * When set, it defines, in milliseconds, the interval for which the lease is taken on a lease representing a partition. + * If the lease is not renewed within this interval, it will cause it to expire and ownership of the partition will move to another instance. + * Default is 60000 (60 seconds). + */ + leaseExpirationInterval?: number; + + /** + * When set, it defines, in milliseconds, the renew interval for all leases for partitions currently held by an instance. + * Default is 17000 (17 seconds). + */ + leaseRenewInterval?: number; + + /** + * When set, it defines, in milliseconds, the interval between lease checkpoints. Default is always after each Function call. + */ + checkpointInterval?: number; + + /** + * Customizes the amount of documents between lease checkpoints. Default is after every function call. + */ + checkpointDocumentCount?: number; + + /** + * When set, this property sets the maximum number of items received per Function call. + * If operations in the monitored container are performed through stored procedures, transaction scope is preserved when reading items from the change feed. + * As a result, the number of items received could be higher than the specified value so that the items changed by the same transaction are returned as part of one atomic batch. + */ + maxItemsPerInvocation?: number; + + /** + * This option tells the Trigger to read changes from the beginning of the container's change history instead of starting at the current time. + * Reading from the beginning only works the first time the trigger starts, as in subsequent runs, the checkpoints are already stored. + * Setting this option to true when there are leases already created has no effect. + */ + startFromBeginning?: boolean; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; + + /** + * Enables multi-region accounts for writing to the leases collection. + */ + useMultipleWriteLocations?: boolean; +} +export type CosmosDBv3Trigger = FunctionTrigger & CosmosDBv3TriggerOptions; + +export interface CosmosDBv3OutputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connectionStringSetting: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + collectionName: string; + + /** + * A boolean value to indicate whether the collection is created when it doesn't exist. + * The default is false because new collections are created with reserved throughput, which has cost implications. For more information, see the [pricing page](https://azure.microsoft.com/pricing/details/cosmos-db/). + */ + createIfNotExists?: boolean; + + /** + * When `createIfNotExists` is true, it defines the partition key path for the created collection. May include binding parameters. + */ + partitionKey?: string; + + /** + * When createIfNotExists is true, it defines the [throughput](https://docs.microsoft.com/azure/cosmos-db/set-throughput) of the created collection + */ + collectionThroughput?: number; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; + + /** + * When set to true along with preferredLocations, supports multi-region writes in the Azure Cosmos DB service. + */ + useMultipleWriteLocations?: boolean; +} +export type CosmosDBv3Output = FunctionOutput & CosmosDBv3OutputOptions; diff --git a/node_modules/@azure/functions/types/cosmosDB.v4.d.ts b/node_modules/@azure/functions/types/cosmosDB.v4.d.ts new file mode 100644 index 00000000..f62162c5 --- /dev/null +++ b/node_modules/@azure/functions/types/cosmosDB.v4.d.ts @@ -0,0 +1,203 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type CosmosDBv4Handler = (documents: unknown[], context: InvocationContext) => FunctionResult; + +export interface CosmosDBv4FunctionOptions extends CosmosDBv4TriggerOptions, Partial { + handler: CosmosDBv4Handler; + + trigger?: CosmosDBv4Trigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface CosmosDBv4InputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connection: string; + + /** + * The name of the Azure Cosmos DB database with the container being monitored + */ + databaseName: string; + + /** + * The name of the container being monitored + */ + containerName: string; + + /** + * Specifies the partition key value for the lookup. May include binding parameters. It is required for lookups in partitioned containers + */ + partitionKey?: string; + + /** + * The ID of the document to retrieve. This property supports [binding expressions](https://docs.microsoft.com/azure/azure-functions/functions-bindings-expressions-patterns). + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire container is retrieved. + */ + id?: string; + + /** + * An Azure Cosmos DB SQL query used for retrieving multiple documents. The property supports runtime bindings, as in this example: + * `SELECT * FROM c where c.departmentId = {departmentId}` + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire container is retrieved. + */ + sqlQuery?: string; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv4Input = FunctionInput & CosmosDBv4InputOptions; + +export interface CosmosDBv4TriggerOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connection: string; + + /** + * The name of the Azure Cosmos DB database with the container being monitored + */ + databaseName: string; + + /** + * The name of the container being monitored + */ + containerName: string; + + /** + * The name of an app setting that contains the connection string to the service which holds the lease container. + * If not set it will connect to the service defined by `connection` + */ + leaseConnection?: string; + + /** + * The name of the database that holds the container to store leases. If not set, it will use the value of `databaseName` + */ + leaseDatabaseName?: string; + + /** + * The name of the container to store leases. If not set, it will use "leases" + */ + leaseContainerName?: string; + + /** + * Checks for existence and automatically creates the leases container. Default is `false` + */ + createLeaseContainerIfNotExists?: boolean; + + /** + * When `createLeaseContainerIfNotExists` is set to `true`, defines the amount of Request Units to assign to the created lease container + */ + leasesContainerThroughput?: number; + + /** + * When set, the value is added as a prefix to the leases created in the Lease container for this function. + * Using a prefix allows two separate Azure Functions to share the same Lease container by using different prefixes. + */ + leaseContainerPrefix?: string; + + /** + * The time (in milliseconds) for the delay between polling a partition for new changes on the feed, after all current changes are drained. + * Default is 5,000 milliseconds, or 5 seconds. + */ + feedPollDelay?: number; + + /** + * When set, it defines, in milliseconds, the interval to kick off a task to compute if partitions are distributed evenly among known host instances. + * Default is 13000 (13 seconds). + */ + leaseAcquireInterval?: number; + + /** + * When set, it defines, in milliseconds, the interval for which the lease is taken on a lease representing a partition. + * If the lease is not renewed within this interval, it will cause it to expire and ownership of the partition will move to another instance. + * Default is 60000 (60 seconds). + */ + leaseExpirationInterval?: number; + + /** + * When set, it defines, in milliseconds, the renew interval for all leases for partitions currently held by an instance. + * Default is 17000 (17 seconds). + */ + leaseRenewInterval?: number; + + /** + * When set, this property sets the maximum number of items received per Function call. + * If operations in the monitored container are performed through stored procedures, transaction scope is preserved when reading items from the change feed. + * As a result, the number of items received could be higher than the specified value so that the items changed by the same transaction are returned as part of one atomic batch. + */ + maxItemsPerInvocation?: number; + + /** + * This option tells the Trigger to read changes from the beginning of the container's change history instead of starting at the current time. + * Reading from the beginning only works the first time the trigger starts, as in subsequent runs, the checkpoints are already stored. + * Setting this option to true when there are leases already created has no effect. + */ + startFromBeginning?: boolean; + + /** + * Gets or sets the date and time from which to initialize the change feed read operation. + * The recommended format is ISO 8601 with the UTC designator, such as 2021-02-16T14:19:29Z. + * This is only used to set the initial trigger state. After the trigger has a lease state, changing this value has no effect. + */ + startFromTime?: string; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv4Trigger = FunctionTrigger & CosmosDBv4TriggerOptions; + +export interface CosmosDBv4OutputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connection: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + containerName: string; + + /** + * A boolean value to indicate whether the collection is created when it doesn't exist. + * The default is false because new collections are created with reserved throughput, which has cost implications. For more information, see the [pricing page](https://azure.microsoft.com/pricing/details/cosmos-db/). + */ + createIfNotExists?: boolean; + + /** + * When `createIfNotExists` is true, it defines the partition key path for the created collection. May include binding parameters. + */ + partitionKey?: string; + + /** + * When createIfNotExists is true, it defines the [throughput](https://docs.microsoft.com/azure/cosmos-db/set-throughput) of the created collection + */ + containerThroughput?: number; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv4Output = FunctionOutput & CosmosDBv4OutputOptions; diff --git a/node_modules/@azure/functions/types/eventGrid.d.ts b/node_modules/@azure/functions/types/eventGrid.d.ts new file mode 100644 index 00000000..039bc62e --- /dev/null +++ b/node_modules/@azure/functions/types/eventGrid.d.ts @@ -0,0 +1,109 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type EventGridHandler = (event: EventGridEvent, context: InvocationContext) => FunctionResult; + +export interface EventGridFunctionOptions extends EventGridTriggerOptions, Partial { + handler: EventGridHandler; + + trigger?: EventGridTrigger; +} + +/** + * At this point in time there are no event grid trigger-specific options + */ +export interface EventGridTriggerOptions {} +export type EventGridTrigger = FunctionTrigger & EventGridTriggerOptions; + +export interface EventGridOutputKeyOptions { + /** + * An app setting (or environment variable) that contains the URI for the custom topic + */ + topicEndpointUri: string; + + /** + * An app setting (or environment variable) that contains an access key for the custom topic + */ + topicKeySetting: string; +} +export interface EventGridOutputConnectionOptions { + /** + * The value of the common prefix for the app setting that contains the `topicEndpointUri`. + * When setting the `connection` property, the `topicEndpointUri` and `topicKeySetting` properties should NOT be set. + */ + connection: string; +} +export type EventGridOutputOptions = EventGridOutputKeyOptions | EventGridOutputConnectionOptions; +export type EventGridOutput = FunctionOutput & EventGridOutputOptions; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/event-grid/event-schema) + * This "partial" interface is meant to be used when creating an event yourself and allows some properties to be left out + */ +export interface EventGridPartialEvent { + /** + * Full resource path to the event source. This field isn't writeable. Event Grid provides this value + * If included, must match the Event Grid topic Azure Resource Manager ID exactly. If not included, Event Grid will stamp onto the event. + */ + topic?: string; + + /** + * Publisher-defined path to the event subject + */ + subject: string; + + /** + * One of the registered event types for this event source + */ + eventType: string; + + /** + * The time the event is generated based on the provider's UTC time + */ + eventTime: string; + + /** + * Unique identifier for the event + */ + id: string; + + /** + * Event data specific to the resource provider + */ + data?: Record; + + /** + * The schema version of the data object. The publisher defines the schema version. + * If not included, will be stamped with an empty value + */ + dataVersion?: string; + + /** + * The schema version of the event metadata. Event Grid defines the schema of the top-level properties. Event Grid provides this value. + * If included, must match the Event Grid Schema `metadataVersion` exactly (currently, only 1). If not included, Event Grid will stamp onto the event. + */ + metadataVersion?: string; +} + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/event-grid/event-schema) + */ +export interface EventGridEvent extends EventGridPartialEvent { + /** + * Full resource path to the event source. This field isn't writeable. Event Grid provides this value + */ + topic: string; + + /** + * The schema version of the data object. The publisher defines the schema version. + */ + dataVersion: string; + + /** + * The schema version of the event metadata. Event Grid defines the schema of the top-level properties. Event Grid provides this value. + */ + metadataVersion: string; +} diff --git a/node_modules/@azure/functions/types/eventHub.d.ts b/node_modules/@azure/functions/types/eventHub.d.ts new file mode 100644 index 00000000..2e1ddcb7 --- /dev/null +++ b/node_modules/@azure/functions/types/eventHub.d.ts @@ -0,0 +1,55 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type EventHubHandler = (messages: unknown, context: InvocationContext) => FunctionResult; + +export interface EventHubFunctionOptions extends EventHubTriggerOptions, Partial { + handler: EventHubHandler; + + trigger?: EventHubTrigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface EventHubTriggerOptions { + /** + * An app setting (or environment variable) with the event hub connection string + */ + connection: string; + + /** + * The name of the event hub. When the event hub name is also present in the connection string, that value overrides this property at runtime. + */ + eventHubName: string; + + /** + * Set to `many` in order to enable batching. If omitted or set to `one`, a single message is passed to the function. + */ + cardinality?: 'many' | 'one'; + + /** + * An optional property that sets the [consumer group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#event-consumers) used to subscribe to events in the hub. If omitted, the `$Default` consumer group is used. + */ + consumerGroup?: string; +} +export type EventHubTrigger = FunctionTrigger & EventHubTriggerOptions; + +export interface EventHubOutputOptions { + /** + * An app setting (or environment variable) with the event hub connection string + */ + connection: string; + + /** + * The name of the event hub. When the event hub name is also present in the connection string, that value overrides this property at runtime. + */ + eventHubName: string; +} +export type EventHubOutput = FunctionOutput & EventHubOutputOptions; diff --git a/node_modules/@azure/functions/types/generic.d.ts b/node_modules/@azure/functions/types/generic.d.ts new file mode 100644 index 00000000..faa1b420 --- /dev/null +++ b/node_modules/@azure/functions/types/generic.d.ts @@ -0,0 +1,24 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, RetryOptions } from './index'; + +export interface GenericFunctionOptions extends FunctionOptions { + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface GenericTriggerOptions extends Record { + type: string; +} + +export interface GenericInputOptions extends Record { + type: string; +} + +export interface GenericOutputOptions extends Record { + type: string; +} diff --git a/node_modules/@azure/functions/types/hooks/HookContext.d.ts b/node_modules/@azure/functions/types/hooks/HookContext.d.ts new file mode 100644 index 00000000..9ef8e664 --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/HookContext.d.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +/** + * Base class for all hook context objects + */ +export declare class HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: HookContextInit); + + /** + * The recommended place to store and share data between hooks in the same scope (app-level vs invocation-level). + * You should use a unique property name so that it doesn't conflict with other hooks' data. + * This object is readonly. You may modify it, but attempting to overwrite it will throw an error + */ + readonly hookData: Record; +} + +/** + * Base interface for objects passed to HookContext constructors. + * For testing purposes only. + */ +export interface HookContextInit { + hookData?: Record; +} diff --git a/node_modules/@azure/functions/types/hooks/appHooks.d.ts b/node_modules/@azure/functions/types/hooks/appHooks.d.ts new file mode 100644 index 00000000..a4e51857 --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/appHooks.d.ts @@ -0,0 +1,46 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { HookContext, HookContextInit } from './HookContext'; + +/** + * Handler for app start hooks + */ +export type AppStartHandler = (context: AppStartContext) => void | Promise; + +/** + * Context on a function app during app startup. + */ +export declare class AppStartContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: AppStartContextInit); +} + +/** + * Handler for app terminate hooks + */ +export type AppTerminateHandler = (context: AppTerminateContext) => void | Promise; + +/** + * Context on a function app during app termination. + */ +export declare class AppTerminateContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: AppTerminateContextInit); +} + +/** + * Object passed to AppStartContext constructors. + * For testing purposes only + */ +export interface AppStartContextInit extends HookContextInit {} + +/** + * Object passed to AppTerminateContext constructors. + * For testing purposes only + */ +export interface AppTerminateContextInit extends HookContextInit {} diff --git a/node_modules/@azure/functions/types/hooks/invocationHooks.d.ts b/node_modules/@azure/functions/types/hooks/invocationHooks.d.ts new file mode 100644 index 00000000..03aafd3d --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/invocationHooks.d.ts @@ -0,0 +1,106 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionHandler } from '../index'; +import { InvocationContext } from '../InvocationContext'; +import { HookContext, HookContextInit } from './HookContext'; + +/** + * Handler for pre-invocation hooks. + */ +export type PreInvocationHandler = (context: PreInvocationContext) => void | Promise; + +/** + * Context on a function before it executes. + */ +export declare class PreInvocationContext extends InvocationHookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: PreInvocationContextInit); + + /** + * The arguments passed to this specific invocation. + * Changes to this array _will_ affect the inputs passed to your function + */ + inputs: unknown[]; + + /** + * The function handler for this specific invocation. Changes to this value _will_ affect the function itself + */ + functionHandler: FunctionHandler; +} + +/** + * Handler for post-invocation hooks + */ +export type PostInvocationHandler = (context: PostInvocationContext) => void | Promise; + +/** + * Context on a function after it executes. + */ +export declare class PostInvocationContext extends InvocationHookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: PostInvocationContextInit); + + /** + * The arguments passed to this specific invocation. + */ + inputs: unknown[]; + + /** + * The result of the function. Changes to this value _will_ affect the overall result of the function + */ + result: unknown; + + /** + * The error thrown by the function, or null/undefined if there is no error. Changes to this value _will_ affect the overall result of the function + */ + error: unknown; +} + +/** + * Base class for all invocation hook context objects + */ +export declare class InvocationHookContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: InvocationHookContextInit); + + /** + * The context object passed to the function. + * This object is readonly. You may modify it, but attempting to overwrite it will throw an error + */ + readonly invocationContext: InvocationContext; +} + +/** + * Object passed to InvocationHookContext constructors. + * For testing purposes only + */ +export interface InvocationHookContextInit extends HookContextInit { + inputs?: unknown[]; + + invocationContext?: InvocationContext; +} + +/** + * Object passed to PreInvocationContext constructors. + * For testing purposes only + */ +export interface PreInvocationContextInit extends InvocationHookContextInit { + functionCallback?: FunctionHandler; +} + +/** + * Object passed to PostInvocationContext constructors. + * For testing purposes only + */ +export interface PostInvocationContextInit extends InvocationHookContextInit { + result?: unknown; + + error?: unknown; +} diff --git a/node_modules/@azure/functions/types/hooks/logHooks.d.ts b/node_modules/@azure/functions/types/hooks/logHooks.d.ts new file mode 100644 index 00000000..a223c598 --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/logHooks.d.ts @@ -0,0 +1,58 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { LogLevel } from '../index'; +import { InvocationContext } from '../InvocationContext'; +import { HookContext, HookContextInit } from './HookContext'; + +/** + * Handler for log hooks. + */ +export type LogHookHandler = (context: LogHookContext) => void; + +/** + * Context on a log + */ +export declare class LogHookContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: LogHookContextInit); + + /** + * If the log occurs during a function execution, the context object passed to the function handler. + * Otherwise, undefined. + */ + readonly invocationContext: InvocationContext | undefined; + + /** + * 'system' if the log is generated by Azure Functions, 'user' if the log is generated by your own app. + */ + readonly category: LogCategory; + + /** + * Changes to this value _will_ affect the resulting log, but only for user-generated logs. + */ + level: LogLevel; + + /** + * Changes to this value _will_ affect the resulting log, but only for user-generated logs. + */ + message: string; +} + +/** + * Object passed to LogHookContext constructors. + * For testing purposes only + */ +export interface LogHookContextInit extends HookContextInit { + invocationContext?: InvocationContext; + + level?: LogLevel; + + category?: LogCategory; + + message?: string; +} + +export type LogCategory = 'user' | 'system' | 'customMetric'; diff --git a/node_modules/@azure/functions/types/hooks/registerHook.d.ts b/node_modules/@azure/functions/types/hooks/registerHook.d.ts new file mode 100644 index 00000000..c736687b --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/registerHook.d.ts @@ -0,0 +1,50 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Disposable } from '../index'; +import { AppStartHandler, AppTerminateHandler } from './appHooks'; +import { PostInvocationHandler, PreInvocationHandler } from './invocationHooks'; +import { LogHookHandler } from './logHooks'; + +/** + * Register a hook to be run at the start of your application + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function appStart(handler: AppStartHandler): Disposable; + +/** + * Register a hook to be run during graceful shutdown of your application. + * This hook will not be executed if your application is terminated forcefully. + * Hooks have a limited time to execute during the termination grace period. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function appTerminate(handler: AppTerminateHandler): Disposable; + +/** + * Register a hook to be run before a function is invoked. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function preInvocation(handler: PreInvocationHandler): Disposable; + +/** + * Register a hook to be run after a function is invoked. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function postInvocation(handler: PostInvocationHandler): Disposable; + +/** + * PREVIEW: Register a hook to be run for each log. + * This functionality requires Azure Functions Host v4.34+. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function log(handler: LogHookHandler): Disposable; diff --git a/node_modules/@azure/functions/types/http.d.ts b/node_modules/@azure/functions/types/http.d.ts new file mode 100644 index 00000000..e918ef1f --- /dev/null +++ b/node_modules/@azure/functions/types/http.d.ts @@ -0,0 +1,386 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Blob } from 'buffer'; +import { ReadableStream } from 'stream/web'; +import { BodyInit, FormData, Headers, HeadersInit } from 'undici'; +import { URLSearchParams } from 'url'; +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type HttpHandler = ( + request: HttpRequest, + context: InvocationContext +) => FunctionResult; + +export interface HttpFunctionOptions extends HttpTriggerOptions, Partial { + handler: HttpHandler; + + trigger?: HttpTrigger; + + /** + * Configuration for the optional primary output of the function. If not set, this will default to a standard http response output + * This is the main output that you should set as the return value of the function handler during invocation + */ + return?: FunctionOutput; +} + +export type HttpMethodFunctionOptions = Omit; + +export interface HttpTriggerOptions { + /** + * The function HTTP authorization level + * Defaults to 'anonymous' if not specified + */ + authLevel?: 'anonymous' | 'function' | 'admin'; + + /** + * An array of the http methods for this http input + * Defaults to ["get", "post"] if not specified + */ + methods?: HttpMethod[]; + + /** + * The route for this http input. If not specified, the function name will be used + */ + route?: string; +} + +export interface HttpTrigger extends FunctionTrigger { + /** + * The function HTTP authorization level. + */ + authLevel: 'anonymous' | 'function' | 'admin'; + + /** + * An array of the http methods for this http input + */ + methods: HttpMethod[]; + + /** + * The route for this http input. If not specified, the function name will be used + */ + route?: string; +} + +/** + * At this point in time there are no http output specific options + */ +export interface HttpOutputOptions {} + +export type HttpOutput = FunctionOutput & HttpOutputOptions; + +/** + * HTTP request object. Provided to your function when using HTTP Bindings. + */ +export declare class HttpRequest { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(httpRequestInit: HttpRequestInit); + + /** + * HTTP request method used to invoke this function. + */ + readonly method: string; + + /** + * Request URL. + */ + readonly url: string; + + /** + * HTTP request headers. + */ + readonly headers: Headers; + + /** + * Query string parameter keys and values from the URL. + */ + readonly query: URLSearchParams; + + /** + * Route parameter keys and values. + */ + readonly params: HttpRequestParams; + + /** + * Object representing logged-in user, either through + * AppService/Functions authentication, or SWA Authentication + * null when no such user is logged in. + */ + readonly user: HttpRequestUser | null; + + /** + * Returns the body as a ReadableStream + */ + readonly body: ReadableStream | null; + + /** + * Returns whether the body has been read from + */ + readonly bodyUsed: boolean; + + /** + * Returns a promise fulfilled with the body as an ArrayBuffer + */ + readonly arrayBuffer: () => Promise; + + /** + * Returns a promise fulfilled with the body as a Blob + */ + readonly blob: () => Promise; + + /** + * Returns a promise fulfilled with the body as FormData + */ + readonly formData: () => Promise; + + /** + * Returns a promise fulfilled with the body parsed as JSON + */ + readonly json: () => Promise; + + /** + * Returns a promise fulfilled with the body as a string + */ + readonly text: () => Promise; + + /** + * Creates a copy of the request object, with special handling of the body. + * [Learn more here](https://developer.mozilla.org/docs/Web/API/Request/clone) + */ + readonly clone: () => HttpRequest; +} + +/** + * Route parameter keys and values. + */ +export type HttpRequestParams = Record; + +/** + * Object representing logged-in user, either through + * AppService/Functions authentication, or SWA Authentication + */ +export interface HttpRequestUser { + /** + * Type of authentication, either AppService or StaticWebApps + */ + type: HttpRequestUserType; + + /** + * unique user GUID + */ + id: string; + + /** + * unique username + */ + username: string; + + /** + * provider of authentication service + */ + identityProvider: string; + + /** + * Extra authentication information, dependent on auth type + * and auth provider + */ + claimsPrincipalData: Record; +} + +/** + * Possible values for an HTTP request method. + */ +export type HttpMethod = 'GET' | 'POST' | 'DELETE' | 'HEAD' | 'PATCH' | 'PUT' | 'OPTIONS' | 'TRACE' | 'CONNECT'; + +/** + * Possible values for an HTTP Request user type + */ +export type HttpRequestUserType = 'AppService' | 'StaticWebApps'; + +export interface HttpResponseInit { + /** + * HTTP response body + */ + body?: BodyInit; + + /** + * A JSON-serializable HTTP Response body. + * If set, the `HttpResponseInit.body` property will be ignored in favor of this property + */ + jsonBody?: any; + + /** + * HTTP response status code + * @default 200 + */ + status?: number; + + /** + * HTTP response headers + */ + headers?: HeadersInit; + + /** + * HTTP response cookies + */ + cookies?: Cookie[]; + + /** + * Enable content negotiation of response body if true + * If false, treat response body as raw + * @default false + */ + enableContentNegotiation?: boolean; +} + +/** + * HTTP response class + */ +export declare class HttpResponse { + constructor(responseInit?: HttpResponseInit); + + /** + * HTTP response status code + * @default 200 + */ + readonly status: number; + + /** + * HTTP response headers. + */ + readonly headers: Headers; + + /** + * HTTP response cookies + */ + readonly cookies: Cookie[]; + + /** + * Enable content negotiation of response body if true + * If false, treat response body as raw + * @default false + */ + readonly enableContentNegotiation: boolean; + + /** + * Returns the body as a ReadableStream + */ + readonly body: ReadableStream | null; + + /** + * Returns whether the body has been read from + */ + readonly bodyUsed: boolean; + + /** + * Returns a promise fulfilled with the body as an ArrayBuffer + */ + readonly arrayBuffer: () => Promise; + + /** + * Returns a promise fulfilled with the body as a Blob + */ + readonly blob: () => Promise; + + /** + * Returns a promise fulfilled with the body as FormData + */ + readonly formData: () => Promise; + + /** + * Returns a promise fulfilled with the body parsed as JSON + */ + readonly json: () => Promise; + + /** + * Returns a promise fulfilled with the body as a string + */ + readonly text: () => Promise; + + /** + * Creates a copy of the response object, with special handling of the body. + * [Learn more here](https://developer.mozilla.org/docs/Web/API/Response/clone) + */ + readonly clone: () => HttpResponse; +} + +/** + * Http response cookie object to "Set-Cookie" + */ +export interface Cookie { + name: string; + + value: string; + + /** + * Specifies allowed hosts to receive the cookie + */ + domain?: string; + + /** + * Specifies URL path that must exist in the requested URL + */ + path?: string; + + /** + * NOTE: It is generally recommended that you use maxAge over expires. + * Sets the cookie to expire at a specific date instead of when the client closes. + * This can be a Javascript Date or Unix time in milliseconds. + */ + expires?: Date | number; + + /** + * Sets the cookie to only be sent with an encrypted request + */ + secure?: boolean; + + /** + * Sets the cookie to be inaccessible to JavaScript's Document.cookie API + */ + httpOnly?: boolean; + + /** + * Can restrict the cookie to not be sent with cross-site requests + */ + sameSite?: 'Strict' | 'Lax' | 'None' | undefined; + + /** + * Number of seconds until the cookie expires. A zero or negative number will expire the cookie immediately. + */ + maxAge?: number; +} + +/** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ +export interface HttpRequestInit { + method?: string; + + url?: string; + + body?: HttpRequestBodyInit; + + headers?: Record; + + query?: Record; + + params?: Record; +} + +/** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ +export interface HttpRequestBodyInit { + /** + * The body as a buffer. You only need to specify one of the `bytes` or `string` properties + */ + bytes?: Uint8Array; + + /** + * The body as a string. You only need to specify one of the `bytes` or `string` properties + */ + string?: string; +} diff --git a/node_modules/@azure/functions/types/index.d.ts b/node_modules/@azure/functions/types/index.d.ts new file mode 100644 index 00000000..314c4c16 --- /dev/null +++ b/node_modules/@azure/functions/types/index.d.ts @@ -0,0 +1,206 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { InvocationContext } from './InvocationContext'; + +export * as app from './app'; +export * from './cosmosDB'; +export * from './cosmosDB.v3'; +export * from './cosmosDB.v4'; +export * from './eventGrid'; +export * from './eventHub'; +export * from './generic'; +export * from './hooks/appHooks'; +export * from './hooks/HookContext'; +export * from './hooks/invocationHooks'; +export * from './hooks/logHooks'; +export * from './http'; +export * as input from './input'; +export * from './InvocationContext'; +export * from './mcpTool'; +export * from './mySql'; +export * as output from './output'; +export * from './serviceBus'; +export * from './setup'; +export * from './sql'; +export * from './storage'; +export * from './table'; +export * from './timer'; +export * as trigger from './trigger'; +export * from './warmup'; +export * from './webpubsub'; + +/** + * Void if no `return` output is registered + * Otherwise, the registered `return` output + */ +export type FunctionResult = T | Promise; + +export type FunctionHandler = (triggerInput: any, context: InvocationContext) => FunctionResult; + +/** + * Configures the inputs, outputs, and handler for an Azure Function + */ +export interface FunctionOptions { + /** + * The code that will be executed when your function is triggered + */ + handler: FunctionHandler; + + /** + * Configuration for the primary input to the function, aka the reason it will be triggered + * This is the only input that is passed as an argument to the function handler during invocation + */ + trigger: FunctionTrigger; + + /** + * Configuration for the optional primary output of the function + * This is the main output that you should set as the return value of the function handler during invocation + */ + return?: FunctionOutput; + + /** + * Configuration for an optional set of secondary inputs + * During invocation, get these values with `context.extraInputs.get()` + */ + extraInputs?: FunctionInput[]; + + /** + * Configuration for an optional set of secondary outputs + * During invocation, set these values with `context.extraOutputs.set()` + */ + extraOutputs?: FunctionOutput[]; +} + +/** + * Full configuration for the primary input to a function + */ +export interface FunctionTrigger extends Record { + /** + * The type for this trigger ('httpTrigger', 'timerTrigger', etc.) + * If using the `trigger` namespace to create this object, the type will be set for you + */ + type: string; + + /** + * Must be unique within this function. + * If using the `trigger` namespace to create this object, the name will be auto-generated for you + */ + name: string; +} + +/** + * Full configuration for the secondary input to a function ("trigger" is the primary input) + * NOTE: Not all triggers can be used as secondary inputs + */ +export interface FunctionInput extends Record { + /** + * The type for this trigger ('blob', 'cosmosDB', etc.) + * If using the `input` namespace to create this object, the type will be set for you + */ + type: string; + + /** + * Must be unique within this function. + * If using the `input` namespace to create this object, the name will be auto-generated for you + */ + name: string; +} + +/** + * Full configuration for the output to a function + */ +export interface FunctionOutput extends Record { + /** + * The type for this output ('http', 'blob', 'queue', etc.) + * If using the `output` namespace to create this object, the type will be set for you + */ + type: string; + + /** + * Must be unique within this function. + * If using the `output` namespace to create this object, the name will be auto-generated for you + */ + name: string; +} + +export type RetryOptions = FixedDelayRetryOptions | ExponentialBackoffRetryOptions; + +export interface FixedDelayRetryOptions { + /** + * A specified amount of time is allowed to elapse between each retry. + */ + strategy: 'fixedDelay'; + + /** + * The maximum number of retries allowed per function execution. -1 means to retry indefinitely. + */ + maxRetryCount: number; + + /** + * The delay that's used between retries. + * This can be a number in milliseconds or a Duration object + */ + delayInterval: Duration | number; +} + +export interface ExponentialBackoffRetryOptions { + /** + * The first retry waits for the minimum delay. On subsequent retries, time is added exponentially to + * the initial duration for each retry, until the maximum delay is reached. Exponential back-off adds + * some small randomization to delays to stagger retries in high-throughput scenarios. + */ + strategy: 'exponentialBackoff'; + + /** + * The maximum number of retries allowed per function execution. -1 means to retry indefinitely. + */ + maxRetryCount: number; + + /** + * The minimum retry delay. + * This can be a number in milliseconds, or a Duration object + */ + minimumInterval: Duration | number; + + /** + * The maximum retry delay. + * This can be a number in milliseconds, or a Duration object + */ + maximumInterval: Duration | number; +} + +export interface Duration { + hours?: number; + minutes?: number; + seconds?: number; + milliseconds?: number; +} + +/** + * Represents a type which can release resources, such as event listening or a timer. + */ +export declare class Disposable { + /** + * Combine many disposable-likes into one. You can use this method when having objects with a dispose function which aren't instances of `Disposable`. + * + * @param disposableLikes Objects that have at least a `dispose`-function member. Note that asynchronous dispose-functions aren't awaited. + * @return Returns a new disposable which, upon dispose, will dispose all provided disposables. + */ + static from(...disposableLikes: { dispose: () => any }[]): Disposable; + + /** + * Creates a new disposable that calls the provided function on dispose. + * *Note* that an asynchronous function is not awaited. + * + * @param callOnDispose Function that disposes something. + */ + constructor(callOnDispose: () => any); + + /** + * Dispose this object. + */ + dispose(): any; +} + +export type LogLevel = 'trace' | 'debug' | 'information' | 'warning' | 'error' | 'critical' | 'none'; diff --git a/node_modules/@azure/functions/types/input.d.ts b/node_modules/@azure/functions/types/input.d.ts new file mode 100644 index 00000000..52d8c680 --- /dev/null +++ b/node_modules/@azure/functions/types/input.d.ts @@ -0,0 +1,57 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBInput, CosmosDBInputOptions } from './cosmosDB'; +import { GenericInputOptions } from './generic'; +import { FunctionInput } from './index'; +import { SqlInput, SqlInputOptions } from './sql'; +import { StorageBlobInput, StorageBlobInputOptions } from './storage'; +import { TableInput, TableInputOptions } from './table'; +import { MySqlInput, MySqlInputOptions } from './mySql'; +import { + WebPubSubConnectionInput, + WebPubSubConnectionInputOptions, + WebPubSubContextInput, + WebPubSubContextInputOptions, +} from './webpubsub'; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-blob-input?pivots=programming-language-javascript) + */ +export function storageBlob(options: StorageBlobInputOptions): StorageBlobInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-table-input?pivots=programming-language-javascript) + */ +export function table(options: TableInputOptions): TableInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-cosmosdb-v2-input?pivots=programming-language-javascript) + */ +export function cosmosDB(options: CosmosDBInputOptions): CosmosDBInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-azure-sql-input?pivots=programming-language-javascript) + */ +export function sql(options: SqlInputOptions): SqlInput; + +/** + * [Link to docs and examples](https://learn.microsoft.com/en-us/azure/azure-functions/functions-bindings-azure-mysql-input?pivots=programming-language-javascript) + */ +export function mySql(options: MySqlInputOptions): MySqlInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-input?pivots=programming-language-javascript) + */ +export function webPubSubConnection(options: WebPubSubConnectionInputOptions): WebPubSubConnectionInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-input?pivots=programming-language-javascript) + */ +export function webPubSubContext(options: WebPubSubContextInputOptions): WebPubSubContextInput; + +/** + * A generic option that can be used for any input type + * Use this method if your desired input type does not already have its own method + */ +export function generic(options: GenericInputOptions): FunctionInput; diff --git a/node_modules/@azure/functions/types/mcpTool.d.ts b/node_modules/@azure/functions/types/mcpTool.d.ts new file mode 100644 index 00000000..2a67bd24 --- /dev/null +++ b/node_modules/@azure/functions/types/mcpTool.d.ts @@ -0,0 +1,107 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +/** + * A handler function for MCP Tool triggers. + * + * @param messages - The messages or data received by the trigger. + * @param context - The invocation context for the function. + * @returns A result that can be a promise or a synchronous value. + */ +export type McpToolTriggerHandler = (messages: unknown, context: InvocationContext) => FunctionResult; + +/** + * Configuration options for an MCP Tool function. + * This includes trigger-specific options and general function options. + */ +export interface McpToolFunctionOptions extends McpToolTriggerOptions, Partial { + /** + * The handler function to execute when the trigger is invoked. + */ + handler: McpToolTriggerHandler; + + /** + * The trigger configuration for the MCP Tool. + */ + trigger?: McpToolTrigger; +} + +/** + * Configuration options for an MCP Tool trigger. + * These options define the behavior and metadata for the trigger. + */ +export interface McpToolTriggerOptions { + /** + * The name of the tool associated with the trigger. + * This is typically an app setting or environment variable. + */ + toolName: string; + + /** + * A description of the tool or trigger. + * This provides additional context about the trigger's purpose. + */ + description: string; + + /** + * Additional properties or metadata for the tool. + * This is a dictionary of key-value pairs that can be used to configure the trigger. + */ + toolProperties?: any | McpToolProperty[]; +} + +/** + * Configuration options for an MCP Tool trigger. + * These options define the behavior and metadata for the trigger. + */ +export interface McpToolTriggerOptionsToRpc { + /** + * The name of the tool associated with the trigger. + * This is typically an app setting or environment variable. + */ + toolName: string; + + /** + * A description of the tool or trigger. + * This provides additional context about the trigger's purpose. + */ + description: string; + + /** + * Additional properties or metadata for the tool. + * This is a dictionary of key-value pairs that can be used to configure the trigger. + */ + toolProperties?: string; +} + +/** + * Represents an MCP Tool trigger, combining base function trigger options + * with MCP Tool-specific trigger options. + */ +export type McpToolTrigger = FunctionTrigger & McpToolTriggerOptionsToRpc; + +export interface McpToolProperty { + /** + * The name of the property. + */ + propertyName: string; + + /** + * The type of the property. + */ + propertyType: string; + + /** + * A description of the property. + * This provides additional context about the purpose or usage of the property. + */ + description: string; + + /** + * Indicates whether the property is required. + */ + required?: boolean; +} diff --git a/node_modules/@azure/functions/types/mySql.d.ts b/node_modules/@azure/functions/types/mySql.d.ts new file mode 100644 index 00000000..82eb69db --- /dev/null +++ b/node_modules/@azure/functions/types/mySql.d.ts @@ -0,0 +1,73 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type MySqlHandler = (changes: MySqlChange[], context: InvocationContext) => FunctionResult; + +export interface MySqlFunctionOptions extends MySqlTriggerOptions, Partial { + handler: MySqlHandler; + + trigger?: MySqlTrigger; +} + +export interface MySqlTriggerOptions { + /** + * The name of the table monitored by the trigger. + */ + tableName: string; + + /** + * An app setting (or environment variable) with the connection string for the database containing the table monitored for changes + */ + connectionStringSetting: string; +} +export type MySqlTrigger = FunctionTrigger & MySqlTriggerOptions; + +export interface MySqlChange { + Item: unknown; + Operation: MySqlChangeOperation; +} + +export enum MySqlChangeOperation { + Update = 0, +} + +export interface MySqlInputOptions { + /** + * The Transact-SQL query command or name of the stored procedure executed by the binding. + */ + commandText: string; + + /** + * The command type value + */ + commandType: 'Text' | 'StoredProcedure'; + + /** + * An app setting (or environment variable) with the connection string for the database against which the query or stored procedure is being executed + */ + connectionStringSetting: string; + + /** + * Zero or more parameter values passed to the command during execution as a single string. + * Must follow the format @param1=param1,@param2=param2. + * Neither the parameter name nor the parameter value can contain a comma (,) or an equals sign (=). + */ + parameters?: string; +} +export type MySqlInput = FunctionInput & MySqlInputOptions; + +export interface MySqlOutputOptions { + /** + * The name of the table being written to by the binding. + */ + commandText: string; + + /** + * An app setting (or environment variable) with the connection string for the database to which data is being written + */ + connectionStringSetting: string; +} +export type MySqlOutput = FunctionOutput & MySqlOutputOptions; diff --git a/node_modules/@azure/functions/types/output.d.ts b/node_modules/@azure/functions/types/output.d.ts new file mode 100644 index 00000000..b9d9d83a --- /dev/null +++ b/node_modules/@azure/functions/types/output.d.ts @@ -0,0 +1,86 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBOutput, CosmosDBOutputOptions } from './cosmosDB'; +import { EventGridOutput, EventGridOutputOptions } from './eventGrid'; +import { EventHubOutput, EventHubOutputOptions } from './eventHub'; +import { GenericOutputOptions } from './generic'; +import { HttpOutput, HttpOutputOptions } from './http'; +import { FunctionOutput } from './index'; +import { + ServiceBusQueueOutput, + ServiceBusQueueOutputOptions, + ServiceBusTopicOutput, + ServiceBusTopicOutputOptions, +} from './serviceBus'; +import { SqlOutput, SqlOutputOptions } from './sql'; +import { StorageBlobOutput, StorageBlobOutputOptions, StorageQueueOutput, StorageQueueOutputOptions } from './storage'; +import { TableOutput, TableOutputOptions } from './table'; +import { MySqlOutput, MySqlOutputOptions } from './mySql'; +import { WebPubSubOutput, WebPubSubOutputOptions } from './webpubsub'; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-http-webhook-output?&pivots=programming-language-javascript) + */ +export function http(options: HttpOutputOptions): HttpOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-blob-output?pivots=programming-language-javascript) + */ +export function storageBlob(options: StorageBlobOutputOptions): StorageBlobOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-table-output?pivots=programming-language-javascript) + */ +export function table(options: TableOutputOptions): TableOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-queue-output?pivots=programming-language-javascript) + */ +export function storageQueue(options: StorageQueueOutputOptions): StorageQueueOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-output?pivots=programming-language-javascript) + */ +export function serviceBusQueue(options: ServiceBusQueueOutputOptions): ServiceBusQueueOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-output?pivots=programming-language-javascript) + */ +export function serviceBusTopic(options: ServiceBusTopicOutputOptions): ServiceBusTopicOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-hubs-output?pivots=programming-language-javascript) + */ +export function eventHub(options: EventHubOutputOptions): EventHubOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-grid-output?pivots=programming-language-javascript) + */ +export function eventGrid(options: EventGridOutputOptions): EventGridOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-cosmosdb-v2-output?pivots=programming-language-javascript) + */ +export function cosmosDB(options: CosmosDBOutputOptions): CosmosDBOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-azure-sql-output?pivots=programming-language-javascript) + */ +export function sql(options: SqlOutputOptions): SqlOutput; + +/** + * [Link to docs and examples](https://learn.microsoft.com/en-us/azure/azure-functions/functions-bindings-azure-mysql-output?pivots=programming-language-javascript) + */ +export function mySql(options: MySqlOutputOptions): MySqlOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-output?pivots=programming-language-javascript) + */ +export function webPubSub(options: WebPubSubOutputOptions): WebPubSubOutput; + +/** + * A generic option that can be used for any output type + * Use this method if your desired output type does not already have its own method + */ +export function generic(options: GenericOutputOptions): FunctionOutput; diff --git a/node_modules/@azure/functions/types/serviceBus.d.ts b/node_modules/@azure/functions/types/serviceBus.d.ts new file mode 100644 index 00000000..0a45d7ab --- /dev/null +++ b/node_modules/@azure/functions/types/serviceBus.d.ts @@ -0,0 +1,98 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type ServiceBusQueueHandler = (messages: unknown, context: InvocationContext) => FunctionResult; + +export interface ServiceBusQueueFunctionOptions extends ServiceBusQueueTriggerOptions, Partial { + handler: ServiceBusQueueHandler; + + trigger?: ServiceBusQueueTrigger; +} + +export interface ServiceBusQueueTriggerOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the queue to monitor + */ + queueName: string; + + /** + * `true` if connecting to a [session-aware](https://docs.microsoft.com/azure/service-bus-messaging/message-sessions) queue. Default is `false` + */ + isSessionsEnabled?: boolean; + + /** + * Set to `many` in order to enable batching. If omitted or set to `one`, a single message is passed to the function. + */ + cardinality?: 'many' | 'one'; +} +export type ServiceBusQueueTrigger = FunctionTrigger & ServiceBusQueueTriggerOptions; + +export interface ServiceBusQueueOutputOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the queue to monitor + */ + queueName: string; +} +export type ServiceBusQueueOutput = FunctionOutput & ServiceBusQueueOutputOptions; + +export type ServiceBusTopicHandler = (message: unknown, context: InvocationContext) => FunctionResult; + +export interface ServiceBusTopicFunctionOptions extends ServiceBusTopicTriggerOptions, Partial { + handler: ServiceBusTopicHandler; + + trigger?: ServiceBusTopicTrigger; +} + +export interface ServiceBusTopicTriggerOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the topic to monitor + */ + topicName: string; + + /** + * The name of the subscription to monitor + */ + subscriptionName: string; + + /** + * `true` if connecting to a [session-aware](https://docs.microsoft.com/azure/service-bus-messaging/message-sessions) subscription. Default is `false` + */ + isSessionsEnabled?: boolean; + + /** + * Set to `many` in order to enable batching. If omitted or set to `one`, a single message is passed to the function. + */ + cardinality?: 'many' | 'one'; +} +export type ServiceBusTopicTrigger = FunctionTrigger & ServiceBusTopicTriggerOptions; + +export interface ServiceBusTopicOutputOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the topic to monitor + */ + topicName: string; +} +export type ServiceBusTopicOutput = FunctionOutput & ServiceBusTopicOutputOptions; diff --git a/node_modules/@azure/functions/types/setup.d.ts b/node_modules/@azure/functions/types/setup.d.ts new file mode 100644 index 00000000..92af251d --- /dev/null +++ b/node_modules/@azure/functions/types/setup.d.ts @@ -0,0 +1,16 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export interface SetupOptions { + /** + * Stream http requests and responses instead of loading entire body in memory. + * [Learn more here](https://aka.ms/AzFuncNodeHttpStreams) + */ + enableHttpStream?: boolean; + + /** + * Dictionary of Node.js worker capabilities. + * This will be merged with existing capabilities specified by the Node.js worker and library. + */ + capabilities?: Record; +} diff --git a/node_modules/@azure/functions/types/sql.d.ts b/node_modules/@azure/functions/types/sql.d.ts new file mode 100644 index 00000000..5614d022 --- /dev/null +++ b/node_modules/@azure/functions/types/sql.d.ts @@ -0,0 +1,75 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type SqlHandler = (changes: SqlChange[], context: InvocationContext) => FunctionResult; + +export interface SqlFunctionOptions extends SqlTriggerOptions, Partial { + handler: SqlHandler; + + trigger?: SqlTrigger; +} + +export interface SqlTriggerOptions { + /** + * The name of the table monitored by the trigger. + */ + tableName: string; + + /** + * An app setting (or environment variable) with the connection string for the database containing the table monitored for changes + */ + connectionStringSetting: string; +} +export type SqlTrigger = FunctionTrigger & SqlTriggerOptions; + +export interface SqlChange { + Item: unknown; + Operation: SqlChangeOperation; +} + +export enum SqlChangeOperation { + Insert = 0, + Update = 1, + Delete = 2, +} + +export interface SqlInputOptions { + /** + * The Transact-SQL query command or name of the stored procedure executed by the binding. + */ + commandText: string; + + /** + * The command type value + */ + commandType: 'Text' | 'StoredProcedure'; + + /** + * An app setting (or environment variable) with the connection string for the database against which the query or stored procedure is being executed + */ + connectionStringSetting: string; + + /** + * Zero or more parameter values passed to the command during execution as a single string. + * Must follow the format @param1=param1,@param2=param2. + * Neither the parameter name nor the parameter value can contain a comma (,) or an equals sign (=). + */ + parameters?: string; +} +export type SqlInput = FunctionInput & SqlInputOptions; + +export interface SqlOutputOptions { + /** + * The name of the table being written to by the binding. + */ + commandText: string; + + /** + * An app setting (or environment variable) with the connection string for the database to which data is being written + */ + connectionStringSetting: string; +} +export type SqlOutput = FunctionOutput & SqlOutputOptions; diff --git a/node_modules/@azure/functions/types/storage.d.ts b/node_modules/@azure/functions/types/storage.d.ts new file mode 100644 index 00000000..2ed33ba9 --- /dev/null +++ b/node_modules/@azure/functions/types/storage.d.ts @@ -0,0 +1,66 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type StorageBlobHandler = (blob: unknown, context: InvocationContext) => FunctionResult; +export type StorageQueueHandler = (queueEntry: unknown, context: InvocationContext) => FunctionResult; + +export interface StorageBlobFunctionOptions extends StorageBlobTriggerOptions, Partial { + handler: StorageBlobHandler; + + trigger?: StorageBlobTrigger; +} + +export interface StorageQueueFunctionOptions extends StorageQueueTriggerOptions, Partial { + handler: StorageQueueHandler; + + trigger?: StorageQueueTrigger; +} + +export interface StorageBlobOptions { + /** + * The path to the blob container, for example "samples-workitems/{name}" + */ + path: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this blob input or output + */ + connection: string; +} + +export interface StorageQueueOptions { + /** + * The queue name + */ + queueName: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this queue input or output + */ + connection: string; +} + +export interface StorageBlobTriggerOptions extends StorageBlobOptions { + /** + * The source of the triggering event. + * Use `EventGrid` for an Event Grid-based blob trigger, which provides much lower latency. + * The default is `LogsAndContainerScan`, which uses the standard polling mechanism to detect changes in the container. + */ + source?: 'EventGrid' | 'LogsAndContainerScan'; +} +export type StorageBlobTrigger = FunctionTrigger & StorageBlobTriggerOptions; + +export type StorageBlobInputOptions = StorageBlobOptions; +export type StorageBlobInput = FunctionInput & StorageBlobInputOptions; + +export type StorageBlobOutputOptions = StorageBlobOptions; +export type StorageBlobOutput = FunctionOutput & StorageBlobOutputOptions; + +export type StorageQueueTriggerOptions = StorageQueueOptions; +export type StorageQueueTrigger = FunctionTrigger & StorageQueueTriggerOptions; + +export type StorageQueueOutputOptions = StorageQueueOptions; +export type StorageQueueOutput = FunctionOutput & StorageQueueOutputOptions; diff --git a/node_modules/@azure/functions/types/table.d.ts b/node_modules/@azure/functions/types/table.d.ts new file mode 100644 index 00000000..8e0c83e5 --- /dev/null +++ b/node_modules/@azure/functions/types/table.d.ts @@ -0,0 +1,60 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOutput } from './index'; + +export interface TableOutputOptions { + /** + * The table name + */ + tableName: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this table output + */ + connection: string; + + /** + * The partition key of the table entity to write. + */ + partitionKey?: string; + + /** + * The row key of the table entity to write. + */ + rowKey?: string; +} +export type TableOutput = FunctionOutput & TableOutputOptions; + +export interface TableInputOptions { + /** + * The table name + */ + tableName: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this table input + */ + connection: string; + + /** + * The partition key of the table entity to read. + */ + partitionKey?: string; + + /** + * The row key of the table entity to read. Can't be used with `take` or `filter`. + */ + rowKey?: string; + + /** + * The maximum number of entities to return. Can't be used with `rowKey` + */ + take?: number; + + /** + * An OData filter expression for the entities to return from the table. Can't be used with `rowKey`. + */ + filter?: string; +} +export type TableInput = FunctionInput & TableInputOptions; diff --git a/node_modules/@azure/functions/types/timer.d.ts b/node_modules/@azure/functions/types/timer.d.ts new file mode 100644 index 00000000..aa2d4c0c --- /dev/null +++ b/node_modules/@azure/functions/types/timer.d.ts @@ -0,0 +1,70 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type TimerHandler = (myTimer: Timer, context: InvocationContext) => FunctionResult; + +export interface TimerFunctionOptions extends TimerTriggerOptions, Partial { + handler: TimerHandler; + + trigger?: TimerTrigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface TimerTriggerOptions { + /** + * A [cron expression](https://docs.microsoft.com/azure/azure-functions/functions-bindings-timer?pivots=programming-language-javascript#ncrontab-expressions) of the format '{second} {minute} {hour} {day} {month} {day of week}' to specify the schedule + */ + schedule: string; + + /** + * If `true`, the function is invoked when the runtime starts. + * For example, the runtime starts when the function app wakes up after going idle due to inactivity, when the function app restarts due to function changes, and when the function app scales out. + * _Use with caution_. runOnStartup should rarely if ever be set to `true`, especially in production. + */ + runOnStartup?: boolean; + + /** + * When true, schedule will be persisted to aid in maintaining the correct schedule even through restarts. Defaults to true for schedules with interval >= 1 minute + */ + useMonitor?: boolean; +} + +export type TimerTrigger = FunctionTrigger & TimerTriggerOptions; + +/** + * Timer schedule information. Provided to your function when using a timer binding. + */ +export interface Timer { + /** + * Whether this timer invocation is due to a missed schedule occurrence. + */ + isPastDue: boolean; + schedule: { + /** + * Whether intervals between invocations should account for DST. + */ + adjustForDST: boolean; + }; + scheduleStatus: { + /** + * The last recorded schedule occurrence. Date ISO string. + */ + last: string; + /** + * The expected next schedule occurrence. Date ISO string. + */ + next: string; + /** + * The last time this record was updated. This is used to re-calculate `next` with the current schedule after a host restart. Date ISO string. + */ + lastUpdated: string; + }; +} diff --git a/node_modules/@azure/functions/types/trigger.d.ts b/node_modules/@azure/functions/types/trigger.d.ts new file mode 100644 index 00000000..e8105d29 --- /dev/null +++ b/node_modules/@azure/functions/types/trigger.d.ts @@ -0,0 +1,103 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBTrigger, CosmosDBTriggerOptions } from './cosmosDB'; +import { EventGridTrigger, EventGridTriggerOptions } from './eventGrid'; +import { EventHubTrigger, EventHubTriggerOptions } from './eventHub'; +import { GenericTriggerOptions } from './generic'; +import { HttpTrigger, HttpTriggerOptions } from './http'; +import { FunctionTrigger } from './index'; +import { McpToolFunctionOptions, McpToolTrigger } from './mcpTool'; +import { MySqlTrigger, MySqlTriggerOptions } from './mySql'; +import { + ServiceBusQueueTrigger, + ServiceBusQueueTriggerOptions, + ServiceBusTopicTrigger, + ServiceBusTopicTriggerOptions, +} from './serviceBus'; +import { SqlTrigger, SqlTriggerOptions } from './sql'; +import { + StorageBlobTrigger, + StorageBlobTriggerOptions, + StorageQueueTrigger, + StorageQueueTriggerOptions, +} from './storage'; +import { TimerTrigger, TimerTriggerOptions } from './timer'; +import { WarmupTrigger, WarmupTriggerOptions } from './warmup'; +import { WebPubSubTrigger, WebPubSubTriggerOptions } from './webpubsub'; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-http-webhook-trigger?&pivots=programming-language-javascript) + */ +export function http(options: HttpTriggerOptions): HttpTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-timer?pivots=programming-language-javascript) + */ +export function timer(options: TimerTriggerOptions): TimerTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-blob-trigger?pivots=programming-language-javascript) + */ +export function storageBlob(options: StorageBlobTriggerOptions): StorageBlobTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-queue-trigger?pivots=programming-language-javascript) + */ +export function storageQueue(options: StorageQueueTriggerOptions): StorageQueueTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-trigger?pivots=programming-language-javascript) + */ +export function serviceBusQueue(options: ServiceBusQueueTriggerOptions): ServiceBusQueueTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-trigger?pivots=programming-language-javascript) + */ +export function serviceBusTopic(options: ServiceBusTopicTriggerOptions): ServiceBusTopicTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-hubs-trigger?pivots=programming-language-javascript) + */ +export function eventHub(options: EventHubTriggerOptions): EventHubTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-grid-trigger?pivots=programming-language-javascript) + */ +export function eventGrid(options: EventGridTriggerOptions): EventGridTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-cosmosdb-v2-trigger?pivots=programming-language-javascript) + */ +export function cosmosDB(options: CosmosDBTriggerOptions): CosmosDBTrigger; + +/** + * [Link to docs and examples](https://learn.microsoft.com/azure/azure-functions/functions-bindings-warmup?tabs=isolated-process&pivots=programming-language-javascript) + */ +export function warmup(options: WarmupTriggerOptions): WarmupTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-azure-sql-trigger?pivots=programming-language-javascript) + */ +export function sql(options: SqlTriggerOptions): SqlTrigger; + +/** + * [Link to docs and examples](https://learn.microsoft.com/en-us/azure/azure-functions/functions-bindings-azure-mysql-trigger?pivots=programming-language-javascript) + */ +export function mySql(options: MySqlTriggerOptions): MySqlTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-trigger?pivots=programming-language-javascript) + */ +export function webPubSub(options: WebPubSubTriggerOptions): WebPubSubTrigger; + +/** + * [Link to docs and examples](//TODO Add link to docs and examples) + */ +export function mcpTool(options: McpToolFunctionOptions): McpToolTrigger; + +/** + * A generic option that can be used for any trigger type + * Use this method if your desired trigger type does not already have its own method + */ +export function generic(options: GenericTriggerOptions): FunctionTrigger; diff --git a/node_modules/@azure/functions/types/warmup.d.ts b/node_modules/@azure/functions/types/warmup.d.ts new file mode 100644 index 00000000..10a3ca8b --- /dev/null +++ b/node_modules/@azure/functions/types/warmup.d.ts @@ -0,0 +1,17 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export interface WarmupContext {} +export type WarmupHandler = (warmupContext: WarmupContext, context: InvocationContext) => FunctionResult; + +export interface WarmupFunctionOptions extends WarmupTriggerOptions, Partial { + handler: WarmupHandler; + + trigger?: WarmupTrigger; +} + +export interface WarmupTriggerOptions {} +export type WarmupTrigger = FunctionTrigger & WarmupTriggerOptions; diff --git a/node_modules/@azure/functions/types/webpubsub.d.ts b/node_modules/@azure/functions/types/webpubsub.d.ts new file mode 100644 index 00000000..e86ac46f --- /dev/null +++ b/node_modules/@azure/functions/types/webpubsub.d.ts @@ -0,0 +1,124 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type WebPubSubHandler = (message: unknown, context: InvocationContext) => FunctionResult; + +export interface WebPubSubFunctionOptions extends WebPubSubTriggerOptions, Partial { + handler: WebPubSubHandler; + + trigger?: WebPubSubTrigger; +} + +export interface WebPubSubTriggerOptions { + /** + * Required - The variable name used in function code for the parameter that receives the event data + */ + name: string; + + /** + * Required - The name of the hub to which the function is bound + */ + hub: string; + + /** + * Required - The type of event to which the function should respond + * Must be either 'user' or 'system' + */ + eventType: 'user' | 'system'; + + /** + * Required - The name of the event to which the function should respond + * For system event type: 'connect', 'connected', or 'disconnected' + * For user-defined subprotocols: 'message' + * For system supported subprotocol json.webpubsub.azure.v1: user-defined event name + */ + eventName: string; + + /** + * Optional - Specifies which client protocol can trigger the Web PubSub trigger functions + * Default is 'all' + */ + clientProtocols?: 'all' | 'webPubSub' | 'mqtt'; + + /** + * Optional - The name of an app setting or setting collection that specifies the upstream Azure Web PubSub service + * Used for signature validation + * Defaults to "WebPubSubConnectionString" if not specified + * Set to null to disable validation + */ + connection?: string | null; +} + +export type WebPubSubTrigger = FunctionTrigger & WebPubSubTriggerOptions; + +export interface WebPubSubConnectionInputOptions { + /** + * Required - Variable name used in function code for input connection binding object. + */ + name: string; + + /** + * Required - The name of the Web PubSub hub for the function to be triggered. + * Can be set in the attribute (higher priority) or in app settings as a global value. + */ + hub: string; + + /** + * Optional - The value of the user identifier claim to be set in the access key token. + */ + userId?: string; + + /** + * Optional - The client protocol type. + * Valid values are 'default' and 'mqtt'. + * For MQTT clients, you must set it to 'mqtt'. + * For other clients, you can omit the property or set it to 'default'. + */ + clientProtocol?: 'default' | 'mqtt'; + + /** + * Optional - The name of the app setting that contains the Web PubSub Service connection string. + * Defaults to "WebPubSubConnectionString". + */ + connection?: string; +} +export type WebPubSubConnectionInput = FunctionInput & WebPubSubConnectionInputOptions; + +export interface WebPubSubContextInputOptions { + /** + * Required - Variable name used in function code for input Web PubSub request. + */ + name: string; + + /** + * Optional - The name of an app settings or setting collection that specifies the upstream Azure Web PubSub service. + * The value is used for Abuse Protection and Signature validation. + * The value is auto resolved with "WebPubSubConnectionString" by default. + * Null means the validation isn't needed and always succeeds. + */ + connection?: string; +} +export type WebPubSubContextInput = FunctionInput & WebPubSubContextInputOptions; + +export interface WebPubSubOutputOptions { + /** + * Required - Variable name used in function code for output binding object. + */ + name: string; + + /** + * Required - The name of the hub to which the function is bound. + * Can be set in the attribute (higher priority) or in app settings as a global value. + */ + hub: string; + + /** + * Optional - The name of the app setting that contains the Web PubSub Service connection string. + * Defaults to "WebPubSubConnectionString". + */ + connection?: string; +} +export type WebPubSubOutput = FunctionOutput & WebPubSubOutputOptions; diff --git a/node_modules/@fastify/busboy/LICENSE b/node_modules/@fastify/busboy/LICENSE new file mode 100644 index 00000000..290762e9 --- /dev/null +++ b/node_modules/@fastify/busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/README.md b/node_modules/@fastify/busboy/README.md new file mode 100644 index 00000000..ece3cc8a --- /dev/null +++ b/node_modules/@fastify/busboy/README.md @@ -0,0 +1,271 @@ +# busboy + +
+ +[![Build Status](https://github.com/fastify/busboy/actions/workflows/ci.yml/badge.svg)](https://github.com/fastify/busboy/actions) +[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) +[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/fastify/.github/blob/main/SECURITY.md) + +
+ +
+ +[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) +[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) + +
+ +Description +=========== + +A Node.js module for parsing incoming HTML form data. + +This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White, +aimed at addressing long-standing issues with it. + +Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup): + +| Library | Version | Mean time in nanoseconds (less is better) | +|-----------------------|---------|-------------------------------------------| +| busboy | 0.3.1 | `340114` | +| @fastify/busboy | 1.0.0 | `270984` | + +[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31. + +Requirements +============ + +* [Node.js](http://nodejs.org/) 10+ + + +Install +======= + + npm i @fastify/busboy + + +Examples +======== + +* Parsing (multipart) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); +const Busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`); + file.on('data', data => { + console.log(`File [${fieldname}] got ${data.length} bytes`); + }); + file.on('end', () => { + console.log(`File [${fieldname}] Finished`); + }); + }); + busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + console.log(`Field [${fieldname}]: value: ${inspect(val)}`); + }); + busboy.on('finish', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` +
+
+
+ +
+ `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file: +// +// Listening for requests +// File [filefield]: filename: ryan-speaker.jpg, encoding: binary +// File [filefield] got 11971 bytes +// Field [textfield]: value: 'testing! :-)' +// File [filefield] Finished +// Done parsing form! +``` + +* Save all incoming files to disk: + +```javascript +const http = require('node:http'); +const path = require('node:path'); +const os = require('node:os'); +const fs = require('node:fs'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + var saveTo = path.join(os.tmpdir(), path.basename(fieldname)); + file.pipe(fs.createWriteStream(saveTo)); + }); + busboy.on('finish', function() { + res.writeHead(200, { 'Connection': 'close' }); + res.end("That's all folks!"); + }); + return req.pipe(busboy); + } + res.writeHead(404); + res.end(); +}).listen(8000, function() { + console.log('Listening for requests'); +}); +``` + +* Parsing (urlencoded) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + console.log('File [' + fieldname + ']: filename: ' + filename); + file.on('data', function(data) { + console.log('File [' + fieldname + '] got ' + data.length + ' bytes'); + }); + file.on('end', function() { + console.log('File [' + fieldname + '] Finished'); + }); + }); + busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) { + console.log('Field [' + fieldname + ']: value: ' + inspect(val)); + }); + busboy.on('finish', function() { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end('\ +
\ +
\ +
\ + Node.js rules!
\ + \ +
\ + '); + } +}).listen(8000, function() { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// Field [textfield]: value: 'testing! :-)' +// Field [selectfield]: value: '9001' +// Field [checkfield]: value: 'on' +// Done parsing form! +``` + + +API +=== + +_Busboy_ is a _Writable_ stream + +Busboy (special) events +----------------------- + +* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream. + * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * The property `bytesRead` informs about the number of bytes that have been read so far. + +* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found. + +* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + +* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + +* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + + +Busboy methods +-------------- + +* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance. + + * The constructor takes the following valid `config` settings: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false). + + * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default). + + * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default). + + * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8'). + + * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false). + + * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters: + + * fieldName - __string__ The name of the field. + + * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream` + + * fileName - __string__ The name of a file supplied by the part. + + (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`) + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes). + + * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes). + + * **fields** - _integer_ - Max number of non-file fields (Default: Infinity). + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity). + + * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity). + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity). + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 + + * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920. + + * The constructor can throw errors: + + * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters. + + * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute. + + * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number. + + * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse. + + * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all. diff --git a/node_modules/@fastify/busboy/deps/dicer/LICENSE b/node_modules/@fastify/busboy/deps/dicer/LICENSE new file mode 100644 index 00000000..290762e9 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js new file mode 100644 index 00000000..b90c0e86 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js @@ -0,0 +1,228 @@ +'use strict' + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH diff --git a/node_modules/@fastify/busboy/package.json b/node_modules/@fastify/busboy/package.json new file mode 100644 index 00000000..83693acb --- /dev/null +++ b/node_modules/@fastify/busboy/package.json @@ -0,0 +1,86 @@ +{ + "name": "@fastify/busboy", + "version": "2.1.1", + "private": false, + "author": "Brian White ", + "contributors": [ + { + "name": "Igor Savin", + "email": "kibertoad@gmail.com", + "url": "https://github.com/kibertoad" + }, + { + "name": "Aras Abbasi", + "email": "aras.abbasi@gmail.com", + "url": "https://github.com/uzlopak" + } + ], + "description": "A streaming parser for HTML form data for node.js", + "main": "lib/main", + "type": "commonjs", + "types": "lib/main.d.ts", + "scripts": { + "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify", + "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js", + "coveralls": "nyc report --reporter=lcov", + "lint": "npm run lint:standard", + "lint:everything": "npm run lint && npm run test:types", + "lint:fix": "standard --fix", + "lint:standard": "standard --verbose | snazzy", + "test:mocha": "tap", + "test:types": "tsd", + "test:coverage": "nyc npm run test", + "test": "npm run test:mocha" + }, + "engines": { + "node": ">=14" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "busboy": "^1.0.0", + "photofinish": "^1.8.0", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.3.8", + "tinybench": "^2.5.1", + "tsd": "^0.30.0", + "typescript": "^5.0.2" + }, + "keywords": [ + "uploads", + "forms", + "multipart", + "form-data" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/fastify/busboy.git" + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": false, + "module": "commonjs", + "target": "ES2017" + } + }, + "standard": { + "globals": [ + "describe", + "it" + ], + "ignore": [ + "bench" + ] + }, + "files": [ + "README.md", + "LICENSE", + "lib/*", + "deps/encoding/*", + "deps/dicer/lib", + "deps/streamsearch/", + "deps/dicer/LICENSE" + ] +} diff --git a/node_modules/cookie/LICENSE b/node_modules/cookie/LICENSE new file mode 100644 index 00000000..058b6b4e --- /dev/null +++ b/node_modules/cookie/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2014 Roman Shtylman +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/cookie/README.md b/node_modules/cookie/README.md new file mode 100644 index 00000000..71fdac11 --- /dev/null +++ b/node_modules/cookie/README.md @@ -0,0 +1,317 @@ +# cookie + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +Basic HTTP cookie parser and serializer for HTTP servers. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install cookie +``` + +## API + +```js +var cookie = require('cookie'); +``` + +### cookie.parse(str, options) + +Parse an HTTP `Cookie` header string and returning an object of all cookie name-value pairs. +The `str` argument is the string representing a `Cookie` header value and `options` is an +optional object containing additional parsing options. + +```js +var cookies = cookie.parse('foo=bar; equation=E%3Dmc%5E2'); +// { foo: 'bar', equation: 'E=mc^2' } +``` + +#### Options + +`cookie.parse` accepts these properties in the options object. + +##### decode + +Specifies a function that will be used to decode a cookie's value. Since the value of a cookie +has a limited character set (and must be a simple string), this function can be used to decode +a previously-encoded cookie value into a JavaScript string or other object. + +The default function is the global `decodeURIComponent`, which will decode any URL-encoded +sequences into their byte representations. + +**note** if an error is thrown from this function, the original, non-decoded cookie value will +be returned as the cookie's value. + +### cookie.serialize(name, value, options) + +Serialize a cookie name-value pair into a `Set-Cookie` header string. The `name` argument is the +name for the cookie, the `value` argument is the value to set the cookie to, and the `options` +argument is an optional object containing additional serialization options. + +```js +var setCookie = cookie.serialize('foo', 'bar'); +// foo=bar +``` + +#### Options + +`cookie.serialize` accepts these properties in the options object. + +##### domain + +Specifies the value for the [`Domain` `Set-Cookie` attribute][rfc-6265-5.2.3]. By default, no +domain is set, and most clients will consider the cookie to apply to only the current domain. + +##### encode + +Specifies a function that will be used to encode a cookie's value. Since value of a cookie +has a limited character set (and must be a simple string), this function can be used to encode +a value into a string suited for a cookie's value. + +The default function is the global `encodeURIComponent`, which will encode a JavaScript string +into UTF-8 byte sequences and then URL-encode any that fall outside of the cookie range. + +##### expires + +Specifies the `Date` object to be the value for the [`Expires` `Set-Cookie` attribute][rfc-6265-5.2.1]. +By default, no expiration is set, and most clients will consider this a "non-persistent cookie" and +will delete it on a condition like exiting a web browser application. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### httpOnly + +Specifies the `boolean` value for the [`HttpOnly` `Set-Cookie` attribute][rfc-6265-5.2.6]. When truthy, +the `HttpOnly` attribute is set, otherwise it is not. By default, the `HttpOnly` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not allow client-side +JavaScript to see the cookie in `document.cookie`. + +##### maxAge + +Specifies the `number` (in seconds) to be the value for the [`Max-Age` `Set-Cookie` attribute][rfc-6265-5.2.2]. +The given number will be converted to an integer by rounding down. By default, no maximum age is set. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### partitioned + +Specifies the `boolean` value for the [`Partitioned` `Set-Cookie`](rfc-cutler-httpbis-partitioned-cookies) +attribute. When truthy, the `Partitioned` attribute is set, otherwise it is not. By default, the +`Partitioned` attribute is not set. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +More information about can be found in [the proposal](https://github.com/privacycg/CHIPS). + +##### path + +Specifies the value for the [`Path` `Set-Cookie` attribute][rfc-6265-5.2.4]. By default, the path +is considered the ["default path"][rfc-6265-5.1.4]. + +##### priority + +Specifies the `string` to be the value for the [`Priority` `Set-Cookie` attribute][rfc-west-cookie-priority-00-4.1]. + + - `'low'` will set the `Priority` attribute to `Low`. + - `'medium'` will set the `Priority` attribute to `Medium`, the default priority when not set. + - `'high'` will set the `Priority` attribute to `High`. + +More information about the different priority levels can be found in +[the specification][rfc-west-cookie-priority-00-4.1]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### sameSite + +Specifies the `boolean` or `string` to be the value for the [`SameSite` `Set-Cookie` attribute][rfc-6265bis-09-5.4.7]. + + - `true` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + - `false` will not set the `SameSite` attribute. + - `'lax'` will set the `SameSite` attribute to `Lax` for lax same site enforcement. + - `'none'` will set the `SameSite` attribute to `None` for an explicit cross-site cookie. + - `'strict'` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + +More information about the different enforcement levels can be found in +[the specification][rfc-6265bis-09-5.4.7]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### secure + +Specifies the `boolean` value for the [`Secure` `Set-Cookie` attribute][rfc-6265-5.2.5]. When truthy, +the `Secure` attribute is set, otherwise it is not. By default, the `Secure` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not send the cookie back to +the server in the future if the browser does not have an HTTPS connection. + +## Example + +The following example uses this module in conjunction with the Node.js core HTTP server +to prompt a user for their name and display it back on future visits. + +```js +var cookie = require('cookie'); +var escapeHtml = require('escape-html'); +var http = require('http'); +var url = require('url'); + +function onRequest(req, res) { + // Parse the query string + var query = url.parse(req.url, true, true).query; + + if (query && query.name) { + // Set a new cookie with the name + res.setHeader('Set-Cookie', cookie.serialize('name', String(query.name), { + httpOnly: true, + maxAge: 60 * 60 * 24 * 7 // 1 week + })); + + // Redirect back after setting cookie + res.statusCode = 302; + res.setHeader('Location', req.headers.referer || '/'); + res.end(); + return; + } + + // Parse the cookies on the request + var cookies = cookie.parse(req.headers.cookie || ''); + + // Get the visitor name set in the cookie + var name = cookies.name; + + res.setHeader('Content-Type', 'text/html; charset=UTF-8'); + + if (name) { + res.write('

Welcome back, ' + escapeHtml(name) + '!

'); + } else { + res.write('

Hello, new visitor!

'); + } + + res.write('
'); + res.write(' '); + res.end('
'); +} + +http.createServer(onRequest).listen(3000); +``` + +## Testing + +```sh +$ npm test +``` + +## Benchmark + +``` +$ npm run bench + +> cookie@0.5.0 bench +> node benchmark/index.js + + node@18.18.2 + acorn@8.10.0 + ada@2.6.0 + ares@1.19.1 + brotli@1.0.9 + cldr@43.1 + icu@73.2 + llhttp@6.0.11 + modules@108 + napi@9 + nghttp2@1.57.0 + nghttp3@0.7.0 + ngtcp2@0.8.1 + openssl@3.0.10+quic + simdutf@3.2.14 + tz@2023c + undici@5.26.3 + unicode@15.0 + uv@1.44.2 + uvwasi@0.0.18 + v8@10.2.154.26-node.26 + zlib@1.2.13.1-motley + +> node benchmark/parse-top.js + + cookie.parse - top sites + + 14 tests completed. + + parse accounts.google.com x 2,588,913 ops/sec ±0.74% (186 runs sampled) + parse apple.com x 2,370,002 ops/sec ±0.69% (186 runs sampled) + parse cloudflare.com x 2,213,102 ops/sec ±0.88% (188 runs sampled) + parse docs.google.com x 2,194,157 ops/sec ±1.03% (184 runs sampled) + parse drive.google.com x 2,265,084 ops/sec ±0.79% (187 runs sampled) + parse en.wikipedia.org x 457,099 ops/sec ±0.81% (186 runs sampled) + parse linkedin.com x 504,407 ops/sec ±0.89% (186 runs sampled) + parse maps.google.com x 1,230,959 ops/sec ±0.98% (186 runs sampled) + parse microsoft.com x 926,294 ops/sec ±0.88% (184 runs sampled) + parse play.google.com x 2,311,338 ops/sec ±0.83% (185 runs sampled) + parse support.google.com x 1,508,850 ops/sec ±0.86% (186 runs sampled) + parse www.google.com x 1,022,582 ops/sec ±1.32% (182 runs sampled) + parse youtu.be x 332,136 ops/sec ±1.02% (185 runs sampled) + parse youtube.com x 323,833 ops/sec ±0.77% (183 runs sampled) + +> node benchmark/parse.js + + cookie.parse - generic + + 6 tests completed. + + simple x 3,214,032 ops/sec ±1.61% (183 runs sampled) + decode x 587,237 ops/sec ±1.16% (187 runs sampled) + unquote x 2,954,618 ops/sec ±1.35% (183 runs sampled) + duplicates x 857,008 ops/sec ±0.89% (187 runs sampled) + 10 cookies x 292,133 ops/sec ±0.89% (187 runs sampled) + 100 cookies x 22,610 ops/sec ±0.68% (187 runs sampled) +``` + +## References + +- [RFC 6265: HTTP State Management Mechanism][rfc-6265] +- [Same-site Cookies][rfc-6265bis-09-5.4.7] + +[rfc-cutler-httpbis-partitioned-cookies]: https://tools.ietf.org/html/draft-cutler-httpbis-partitioned-cookies/ +[rfc-west-cookie-priority-00-4.1]: https://tools.ietf.org/html/draft-west-cookie-priority-00#section-4.1 +[rfc-6265bis-09-5.4.7]: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-09#section-5.4.7 +[rfc-6265]: https://tools.ietf.org/html/rfc6265 +[rfc-6265-5.1.4]: https://tools.ietf.org/html/rfc6265#section-5.1.4 +[rfc-6265-5.2.1]: https://tools.ietf.org/html/rfc6265#section-5.2.1 +[rfc-6265-5.2.2]: https://tools.ietf.org/html/rfc6265#section-5.2.2 +[rfc-6265-5.2.3]: https://tools.ietf.org/html/rfc6265#section-5.2.3 +[rfc-6265-5.2.4]: https://tools.ietf.org/html/rfc6265#section-5.2.4 +[rfc-6265-5.2.5]: https://tools.ietf.org/html/rfc6265#section-5.2.5 +[rfc-6265-5.2.6]: https://tools.ietf.org/html/rfc6265#section-5.2.6 +[rfc-6265-5.3]: https://tools.ietf.org/html/rfc6265#section-5.3 + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/cookie/master?label=ci +[ci-url]: https://github.com/jshttp/cookie/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/cookie/master +[coveralls-url]: https://coveralls.io/r/jshttp/cookie?branch=master +[node-image]: https://badgen.net/npm/node/cookie +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/cookie +[npm-url]: https://npmjs.org/package/cookie +[npm-version-image]: https://badgen.net/npm/v/cookie diff --git a/node_modules/cookie/SECURITY.md b/node_modules/cookie/SECURITY.md new file mode 100644 index 00000000..fd4a6c53 --- /dev/null +++ b/node_modules/cookie/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The `cookie` team and community take all security bugs seriously. Thank +you for improving the security of the project. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `cookie`. This +information can be found in the npm registry using the command +`npm owner ls cookie`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/jshttp/cookie/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/node_modules/cookie/index.js b/node_modules/cookie/index.js new file mode 100644 index 00000000..acd5acd6 --- /dev/null +++ b/node_modules/cookie/index.js @@ -0,0 +1,335 @@ +/*! + * cookie + * Copyright(c) 2012-2014 Roman Shtylman + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +exports.parse = parse; +exports.serialize = serialize; + +/** + * Module variables. + * @private + */ + +var __toString = Object.prototype.toString +var __hasOwnProperty = Object.prototype.hasOwnProperty + +/** + * RegExp to match cookie-name in RFC 6265 sec 4.1.1 + * This refers out to the obsoleted definition of token in RFC 2616 sec 2.2 + * which has been replaced by the token definition in RFC 7230 appendix B. + * + * cookie-name = token + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / + * "*" / "+" / "-" / "." / "^" / "_" / + * "`" / "|" / "~" / DIGIT / ALPHA + */ + +var cookieNameRegExp = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/; + +/** + * RegExp to match cookie-value in RFC 6265 sec 4.1.1 + * + * cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + * cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + * ; US-ASCII characters excluding CTLs, + * ; whitespace DQUOTE, comma, semicolon, + * ; and backslash + */ + +var cookieValueRegExp = /^("?)[\u0021\u0023-\u002B\u002D-\u003A\u003C-\u005B\u005D-\u007E]*\1$/; + +/** + * RegExp to match domain-value in RFC 6265 sec 4.1.1 + * + * domain-value = + * ; defined in [RFC1034], Section 3.5, as + * ; enhanced by [RFC1123], Section 2.1 + * =