diff --git a/.funcignore b/.funcignore new file mode 100644 index 00000000..d5b3b4a2 --- /dev/null +++ b/.funcignore @@ -0,0 +1,10 @@ +*.js.map +*.ts +.git* +.vscode +__azurite_db*__.json +__blobstorage__ +__queuestorage__ +local.settings.json +test +tsconfig.json \ No newline at end of file diff --git a/.github/workflows/deploy-progress-api.yml b/.github/workflows/deploy-progress-api.yml new file mode 100644 index 00000000..58fa0270 --- /dev/null +++ b/.github/workflows/deploy-progress-api.yml @@ -0,0 +1,72 @@ +name: Deploy Progress API to Azure Functions + +on: + push: + branches: [ main ] + paths: + - 'progress-api/**' + workflow_dispatch: + +env: + AZURE_FUNCTIONAPP_NAME: futurecoder-progress-api + AZURE_FUNCTIONAPP_PACKAGE: 'progress-api' + AZURE_RESOURCE_GROUP: MSAN-RG-Training + NODE_VERSION: '18.x' + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + environment: production + permissions: + contents: read + id-token: write + + steps: + - name: 'Checkout repository' + uses: actions/checkout@v3 + + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: ${{ env.NODE_VERSION }} + cache: 'npm' + + - name: Install dependencies + working-directory: ./progress-api + run: npm ci + + - name: Build the project + working-directory: ./progress-api + run: | + rm -rf dist deployment functionapp.zip + npm run build + mkdir -p deployment + cp -R dist/* deployment/ + cp host.json deployment/ + cp package*.json deployment/ + pushd deployment + npm ci --omit=dev + zip -r ../functionapp.zip . + popd + ls -la functionapp.zip + + - name: 'Azure login' + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + + - name: 'Deploy package via CLI' + run: | + az functionapp deployment source config-zip \ + --name ${{ env.AZURE_FUNCTIONAPP_NAME }} \ + --resource-group ${{ env.AZURE_RESOURCE_GROUP }} \ + --src ./progress-api/functionapp.zip + + - name: 'Ensure run-from-package' + run: | + az functionapp config appsettings set \ + --name ${{ env.AZURE_FUNCTIONAPP_NAME }} \ + --resource-group ${{ env.AZURE_RESOURCE_GROUP }} \ + --settings WEBSITE_RUN_FROM_PACKAGE=1 diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 00000000..8b048ab8 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,117 @@ +name: Build & Deploy container to Azure Web App (OIDC, GHCR) + +on: + push: + branches: [ main ] + workflow_dispatch: + +permissions: + contents: read + packages: write + id-token: write + +env: + REGISTRY: ghcr.io + IMAGE_NAME: futurecoder + APP_NAME: PythonCoding + RESOURCE_GROUP: MSAN-RG-Training + FUNCTION_APP_NAME: futurecoder-progress-api + WEBSITES_PORT: "80" + DOCKER_BUILDKIT: "1" + BUILDKIT_PROGRESS: plain + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Compute image tags + id: meta + run: | + OWNER_LC=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]') + echo "owner=${OWNER_LC}" >> $GITHUB_OUTPUT + echo "tag_sha=${{ env.REGISTRY }}/${OWNER_LC}/${{ env.IMAGE_NAME }}:${{ github.sha }}" >> $GITHUB_OUTPUT + echo "tag_latest=${{ env.REGISTRY }}/${OWNER_LC}/${{ env.IMAGE_NAME }}:latest" >> $GITHUB_OUTPUT + + - name: Set up Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to GHCR (push) + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & push image to GHCR + uses: docker/build-push-action@v6 + with: + context: . + push: true + build-args: | + REACT_APP_PROGRESS_API_BASE=${{ secrets.REACT_APP_PROGRESS_API_BASE }} + REACT_APP_PROGRESS_API_KEY=${{ secrets.REACT_APP_PROGRESS_API_KEY }} + REACT_APP_ADMIN_EMAILS=${{ secrets.REACT_APP_ADMIN_EMAILS }} + tags: | + ${{ steps.meta.outputs.tag_sha }} + ${{ steps.meta.outputs.tag_latest }} + + - name: Azure login (OIDC) + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + + - name: Set web app settings + run: | + az webapp config appsettings set \ + --name "${{ env.APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --settings WEBSITES_PORT=${{ env.WEBSITES_PORT }} \ + REACT_APP_PROGRESS_API_BASE=${{ secrets.REACT_APP_PROGRESS_API_BASE }} \ + REACT_APP_PROGRESS_API_KEY=${{ secrets.REACT_APP_PROGRESS_API_KEY }} \ + REACT_APP_ADMIN_EMAILS=${{ secrets.REACT_APP_ADMIN_EMAILS }} + + - name: Configure Function App settings + run: | + az functionapp config appsettings set \ + --name "${{ env.FUNCTION_APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --settings "CosmosConnection=${{ secrets.COSMOS_CONNECTION_STRING }}" \ + "CORS_ALLOWED_ORIGIN=${{ secrets.FUNCTION_CORS_ALLOWED_ORIGIN }}" \ + "ADMIN_EMAIL_ALLOWLIST=${{ secrets.FUNCTION_ADMIN_EMAIL_ALLOWLIST }}" + + - name: Determine if GHCR is private + id: ghcr + shell: bash + env: + GHCR_READ_TOKEN: ${{ secrets.GHCR_READ_TOKEN }} + run: | + if [ -n "${GHCR_READ_TOKEN}" ]; then + echo "use_auth=true" >> $GITHUB_OUTPUT + else + echo "use_auth=false" >> $GITHUB_OUTPUT + fi + + - name: Configure container (public GHCR) + if: ${{ steps.ghcr.outputs.use_auth == 'false' }} + run: | + az webapp config container set \ + --name "${{ env.APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --docker-custom-image-name "${{ steps.meta.outputs.tag_sha }}" + + - name: Configure container (private GHCR) + if: ${{ steps.ghcr.outputs.use_auth == 'true' }} + run: | + az webapp config container set \ + --name "${{ env.APP_NAME }}" \ + --resource-group "${{ env.RESOURCE_GROUP }}" \ + --docker-custom-image-name "${{ steps.meta.outputs.tag_sha }}" \ + --docker-registry-server-url "https://${{ env.REGISTRY }}" \ + --docker-registry-server-user "${{ github.actor }}" \ + --docker-registry-server-password "${{ secrets.GHCR_READ_TOKEN }}" diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml deleted file mode 100644 index 9d39f37f..00000000 --- a/.github/workflows/workflow.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: CI -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: '3.12.1' - - name: Set up Node - uses: actions/setup-node@v5 - with: - node-version: '22.17.0' - - name: Install chromedriver - uses: nanasess/setup-chromedriver@master - - name: Install Python and JS dependencies - run: ./scripts/install_deps.sh - - name: Build - id: build - env: - REACT_APP_SENTRY_DSN: https://37b1f01452b54bf4a0fe88656070998f@o871617.ingest.sentry.io/5824691 - REACT_APP_USE_FIREBASE_EMULATORS: '1' - REACT_APP_FIREBASE_STAGING: '1' - FUTURECODER_LANGUAGE: en - run: ./scripts/build.sh - - name: Test - env: - FUTURECODER_LANGUAGE: en - FIREBASE_TOKEN: '1//03I37hFeN4kn3CgYIARAAGAMSNwF-L9IrUvqofZbhOkS8YMtQBhw_bu2TpWYC5MHvnaZDsWPP0KJMypXPyoxogkl8A6p2RxPJQwQ' - run: ./scripts/ci_test.sh - - name: Upload test artifacts - uses: actions/upload-artifact@v4 - if: steps.build.outcome == 'success' - with: - path: '**/test_frontend_assets/' - - name: Deploy preview - uses: FirebaseExtended/action-hosting-deploy@v0 - if: steps.build.outcome == 'success' && github.ref != 'refs/heads/main' - with: - repoToken: '${{ secrets.GITHUB_TOKEN }}' - firebaseServiceAccount: '${{ secrets.FIREBASE_SERVICE_ACCOUNT }}' - projectId: futurecoder-staging - channelId: ${{ github.ref }} diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..2b827a34 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,106 @@ +# ---------- build stage ---------- +FROM python:3.12.1-slim AS build + +ARG NODE_MAJOR=22 +ARG REACT_APP_PROGRESS_API_BASE="" +ARG REACT_APP_PROGRESS_API_KEY="" +ARG REACT_APP_ADMIN_EMAILS="" + +ENV DEBIAN_FRONTEND=noninteractive \ + PIP_NO_CACHE_DIR=1 \ + POETRY_VERSION=1.8.5 \ + # futurecoder build expects these; adjust if you want to use production Firebase etc. + FUTURECODER_LANGUAGE=en \ + REACT_APP_USE_FIREBASE_EMULATORS=1 \ + REACT_APP_FIREBASE_STAGING=1 \ + CI=false \ + REACT_APP_PROGRESS_API_BASE=${REACT_APP_PROGRESS_API_BASE} \ + REACT_APP_PROGRESS_API_KEY=${REACT_APP_PROGRESS_API_KEY} \ + REACT_APP_ADMIN_EMAILS=${REACT_APP_ADMIN_EMAILS} + +# system deps + Node.js +RUN set -eux; \ + apt-get update; \ + apt-get install -y --no-install-recommends curl ca-certificates gnupg git build-essential; \ + curl -fsSL https://deb.nodesource.com/setup_${NODE_MAJOR}.x | bash -; \ + apt-get install -y --no-install-recommends nodejs; \ + node -v && npm -v; \ + rm -rf /var/lib/apt/lists/* + +# Poetry +RUN set -eux; \ + curl -sSL https://install.python-poetry.org | python3 -; \ + ln -s /root/.local/bin/poetry /usr/local/bin/poetry; \ + poetry --version + +WORKDIR /app + +# Install Python deps first (better layer caching) +COPY pyproject.toml poetry.lock ./ +RUN set -eux; \ + poetry config virtualenvs.in-project true; \ + poetry install --no-root --no-interaction --no-ansi + +# Bring in the rest of the project +COPY . . +RUN chmod +x scripts/*.sh || true + +# ✅ Install frontend deps so "craco" exists +RUN npm ci --prefix frontend + +# Build the site (show each failing command clearly) +RUN set -eux; \ + poetry --version; \ + poetry install --no-root -v; \ + ./scripts/generate.sh; \ + ./scripts/build.sh + +# After build, the static site should be in dist/course +# Validate the build output early (this will fail the build if something’s missing) +# ---------- normalize & validate build output ---------- +RUN set -eux; \ + # Base structure must exist + test -d dist/course; \ + test -f dist/course/index.html; \ + test -d dist/course/pyodide; \ + \ + # Normalize python_stdlib.zip (required by runtime) + if [ -f dist/course/python_stdlib.zip ]; then \ + echo "Found stdlib at dist/course/python_stdlib.zip"; \ + elif [ -f dist/course/pyodide/python_stdlib.zip ]; then \ + echo "Found stdlib under pyodide/, normalizing to course root..."; \ + cp -f dist/course/pyodide/python_stdlib.zip dist/course/python_stdlib.zip; \ + else \ + echo "python_stdlib.zip missing (looked in course/ and course/pyodide/)"; \ + exit 1; \ + fi; \ + \ + # Optional: normalize python_core tar if the build produced one + CORE_FILE="$(find dist/course -maxdepth 2 -type f -name 'python_core*.tar' | head -n1 || true)"; \ + if [ -n "$CORE_FILE" ] && [ ! -f dist/course/python_core.tar ]; then \ + echo "Normalizing $CORE_FILE -> dist/course/python_core.tar"; \ + cp -f "$CORE_FILE" dist/course/python_core.tar || true; \ + fi; \ + \ + # Final assertions + test -f dist/course/python_stdlib.zip + + +# ---------- runtime stage ---------- +FROM nginx:alpine AS runtime + +# Nginx serves on 80 +EXPOSE 80 + +# Copy the built site under /usr/share/nginx/html +COPY --from=build /app/dist/course /usr/share/nginx/html/course + +# Copy robots.txt from project root into the nginx root +COPY robots.txt /usr/share/nginx/html/robots.txt + +# Optional: redirect root to /course/ +RUN printf '' \ + > /usr/share/nginx/html/index.html + +# Minimal nginx config (default works fine for static) +CMD ["nginx", "-g", "daemon off;"] diff --git a/docs/azure-progress-api.md b/docs/azure-progress-api.md new file mode 100644 index 00000000..cfbd2b1c --- /dev/null +++ b/docs/azure-progress-api.md @@ -0,0 +1,255 @@ +Azure Progress API Integration +================================ + +This frontend can persist learner progress and feed the admin dashboard entirely through Azure Functions. The sections below describe a minimal implementation you can adapt to your infrastructure. + +Environment variables +--------------------- + +Configure the React app via `.env.local` (see `frontend/.env.local.example`): + +``` +REACT_APP_PROGRESS_API_BASE=https://your-function-app.azurewebsites.net/api +REACT_APP_PROGRESS_API_KEY= +REACT_APP_ADMIN_EMAILS=admin@example.com,@company.com +``` + +Endpoint contract +----------------- + +The UI expects three endpoints under `REACT_APP_PROGRESS_API_BASE`: + +1. **GET `/users/{id}`** + Returns a JSON document shaped like: + ```json + { + "pageSlug": "Introduction", + "developerMode": false, + "editorContent": "print('hello world')", + "pagesProgress": { + "Introduction": { + "step_name": "writing_code", + "updated_at": "2025-10-24T08:00:00Z" + } + } + } + ``` + Fields can contain more data, but these keys are required. + +2. **PATCH `/users/{id}`** + Accepts partial updates in the same shape and merges them into storage. For example: + ```json + { + "pagesProgress/Introduction/step_name": "next_step", + "pagesProgress/Introduction/updated_at": "2025-10-24T08:05:00Z" + } + ``` + You can implement this either as a document merge or convert the flattened paths back to nested objects before writing. + +3. **GET `/admin/progress`** + Returns either an array or an object with a `users` array. Each entry needs: + ```json + { + "userId": "1234-5678", + "email": "learner@example.com", + "pagesProgress": { "...": { "step_name": "...", "updated_at": "..." } } + } + ``` + +Sample Azure Functions skeleton +------------------------------- + +Below is a TypeScript/JavaScript example using Cosmos DB bindings. Adjust names and bindings to match your resource group. + +`UsersGet/index.js` +```javascript +module.exports = async function (context, req) { + const { id } = req.params; + const user = context.bindings.userDocument || {}; + context.res = { + status: 200, + body: { + pageSlug: user.pageSlug || "loading_placeholder", + developerMode: Boolean(user.developerMode), + editorContent: user.editorContent || "", + pagesProgress: user.pagesProgress || {}, + email: user.email || null + } + }; +}; +``` + +`UsersGet/function.json` +```json +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ "get" ], + "route": "users/{id}" + }, + { + "type": "cosmosDB", + "direction": "in", + "name": "userDocument", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "partitionKey": "{id}", + "id": "{id}" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} +``` + +`UsersPatch/index.js` +```javascript +const applyPatch = (doc, updates) => { + const clone = { ...doc }; + Object.entries(updates).forEach(([path, value]) => { + const segments = path.split("/"); + let cursor = clone; + while (segments.length > 1) { + const key = segments.shift(); + cursor[key] = cursor[key] || {}; + cursor = cursor[key]; + } + cursor[segments[0]] = value; + }); + return clone; +}; + +module.exports = async function (context, req) { + const { id } = req.params; + const updates = req.body || {}; + const current = context.bindings.userDocument || {}; + const next = applyPatch(current, updates); + next.userId = next.userId || id; + context.bindings.updatedDocument = next; + context.res = { status: 204 }; +}; +``` + +`UsersPatch/function.json` +```json +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ "patch" ], + "route": "users/{id}" + }, + { + "type": "cosmosDB", + "direction": "in", + "name": "userDocument", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "partitionKey": "{id}", + "id": "{id}" + }, + { + "type": "cosmosDB", + "direction": "out", + "name": "updatedDocument", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "createIfNotExists": true, + "partitionKey": "{id}" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} +``` + +`AdminProgress/index.js` +```javascript +module.exports = async function (context, req) { + const users = context.bindings.progressDocuments || []; + context.res = { + status: 200, + body: { + users: users.map(doc => ({ + userId: doc.userId || doc.id, + email: doc.email || null, + pagesProgress: doc.pagesProgress || {} + })) + } + }; +}; +``` + +`AdminProgress/function.json` +```json +{ + "bindings": [ + { + "authLevel": "function", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": [ "get" ], + "route": "admin/progress" + }, + { + "type": "cosmosDB", + "direction": "in", + "name": "progressDocuments", + "databaseName": "futurecoder", + "containerName": "progress", + "connection": "CosmosConnection", + "sqlQuery": "SELECT * FROM c" + }, + { + "type": "http", + "direction": "out", + "name": "res" + } + ] +} +``` + +Local testing tips +------------------ + +1. Install the Azure Functions Core Tools and Cosmos DB emulator (or target your cloud instance). +2. Create `local.settings.json` alongside `host.json`: + ```json + { + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "node", + "CosmosConnection": "AccountEndpoint=https://localhost:8081/;AccountKey=local-emulator-key;" + } + } + ``` +3. Seed the `progress` container with documents matching the schema above. +4. Run `func start` and confirm the endpoints return sample data. +5. Set `REACT_APP_PROGRESS_API_BASE=http://localhost:7071/api` in `.env.local` and start the React app. + +Security considerations +----------------------- + +- Swap `authLevel` to `anonymous` and enforce Azure AD via Easy Auth if you plan to use MSAL access tokens instead of a functions key. +- Restrict `AdminProgress` to admins only (e.g. verify group claims in Easy Auth headers or add your own JWT validation layer). +- Sanitize inputs when you expand the schema; the skeleton above blindly merges update paths, so refine it for production. + +Once these endpoints are live, the frontend will automatically persist learner progress and render the admin dashboard using your Azure stack. diff --git a/frontend/.env.local.example b/frontend/.env.local.example new file mode 100644 index 00000000..e69de29b diff --git a/frontend/README.md b/frontend/README.md index 859d27a6..18ba6f02 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,5 +1,34 @@ This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). +## Azure Progress API Setup + +See `../docs/azure-progress-api.md` for a full backend blueprint. + +The frontend now talks to an Azure-hosted progress service instead of Firebase when the following environment variables are present (see `.env.local.example`): + +``` +REACT_APP_PROGRESS_API_BASE=https://.azurewebsites.net/api +REACT_APP_PROGRESS_API_KEY= +REACT_APP_ADMIN_EMAILS=comma,separated,list,@or-domains +``` + +1. Copy `.env.local.example` to `.env.local` and fill in the values for your environment. +2. Restart the React dev server after editing env files; Create React App only reads them on startup. +3. The frontend expects the Azure API to expose: + - `GET /users/{id}` → returns a document containing at least `pagesProgress`, `pageSlug`, `developerMode`, `editorContent`. + - `PATCH /users/{id}` → merges the posted JSON into the stored user document. + - `GET /admin/progress` → returns either an array or object with `users` array; each entry should expose `userId`/`email` and `pagesProgress`. + +If these variables are omitted the app will fall back to the legacy Firebase implementation (or local storage only if Firebase is disabled). + +### Quick local test workflow + +1. Run the Azure Functions progress API locally with `REACT_APP_PROGRESS_API_BASE` pointing to the emulator URL (for example `http://localhost:7071/api`). +2. Populate your backing data store (e.g. Cosmos DB or the emulator) with sample progress documents. +3. From `frontend/`, run `npm install` and `npm start`. +4. Sign in as a learner and navigate course content to confirm progress updates. +5. Open `#admin` (or use the Admin Dashboard link) with an admin user to verify cross-user summaries. + ## Available Scripts In the project directory, you can run: diff --git a/frontend/public/index.html b/frontend/public/index.html index fc29ecb9..04ad7066 100644 --- a/frontend/public/index.html +++ b/frontend/public/index.html @@ -24,7 +24,7 @@ work correctly both with client-side routing and a non-root public URL. Learn how to configure a non-root public URL by running `npm run build`. --> - futurecoder + Python Training diff --git a/frontend/src/App.js b/frontend/src/App.js index 51131bb7..111c785f 100644 --- a/frontend/src/App.js +++ b/frontend/src/App.js @@ -54,10 +54,39 @@ import {interrupt, runCode, terminalRef} from "./RunCode"; import firebase from "firebase/app"; import {TableOfContents} from "./TableOfContents"; import HeaderLoginInfo from "./components/HeaderLoginInfo"; +import AdminDashboard from "./components/AdminDashboard"; import terms from "./terms.json" import _ from "lodash"; import {otherVisibleLanguages} from "./languages"; +const rawAdminEmailRules = (process.env.REACT_APP_ADMIN_EMAILS || "") + .split(",") + .map(rule => rule.trim().toLowerCase()) + .filter(Boolean); + +const matchesAdminEmail = (email) => { + if (!email) { + return false; + } + const normalised = email.toLowerCase(); + return rawAdminEmailRules.some(rule => { + if (rule.startsWith("@")) { + return normalised.endsWith(rule); + } + return normalised === rule; + }); +}; + +const isAdminUser = (user) => { + if (!user) { + return false; + } + if (user.developerMode) { + return true; + } + return matchesAdminEmail(user.email || ""); +}; + const EditorButtons = ( { @@ -427,12 +456,34 @@ const CourseText = ( class AppComponent extends React.Component { render() { - if (this.props.route === "toc") { + const {route, user, pages} = this.props; + const admin = isAdminUser(user); + if (route === "toc") { return } + if (route === "admin") { + return
+ + + {admin + ? + : ( +
+

Admin access required

+

+ You need to be marked as an admin to view this dashboard. + Add your email address to REACT_APP_ADMIN_EMAILS or enable developer mode in settings. +

+
+ ) + } +
+
+ } + return
- + @@ -440,14 +491,18 @@ class AppComponent extends React.Component { } } -function NavBar({user}) { +function NavBar({user, isAdmin}) { return
-const MenuPopup = ({user}) => +const MenuPopup = ({user, isAdmin}) =>

+ {isAdmin && +

+ close()} + > + Admin dashboard + +

+ } { otherVisibleLanguages.map(lang => diff --git a/frontend/src/book/store.js b/frontend/src/book/store.js index b2feeeae..f696884e 100644 --- a/frontend/src/book/store.js +++ b/frontend/src/book/store.js @@ -13,6 +13,7 @@ import {wrapAsync} from "../frontendlib/sentry"; import pRetry from 'p-retry'; import localforage from "localforage"; import {languageConfig} from "../languages"; +import {fetchUserProgress, patchUserProgress, progressApiAvailable} from "../services/progressApi"; export const disableFirebase = !!process.env.REACT_APP_DISABLE_FIREBASE; export const disableLogin = disableFirebase || !!process.env.REACT_APP_DISABLE_LOGIN; @@ -75,6 +76,7 @@ const initialState = { pagesProgress: { loading_placeholder: { step_name: "loading_placeholder", + updated_at: null, } }, pageSlug: "loading_placeholder", @@ -109,6 +111,8 @@ const initialState = { const {reducer, makeAction, setState, localState, statePush} = redact('book', initialState, {dispatched: true}); +let progressApiUserId = null; + export {reducer as bookReducer, setState as bookSetState, localState as bookState, statePush as bookStatePush}; const isLoaded = (state) => state.user.uid && state.pageSlugsList.length > 1 @@ -150,7 +154,7 @@ const afterSetPage = (pageSlug, state = localState) => { window.location.hash = pageSlug; } -export const specialHash = (hash) => ["toc", "ide", "question"].includes(hash); +export const specialHash = (hash) => ["toc", "ide", "question", "admin"].includes(hash); export const navigate = () => { const hash = window.location.hash.substring(1); @@ -182,7 +186,9 @@ export const moveStep = (delta) => { if (delta > 0) { animateStep(stepIndex); } - setUserStateAndDatabase(["pagesProgress", localState.user.pageSlug, "step_name"], step.name); + const progressPath = ["pagesProgress", localState.user.pageSlug]; + setUserStateAndDatabase([...progressPath, "step_name"], step.name); + setUserStateAndDatabase([...progressPath, "updated_at"], new Date().toISOString()); setState("assistant", initialState.assistant); }; @@ -259,13 +265,25 @@ if (!disableFirebase) { } export const updateUserData = async (user) => { - Sentry.setUser({id: user.uid}); - const userData = await databaseRequest("GET"); + const identifier = user.uid || user.email; + if (identifier) { + Sentry.setUser({id: identifier}); + } + let userData = {}; + if (progressApiAvailable) { + try { + userData = await fetchUserProgress(identifier) || {}; + } catch (error) { + console.error("Failed to load user progress from Azure API", error); + } + } else { + userData = await databaseRequest("GET"); + } // loadUser should be called on the local store data first // for proper merging with the firebase user data in loadUserAndPages await loadUserFromLocalStorePromise; loadUser({ - uid: user.uid, + uid: user.uid || identifier, email: user.email, ...userData, }); @@ -280,7 +298,7 @@ const loadUserFromLocalStorePromise = localStore.getItem("user").then(user => { }); export const databaseRequest = wrapAsync(async function databaseRequest(method, data={}, endpoint="users") { - if (disableFirebase) { + if (progressApiAvailable || disableFirebase) { return; } const currentUser = firebase.auth().currentUser; @@ -300,7 +318,29 @@ export const databaseRequest = wrapAsync(async function databaseRequest(method, return response.data; }); -export const updateDatabase = (updates) => { +const resolveUserId = (override) => { + if (typeof override === "string") { + return override; + } + if (override && typeof override === "object") { + return override.userId || override.uid || override.email || null; + } + return null; +}; + +export const updateDatabase = (updates, userContext) => { + if (progressApiAvailable) { + const explicit = resolveUserId(userContext); + const userId = explicit + || progressApiUserId + || resolveUserId(localState.user); + if (!userId) { + return Promise.resolve(); + } + return patchUserProgress(userId, updates).catch(error => { + console.error("Failed to update progress via Azure API", error, updates); + }); + } return databaseRequest("PATCH", updates); } @@ -343,8 +383,10 @@ const loadUserAndPages = (state, previousUser = {}) => { pagesProgress = {...(pagesProgress || {})}; pageSlugsList.forEach(slug => { - const steps = pages[slug].steps; - let step_name = pagesProgress[slug]?.step_name || steps[0].name; + const steps = pages[slug].steps || []; + const currentProgress = pagesProgress[slug] || {}; + let step_name = currentProgress.step_name || steps[0]?.name || ""; + let updated_at = currentProgress.updated_at ?? null; const progress = previousUser.pagesProgress?.[slug]; if (progress) { const findStepIndex = (name) => _.find(steps, {name})?.index || 0 @@ -353,16 +395,24 @@ const loadUserAndPages = (state, previousUser = {}) => { if (previousIndex > currentIndex) { step_name = progress.step_name; updates[`pagesProgress/${slug}/step_name`] = step_name; + if (progress.updated_at) { + updates[`pagesProgress/${slug}/updated_at`] = progress.updated_at; + updated_at = progress.updated_at; + } + } else if (!updated_at && progress.updated_at) { + updated_at = progress.updated_at; } } - pagesProgress[slug] = {step_name}; + pagesProgress[slug] = {...currentProgress, step_name, updated_at}; }); migrateUserState(pages, pagesProgress, updates); - updateDatabase(updates); + const userId = state.user.uid || state.user.userId || state.user.email || previousUser.uid || previousUser.userId || previousUser.email || null; + updateDatabase(updates, userId); state = {...state, user: {...state.user, pagesProgress, pageSlug, developerMode}}; + progressApiUserId = state.user.uid || state.user.userId || state.user.email || null; if (!specialHash(hash)) { afterSetPage(pageSlug, state); } @@ -375,12 +425,18 @@ const loadUserAndPages = (state, previousUser = {}) => { function migrateUserState(pages, pagesProgress, updates) { const oldSlug = "GettingElementsAtPosition"; const newSlug = "GettingElementsAtPositionExercises"; - const {step_name} = pagesProgress[oldSlug]; + const oldProgress = pagesProgress[oldSlug] || {}; + const newProgress = pagesProgress[newSlug] || {}; + const {step_name} = oldProgress; if (!pages[oldSlug].step_names.includes(step_name)) { - pagesProgress[oldSlug] = {step_name: "final_text"}; - pagesProgress[newSlug] = {step_name}; + const updated_at = oldProgress.updated_at ?? null; + pagesProgress[oldSlug] = {...oldProgress, step_name: "final_text"}; + pagesProgress[newSlug] = {...newProgress, step_name, updated_at}; updates[`pagesProgress/${oldSlug}/step_name`] = "final_text"; updates[`pagesProgress/${newSlug}/step_name`] = step_name; + if (updated_at) { + updates[`pagesProgress/${newSlug}/updated_at`] = updated_at; + } } } diff --git a/frontend/src/components/AdminDashboard.jsx b/frontend/src/components/AdminDashboard.jsx new file mode 100644 index 00000000..673ee580 --- /dev/null +++ b/frontend/src/components/AdminDashboard.jsx @@ -0,0 +1,269 @@ +import React, {useEffect, useMemo, useState} from "react"; +import {fetchAdminProgress, progressApiAvailable} from "../services/progressApi"; + +const stripHtml = (html) => (html || "").replace(/<[^>]+>/g, ""); + +const formatTimestamp = (timestamp) => { + if (!timestamp) { + return "Not updated yet"; + } + const date = new Date(timestamp); + if (Number.isNaN(date.getTime())) { + return "Not updated yet"; + } + return new Intl.DateTimeFormat(undefined, {dateStyle: "medium", timeStyle: "short"}).format(date); +}; + +const statusLabels = { + completed: "Completed", + inProgress: "In progress", + notStarted: "Not started", + noSteps: "No steps defined", +}; + +const AdminDashboard = ({pages, pagesProgress, user, isAdmin}) => { + const fallbackUsers = useMemo(() => [ + { + userId: user?.uid || user?.email || "current-user", + email: user?.email || "Current learner", + pagesProgress: pagesProgress || {}, + } + ], [pagesProgress, user]); + const [availableUsers, setAvailableUsers] = useState(fallbackUsers); + const [activeUserId, setActiveUserId] = useState(fallbackUsers[0]?.userId || ""); + const [loading, setLoading] = useState(progressApiAvailable && isAdmin); + const [error, setError] = useState(null); + + useEffect(() => { + if (!progressApiAvailable) { + setAvailableUsers(fallbackUsers); + setActiveUserId(fallbackUsers[0]?.userId || ""); + } + }, [fallbackUsers, progressApiAvailable]); + + useEffect(() => { + if (!progressApiAvailable || !isAdmin) { + setLoading(false); + return; + } + let cancelled = false; + const load = async () => { + setLoading(true); + setError(null); + try { + const response = await fetchAdminProgress(); + const items = Array.isArray(response?.users) ? response.users : Array.isArray(response) ? response : []; + const mapped = items + .map(entry => ({ + userId: entry.userId || entry.uid || entry.id || entry.email, + email: entry.email || entry.userEmail || entry.user?.email || `(user ${entry.userId || entry.uid || entry.id || entry.email || "unknown"})`, + pagesProgress: entry.pagesProgress || entry.progress || {}, + })) + .filter(item => item.userId); + if (!cancelled && mapped.length) { + setAvailableUsers(mapped); + setActiveUserId(prev => mapped.some(item => item.userId === prev) ? prev : mapped[0].userId); + } + } catch (err) { + if (!cancelled) { + console.error("Failed to load admin progress from Azure API", err); + setError("Unable to load progress data from Azure right now."); + } + } finally { + if (!cancelled) { + setLoading(false); + } + } + }; + load(); + return () => { + cancelled = true; + }; + }, [isAdmin]); + + const activeUser = useMemo( + () => availableUsers.find(candidate => candidate.userId === activeUserId) || availableUsers[0] || fallbackUsers[0], + [availableUsers, activeUserId] + ); + + const summary = useMemo(() => { + const values = Object.values(pages || {}) + .filter(page => page?.slug && page.slug !== "loading_placeholder") + .sort((a, b) => (a.index ?? 0) - (b.index ?? 0)) + .map(page => { + const steps = page.steps || []; + const progress = activeUser?.pagesProgress?.[page.slug] || {}; + const stepName = progress.step_name || steps[0]?.name || ""; + let stepIndex = steps.findIndex(step => step.name === stepName); + if (stepIndex === -1) { + stepIndex = 0; + } + const totalSteps = steps.length || 0; + const hasActivity = Boolean(progress.updated_at) || stepIndex > 0; + let statusKey = "notStarted"; + let completedSteps = Math.min(stepIndex, totalSteps); + if (!totalSteps) { + statusKey = "noSteps"; + completedSteps = 0; + } else if (hasActivity && stepIndex >= totalSteps - 1) { + statusKey = "completed"; + completedSteps = totalSteps; + } else if (hasActivity) { + statusKey = "inProgress"; + } + const percent = totalSteps ? Math.round((completedSteps / totalSteps) * 100) : 0; + const currentStepNumber = totalSteps ? Math.min(stepIndex + 1, totalSteps) : 0; + return { + slug: page.slug, + title: stripHtml(page.title) || page.slug, + totalSteps, + percent, + statusKey, + updatedAt: progress.updated_at || null, + currentStepNumber, + stepName, + }; + }); + + const totals = values.reduce((acc, row) => { + acc.percentSum += row.percent; + if (row.statusKey === "completed") { + acc.completed += 1; + } else if (row.statusKey === "inProgress") { + acc.inProgress += 1; + } else if (row.statusKey === "notStarted") { + acc.notStarted += 1; + } + return acc; + }, {percentSum: 0, completed: 0, inProgress: 0, notStarted: 0}); + + const overallPercent = values.length ? Math.round(totals.percentSum / values.length) : 0; + + return { + rows: values, + overallPercent, + counts: totals, + totalPages: values.length, + }; + }, [pages, activeUser]); + + return ( +
+

Admin Progress Dashboard

+

+ Tracking progress for {activeUser?.email || "selected learner"} across {summary.totalPages} pages. +

+ + {availableUsers.length > 1 && +
+ + +
+ } + + {loading && +
Loading progress data from Azure...
+ } + {error && +
{error}
+ } + +
+
+
+
+
Overall completion
+

{summary.overallPercent}%

+
+
+
+
+
+
+
+
+
+
Pages completed
+

{summary.counts.completed} / {summary.totalPages}

+

In progress: {summary.counts.inProgress}

+

Not started: {summary.counts.notStarted}

+
+
+
+
+ +
+ + + + + + + + + + + + {summary.rows.length === 0 + ? ( + + + + ) + : summary.rows.map(row => ( + + + + + + + + ))} + +
PageCurrent stepProgressStatusLast updated
+ Course content is still loading. Please check back shortly. +
{row.title} + {row.totalSteps + ? ( + <> +
Step {row.currentStepNumber} of {row.totalSteps}
+
{row.stepName}
+ + ) + : "No steps available"} +
+
+
+
+
{row.percent}%
+
{statusLabels[row.statusKey] || row.statusKey}{formatTimestamp(row.updatedAt)}
+
+
+ ); +}; + +export default AdminDashboard; diff --git a/frontend/src/services/progressApi.js b/frontend/src/services/progressApi.js new file mode 100644 index 00000000..b8f6951c --- /dev/null +++ b/frontend/src/services/progressApi.js @@ -0,0 +1,74 @@ +import axios from "axios"; + +const baseUrl = (process.env.REACT_APP_PROGRESS_API_BASE || "").replace(/\/$/, ""); +const apiKey = process.env.REACT_APP_PROGRESS_API_KEY; + +export const progressApiAvailable = Boolean(baseUrl); + +const safeHeaders = () => { + if (!apiKey) { + return {}; + } + return {"x-functions-key": apiKey}; +}; + +const request = async (config) => { + if (!progressApiAvailable) { + return null; + } + const finalConfig = { + ...config, + headers: { + ...safeHeaders(), + ...(config.headers || {}), + }, + timeout: 15000, + }; + const response = await axios(finalConfig); + return response.data; +}; + +export async function fetchUserProgress(userId) { + if (!progressApiAvailable || !userId) { + return null; + } + try { + return await request({ + method: "GET", + url: `${baseUrl}/users/${encodeURIComponent(userId)}`, + }); + } catch (error) { + if (error?.response?.status === 404) { + return {}; + } + throw error; + } +} + +export async function patchUserProgress(userId, updates) { + if (!progressApiAvailable || !userId || !updates || !Object.keys(updates).length) { + return null; + } + try { + return await request({ + method: "PATCH", + url: `${baseUrl}/users/${encodeURIComponent(userId)}`, + data: updates, + }); + } catch (error) { + if (error?.response?.status === 404) { + return null; + } + throw error; + } +} + +export async function fetchAdminProgress() { + if (!progressApiAvailable) { + return null; + } + return request({ + method: "GET", + url: `${baseUrl}/admin/progress`, + }); +} diff --git a/host.json b/host.json new file mode 100644 index 00000000..9df91361 --- /dev/null +++ b/host.json @@ -0,0 +1,15 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": true, + "excludedTypes": "Request" + } + } + }, + "extensionBundle": { + "id": "Microsoft.Azure.Functions.ExtensionBundle", + "version": "[4.*, 5.0.0)" + } +} \ No newline at end of file diff --git a/local.settings.json b/local.settings.json new file mode 100644 index 00000000..356253ec --- /dev/null +++ b/local.settings.json @@ -0,0 +1,7 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "", + "FUNCTIONS_WORKER_RUNTIME": "node" + } +} \ No newline at end of file diff --git a/nginx.conf b/nginx.conf new file mode 100644 index 00000000..95df2baa --- /dev/null +++ b/nginx.conf @@ -0,0 +1,36 @@ +server { + listen 80; + server_name _; + + root /usr/share/nginx/html; + + # Redirect site root to /course/ + location = / { return 302 /course/; } + + # --- Serve Pyodide archives BEFORE SPA fallback --- + # Futurecoder / Pyodide sometimes request hashed CRA paths; handle both. + location ~ ^/course/(?:static/js/static/media/)?python_core\.tar.*$ { + default_type application/x-bzip2; + try_files /course/python_core.tar =404; + } + location ~ ^/course/(?:static/js/static/media/)?python_stdlib\.zip.*$ { + default_type application/zip; + try_files /course/python_stdlib.zip =404; + } + + # Static assets served as-is + location ~ ^/course/(static|pyodide|wheels|packages)/ { + try_files $uri =404; + } + location ~ ^/course/.*\.(wasm|whl|data|js|css|map|json)$ { + try_files $uri =404; + } + + # SPA fallback for the rest of /course/ + location /course/ { + try_files $uri $uri/ /course/index.html; + } + + # Simple health check + location = /healthz { return 200 "ok\n"; add_header Content-Type text/plain; } +} diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 00000000..31c6fcd5 --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,58 @@ +{ + "name": "futurecoder", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@azure/functions": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/@azure/functions/-/functions-4.8.0.tgz", + "integrity": "sha512-LNtl3xZNE40vE7+SIST+GYQX5cnnI1M65fXPi26l9XCdPakuQrz54lHv+qQQt1GG5JbqLfQk75iM7A6Y9O+2dQ==", + "license": "MIT", + "dependencies": { + "cookie": "^0.7.0", + "long": "^4.0.0", + "undici": "^5.29.0" + }, + "engines": { + "node": ">=18.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==", + "license": "Apache-2.0" + }, + "node_modules/undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "license": "MIT", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + } + } +} diff --git a/node_modules/@azure/functions/LICENSE b/node_modules/@azure/functions/LICENSE new file mode 100644 index 00000000..4f0d38ca --- /dev/null +++ b/node_modules/@azure/functions/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) .NET Foundation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@azure/functions/README.md b/node_modules/@azure/functions/README.md new file mode 100644 index 00000000..4d666e65 --- /dev/null +++ b/node_modules/@azure/functions/README.md @@ -0,0 +1,67 @@ +# Azure Functions Node.js Programming Model + +|Branch|Status|Support level|Node.js Versions| +|---|---|---|---| +|v4.x (default)|[![Build Status](https://img.shields.io/azure-devops/build/azfunc/public/514/v4.x)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v4.x) [![Test Status](https://img.shields.io/azure-devops/tests/azfunc/public/514/v4.x?compact_message)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v4.x)|GA|20, 18| +|v3.x|[![Build Status](https://img.shields.io/azure-devops/build/azfunc/public/514/v3.x)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v3.x) [![Test Status](https://img.shields.io/azure-devops/tests/azfunc/public/514/v3.x?compact_message)](https://azfunc.visualstudio.com/public/_build/latest?definitionId=514&branchName=v3.x)|GA|20, 18| + +## Install + +```bash +npm install @azure/functions +``` + +## Documentation + +- [Azure Functions JavaScript Developer Guide](https://learn.microsoft.com/azure/azure-functions/functions-reference-node?pivots=nodejs-model-v4) +- [Upgrade guide from v3 to v4](https://learn.microsoft.com/azure/azure-functions/functions-node-upgrade-v4) +- [Create your first TypeScript function](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-typescript?pivots=nodejs-model-v4) +- [Create your first JavaScript function](https://docs.microsoft.com/azure/azure-functions/create-first-function-vs-code-node?pivots=nodejs-model-v4) + +## Considerations + +- The Node.js "programming model" shouldn't be confused with the Azure Functions "runtime". + - _**Programming model**_: Defines how you author your code and is specific to JavaScript and TypeScript. + - _**Runtime**_: Defines underlying behavior of Azure Functions and is shared across all languages. +- The programming model version is strictly tied to the version of the [`@azure/functions`](https://www.npmjs.com/package/@azure/functions) npm package, and is versioned independently of the [runtime](https://learn.microsoft.com/azure/azure-functions/functions-versions?pivots=programming-language-javascript). Both the runtime and the programming model use "4" as their latest major version, but that is purely a coincidence. +- You can't mix the v3 and v4 programming models in the same function app. As soon as you register one v4 function in your app, any v3 functions registered in _function.json_ files are ignored. + +## Usage + +### TypeScript + +```typescript +import { app, HttpRequest, HttpResponseInit, InvocationContext } from "@azure/functions"; + +export async function httpTrigger1(request: HttpRequest, context: InvocationContext): Promise { + context.log(`Http function processed request for url "${request.url}"`); + + const name = request.query.get('name') || await request.text() || 'world'; + + return { body: `Hello, ${name}!` }; +}; + +app.http('httpTrigger1', { + methods: ['GET', 'POST'], + authLevel: 'anonymous', + handler: httpTrigger1 +}); +``` + +### JavaScript + +```javascript +const { app } = require('@azure/functions'); + +app.http('httpTrigger1', { + methods: ['GET', 'POST'], + authLevel: 'anonymous', + handler: async (request, context) => { + context.log(`Http function processed request for url "${request.url}"`); + + const name = request.query.get('name') || await request.text() || 'world'; + + return { body: `Hello, ${name}!` }; + } +}); +``` diff --git a/node_modules/@azure/functions/package.json b/node_modules/@azure/functions/package.json new file mode 100644 index 00000000..e0f57eed --- /dev/null +++ b/node_modules/@azure/functions/package.json @@ -0,0 +1,86 @@ +{ + "name": "@azure/functions", + "version": "4.8.0", + "description": "Microsoft Azure Functions NodeJS Framework", + "keywords": [ + "azure", + "azure-functions", + "serverless", + "typescript" + ], + "author": "Microsoft", + "license": "MIT", + "homepage": "https://github.com/Azure/azure-functions-nodejs-library", + "repository": { + "type": "git", + "url": "https://github.com/Azure/azure-functions-nodejs-library.git" + }, + "bugs": { + "url": "https://github.com/Azure/azure-functions-nodejs-library/issues" + }, + "main": "./dist/azure-functions.js", + "types": "types/index.d.ts", + "files": [ + "dist/", + "src/", + "types/", + "LICENSE", + "README.md" + ], + "engines": { + "node": ">=18.0" + }, + "scripts": { + "build": "webpack --mode development", + "minify": "webpack --mode production", + "test": "ts-node ./test/index.ts", + "format": "prettier . --write", + "lint": "eslint . --fix", + "updateVersion": "ts-node ./scripts/updateVersion.ts", + "validateRelease": "ts-node ./scripts/validateRelease.ts", + "watch": "webpack --watch --mode development" + }, + "dependencies": { + "cookie": "^0.7.0", + "long": "^4.0.0", + "undici": "^5.29.0" + }, + "devDependencies": { + "@types/chai": "^4.2.22", + "@types/chai-as-promised": "^7.1.5", + "@types/cookie": "^0.6.0", + "@types/fs-extra": "^9.0.13", + "@types/long": "^4.0.2", + "@types/minimist": "^1.2.2", + "@types/mocha": "^9.1.1", + "@types/node": "^18.0.0", + "@types/semver": "^7.3.9", + "@typescript-eslint/eslint-plugin": "^5.12.1", + "@typescript-eslint/parser": "^5.12.1", + "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "eslint": "^7.32.0", + "eslint-config-prettier": "^8.3.0", + "eslint-plugin-deprecation": "^1.3.2", + "eslint-plugin-header": "^3.1.1", + "eslint-plugin-import": "^2.29.0", + "eslint-plugin-prettier": "^4.0.0", + "eslint-webpack-plugin": "^3.2.0", + "eslint-plugin-simple-import-sort": "^10.0.0", + "fork-ts-checker-webpack-plugin": "^7.2.13", + "fs-extra": "^10.0.1", + "globby": "^11.0.0", + "minimist": "^1.2.6", + "mocha": "^11.1.0", + "mocha-junit-reporter": "^2.0.2", + "mocha-multi-reporters": "^1.5.1", + "prettier": "^2.4.1", + "semver": "^7.3.5", + "ts-loader": "^9.3.1", + "ts-node": "^3.3.0", + "typescript": "^4.5.5", + "typescript4": "npm:typescript@~4.0.0", + "webpack": "^5.74.0", + "webpack-cli": "^4.10.0" + } +} diff --git a/node_modules/@azure/functions/src/InvocationContext.ts b/node_modules/@azure/functions/src/InvocationContext.ts new file mode 100644 index 00000000..56ce2417 --- /dev/null +++ b/node_modules/@azure/functions/src/InvocationContext.ts @@ -0,0 +1,95 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { + EffectiveFunctionOptions, + InvocationContextInit, + LogHandler, + RetryContext, + TraceContext, + TriggerMetadata, +} from '@azure/functions'; +import { fallbackLogHandler } from './utils/fallbackLogHandler'; + +export class InvocationContext implements types.InvocationContext { + invocationId: string; + functionName: string; + extraInputs: InvocationContextExtraInputs; + extraOutputs: InvocationContextExtraOutputs; + retryContext?: RetryContext; + traceContext?: TraceContext; + triggerMetadata?: TriggerMetadata; + options: EffectiveFunctionOptions; + #userLogHandler: LogHandler; + + constructor(init?: InvocationContextInit) { + init = init || {}; + const fallbackString = 'unknown'; + this.invocationId = init.invocationId || fallbackString; + this.functionName = init.functionName || fallbackString; + this.extraInputs = new InvocationContextExtraInputs(); + this.extraOutputs = new InvocationContextExtraOutputs(); + this.retryContext = init.retryContext; + this.traceContext = init.traceContext; + this.triggerMetadata = init.triggerMetadata; + this.options = { + trigger: init.options?.trigger || { + name: fallbackString, + type: fallbackString, + }, + return: init.options?.return, + extraInputs: init.options?.extraInputs || [], + extraOutputs: init.options?.extraOutputs || [], + }; + this.#userLogHandler = init.logHandler || fallbackLogHandler; + } + + log(...args: unknown[]): void { + this.#userLogHandler('information', ...args); + } + + trace(...args: unknown[]): void { + this.#userLogHandler('trace', ...args); + } + + debug(...args: unknown[]): void { + this.#userLogHandler('debug', ...args); + } + + info(...args: unknown[]): void { + this.#userLogHandler('information', ...args); + } + + warn(...args: unknown[]): void { + this.#userLogHandler('warning', ...args); + } + + error(...args: unknown[]): void { + this.#userLogHandler('error', ...args); + } +} + +class InvocationContextExtraInputs implements types.InvocationContextExtraInputs { + #inputs: Record = {}; + get(inputOrName: types.FunctionInput | string): any { + const name = typeof inputOrName === 'string' ? inputOrName : inputOrName.name; + return this.#inputs[name]; + } + set(inputOrName: types.FunctionInput | string, value: unknown): void { + const name = typeof inputOrName === 'string' ? inputOrName : inputOrName.name; + this.#inputs[name] = value; + } +} + +class InvocationContextExtraOutputs implements types.InvocationContextExtraOutputs { + #outputs: Record = {}; + get(outputOrName: types.FunctionOutput | string): unknown { + const name = typeof outputOrName === 'string' ? outputOrName : outputOrName.name; + return this.#outputs[name]; + } + set(outputOrName: types.FunctionOutput | string, value: unknown): void { + const name = typeof outputOrName === 'string' ? outputOrName : outputOrName.name; + this.#outputs[name] = value; + } +} diff --git a/node_modules/@azure/functions/src/InvocationModel.ts b/node_modules/@azure/functions/src/InvocationModel.ts new file mode 100644 index 00000000..d1958aed --- /dev/null +++ b/node_modules/@azure/functions/src/InvocationModel.ts @@ -0,0 +1,176 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as coreTypes from '@azure/functions-core'; +import { + CoreInvocationContext, + InvocationArguments, + RpcBindingInfo, + RpcInvocationResponse, + RpcLogCategory, + RpcLogLevel, + RpcTypedData, +} from '@azure/functions-core'; +import { format } from 'util'; +import { returnBindingKey } from './constants'; +import { fromRpcBindings } from './converters/fromRpcBindings'; +import { fromRpcRetryContext, fromRpcTraceContext } from './converters/fromRpcContext'; +import { fromRpcTriggerMetadata } from './converters/fromRpcTriggerMetadata'; +import { fromRpcTypedData } from './converters/fromRpcTypedData'; +import { toCamelCaseValue } from './converters/toCamelCase'; +import { toRpcHttp } from './converters/toRpcHttp'; +import { toRpcTypedData } from './converters/toRpcTypedData'; +import { AzFuncSystemError } from './errors'; +import { waitForProxyRequest } from './http/httpProxy'; +import { createStreamRequest } from './http/HttpRequest'; +import { InvocationContext } from './InvocationContext'; +import { enableHttpStream } from './setup'; +import { isHttpTrigger, isTimerTrigger, isTrigger } from './utils/isTrigger'; +import { isDefined, nonNullProp, nonNullValue } from './utils/nonNull'; + +export class InvocationModel implements coreTypes.InvocationModel { + #isDone = false; + #coreCtx: CoreInvocationContext; + #functionName: string; + #bindings: Record; + #triggerType: string; + + constructor(coreCtx: CoreInvocationContext) { + this.#coreCtx = coreCtx; + this.#functionName = nonNullProp(coreCtx.metadata, 'name'); + this.#bindings = nonNullProp(coreCtx.metadata, 'bindings'); + const triggerBinding = nonNullValue( + Object.values(this.#bindings).find((b) => isTrigger(b.type)), + 'triggerBinding' + ); + this.#triggerType = nonNullProp(triggerBinding, 'type'); + } + + // eslint-disable-next-line @typescript-eslint/require-await + async getArguments(): Promise { + const req = this.#coreCtx.request; + + const context = new InvocationContext({ + invocationId: nonNullProp(this.#coreCtx, 'invocationId'), + functionName: this.#functionName, + logHandler: (level: RpcLogLevel, ...args: unknown[]) => this.#userLog(level, ...args), + retryContext: fromRpcRetryContext(req.retryContext), + traceContext: fromRpcTraceContext(req.traceContext), + triggerMetadata: fromRpcTriggerMetadata(req.triggerMetadata, this.#triggerType), + options: fromRpcBindings(this.#bindings), + }); + + const inputs: unknown[] = []; + if (req.inputData) { + for (const binding of req.inputData) { + const bindingName = nonNullProp(binding, 'name'); + + const rpcBinding = this.#bindings[bindingName]; + if (!rpcBinding) { + throw new AzFuncSystemError( + `Failed to find binding "${bindingName}" in bindings "${Object.keys(this.#bindings).join( + ', ' + )}".` + ); + } + const bindingType = rpcBinding.type; + + let input: unknown; + if (isHttpTrigger(bindingType) && enableHttpStream) { + const proxyRequest = await waitForProxyRequest(this.#coreCtx.invocationId); + input = createStreamRequest(proxyRequest, nonNullProp(req, 'triggerMetadata')); + } else { + input = fromRpcTypedData(binding.data); + } + + if (isTimerTrigger(bindingType)) { + input = toCamelCaseValue(input); + } + + if (isTrigger(bindingType)) { + inputs.push(input); + } else { + context.extraInputs.set(bindingName, input); + } + } + } + + return { context, inputs }; + } + + async invokeFunction( + context: InvocationContext, + inputs: unknown[], + handler: coreTypes.FunctionCallback + ): Promise { + try { + return await Promise.resolve(handler(...inputs, context)); + } finally { + this.#isDone = true; + } + } + + async getResponse(context: InvocationContext, result: unknown): Promise { + const response: RpcInvocationResponse = { invocationId: this.#coreCtx.invocationId }; + + response.outputData = []; + let usedReturnValue = false; + for (const [name, binding] of Object.entries(this.#bindings)) { + if (binding.direction === 'out') { + if (name === returnBindingKey) { + response.returnValue = await this.#convertOutput(context.invocationId, binding, result); + usedReturnValue = true; + } else { + const outputValue = await this.#convertOutput( + context.invocationId, + binding, + context.extraOutputs.get(name) + ); + if (isDefined(outputValue)) { + response.outputData.push({ name, data: outputValue }); + } + } + } + } + + // This allows the return value of non-HTTP triggered functions to be passed back + // to the host, even if no explicit output binding is set. In most cases, this is ignored, + // but e.g., Durable uses this to pass orchestrator state back to the Durable extension, w/o + // an explicit output binding. See here for more details: https://github.com/Azure/azure-functions-nodejs-library/pull/25 + if (!usedReturnValue && !isHttpTrigger(this.#triggerType)) { + response.returnValue = toRpcTypedData(result); + } + + return response; + } + + async #convertOutput( + invocationId: string, + binding: RpcBindingInfo, + value: unknown + ): Promise { + if (binding.type?.toLowerCase() === 'http') { + return toRpcHttp(invocationId, value); + } else { + return toRpcTypedData(value); + } + } + + #log(level: RpcLogLevel, logCategory: RpcLogCategory, ...args: unknown[]): void { + this.#coreCtx.log(level, logCategory, format(...args)); + } + + #systemLog(level: RpcLogLevel, ...args: unknown[]) { + this.#log(level, 'system', ...args); + } + + #userLog(level: RpcLogLevel, ...args: unknown[]): void { + if (this.#isDone && this.#coreCtx.state !== 'postInvocationHooks') { + let badAsyncMsg = + "Warning: Unexpected call to 'log' on the context object after function execution has completed. Please check for asynchronous calls that are not awaited. "; + badAsyncMsg += `Function name: ${this.#functionName}. Invocation Id: ${this.#coreCtx.invocationId}.`; + this.#systemLog('warning', badAsyncMsg); + } + this.#log(level, 'user', ...args); + } +} diff --git a/node_modules/@azure/functions/src/ProgrammingModel.ts b/node_modules/@azure/functions/src/ProgrammingModel.ts new file mode 100644 index 00000000..009ed92e --- /dev/null +++ b/node_modules/@azure/functions/src/ProgrammingModel.ts @@ -0,0 +1,31 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as coreTypes from '@azure/functions-core'; +import { CoreInvocationContext, WorkerCapabilities } from '@azure/functions-core'; +import { version } from './constants'; +import { setupHttpProxy } from './http/httpProxy'; +import { InvocationModel } from './InvocationModel'; +import { capabilities as libraryCapabilities, enableHttpStream, lockSetup } from './setup'; + +export class ProgrammingModel implements coreTypes.ProgrammingModel { + name = '@azure/functions'; + version = version; + + getInvocationModel(coreCtx: CoreInvocationContext): InvocationModel { + return new InvocationModel(coreCtx); + } + + async getCapabilities(workerCapabilities: WorkerCapabilities): Promise { + lockSetup(); + + if (enableHttpStream) { + const httpUri = await setupHttpProxy(); + workerCapabilities.HttpUri = httpUri; + } + + Object.assign(workerCapabilities, libraryCapabilities); + + return workerCapabilities; + } +} diff --git a/node_modules/@azure/functions/src/addBindingName.ts b/node_modules/@azure/functions/src/addBindingName.ts new file mode 100644 index 00000000..3ac0f952 --- /dev/null +++ b/node_modules/@azure/functions/src/addBindingName.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { getStringHash } from './utils/getRandomHexString'; + +/** + * If the host spawns multiple workers, it expects the metadata (including binding name) to be the same across workers. + * That means we need to generate binding names in a deterministic fashion, so we'll do that using a string hash of the binding data + * A few considerations: + * 1. We will include the binding type in the name to make it more readable + * 2. Users can manually specify the name themselves and we will respect that + * 3. The only time the hash should cause a conflict is if a single function has duplicate bindings. Not sure why someone would do that, but we will throw an error at function registration time + * More info here: https://github.com/Azure/azure-functions-nodejs-worker/issues/638 + */ +export function addBindingName( + binding: T, + suffix: string +): T & { name: string } { + if (!binding.name) { + let bindingType = binding.type; + if (!bindingType.toLowerCase().endsWith(suffix.toLowerCase())) { + bindingType += suffix; + } + binding.name = bindingType + getStringHash(JSON.stringify(binding)); + } + return binding; +} diff --git a/node_modules/@azure/functions/src/app.ts b/node_modules/@azure/functions/src/app.ts new file mode 100644 index 00000000..b7818459 --- /dev/null +++ b/node_modules/@azure/functions/src/app.ts @@ -0,0 +1,173 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBFunctionOptions, + EventGridFunctionOptions, + EventHubFunctionOptions, + FunctionTrigger, + GenericFunctionOptions, + HttpFunctionOptions, + HttpHandler, + HttpMethod, + HttpMethodFunctionOptions, + McpToolFunctionOptions, + MySqlFunctionOptions, + ServiceBusQueueFunctionOptions, + ServiceBusTopicFunctionOptions, + SqlFunctionOptions, + StorageBlobFunctionOptions, + StorageQueueFunctionOptions, + TimerFunctionOptions, + WarmupFunctionOptions, + WebPubSubFunctionOptions, +} from '@azure/functions'; +import { FunctionCallback } from '@azure/functions-core'; +import { toCoreFunctionMetadata } from './converters/toCoreFunctionMetadata'; +import * as output from './output'; +import { ProgrammingModel } from './ProgrammingModel'; +import * as trigger from './trigger'; +import { tryGetCoreApiLazy } from './utils/tryGetCoreApiLazy'; + +export * as hook from './hooks/registerHook'; +export { setup } from './setup'; + +let hasSetModel = false; +function setProgrammingModel() { + const coreApi = tryGetCoreApiLazy(); + if (!coreApi) { + console.warn( + 'WARNING: Failed to detect the Azure Functions runtime. Switching "@azure/functions" package to test mode - not all features are supported.' + ); + } else { + coreApi.setProgrammingModel(new ProgrammingModel()); + } + hasSetModel = true; +} + +function convertToHttpOptions( + optionsOrHandler: HttpFunctionOptions | HttpHandler, + method: HttpMethod +): HttpFunctionOptions { + const options: HttpFunctionOptions = + typeof optionsOrHandler === 'function' ? { handler: optionsOrHandler } : optionsOrHandler; + options.methods = [method]; + return options; +} + +function convertToGenericOptions & Partial>( + options: T, + triggerMethod: ( + o: Omit + ) => FunctionTrigger +): GenericFunctionOptions { + const { handler, return: ret, trigger, extraInputs, extraOutputs, retry, ...triggerOptions } = options; + return { + trigger: trigger ?? triggerMethod(triggerOptions), + return: ret, + retry, + extraInputs, + extraOutputs, + handler, + }; +} + +export function get(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'GET')); +} + +export function put(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'PUT')); +} + +export function post(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'POST')); +} + +export function patch(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'PATCH')); +} + +export function deleteRequest(name: string, optionsOrHandler: HttpMethodFunctionOptions | HttpHandler): void { + http(name, convertToHttpOptions(optionsOrHandler, 'DELETE')); +} + +export function http(name: string, options: HttpFunctionOptions): void { + options.return ||= output.http({}); + generic(name, convertToGenericOptions(options, trigger.http)); +} + +export function timer(name: string, options: TimerFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.timer)); +} + +export function storageBlob(name: string, options: StorageBlobFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.storageBlob)); +} + +export function storageQueue(name: string, options: StorageQueueFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.storageQueue)); +} + +export function serviceBusQueue(name: string, options: ServiceBusQueueFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.serviceBusQueue)); +} + +export function serviceBusTopic(name: string, options: ServiceBusTopicFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.serviceBusTopic)); +} + +export function eventHub(name: string, options: EventHubFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.eventHub)); +} + +export function eventGrid(name: string, options: EventGridFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.eventGrid)); +} + +export function cosmosDB(name: string, options: CosmosDBFunctionOptions): void { + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + generic(name, convertToGenericOptions(options, trigger.cosmosDB)); +} + +export function warmup(name: string, options: WarmupFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.warmup)); +} + +export function sql(name: string, options: SqlFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.sql)); +} + +export function mySql(name: string, options: MySqlFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.mySql)); +} + +export function webPubSub(name: string, options: WebPubSubFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.webPubSub)); +} + +/** + * Registers an MCP Tool function in your app. + * This function is triggered by MCP Tool events and allows you to define the behavior of the function. + * + * @param name - The name of the function. This must be unique within your app and is primarily used for tracking purposes. + * @param options - Configuration options for the MCP Tool function, including the handler and trigger-specific settings. + */ +export function mcpTool(name: string, options: McpToolFunctionOptions): void { + generic(name, convertToGenericOptions(options, trigger.mcpTool)); +} + +export function generic(name: string, options: GenericFunctionOptions): void { + if (!hasSetModel) { + setProgrammingModel(); + } + + const coreApi = tryGetCoreApiLazy(); + if (!coreApi) { + console.warn( + `WARNING: Skipping call to register function "${name}" because the "@azure/functions" package is in test mode.` + ); + } else { + coreApi.registerFunction(toCoreFunctionMetadata(name, options), options.handler); + } +} diff --git a/node_modules/@azure/functions/src/constants.ts b/node_modules/@azure/functions/src/constants.ts new file mode 100644 index 00000000..3b05c2d4 --- /dev/null +++ b/node_modules/@azure/functions/src/constants.ts @@ -0,0 +1,6 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export const version = '4.8.0'; + +export const returnBindingKey = '$return'; diff --git a/node_modules/@azure/functions/src/converters/fromRpcBindings.ts b/node_modules/@azure/functions/src/converters/fromRpcBindings.ts new file mode 100644 index 00000000..abdaf07b --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcBindings.ts @@ -0,0 +1,40 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { EffectiveFunctionOptions, FunctionInput, FunctionOutput, FunctionTrigger } from '@azure/functions'; +import { RpcBindingInfo } from '@azure/functions-core'; +import { returnBindingKey } from '../constants'; +import { isTrigger } from '../utils/isTrigger'; +import { nonNullProp, nonNullValue } from '../utils/nonNull'; + +export function fromRpcBindings(bindings: Record | null | undefined): EffectiveFunctionOptions { + let trigger: FunctionTrigger | undefined; + let returnBinding: FunctionOutput | undefined; + const extraInputs: FunctionInput[] = []; + const extraOutputs: FunctionOutput[] = []; + for (const [name, binding] of Object.entries(nonNullValue(bindings, 'bindings'))) { + if (isTrigger(binding.type)) { + trigger = fromRpcBinding(name, binding); + } else if (name === returnBindingKey) { + returnBinding = fromRpcBinding(name, binding); + } else if (binding.direction === 'in') { + extraInputs.push(fromRpcBinding(name, binding)); + } else if (binding.direction === 'out') { + extraOutputs.push(fromRpcBinding(name, binding)); + } + } + return { + trigger: nonNullValue(trigger, 'trigger'), + return: returnBinding, + extraInputs, + extraOutputs, + }; +} + +function fromRpcBinding(name: string, binding: RpcBindingInfo): FunctionTrigger | FunctionInput | FunctionOutput { + return { + ...binding, + type: nonNullProp(binding, 'type'), + name, + }; +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcContext.ts b/node_modules/@azure/functions/src/converters/fromRpcContext.ts new file mode 100644 index 00000000..71f84952 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcContext.ts @@ -0,0 +1,43 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Exception, RetryContext, TraceContext } from '@azure/functions'; +import { RpcException, RpcRetryContext, RpcTraceContext } from '@azure/functions-core'; +import { copyPropIfDefined, nonNullProp } from '../utils/nonNull'; + +export function fromRpcRetryContext(retryContext: RpcRetryContext | null | undefined): RetryContext | undefined { + if (!retryContext) { + return undefined; + } else { + const result: RetryContext = { + retryCount: nonNullProp(retryContext, 'retryCount'), + maxRetryCount: nonNullProp(retryContext, 'maxRetryCount'), + }; + if (retryContext.exception) { + result.exception = fromRpcException(retryContext.exception); + } + return result; + } +} + +function fromRpcException(exception: RpcException): Exception { + const result: Exception = {}; + copyPropIfDefined(exception, result, 'message'); + copyPropIfDefined(exception, result, 'source'); + copyPropIfDefined(exception, result, 'stackTrace'); + return result; +} + +export function fromRpcTraceContext(traceContext: RpcTraceContext | null | undefined): TraceContext | undefined { + if (!traceContext) { + return undefined; + } else { + const result: TraceContext = {}; + copyPropIfDefined(traceContext, result, 'traceParent'); + copyPropIfDefined(traceContext, result, 'traceState'); + if (traceContext.attributes) { + result.attributes = traceContext.attributes; + } + return result; + } +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcNullable.ts b/node_modules/@azure/functions/src/converters/fromRpcNullable.ts new file mode 100644 index 00000000..613be930 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcNullable.ts @@ -0,0 +1,19 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcNullableString } from '@azure/functions-core'; + +export function fromNullableMapping( + nullableMapping: Record | null | undefined, + originalMapping?: Record | null +): Record { + let converted: Record = {}; + if (nullableMapping && Object.keys(nullableMapping).length > 0) { + for (const key in nullableMapping) { + converted[key] = nullableMapping[key]?.value || ''; + } + } else if (originalMapping && Object.keys(originalMapping).length > 0) { + converted = originalMapping; + } + return converted; +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcTriggerMetadata.ts b/node_modules/@azure/functions/src/converters/fromRpcTriggerMetadata.ts new file mode 100644 index 00000000..4e5efd1c --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcTriggerMetadata.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { TriggerMetadata } from '@azure/functions'; +import { RpcTypedData } from '@azure/functions-core'; +import { isHttpTrigger, isTimerTrigger } from '../utils/isTrigger'; +import { fromRpcTypedData } from './fromRpcTypedData'; +import { toCamelCaseKey, toCamelCaseValue } from './toCamelCase'; + +export function fromRpcTriggerMetadata( + triggerMetadata: Record | null | undefined, + triggerType: string +): TriggerMetadata | undefined { + // For http and timer triggers, we will avoid using `triggerMetadata` for a few reasons: + // 1. It uses `toCamelCase` methods, which can lead to weird casing bugs + // 2. It's generally a large medley of properties that is difficult for us to document/type + // 3. We can represent that information on the request & timer objects instead + if (!triggerMetadata || isHttpTrigger(triggerType) || isTimerTrigger(triggerType)) { + return undefined; + } else { + const result: TriggerMetadata = {}; + for (const [key, value] of Object.entries(triggerMetadata)) { + result[toCamelCaseKey(key)] = toCamelCaseValue(fromRpcTypedData(value)); + } + return result; + } +} diff --git a/node_modules/@azure/functions/src/converters/fromRpcTypedData.ts b/node_modules/@azure/functions/src/converters/fromRpcTypedData.ts new file mode 100644 index 00000000..a8ed9631 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/fromRpcTypedData.ts @@ -0,0 +1,44 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcTypedData } from '@azure/functions-core'; +import { HttpRequest } from '../http/HttpRequest'; +import { isDefined } from '../utils/nonNull'; + +export function fromRpcTypedData(data: RpcTypedData | null | undefined): unknown { + if (!data) { + return undefined; + } else if (isDefined(data.string)) { + return tryJsonParse(data.string); + } else if (isDefined(data.json)) { + return JSON.parse(data.json); + } else if (isDefined(data.bytes)) { + return Buffer.from(data.bytes); + } else if (isDefined(data.stream)) { + return Buffer.from(data.stream); + } else if (isDefined(data.http)) { + return new HttpRequest(data.http); + } else if (isDefined(data.int)) { + return data.int; + } else if (isDefined(data.double)) { + return data.double; + } else if (data.collectionBytes && isDefined(data.collectionBytes.bytes)) { + return data.collectionBytes.bytes.map((d) => Buffer.from(d)); + } else if (data.collectionString && isDefined(data.collectionString.string)) { + return data.collectionString.string.map(tryJsonParse); + } else if (data.collectionDouble && isDefined(data.collectionDouble.double)) { + return data.collectionDouble.double; + } else if (data.collectionSint64 && isDefined(data.collectionSint64.sint64)) { + return data.collectionSint64.sint64; + } else { + return undefined; + } +} + +function tryJsonParse(data: string): unknown { + try { + return JSON.parse(data); + } catch { + return data; + } +} diff --git a/node_modules/@azure/functions/src/converters/toCamelCase.ts b/node_modules/@azure/functions/src/converters/toCamelCase.ts new file mode 100644 index 00000000..20e2b6a5 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toCamelCase.ts @@ -0,0 +1,20 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export function toCamelCaseValue(data: unknown): unknown { + if (typeof data !== 'object' || data === null) { + return data; + } else if (Array.isArray(data)) { + return data.map(toCamelCaseValue); + } else { + const result: Record = {}; + for (const [key, value] of Object.entries(data)) { + result[toCamelCaseKey(key)] = toCamelCaseValue(value); + } + return result; + } +} + +export function toCamelCaseKey(key: string): string { + return key.charAt(0).toLowerCase() + key.slice(1); +} diff --git a/node_modules/@azure/functions/src/converters/toCoreFunctionMetadata.ts b/node_modules/@azure/functions/src/converters/toCoreFunctionMetadata.ts new file mode 100644 index 00000000..390c9eaf --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toCoreFunctionMetadata.ts @@ -0,0 +1,76 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { ExponentialBackoffRetryOptions, FixedDelayRetryOptions, GenericFunctionOptions } from '@azure/functions'; +import * as coreTypes from '@azure/functions-core'; +import { returnBindingKey } from '../constants'; +import { AzFuncSystemError } from '../errors'; +import { isTrigger } from '../utils/isTrigger'; +import { toRpcDuration } from './toRpcDuration'; + +export function toCoreFunctionMetadata(name: string, options: GenericFunctionOptions): coreTypes.FunctionMetadata { + const bindings: Record = {}; + const bindingNames: string[] = []; + + const trigger = options.trigger; + bindings[trigger.name] = { + ...trigger, + direction: 'in', + type: isTrigger(trigger.type) ? trigger.type : trigger.type + 'Trigger', + }; + bindingNames.push(trigger.name); + + if (options.extraInputs) { + for (const input of options.extraInputs) { + bindings[input.name] = { + ...input, + direction: 'in', + }; + bindingNames.push(input.name); + } + } + + if (options.return) { + bindings[returnBindingKey] = { + ...options.return, + direction: 'out', + }; + bindingNames.push(returnBindingKey); + } + + if (options.extraOutputs) { + for (const output of options.extraOutputs) { + bindings[output.name] = { + ...output, + direction: 'out', + }; + bindingNames.push(output.name); + } + } + + const dupeBindings = bindingNames.filter((v, i) => bindingNames.indexOf(v) !== i); + if (dupeBindings.length > 0) { + throw new AzFuncSystemError( + `Duplicate bindings found for function "${name}". Remove a duplicate binding or manually specify the "name" property to make it unique.` + ); + } + + let retryOptions: coreTypes.RpcRetryOptions | undefined; + if (options.retry) { + retryOptions = { + ...options.retry, + retryStrategy: options.retry.strategy, + delayInterval: toRpcDuration((options.retry).delayInterval, 'retry.delayInterval'), + maximumInterval: toRpcDuration( + (options.retry).maximumInterval, + 'retry.maximumInterval' + ), + minimumInterval: toRpcDuration( + (options.retry).minimumInterval, + 'retry.minimumInterval' + ), + }; + } + + return { name, bindings, retryOptions }; +} diff --git a/node_modules/@azure/functions/src/converters/toMcpToolTriggerOptionsToRpc.ts b/node_modules/@azure/functions/src/converters/toMcpToolTriggerOptionsToRpc.ts new file mode 100644 index 00000000..fe6cf1ee --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toMcpToolTriggerOptionsToRpc.ts @@ -0,0 +1,149 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { McpToolProperty, McpToolTriggerOptions, McpToolTriggerOptionsToRpc } from '../../types'; + +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +/** + * Converts an McpToolTriggerOptions object to an McpToolTriggerOptionsToRpc object. + * + * @param mcpToolTriggerOptions - The input options to be converted. + * @returns The converted McpToolTriggerOptionsToRpc object. + */ +export function converToMcpToolTriggerOptionsToRpc( + mcpToolTriggerOptions: McpToolTriggerOptions +): McpToolTriggerOptionsToRpc { + // Base object for the return value + const baseResult = { + toolName: mcpToolTriggerOptions.toolName, + description: mcpToolTriggerOptions.description, + }; + + // Check for null or undefined toolProperties + if (!mcpToolTriggerOptions?.toolProperties) { + return { + ...baseResult, + toolProperties: JSON.stringify([]), // Default to an empty array + }; + } + + // Check if toolProperties is an array of McpToolProperty objects + if (Array.isArray(mcpToolTriggerOptions.toolProperties)) { + const isValid = mcpToolTriggerOptions.toolProperties.every(isMcpToolProperty); + if (isValid) { + return { + ...baseResult, + toolProperties: JSON.stringify(mcpToolTriggerOptions.toolProperties), + }; + } else { + throw new Error( + 'Invalid toolProperties: Array contains invalid McpToolProperty, please validate the parameters.' + ); + } + } + + // Handle cases where toolProperties is an object (e.g., Zod schema) + if (typeof mcpToolTriggerOptions.toolProperties === 'object') { + // Define the type of the ZodObject shape and ZodPropertyDef + type ZodPropertyDef = { + description?: string; + typeName: string; + }; + type ZodObjectShape = Record; + + // Define the type of the toolProperties object + type ToolProperties = + | { + _def?: { + typeName?: string; + }; + shape?: ZodObjectShape; + } + | Record; + + let isZodObject = false; + + const toolProperties = mcpToolTriggerOptions.toolProperties as ToolProperties; + + // Check if the object is a ZodObject + if ((toolProperties?._def as { typeName?: string })?.typeName === 'ZodObject') { + isZodObject = true; + } + + // Check if shape is a valid ZodObject shape + const shape: ZodObjectShape | Record = isZodObject + ? (toolProperties as { shape: ZodObjectShape }).shape + : toolProperties; + + // Extract properties from the ZodObject shape + const result = Object.keys(shape).map((propertyName) => { + const property = shape[propertyName] as { _def: ZodPropertyDef }; + const description = property?._def?.description || ''; + const propertyType = getPropertyType(property?._def?.typeName?.toLowerCase() || 'unknown'); // Extract type name or default to "unknown" + + return { + propertyName, + propertyType, + description, + }; + }); + + return { + ...baseResult, + toolProperties: JSON.stringify(result), + }; + } + // Handle cases where toolProperties is not an array + throw new Error('Invalid toolProperties: Expected an array of McpToolProperty objects or zod objects.'); +} + +// Helper function to infer property type from zod schema +function getPropertyType(zodType: string): string { + switch (zodType) { + case 'zodnumber': + return 'number'; + case 'zodstring': + return 'string'; + case 'zodboolean': + return 'boolean'; + case 'zodarray': + return 'array'; + case 'zodobject': + return 'object'; + case 'zodbigint': + return 'long'; + case 'zoddate': + return 'DateTime'; + case 'zodtuple': + return 'Tuple'; + default: + console.warn(`Unknown zod type: ${zodType}`); + return 'unknown'; + } +} + +/** + * Type guard to check if a given object is of type McpToolProperty. + * + * @param property - The object to check. + * @returns True if the object is of type McpToolProperty, otherwise false. + * + * This function ensures that the object: + * - Is not null and is of type 'object'. + * - Contains the required properties: 'propertyName', 'propertyValue', and 'description'. + * - Each of these properties is of the correct type (string). + */ +function isMcpToolProperty(property: unknown): property is McpToolProperty { + return ( + typeof property === 'object' && + property !== null && + 'propertyName' in property && + 'propertyType' in property && + 'description' in property && + typeof (property as McpToolProperty).propertyName === 'string' && + typeof (property as McpToolProperty).propertyType === 'string' && + typeof (property as McpToolProperty).description === 'string' + ); +} diff --git a/node_modules/@azure/functions/src/converters/toRpcDuration.ts b/node_modules/@azure/functions/src/converters/toRpcDuration.ts new file mode 100644 index 00000000..7e03c1f3 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcDuration.ts @@ -0,0 +1,36 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcDuration } from '@azure/functions-core'; +import { Duration } from '../../types'; +import { AzFuncSystemError } from '../errors'; +import { isDefined } from '../utils/nonNull'; + +export function toRpcDuration(dateTime: Duration | number | undefined, propertyName: string): RpcDuration | undefined { + if (isDefined(dateTime)) { + try { + let timeInMilliseconds: number | undefined; + if (typeof dateTime === 'object') { + const minutes = (dateTime.minutes || 0) + (dateTime.hours || 0) * 60; + const seconds = (dateTime.seconds || 0) + minutes * 60; + timeInMilliseconds = (dateTime.milliseconds || 0) + seconds * 1000; + } else if (typeof dateTime === 'number') { + timeInMilliseconds = dateTime; + } + + if (isDefined(timeInMilliseconds) && timeInMilliseconds >= 0) { + return { + seconds: Math.round(timeInMilliseconds / 1000), + }; + } + } catch { + // fall through + } + + throw new AzFuncSystemError( + `A 'number' or 'Duration' object was expected instead of a '${typeof dateTime}'. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcHttp.ts b/node_modules/@azure/functions/src/converters/toRpcHttp.ts new file mode 100644 index 00000000..c314f17a --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcHttp.ts @@ -0,0 +1,47 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcHttpData, RpcTypedData } from '@azure/functions-core'; +import { AzFuncSystemError } from '../errors'; +import { sendProxyResponse } from '../http/httpProxy'; +import { HttpResponse } from '../http/HttpResponse'; +import { enableHttpStream } from '../setup'; +import { toRpcHttpCookie } from './toRpcHttpCookie'; +import { toRpcTypedData } from './toRpcTypedData'; + +export async function toRpcHttp(invocationId: string, data: unknown): Promise { + if (data === null || data === undefined) { + return data; + } else if (typeof data !== 'object') { + throw new AzFuncSystemError( + 'The HTTP response must be an object with optional properties "body", "status", "headers", and "cookies".' + ); + } + + const response = data instanceof HttpResponse ? data : new HttpResponse(data); + if (enableHttpStream) { + // send http data over http proxy instead of rpc + await sendProxyResponse(invocationId, response); + return; + } + + const rpcResponse: RpcHttpData = {}; + rpcResponse.statusCode = response.status.toString(); + + rpcResponse.headers = {}; + for (const [key, value] of response.headers.entries()) { + rpcResponse.headers[key] = value; + } + + rpcResponse.cookies = []; + for (const cookie of response.cookies) { + rpcResponse.cookies.push(toRpcHttpCookie(cookie)); + } + + rpcResponse.enableContentNegotiation = response.enableContentNegotiation; + + const bodyBytes = await response.arrayBuffer(); + rpcResponse.body = toRpcTypedData(bodyBytes); + + return { http: rpcResponse }; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcHttpCookie.ts b/node_modules/@azure/functions/src/converters/toRpcHttpCookie.ts new file mode 100644 index 00000000..e3532434 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcHttpCookie.ts @@ -0,0 +1,39 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Cookie } from '@azure/functions'; +import { RpcHttpCookie, RpcHttpCookieSameSite } from '@azure/functions-core'; +import { toNullableBool, toNullableDouble, toNullableString, toNullableTimestamp, toRpcString } from './toRpcNullable'; + +/** + * From RFC specifications for 'Set-Cookie' response header: https://www.rfc-editor.org/rfc/rfc6265.txt + * @param inputCookie + */ +export function toRpcHttpCookie(inputCookie: Cookie): RpcHttpCookie { + // Resolve RpcHttpCookie.SameSite enum, a one-off + let rpcSameSite: RpcHttpCookieSameSite = 'none'; + if (inputCookie && inputCookie.sameSite) { + const sameSite = inputCookie.sameSite.toLocaleLowerCase(); + if (sameSite === 'lax') { + rpcSameSite = 'lax'; + } else if (sameSite === 'strict') { + rpcSameSite = 'strict'; + } else if (sameSite === 'none') { + rpcSameSite = 'explicitNone'; + } + } + + const rpcCookie: RpcHttpCookie = { + name: inputCookie && toRpcString(inputCookie.name, 'cookie.name'), + value: inputCookie && toRpcString(inputCookie.value, 'cookie.value'), + domain: toNullableString(inputCookie && inputCookie.domain, 'cookie.domain'), + path: toNullableString(inputCookie && inputCookie.path, 'cookie.path'), + expires: toNullableTimestamp(inputCookie && inputCookie.expires, 'cookie.expires'), + secure: toNullableBool(inputCookie && inputCookie.secure, 'cookie.secure'), + httpOnly: toNullableBool(inputCookie && inputCookie.httpOnly, 'cookie.httpOnly'), + sameSite: rpcSameSite, + maxAge: toNullableDouble(inputCookie && inputCookie.maxAge, 'cookie.maxAge'), + }; + + return rpcCookie; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcNullable.ts b/node_modules/@azure/functions/src/converters/toRpcNullable.ts new file mode 100644 index 00000000..56375e80 --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcNullable.ts @@ -0,0 +1,132 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcNullableBool, RpcNullableDouble, RpcNullableString, RpcNullableTimestamp } from '@azure/functions-core'; +import { AzFuncSystemError } from '../errors'; +import { isDefined } from '../utils/nonNull'; + +/** + * Converts boolean input to an 'INullableBool' to be sent through the RPC layer. + * Input that is not a boolean but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableBool if it is a valid boolean + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableBool(nullable: boolean | undefined, propertyName: string): undefined | RpcNullableBool { + if (typeof nullable === 'boolean') { + return { + value: nullable, + }; + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'boolean' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} + +/** + * Converts number or string that parses to a number to an 'INullableDouble' to be sent through the RPC layer. + * Input that is not a valid number but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableDouble if it is a valid number + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableDouble( + nullable: number | string | undefined, + propertyName: string +): undefined | RpcNullableDouble { + if (typeof nullable === 'number') { + return { + value: nullable, + }; + } else if (typeof nullable === 'string') { + if (!isNaN(Number(nullable))) { + const parsedNumber = parseFloat(nullable); + return { + value: parsedNumber, + }; + } + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'number' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} + +/** + * Converts string input to an 'INullableString' to be sent through the RPC layer. + * Input that is not a string but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableString if it is a valid string + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toRpcString(nullable: string | undefined, propertyName: string): string { + if (typeof nullable === 'string') { + return nullable; + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'string' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return ''; +} + +/** + * Converts string input to an 'INullableString' to be sent through the RPC layer. + * Input that is not a string but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableString if it is a valid string + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableString(nullable: string | undefined, propertyName: string): undefined | RpcNullableString { + if (typeof nullable === 'string') { + return { + value: nullable, + }; + } + + if (isDefined(nullable)) { + throw new AzFuncSystemError( + `A 'string' type was expected instead of a '${typeof nullable}' type. Cannot parse value of '${propertyName}'.` + ); + } + + return undefined; +} + +/** + * Converts Date or number input to an 'INullableTimestamp' to be sent through the RPC layer. + * Input that is not a Date or number but is also not null or undefined logs a function app level warning. + * @param nullable Input to be converted to an INullableTimestamp if it is valid input + * @param propertyName The name of the property that the caller will assign the output to. Used for debugging. + */ +export function toNullableTimestamp( + dateTime: Date | number | undefined, + propertyName: string +): RpcNullableTimestamp | undefined { + if (isDefined(dateTime)) { + try { + const timeInMilliseconds = typeof dateTime === 'number' ? dateTime : dateTime.getTime(); + + if (timeInMilliseconds && timeInMilliseconds >= 0) { + return { + value: { + seconds: Math.round(timeInMilliseconds / 1000), + }, + }; + } + } catch { + throw new AzFuncSystemError( + `A 'number' or 'Date' input was expected instead of a '${typeof dateTime}'. Cannot parse value of '${propertyName}'.` + ); + } + } + return undefined; +} diff --git a/node_modules/@azure/functions/src/converters/toRpcTypedData.ts b/node_modules/@azure/functions/src/converters/toRpcTypedData.ts new file mode 100644 index 00000000..bd4f83fc --- /dev/null +++ b/node_modules/@azure/functions/src/converters/toRpcTypedData.ts @@ -0,0 +1,28 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { RpcTypedData } from '@azure/functions-core'; + +export function toRpcTypedData(data: unknown): RpcTypedData | null | undefined { + if (data === null || data === undefined) { + return data; + } else if (typeof data === 'string') { + return { string: data }; + } else if (Buffer.isBuffer(data)) { + return { bytes: data }; + } else if (ArrayBuffer.isView(data)) { + const bytes = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + return { bytes: bytes }; + } else if (data instanceof ArrayBuffer) { + const bytes = new Uint8Array(data); + return { bytes: bytes }; + } else if (typeof data === 'number') { + if (Number.isInteger(data)) { + return { int: data }; + } else { + return { double: data }; + } + } else { + return { json: JSON.stringify(data) }; + } +} diff --git a/node_modules/@azure/functions/src/errors.ts b/node_modules/@azure/functions/src/errors.ts new file mode 100644 index 00000000..7f8192a4 --- /dev/null +++ b/node_modules/@azure/functions/src/errors.ts @@ -0,0 +1,69 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export interface AzFuncError { + /** + * System errors can be tracked in our telemetry + * User errors cannot be tracked in our telemetry because they could have user information (users can still track it themselves in their app insights resource) + */ + isAzureFunctionsSystemError: boolean; +} + +export interface ValidatedError extends Error, Partial { + /** + * Use `trySetErrorMessage` to set the error message + */ + readonly message: string; +} + +export class AzFuncSystemError extends Error { + isAzureFunctionsSystemError = true; +} + +export class AzFuncTypeError extends TypeError implements AzFuncError { + isAzureFunctionsSystemError = true; +} + +export class AzFuncRangeError extends RangeError implements AzFuncError { + isAzureFunctionsSystemError = true; +} + +export class ReadOnlyError extends AzFuncTypeError { + constructor(propertyName: string) { + super(`Cannot assign to read only property '${propertyName}'`); + } +} + +export function ensureErrorType(err: unknown): ValidatedError { + if (err instanceof Error) { + return err; + } else { + let message: string; + if (err === undefined || err === null) { + message = 'Unknown error'; + } else if (typeof err === 'string') { + message = err; + } else if (typeof err === 'object') { + message = JSON.stringify(err); + } else { + message = String(err); + } + return new Error(message); + } +} + +export function trySetErrorMessage(err: Error, message: string): void { + try { + err.message = message; + } catch { + // If we can't set the message, we'll keep the error as is + } +} + +/** + * This is mostly for callbacks where `null` or `undefined` indicates there is no error + * By contrast, anything thrown/caught is assumed to be an error regardless of what it is + */ +export function isError(err: unknown): boolean { + return err !== null && err !== undefined; +} diff --git a/node_modules/@azure/functions/src/hooks/AppStartContext.ts b/node_modules/@azure/functions/src/hooks/AppStartContext.ts new file mode 100644 index 00000000..dda2b77c --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/AppStartContext.ts @@ -0,0 +1,7 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HookContext } from './HookContext'; + +export class AppStartContext extends HookContext implements types.AppStartContext {} diff --git a/node_modules/@azure/functions/src/hooks/AppTerminateContext.ts b/node_modules/@azure/functions/src/hooks/AppTerminateContext.ts new file mode 100644 index 00000000..84e3694f --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/AppTerminateContext.ts @@ -0,0 +1,7 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HookContext } from './HookContext'; + +export class AppTerminateContext extends HookContext implements types.AppTerminateContext {} diff --git a/node_modules/@azure/functions/src/hooks/HookContext.ts b/node_modules/@azure/functions/src/hooks/HookContext.ts new file mode 100644 index 00000000..c2794cad --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/HookContext.ts @@ -0,0 +1,23 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { ReadOnlyError } from '../errors'; +import { nonNullProp } from '../utils/nonNull'; + +export class HookContext implements types.HookContext { + #init: types.HookContextInit; + + constructor(init?: types.HookContextInit) { + this.#init = init ?? {}; + this.#init.hookData ??= {}; + } + + get hookData(): Record { + return nonNullProp(this.#init, 'hookData'); + } + + set hookData(_value: unknown) { + throw new ReadOnlyError('hookData'); + } +} diff --git a/node_modules/@azure/functions/src/hooks/InvocationHookContext.ts b/node_modules/@azure/functions/src/hooks/InvocationHookContext.ts new file mode 100644 index 00000000..13454d1e --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/InvocationHookContext.ts @@ -0,0 +1,35 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { ReadOnlyError } from '../errors'; +import { InvocationContext } from '../InvocationContext'; +import { nonNullProp } from '../utils/nonNull'; +import { HookContext } from './HookContext'; + +export class InvocationHookContext extends HookContext implements types.InvocationHookContext { + #init: types.InvocationHookContextInit; + + constructor(init?: types.InvocationHookContextInit) { + super(init); + this.#init = init ?? {}; + this.#init.inputs ??= []; + this.#init.invocationContext ??= new InvocationContext(); + } + + get invocationContext(): types.InvocationContext { + return nonNullProp(this.#init, 'invocationContext'); + } + + set invocationContext(_value: types.InvocationContext) { + throw new ReadOnlyError('invocationContext'); + } + + get inputs(): unknown[] { + return nonNullProp(this.#init, 'inputs'); + } + + set inputs(value: unknown[]) { + this.#init.inputs = value; + } +} diff --git a/node_modules/@azure/functions/src/hooks/LogHookContext.ts b/node_modules/@azure/functions/src/hooks/LogHookContext.ts new file mode 100644 index 00000000..1f5e2ef5 --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/LogHookContext.ts @@ -0,0 +1,51 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { ReadOnlyError } from '../errors'; +import { nonNullProp } from '../utils/nonNull'; +import { HookContext } from './HookContext'; + +export class LogHookContext extends HookContext implements types.LogHookContext { + #init: types.LogHookContextInit; + + constructor(init?: types.LogHookContextInit) { + super(init); + this.#init = init ?? {}; + this.#init.level ??= 'information'; + this.#init.message ??= 'unknown'; + this.#init.category ??= 'user'; + } + + get level(): types.LogLevel { + return nonNullProp(this.#init, 'level'); + } + + set level(value: types.LogLevel) { + this.#init.level = value; + } + + get message(): string { + return nonNullProp(this.#init, 'message'); + } + + set message(value: string) { + this.#init.message = value; + } + + get category(): types.LogCategory { + return nonNullProp(this.#init, 'category'); + } + + set category(_value: types.LogCategory) { + throw new ReadOnlyError('category'); + } + + get invocationContext(): types.InvocationContext | undefined { + return this.#init.invocationContext; + } + + set invocationContext(_value: types.InvocationContext | undefined) { + throw new ReadOnlyError('invocationContext'); + } +} diff --git a/node_modules/@azure/functions/src/hooks/PostInvocationContext.ts b/node_modules/@azure/functions/src/hooks/PostInvocationContext.ts new file mode 100644 index 00000000..889532fc --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/PostInvocationContext.ts @@ -0,0 +1,30 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { InvocationHookContext } from './InvocationHookContext'; + +export class PostInvocationContext extends InvocationHookContext implements types.PostInvocationContext { + #init: types.PostInvocationContextInit; + + constructor(init?: types.PostInvocationContextInit) { + super(init); + this.#init = init ?? {}; + } + + get result(): unknown { + return this.#init.result; + } + + set result(value: unknown) { + this.#init.result = value; + } + + get error(): unknown { + return this.#init.error; + } + + set error(value: unknown) { + this.#init.error = value; + } +} diff --git a/node_modules/@azure/functions/src/hooks/PreInvocationContext.ts b/node_modules/@azure/functions/src/hooks/PreInvocationContext.ts new file mode 100644 index 00000000..5a7f0be5 --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/PreInvocationContext.ts @@ -0,0 +1,24 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { nonNullProp } from '../utils/nonNull'; +import { InvocationHookContext } from './InvocationHookContext'; + +export class PreInvocationContext extends InvocationHookContext implements types.PreInvocationContext { + #init: types.PreInvocationContextInit; + + constructor(init?: types.PreInvocationContextInit) { + super(init); + this.#init = init ?? {}; + this.#init.functionCallback ??= () => {}; + } + + get functionHandler(): types.FunctionHandler { + return nonNullProp(this.#init, 'functionCallback'); + } + + set functionHandler(value: types.FunctionHandler) { + this.#init.functionCallback = value; + } +} diff --git a/node_modules/@azure/functions/src/hooks/registerHook.ts b/node_modules/@azure/functions/src/hooks/registerHook.ts new file mode 100644 index 00000000..104b3ae8 --- /dev/null +++ b/node_modules/@azure/functions/src/hooks/registerHook.ts @@ -0,0 +1,74 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + AppStartHandler, + AppTerminateHandler, + LogHookHandler, + PostInvocationHandler, + PreInvocationHandler, +} from '@azure/functions'; +import * as coreTypes from '@azure/functions-core'; +import { AzFuncSystemError, ensureErrorType } from '../errors'; +import { Disposable } from '../utils/Disposable'; +import { tryGetCoreApiLazy } from '../utils/tryGetCoreApiLazy'; +import { AppStartContext } from './AppStartContext'; +import { AppTerminateContext } from './AppTerminateContext'; +import { LogHookContext } from './LogHookContext'; +import { PostInvocationContext } from './PostInvocationContext'; +import { PreInvocationContext } from './PreInvocationContext'; + +function registerHook(hookName: string, callback: coreTypes.HookCallback): coreTypes.Disposable { + const coreApi = tryGetCoreApiLazy(); + if (!coreApi) { + console.warn( + `WARNING: Skipping call to register ${hookName} hook because the "@azure/functions" package is in test mode.` + ); + return new Disposable(() => { + console.warn( + `WARNING: Skipping call to dispose ${hookName} hook because the "@azure/functions" package is in test mode.` + ); + }); + } else { + return coreApi.registerHook(hookName, callback); + } +} + +export function appStart(handler: AppStartHandler): Disposable { + return registerHook('appStart', (coreContext) => { + return handler(new AppStartContext(coreContext)); + }); +} + +export function appTerminate(handler: AppTerminateHandler): Disposable { + return registerHook('appTerminate', (coreContext) => { + return handler(new AppTerminateContext(coreContext)); + }); +} + +export function preInvocation(handler: PreInvocationHandler): Disposable { + return registerHook('preInvocation', (coreContext) => { + return handler(new PreInvocationContext(coreContext)); + }); +} + +export function postInvocation(handler: PostInvocationHandler): Disposable { + return registerHook('postInvocation', (coreContext) => { + return handler(new PostInvocationContext(coreContext)); + }); +} + +export function log(handler: LogHookHandler): Disposable { + try { + return registerHook('log', (coreContext) => { + return handler(new LogHookContext(coreContext)); + }); + } catch (err) { + const error = ensureErrorType(err); + if (error.name === 'RangeError' && error.isAzureFunctionsSystemError) { + throw new AzFuncSystemError(`Log hooks require Azure Functions Host v4.34 or higher.`); + } else { + throw err; + } + } +} diff --git a/node_modules/@azure/functions/src/http/HttpRequest.ts b/node_modules/@azure/functions/src/http/HttpRequest.ts new file mode 100644 index 00000000..a775a183 --- /dev/null +++ b/node_modules/@azure/functions/src/http/HttpRequest.ts @@ -0,0 +1,165 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HttpRequestParams, HttpRequestUser } from '@azure/functions'; +import { RpcHttpData, RpcTypedData } from '@azure/functions-core'; +import { Blob } from 'buffer'; +import { IncomingMessage } from 'http'; +import * as stream from 'stream'; +import { ReadableStream } from 'stream/web'; +import { FormData, Headers, HeadersInit, Request as uRequest } from 'undici'; +import { URLSearchParams } from 'url'; +import { fromNullableMapping } from '../converters/fromRpcNullable'; +import { fromRpcTypedData } from '../converters/fromRpcTypedData'; +import { AzFuncSystemError } from '../errors'; +import { isDefined, nonNullProp } from '../utils/nonNull'; +import { extractHttpUserFromHeaders } from './extractHttpUserFromHeaders'; + +interface InternalHttpRequestInit extends RpcHttpData { + undiciRequest?: uRequest; +} + +export class HttpRequest implements types.HttpRequest { + readonly query: URLSearchParams; + readonly params: HttpRequestParams; + + #cachedUser?: HttpRequestUser | null; + #uReq: uRequest; + #init: InternalHttpRequestInit; + + constructor(init: InternalHttpRequestInit) { + this.#init = init; + + let uReq = init.undiciRequest; + if (!uReq) { + const url = nonNullProp(init, 'url'); + + let body: Buffer | string | undefined; + if (init.body?.bytes) { + body = Buffer.from(init.body?.bytes); + } else if (init.body?.string) { + body = init.body.string; + } + + uReq = new uRequest(url, { + body, + method: nonNullProp(init, 'method'), + headers: fromNullableMapping(init.nullableHeaders, init.headers), + }); + } + this.#uReq = uReq; + + if (init.nullableQuery || init.query) { + this.query = new URLSearchParams(fromNullableMapping(init.nullableQuery, init.query)); + } else { + this.query = new URL(this.#uReq.url).searchParams; + } + + this.params = fromNullableMapping(init.nullableParams, init.params); + } + + get url(): string { + return this.#uReq.url; + } + + get method(): string { + return this.#uReq.method; + } + + get headers(): Headers { + return this.#uReq.headers; + } + + get user(): HttpRequestUser | null { + if (this.#cachedUser === undefined) { + this.#cachedUser = extractHttpUserFromHeaders(this.headers); + } + + return this.#cachedUser; + } + + get body(): ReadableStream | null { + return this.#uReq.body; + } + + get bodyUsed(): boolean { + return this.#uReq.bodyUsed; + } + + async arrayBuffer(): Promise { + return this.#uReq.arrayBuffer(); + } + + async blob(): Promise { + return this.#uReq.blob(); + } + + async formData(): Promise { + return this.#uReq.formData(); + } + + async json(): Promise { + return this.#uReq.json(); + } + + async text(): Promise { + return this.#uReq.text(); + } + + clone(): HttpRequest { + const newInit = structuredClone(this.#init); + newInit.undiciRequest = this.#uReq.clone(); + return new HttpRequest(newInit); + } +} + +export function createStreamRequest( + proxyReq: IncomingMessage, + triggerMetadata: Record +): HttpRequest { + const hostHeaderName = 'x-forwarded-host'; + const protoHeaderName = 'x-forwarded-proto'; + const host = proxyReq.headers[hostHeaderName]; + const proto = proxyReq.headers[protoHeaderName]; + if (typeof host !== 'string' || typeof proto !== 'string') { + throw new AzFuncSystemError(`Expected headers "${hostHeaderName}" and "${protoHeaderName}" to be set.`); + } + const url = `${proto}://${host}${nonNullProp(proxyReq, 'url')}`; + + let body: stream.Readable | undefined; + const lowerMethod = proxyReq.method?.toLowerCase(); + if (lowerMethod !== 'get' && lowerMethod !== 'head') { + body = proxyReq; + } + + // Get headers and params from trigger metadata + // See here for more info: https://github.com/Azure/azure-functions-host/issues/9840 + // NOTE: We ignore query info because it has this bug: https://github.com/Azure/azure-functions-nodejs-library/issues/168 + const { Query: rpcQueryIgnored, Headers: rpcHeaders, ...rpcParams } = triggerMetadata; + + let headers: HeadersInit | undefined; + const headersData = fromRpcTypedData(rpcHeaders); + if (typeof headersData === 'object' && isDefined(headersData)) { + headers = headersData; + } + + const uReq = new uRequest(url, { + body, + duplex: 'half', + method: nonNullProp(proxyReq, 'method'), + headers, + }); + + const params: Record = {}; + for (const [key, rpcValue] of Object.entries(rpcParams)) { + if (isDefined(rpcValue.string)) { + params[key] = rpcValue.string; + } + } + + return new HttpRequest({ + undiciRequest: uReq, + params, + }); +} diff --git a/node_modules/@azure/functions/src/http/HttpResponse.ts b/node_modules/@azure/functions/src/http/HttpResponse.ts new file mode 100644 index 00000000..6a2282de --- /dev/null +++ b/node_modules/@azure/functions/src/http/HttpResponse.ts @@ -0,0 +1,82 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { HttpResponseInit } from '@azure/functions'; +import { Blob } from 'buffer'; +import { ReadableStream } from 'stream/web'; +import { FormData, Headers, Response as uResponse, ResponseInit as uResponseInit } from 'undici'; +import { isDefined } from '../utils/nonNull'; + +interface InternalHttpResponseInit extends HttpResponseInit { + undiciResponse?: uResponse; +} + +export class HttpResponse implements types.HttpResponse { + readonly cookies: types.Cookie[]; + readonly enableContentNegotiation: boolean; + + #uRes: uResponse; + #init: InternalHttpResponseInit; + + constructor(init?: InternalHttpResponseInit) { + init ??= {}; + this.#init = init; + + if (init.undiciResponse) { + this.#uRes = init.undiciResponse; + } else { + const uResInit: uResponseInit = { status: init.status, headers: init.headers }; + if (isDefined(init.jsonBody)) { + this.#uRes = uResponse.json(init.jsonBody, uResInit); + } else { + this.#uRes = new uResponse(init.body, uResInit); + } + } + + this.cookies = init.cookies ?? []; + this.enableContentNegotiation = !!init.enableContentNegotiation; + } + + get status(): number { + return this.#uRes.status; + } + + get headers(): Headers { + return this.#uRes.headers; + } + + get body(): ReadableStream | null { + return this.#uRes.body; + } + + get bodyUsed(): boolean { + return this.#uRes.bodyUsed; + } + + async arrayBuffer(): Promise { + return this.#uRes.arrayBuffer(); + } + + async blob(): Promise { + return this.#uRes.blob(); + } + + async formData(): Promise { + return this.#uRes.formData(); + } + + async json(): Promise { + return this.#uRes.json(); + } + + async text(): Promise { + return this.#uRes.text(); + } + + clone(): HttpResponse { + const newInit = structuredClone(this.#init); + newInit.undiciResponse = this.#uRes.clone(); + return new HttpResponse(newInit); + } +} diff --git a/node_modules/@azure/functions/src/http/extractHttpUserFromHeaders.ts b/node_modules/@azure/functions/src/http/extractHttpUserFromHeaders.ts new file mode 100644 index 00000000..a2b24a22 --- /dev/null +++ b/node_modules/@azure/functions/src/http/extractHttpUserFromHeaders.ts @@ -0,0 +1,38 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { HttpRequestUser } from '@azure/functions'; +import { Headers } from 'undici'; +import { nonNullValue } from '../utils/nonNull'; + +/* grandfathered in. Should fix when possible */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-member-access */ + +export function extractHttpUserFromHeaders(headers: Headers): HttpRequestUser | null { + let user: HttpRequestUser | null = null; + + const clientPrincipal = headers.get('x-ms-client-principal'); + if (clientPrincipal) { + const claimsPrincipalData = JSON.parse(Buffer.from(clientPrincipal, 'base64').toString('utf-8')); + + if (claimsPrincipalData['identityProvider']) { + user = { + type: 'StaticWebApps', + id: claimsPrincipalData['userId'], + username: claimsPrincipalData['userDetails'], + identityProvider: claimsPrincipalData['identityProvider'], + claimsPrincipalData, + }; + } else { + user = { + type: 'AppService', + id: nonNullValue(headers.get('x-ms-client-principal-id'), 'user-id'), + username: nonNullValue(headers.get('x-ms-client-principal-name'), 'user-name'), + identityProvider: nonNullValue(headers.get('x-ms-client-principal-idp'), 'user-idp'), + claimsPrincipalData, + }; + } + } + + return user; +} diff --git a/node_modules/@azure/functions/src/http/httpProxy.ts b/node_modules/@azure/functions/src/http/httpProxy.ts new file mode 100644 index 00000000..b1c683b2 --- /dev/null +++ b/node_modules/@azure/functions/src/http/httpProxy.ts @@ -0,0 +1,173 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { serialize as serializeCookie } from 'cookie'; +import { EventEmitter } from 'events'; +import * as http from 'http'; +import * as net from 'net'; +import { AzFuncSystemError, ensureErrorType } from '../errors'; +import { nonNullProp } from '../utils/nonNull'; +import { workerSystemLog } from '../utils/workerSystemLog'; +import { HttpResponse } from './HttpResponse'; + +const requests: Record = {}; +const responses: Record = {}; +const minPort = 55000; +const maxPort = 55025; + +const invocRequestEmitter = new EventEmitter(); + +export async function waitForProxyRequest(invocationId: string): Promise { + return new Promise((resolve, _reject) => { + const req = requests[invocationId]; + if (req) { + resolve(req); + delete requests[invocationId]; + } else { + invocRequestEmitter.once(invocationId, () => { + const req = requests[invocationId]; + if (req) { + resolve(req); + delete requests[invocationId]; + } + }); + } + }); +} + +const invocationIdHeader = 'x-ms-invocation-id'; +export async function sendProxyResponse(invocationId: string, userRes: HttpResponse): Promise { + const proxyRes = nonNullProp(responses, invocationId); + delete responses[invocationId]; + for (const [key, val] of userRes.headers.entries()) { + proxyRes.setHeader(key, val); + } + proxyRes.setHeader(invocationIdHeader, invocationId); + proxyRes.statusCode = userRes.status; + + if (userRes.cookies.length > 0) { + setCookies(userRes, proxyRes); + } + + if (userRes.body) { + for await (const chunk of userRes.body.values()) { + proxyRes.write(chunk); + } + } + proxyRes.end(); +} + +function setCookies(userRes: HttpResponse, proxyRes: http.ServerResponse): void { + const serializedCookies: string[] = userRes.cookies.map((c) => { + let sameSite: true | false | 'lax' | 'strict' | 'none' | undefined; + switch (c.sameSite) { + case 'Lax': + sameSite = 'lax'; + break; + case 'None': + sameSite = 'none'; + break; + case 'Strict': + sameSite = 'strict'; + break; + default: + sameSite = c.sameSite; + } + return serializeCookie(c.name, c.value, { + domain: c.domain, + path: c.path, + expires: typeof c.expires === 'number' ? new Date(c.expires) : c.expires, + secure: c.secure, + httpOnly: c.httpOnly, + sameSite: sameSite, + maxAge: c.maxAge, + }); + }); + proxyRes.setHeader('Set-Cookie', serializedCookies); +} + +export async function setupHttpProxy(): Promise { + return new Promise((resolve, reject) => { + const server = http.createServer(); + + server.on('request', (req, res) => { + const invocationId = req.headers[invocationIdHeader]; + if (typeof invocationId === 'string') { + requests[invocationId] = req; + responses[invocationId] = res; + invocRequestEmitter.emit(invocationId); + } else { + workerSystemLog('error', `Http proxy request missing header ${invocationIdHeader}`); + } + }); + + server.on('error', (err) => { + err = ensureErrorType(err); + workerSystemLog('error', `Http proxy error: ${err.stack || err.message}`); + }); + + server.listen(() => { + const address = server.address(); + // Valid address has been created + if (address !== null && typeof address === 'object') { + if (address.port === 0) { + // Auto-assigned port is 0, find and bind to an open port + workerSystemLog('debug', `Port 0 assigned. Finding open port.`); + findOpenPort((openPort: number) => { + // Close the server and re-listen on the found open port + server.close(); + server.listen(openPort, () => { + workerSystemLog('debug', `Server is now listening on found open port: ${openPort}`); + }); + resolve(`http://localhost:${openPort}/`); + }); + } else { + // Auto-assigned port is not 0 + workerSystemLog('debug', `Auto-assigned port is valid. Port: ${address.port}`); + resolve(`http://localhost:${address.port}/`); + } + } else { + reject(new AzFuncSystemError('Unexpected server address during http proxy setup')); + } + }); + + server.on('close', () => { + workerSystemLog('information', 'Http proxy closing'); + }); + }); +} + +// Function to find an open port starting from a specified port +function findOpenPort(callback: (port: number) => void): void { + const server = net.createServer(); + + function tryPort(port: number) { + if (port > maxPort) { + // If we've reached the maximum port, throw an error + throw new AzFuncSystemError( + `No available ports found between ${minPort} and ${maxPort}. To enable HTTP streaming, please open a port in this range.` + ); + } + + server.once('error', () => { + // If the port is unavailable, increment and try the next one + tryPort(port + 1); + }); + + // If the port is available, return it + server.once('listening', () => { + const address = server.address(); + if (address !== null && typeof address === 'object') { + port = address.port; + server.close(); + callback(port); + } + }); + + // Try binding to the given port + server.listen(port); + } + + // Start trying from the specified starting port + tryPort(minPort); +} diff --git a/node_modules/@azure/functions/src/index.ts b/node_modules/@azure/functions/src/index.ts new file mode 100644 index 00000000..0b2409f7 --- /dev/null +++ b/node_modules/@azure/functions/src/index.ts @@ -0,0 +1,28 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export * as app from './app'; +export { AppStartContext } from './hooks/AppStartContext'; +export { AppTerminateContext } from './hooks/AppTerminateContext'; +export { HookContext } from './hooks/HookContext'; +export { InvocationHookContext } from './hooks/InvocationHookContext'; +export { LogHookContext } from './hooks/LogHookContext'; +export { PostInvocationContext } from './hooks/PostInvocationContext'; +export { PreInvocationContext } from './hooks/PreInvocationContext'; +export { HttpRequest } from './http/HttpRequest'; +export { HttpResponse } from './http/HttpResponse'; +export * as input from './input'; +export { InvocationContext } from './InvocationContext'; +export * as output from './output'; +export * as trigger from './trigger'; +export { Disposable } from './utils/Disposable'; + +export enum SqlChangeOperation { + Insert = 0, + Update = 1, + Delete = 2, +} + +export enum MySqlChangeOperation { + Update = 0, +} diff --git a/node_modules/@azure/functions/src/input.ts b/node_modules/@azure/functions/src/input.ts new file mode 100644 index 00000000..3e238bec --- /dev/null +++ b/node_modules/@azure/functions/src/input.ts @@ -0,0 +1,79 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBInput, + CosmosDBInputOptions, + FunctionInput, + GenericInputOptions, + MySqlInput, + MySqlInputOptions, + SqlInput, + SqlInputOptions, + StorageBlobInput, + StorageBlobInputOptions, + TableInput, + TableInputOptions, + WebPubSubConnectionInput, + WebPubSubConnectionInputOptions, + WebPubSubContextInput, + WebPubSubContextInputOptions, +} from '@azure/functions'; +import { addBindingName } from './addBindingName'; + +export function storageBlob(options: StorageBlobInputOptions): StorageBlobInput { + return addInputBindingName({ + ...options, + type: 'blob', + }); +} + +export function table(options: TableInputOptions): TableInput { + return addInputBindingName({ + ...options, + type: 'table', + }); +} + +export function cosmosDB(options: CosmosDBInputOptions): CosmosDBInput { + return addInputBindingName({ + ...options, + type: 'cosmosDB', + }); +} + +export function sql(options: SqlInputOptions): SqlInput { + return addInputBindingName({ + ...options, + type: 'sql', + }); +} + +export function mySql(options: MySqlInputOptions): MySqlInput { + return addInputBindingName({ + ...options, + type: 'mysql', + }); +} + +export function webPubSubConnection(options: WebPubSubConnectionInputOptions): WebPubSubConnectionInput { + return addInputBindingName({ + ...options, + type: 'webPubSubConnection', + }); +} + +export function webPubSubContext(options: WebPubSubContextInputOptions): WebPubSubContextInput { + return addInputBindingName({ + ...options, + type: 'webPubSubContext', + }); +} + +export function generic(options: GenericInputOptions): FunctionInput { + return addInputBindingName(options); +} + +function addInputBindingName(binding: T): T & { name: string } { + return addBindingName(binding, 'Input'); +} diff --git a/node_modules/@azure/functions/src/output.ts b/node_modules/@azure/functions/src/output.ts new file mode 100644 index 00000000..f79fb5d1 --- /dev/null +++ b/node_modules/@azure/functions/src/output.ts @@ -0,0 +1,124 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBOutput, + CosmosDBOutputOptions, + EventGridOutput, + EventGridOutputOptions, + EventHubOutput, + EventHubOutputOptions, + FunctionOutput, + GenericOutputOptions, + HttpOutput, + HttpOutputOptions, + MySqlOutput, + MySqlOutputOptions, + ServiceBusQueueOutput, + ServiceBusQueueOutputOptions, + ServiceBusTopicOutput, + ServiceBusTopicOutputOptions, + SqlOutput, + SqlOutputOptions, + StorageBlobOutput, + StorageBlobOutputOptions, + StorageQueueOutput, + StorageQueueOutputOptions, + TableOutput, + TableOutputOptions, + WebPubSubOutput, + WebPubSubOutputOptions, +} from '@azure/functions'; +import { addBindingName } from './addBindingName'; + +export function http(options: HttpOutputOptions): HttpOutput { + return addOutputBindingName({ + ...options, + type: 'http', + }); +} + +export function storageBlob(options: StorageBlobOutputOptions): StorageBlobOutput { + return addOutputBindingName({ + ...options, + type: 'blob', + }); +} + +export function table(options: TableOutputOptions): TableOutput { + return addOutputBindingName({ + ...options, + type: 'table', + }); +} + +export function storageQueue(options: StorageQueueOutputOptions): StorageQueueOutput { + return addOutputBindingName({ + ...options, + type: 'queue', + }); +} + +export function serviceBusQueue(options: ServiceBusQueueOutputOptions): ServiceBusQueueOutput { + return addOutputBindingName({ + ...options, + type: 'serviceBus', + }); +} + +export function serviceBusTopic(options: ServiceBusTopicOutputOptions): ServiceBusTopicOutput { + return addOutputBindingName({ + ...options, + type: 'serviceBus', + }); +} + +export function eventHub(options: EventHubOutputOptions): EventHubOutput { + return addOutputBindingName({ + ...options, + type: 'eventHub', + }); +} + +export function eventGrid(options: EventGridOutputOptions): EventGridOutput { + return addOutputBindingName({ + ...options, + type: 'eventGrid', + }); +} + +export function cosmosDB(options: CosmosDBOutputOptions): CosmosDBOutput { + return addOutputBindingName({ + ...options, + type: 'cosmosDB', + }); +} + +export function sql(options: SqlOutputOptions): SqlOutput { + return addOutputBindingName({ + ...options, + type: 'sql', + }); +} + +export function mySql(options: MySqlOutputOptions): MySqlOutput { + return addOutputBindingName({ + ...options, + type: 'mysql', + }); +} + +export function webPubSub(options: WebPubSubOutputOptions): WebPubSubOutput { + return addOutputBindingName({ + ...options, + type: 'webPubSub', + }); +} + +export function generic(options: GenericOutputOptions): FunctionOutput { + return addOutputBindingName(options); +} + +function addOutputBindingName(binding: T): T & { name: string } { + return addBindingName(binding, 'Output'); +} diff --git a/node_modules/@azure/functions/src/setup.ts b/node_modules/@azure/functions/src/setup.ts new file mode 100644 index 00000000..d6b54c34 --- /dev/null +++ b/node_modules/@azure/functions/src/setup.ts @@ -0,0 +1,49 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { SetupOptions } from '../types'; +import { AzFuncSystemError } from './errors'; +import { isDefined } from './utils/nonNull'; +import { tryGetCoreApiLazy } from './utils/tryGetCoreApiLazy'; +import { workerSystemLog } from './utils/workerSystemLog'; + +let setupLocked = false; +export function lockSetup(): void { + setupLocked = true; +} + +export let enableHttpStream = false; +export const capabilities: Record = {}; + +export function setup(opts: SetupOptions): void { + if (setupLocked) { + throw new AzFuncSystemError("Setup options can't be changed after app startup has finished."); + } + + if (opts.enableHttpStream) { + // NOTE: coreApi.log was coincidentally added the same time as http streaming, + // so we can use that to validate the host version instead of messing with semver parsing + const coreApi = tryGetCoreApiLazy(); + if (coreApi && !coreApi.log) { + throw new AzFuncSystemError(`HTTP streaming requires Azure Functions Host v4.28 or higher.`); + } + } + + if (isDefined(opts.enableHttpStream)) { + enableHttpStream = opts.enableHttpStream; + } + + if (opts.capabilities) { + for (let [key, val] of Object.entries(opts.capabilities)) { + if (isDefined(val)) { + val = String(val); + workerSystemLog('debug', `Capability ${key} set to ${val}.`); + capabilities[key] = val; + } + } + } + + if (enableHttpStream) { + workerSystemLog('debug', `HTTP streaming enabled.`); + } +} diff --git a/node_modules/@azure/functions/src/trigger.ts b/node_modules/@azure/functions/src/trigger.ts new file mode 100644 index 00000000..564781e1 --- /dev/null +++ b/node_modules/@azure/functions/src/trigger.ts @@ -0,0 +1,152 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBTrigger, + CosmosDBTriggerOptions, + EventGridTrigger, + EventGridTriggerOptions, + EventHubTrigger, + EventHubTriggerOptions, + FunctionTrigger, + GenericTriggerOptions, + HttpTrigger, + HttpTriggerOptions, + McpToolTrigger, + McpToolTriggerOptions, + MySqlTrigger, + MySqlTriggerOptions, + ServiceBusQueueTrigger, + ServiceBusQueueTriggerOptions, + ServiceBusTopicTrigger, + ServiceBusTopicTriggerOptions, + SqlTrigger, + SqlTriggerOptions, + StorageBlobTrigger, + StorageBlobTriggerOptions, + StorageQueueTrigger, + StorageQueueTriggerOptions, + TimerTrigger, + TimerTriggerOptions, + WarmupTrigger, + WarmupTriggerOptions, + WebPubSubTrigger, + WebPubSubTriggerOptions, +} from '@azure/functions'; +import { addBindingName } from './addBindingName'; +import { converToMcpToolTriggerOptionsToRpc } from './converters/toMcpToolTriggerOptionsToRpc'; + +export function http(options: HttpTriggerOptions): HttpTrigger { + return addTriggerBindingName({ + ...options, + authLevel: options.authLevel || 'anonymous', + methods: options.methods || ['GET', 'POST'], + type: 'httpTrigger', + }); +} + +export function timer(options: TimerTriggerOptions): TimerTrigger { + return addTriggerBindingName({ + ...options, + type: 'timerTrigger', + }); +} + +export function storageBlob(options: StorageBlobTriggerOptions): StorageBlobTrigger { + return addTriggerBindingName({ + ...options, + type: 'blobTrigger', + }); +} + +export function storageQueue(options: StorageQueueTriggerOptions): StorageQueueTrigger { + return addTriggerBindingName({ + ...options, + type: 'queueTrigger', + }); +} + +export function serviceBusQueue(options: ServiceBusQueueTriggerOptions): ServiceBusQueueTrigger { + return addTriggerBindingName({ + ...options, + type: 'serviceBusTrigger', + }); +} + +export function serviceBusTopic(options: ServiceBusTopicTriggerOptions): ServiceBusTopicTrigger { + return addTriggerBindingName({ + ...options, + type: 'serviceBusTrigger', + }); +} + +export function eventHub(options: EventHubTriggerOptions): EventHubTrigger { + return addTriggerBindingName({ + ...options, + type: 'eventHubTrigger', + }); +} + +export function eventGrid(options: EventGridTriggerOptions): EventGridTrigger { + return addTriggerBindingName({ + ...options, + type: 'eventGridTrigger', + }); +} + +export function cosmosDB(options: CosmosDBTriggerOptions): CosmosDBTrigger { + return addTriggerBindingName({ + ...options, + type: 'cosmosDBTrigger', + }); +} + +export function warmup(options: WarmupTriggerOptions): WarmupTrigger { + return addTriggerBindingName({ + ...options, + type: 'warmupTrigger', + }); +} + +export function sql(options: SqlTriggerOptions): SqlTrigger { + return addTriggerBindingName({ + ...options, + type: 'sqlTrigger', + }); +} + +export function mySql(options: MySqlTriggerOptions): MySqlTrigger { + return addTriggerBindingName({ + ...options, + type: 'mysqlTrigger', + }); +} + +export function webPubSub(options: WebPubSubTriggerOptions): WebPubSubTrigger { + return addTriggerBindingName({ + ...options, + type: 'webPubSubTrigger', + }); +} + +/** + * Creates an MCP Tool trigger configuration. + * This function is used to define an MCP Tool trigger for an Azure Function. + * + * @param options - The configuration options for the MCP Tool trigger, including tool-specific metadata. + * @returns An MCP Tool trigger object with the specified configuration. + */ +export function mcpTool(options: McpToolTriggerOptions): McpToolTrigger { + return addTriggerBindingName({ + ...converToMcpToolTriggerOptionsToRpc(options), + type: 'mcpToolTrigger', + }); +} + +export function generic(options: GenericTriggerOptions): FunctionTrigger { + return addTriggerBindingName(options); +} + +function addTriggerBindingName(binding: T): T & { name: string } { + return addBindingName(binding, 'Trigger'); +} diff --git a/node_modules/@azure/functions/src/utils/Disposable.ts b/node_modules/@azure/functions/src/utils/Disposable.ts new file mode 100644 index 00000000..913a080b --- /dev/null +++ b/node_modules/@azure/functions/src/utils/Disposable.ts @@ -0,0 +1,35 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +/** + * Based off of VS Code + * https://github.com/microsoft/vscode/blob/7bed4ce3e9f5059b5fc638c348f064edabcce5d2/src/vs/workbench/api/common/extHostTypes.ts#L65 + */ +export class Disposable { + static from(...inDisposables: { dispose(): any }[]): Disposable { + let disposables: ReadonlyArray<{ dispose(): any }> | undefined = inDisposables; + return new Disposable(function () { + if (disposables) { + for (const disposable of disposables) { + if (disposable && typeof disposable.dispose === 'function') { + disposable.dispose(); + } + } + disposables = undefined; + } + }); + } + + #callOnDispose?: () => any; + + constructor(callOnDispose: () => any) { + this.#callOnDispose = callOnDispose; + } + + dispose(): any { + if (typeof this.#callOnDispose === 'function') { + this.#callOnDispose(); + this.#callOnDispose = undefined; + } + } +} diff --git a/node_modules/@azure/functions/src/utils/fallbackLogHandler.ts b/node_modules/@azure/functions/src/utils/fallbackLogHandler.ts new file mode 100644 index 00000000..1c2d9669 --- /dev/null +++ b/node_modules/@azure/functions/src/utils/fallbackLogHandler.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; + +export function fallbackLogHandler(level: types.LogLevel, ...args: unknown[]): void { + switch (level) { + case 'trace': + console.trace(...args); + break; + case 'debug': + console.debug(...args); + break; + case 'information': + console.info(...args); + break; + case 'warning': + console.warn(...args); + break; + case 'critical': + case 'error': + console.error(...args); + break; + default: + console.log(...args); + } +} diff --git a/node_modules/@azure/functions/src/utils/getRandomHexString.ts b/node_modules/@azure/functions/src/utils/getRandomHexString.ts new file mode 100644 index 00000000..5f9476cf --- /dev/null +++ b/node_modules/@azure/functions/src/utils/getRandomHexString.ts @@ -0,0 +1,13 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as crypto from 'crypto'; + +export function getRandomHexString(length = 10): string { + const buffer: Buffer = crypto.randomBytes(Math.ceil(length / 2)); + return buffer.toString('hex').slice(0, length); +} + +export function getStringHash(data: string, length = 10): string { + return crypto.createHash('sha256').update(data).digest('hex').slice(0, length); +} diff --git a/node_modules/@azure/functions/src/utils/isTrigger.ts b/node_modules/@azure/functions/src/utils/isTrigger.ts new file mode 100644 index 00000000..b58aceeb --- /dev/null +++ b/node_modules/@azure/functions/src/utils/isTrigger.ts @@ -0,0 +1,14 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export function isTrigger(typeName: string | undefined | null): boolean { + return !!typeName && /trigger$/i.test(typeName); +} + +export function isHttpTrigger(typeName: string | undefined | null): boolean { + return typeName?.toLowerCase() === 'httptrigger'; +} + +export function isTimerTrigger(typeName: string | undefined | null): boolean { + return typeName?.toLowerCase() === 'timertrigger'; +} diff --git a/node_modules/@azure/functions/src/utils/nonNull.ts b/node_modules/@azure/functions/src/utils/nonNull.ts new file mode 100644 index 00000000..336b317a --- /dev/null +++ b/node_modules/@azure/functions/src/utils/nonNull.ts @@ -0,0 +1,40 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { AzFuncSystemError } from '../errors'; + +/** + * Retrieves a property by name from an object and checks that it's not null and not undefined. It is strongly typed + * for the property and will give a compile error if the given name is not a property of the source. + */ +export function nonNullProp( + source: TSource, + name: TKey +): NonNullable { + const value: NonNullable = >source[name]; + return nonNullValue(value, name); +} + +/** + * Validates that a given value is not null and not undefined. + */ +export function nonNullValue(value: T | undefined | null, propertyNameOrMessage?: string): T { + if (value === null || value === undefined) { + throw new AzFuncSystemError( + 'Internal error: Expected value to be neither null nor undefined' + + (propertyNameOrMessage ? `: ${propertyNameOrMessage}` : '') + ); + } + + return value; +} + +export function copyPropIfDefined(source: TData, destination: TData, key: TKey): void { + if (source[key] !== null && source[key] !== undefined) { + destination[key] = source[key]; + } +} + +export function isDefined(data: T | undefined | null): data is T { + return data !== null && data !== undefined; +} diff --git a/node_modules/@azure/functions/src/utils/tryGetCoreApiLazy.ts b/node_modules/@azure/functions/src/utils/tryGetCoreApiLazy.ts new file mode 100644 index 00000000..b42c5238 --- /dev/null +++ b/node_modules/@azure/functions/src/utils/tryGetCoreApiLazy.ts @@ -0,0 +1,17 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as coreTypes from '@azure/functions-core'; + +let coreApi: typeof coreTypes | undefined | null; +export function tryGetCoreApiLazy(): typeof coreTypes | null { + if (coreApi === undefined) { + try { + // eslint-disable-next-line @typescript-eslint/no-var-requires + coreApi = require('@azure/functions-core'); + } catch { + coreApi = null; + } + } + return coreApi; +} diff --git a/node_modules/@azure/functions/src/utils/util.ts b/node_modules/@azure/functions/src/utils/util.ts new file mode 100644 index 00000000..e870de86 --- /dev/null +++ b/node_modules/@azure/functions/src/utils/util.ts @@ -0,0 +1,6 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export function isEnvironmentVariableSet(val: string | boolean | number | undefined | null): boolean { + return !/^(false|0)?$/i.test(val === undefined || val === null ? '' : String(val)); +} diff --git a/node_modules/@azure/functions/src/utils/workerSystemLog.ts b/node_modules/@azure/functions/src/utils/workerSystemLog.ts new file mode 100644 index 00000000..0011b12c --- /dev/null +++ b/node_modules/@azure/functions/src/utils/workerSystemLog.ts @@ -0,0 +1,17 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import * as types from '@azure/functions'; +import { format } from 'util'; +import { fallbackLogHandler } from './fallbackLogHandler'; +import { tryGetCoreApiLazy } from './tryGetCoreApiLazy'; + +export function workerSystemLog(level: types.LogLevel, ...args: unknown[]): void { + const coreApi = tryGetCoreApiLazy(); + // NOTE: coreApi.log doesn't exist on older versions of the worker + if (coreApi && coreApi.log) { + coreApi.log(level, 'system', format(...args)); + } else { + fallbackLogHandler(level, ...args); + } +} diff --git a/node_modules/@azure/functions/types/InvocationContext.d.ts b/node_modules/@azure/functions/types/InvocationContext.d.ts new file mode 100644 index 00000000..8815bdf3 --- /dev/null +++ b/node_modules/@azure/functions/types/InvocationContext.d.ts @@ -0,0 +1,366 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBInput, CosmosDBOutput } from './cosmosDB'; +import { EventGridOutput, EventGridPartialEvent } from './eventGrid'; +import { EventHubOutput } from './eventHub'; +import { HttpOutput, HttpResponse } from './http'; +import { FunctionInput, FunctionOutput, FunctionTrigger, LogLevel } from './index'; +import { MySqlInput, MySqlOutput } from './mySql'; +import { ServiceBusQueueOutput, ServiceBusTopicOutput } from './serviceBus'; +import { SqlInput, SqlOutput } from './sql'; +import { StorageBlobInput, StorageBlobOutput, StorageQueueOutput } from './storage'; +import { TableInput, TableOutput } from './table'; +import { WebPubSubOutput } from './webpubsub'; + +/** + * Contains metadata and helper methods specific to this invocation + */ +export declare class InvocationContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: InvocationContextInit); + + /** + * A unique guid specific to this invocation + */ + invocationId: string; + + /** + * The name of the function that is being invoked + */ + functionName: string; + + /** + * An object used to get secondary inputs + */ + extraInputs: InvocationContextExtraInputs; + + /** + * An object used to set secondary outputs + */ + extraOutputs: InvocationContextExtraOutputs; + + /** + * The recommended way to log data during invocation. + * Similar to Node.js's `console.log`, but has integration with Azure features like application insights + * Uses the 'information' log level + */ + log(...args: any[]): void; + + /** + * The recommended way to log trace data (level 0) during invocation. + * Similar to Node.js's `console.trace`, but has integration with Azure features like application insights + */ + trace(...args: any[]): void; + + /** + * The recommended way to log debug data (level 1) during invocation. + * Similar to Node.js's `console.debug`, but has integration with Azure features like application insights + */ + debug(...args: any[]): void; + + /** + * The recommended way to log information data (level 2) during invocation. + * Similar to Node.js's `console.info`, but has integration with Azure features like application insights + */ + info(...args: any[]): void; + + /** + * The recommended way to log warning data (level 3) during invocation. + * Similar to Node.js's `console.warn`, but has integration with Azure features like application insights + */ + warn(...args: any[]): void; + + /** + * The recommended way to log error data (level 4) during invocation. + * Similar to Node.js's `console.error`, but has integration with Azure features like application insights + */ + error(...args: any[]): void; + + /** + * The retry context of the current function execution if the retry policy is defined + */ + retryContext?: RetryContext; + + /** + * TraceContext information to enable distributed tracing scenarios + */ + traceContext?: TraceContext; + + /** + * Metadata about the trigger or undefined if the metadata is already represented elsewhere + * For example, this will be undefined for http and timer triggers because you can find that information on the request & timer object instead + */ + triggerMetadata?: TriggerMetadata; + + /** + * The options used when registering the function + * NOTE: This value may differ slightly from the original because it has been validated and defaults may have been explicitly added + */ + options: EffectiveFunctionOptions; +} + +/** + * An object used to get secondary inputs + */ +export interface InvocationContextExtraInputs { + /** + * Get a secondary storage blob entry input for this invocation + * @input the configuration object for this storage blob input + */ + get(input: StorageBlobInput): unknown; + + /** + * Get a secondary table input for this invocation + * @input the configuration object for this table input + */ + get(input: TableInput): unknown; + + /** + * Get a secondary Cosmos DB documents input for this invocation + * @input the configuration object for this Cosmos DB input + */ + get(input: CosmosDBInput): unknown; + + /** + * Get a secondary SQL items input for this invocation + * @input the configuration object for this SQL input + */ + get(input: SqlInput): unknown; + + /** + * Get a secondary MySql items input for this invocation + * @input the configuration object for this MySql input + */ + get(input: MySqlInput): unknown; + + /** + * Get a secondary generic input for this invocation + * @inputOrName the configuration object or name for this input + */ + get(inputOrName: FunctionInput | string): unknown; + + /** + * Set a secondary generic input for this invocation + * @inputOrName the configuration object or name for this input + * @value the input value + */ + set(inputOrName: FunctionInput | string, value: unknown): void; +} + +/** + * An object used to set secondary outputs + */ +export interface InvocationContextExtraOutputs { + /** + * Set a secondary http response output for this invocation + * @output the configuration object for this http output + * @response the http response output value + */ + set(output: HttpOutput, response: HttpResponse): void; + + /** + * Set a secondary storage blob entry output for this invocation + * @output the configuration object for this storage blob output + * @blob the blob output value + */ + set(output: StorageBlobOutput, blob: unknown): void; + + /** + * Set a secondary table output for this invocation + * @output the configuration object for this table output + * @tableEntity the table output value + */ + set(output: TableOutput, tableEntity: unknown): void; + + /** + * Set a secondary storage queue entry output for this invocation + * @output the configuration object for this storage queue output + * @queueItem the queue entry output value + */ + set(output: StorageQueueOutput, queueItem: unknown): void; + + /** + * Set a secondary Cosmos DB documents output for this invocation + * @output the configuration object for this Cosmos DB output + * @documents the output document(s) value + */ + set(output: CosmosDBOutput, documents: unknown): void; + + /** + * Set a secondary SQL items output for this invocation + * @output the configuration object for this SQL output + * @documents the output item(s) value + */ + set(output: SqlOutput, items: unknown): void; + + /** + * Set a secondary Service Bus queue output for this invocation + * @output the configuration object for this Service Bus output + * @message the output message(s) value + */ + set(output: ServiceBusQueueOutput, messages: unknown): void; + + /** + * Set a secondary Service Bus topic output for this invocation + * @output the configuration object for this Service Bus output + * @message the output message(s) value + */ + set(output: ServiceBusTopicOutput, messages: unknown): void; + + /** + * Set a secondary Event Hub output for this invocation + * @output the configuration object for this EventHub output + * @message the output message(s) value + */ + set(output: EventHubOutput, messages: unknown): void; + + /** + * Set a secondary Event Grid output for this invocation + * @output the configuration object for this Event Grid output + * @message the output event(s) value + */ + set(output: EventGridOutput, events: EventGridPartialEvent | EventGridPartialEvent[]): void; + + /** + * Set a secondary MySql items output for this invocation + * @output the configuration object for this MySql output + * @documents the output item(s) value + */ + set(output: MySqlOutput, items: unknown): void; + + /** + * Set a secondary Web PubSub output for this invocation + * @output the configuration object for this Web PubSub output + * @message the output message(s) value + */ + set(output: WebPubSubOutput, messages: unknown): void; + + /** + * Set a secondary generic output for this invocation + * @outputOrName the configuration object or name for this output + * @value the output value + */ + set(outputOrName: FunctionOutput | string, value: unknown): void; + + /** + * Get a secondary generic output for this invocation + * @outputOrName the configuration object or name for this output + */ + get(outputOrName: FunctionOutput | string): unknown; +} + +/** + * Metadata related to the input that triggered your function + */ +export type TriggerMetadata = Record; + +export interface RetryContext { + /** + * Current retry count of the function executions. + */ + retryCount: number; + + /** + * Max retry count is the maximum number of times an execution is retried before eventual failure. A value of -1 means to retry indefinitely. + */ + maxRetryCount: number; + + /** + * Exception that caused the retry + */ + exception?: Exception; +} + +export interface Exception { + source?: string; + + stackTrace?: string; + + message?: string; +} + +/** + * TraceContext information to enable distributed tracing scenarios + */ +export interface TraceContext { + /** + * Describes the position of the incoming request in its trace graph in a portable, fixed-length format + */ + traceParent?: string | undefined; + + /** + * Extends traceparent with vendor-specific data + */ + traceState?: string | undefined; + + /** + * Holds additional properties being sent as part of request telemetry + */ + attributes?: Record; +} + +/** + * The options used when registering the function, as passed to a specific invocation + * NOTE: This value may differ slightly from the original because it has been validated and defaults may have been explicitly added + */ +export interface EffectiveFunctionOptions { + /** + * Configuration for the primary input to the function, aka the reason it will be triggered + * This is the only input that is passed as an argument to the function handler during invocation + */ + trigger: FunctionTrigger; + + /** + * Configuration for the optional primary output of the function + * This is the main output that you should set as the return value of the function handler during invocation + */ + return?: FunctionOutput; + + /** + * Configuration for an optional set of secondary inputs + * During invocation, get these values with `context.extraInputs.get()` + */ + extraInputs: FunctionInput[]; + + /** + * Configuration for an optional set of secondary outputs + * During invocation, set these values with `context.extraOutputs.set()` + */ + extraOutputs: FunctionOutput[]; +} + +/** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ +export interface InvocationContextInit { + /** + * Defaults to 'unknown' if not specified + */ + invocationId?: string; + + /** + * Defaults to 'unknown' if not specified + */ + functionName?: string; + + /** + * Defaults to Node.js console if not specified + */ + logHandler?: LogHandler; + + traceContext?: TraceContext; + + retryContext?: RetryContext; + + triggerMetadata?: TriggerMetadata; + + /** + * Defaults to a trigger with 'unknown' type and name if not specified + */ + options?: Partial; +} + +export type LogHandler = (level: LogLevel, ...args: unknown[]) => void; diff --git a/node_modules/@azure/functions/types/app.d.ts b/node_modules/@azure/functions/types/app.d.ts new file mode 100644 index 00000000..db3e059d --- /dev/null +++ b/node_modules/@azure/functions/types/app.d.ts @@ -0,0 +1,202 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBFunctionOptions } from './cosmosDB'; +import { EventGridFunctionOptions } from './eventGrid'; +import { EventHubFunctionOptions } from './eventHub'; +import { GenericFunctionOptions } from './generic'; +import { HttpFunctionOptions, HttpHandler, HttpMethodFunctionOptions } from './http'; +import { McpToolFunctionOptions } from './mcpTool'; +import { MySqlFunctionOptions } from './mySql'; +import { ServiceBusQueueFunctionOptions, ServiceBusTopicFunctionOptions } from './serviceBus'; +import { SetupOptions } from './setup'; +import { SqlFunctionOptions } from './sql'; +import { StorageBlobFunctionOptions, StorageQueueFunctionOptions } from './storage'; +import { TimerFunctionOptions } from './timer'; +import { WarmupFunctionOptions } from './warmup'; +import { WebPubSubFunctionOptions } from './webpubsub'; + +/** + * Optional method to configure the behavior of your app. + * This can only be done during app startup, before invocations occur. + * If called multiple times, options will be merged with the previous options specified. + */ +export declare function setup(options: SetupOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function http(name: string, options: HttpFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'GET' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function get(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'GET' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function get(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PUT' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function put(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PUT' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function put(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'POST' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function post(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'POST' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function post(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PATCH' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function patch(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'PATCH' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function patch(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers an http function in your app that will be triggered by making a 'DELETE' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param handler The handler for this function + */ +export function deleteRequest(name: string, handler: HttpHandler): void; + +/** + * Registers an http function in your app that will be triggered by making a 'DELETE' request to the function url + * @param name The name of the function. This will be the route unless a route is explicitly configured in the `HttpTriggerOptions` + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function deleteRequest(name: string, options: HttpMethodFunctionOptions): void; + +/** + * Registers a timer function in your app that will be triggered on a schedule + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function timer(name: string, options: TimerFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever an item is added to a storage blob path + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function storageBlob(name: string, options: StorageBlobFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever an item is added to a storage queue + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function storageQueue(name: string, options: StorageQueueFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever a message is added to a service bus queue + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function serviceBusQueue(name: string, options: ServiceBusQueueFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever a message is added to a service bus topic + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function serviceBusTopic(name: string, options: ServiceBusTopicFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever a message is added to an event hub + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function eventHub(name: string, options: EventHubFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered whenever an event is sent by an event grid source + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function eventGrid(name: string, options: EventGridFunctionOptions): void; + +/** + * Registers a Cosmos DB function in your app that will be triggered whenever inserts and updates occur (not deletions) + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function cosmosDB(name: string, options: CosmosDBFunctionOptions): void; + +/** + * Registers a function in your app that will be triggered when an instance is added to scale a running function app. + * The warmup trigger is only called during scale-out operations, not during restarts or other non-scale startups. + * Make sure your logic can load all required dependencies without relying on the warmup trigger. + * Lazy loading is a good pattern to achieve this goal. + * The warmup trigger isn't available to apps running on the Consumption plan. + * For more information, please see the [Azure Functions warmup trigger documentation](https://learn.microsoft.com/azure/azure-functions/functions-bindings-warmup?tabs=isolated-process&pivots=programming-language-javascript). + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function warmup(name: string, options: WarmupFunctionOptions): void; + +/** + * Registers a SQL function in your app that will be triggered when a row is created, updated, or deleted + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function sql(name: string, options: SqlFunctionOptions): void; + +/** + * Registers a MySql function in your app that will be triggered when a row is created or updated + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function mySql(name: string, options: MySqlFunctionOptions): void; + +/** + * Registers a generic function in your app that will be triggered based on the type specified in `options.trigger.type` + * Use this method if your desired trigger type does not already have its own method + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function generic(name: string, options: GenericFunctionOptions): void; + +/** + * Registers a WebPubSub function in your app that will be triggered by WebPubSub events + * @param name The name of the function. The name must be unique within your app and will mostly be used for your own tracking purposes + * @param options Configuration options describing the inputs, outputs, and handler for this function + */ +export function webPubSub(name: string, options: WebPubSubFunctionOptions): void; + +export function mcpTool(name: string, options: McpToolFunctionOptions): void; + +export * as hook from './hooks/registerHook'; diff --git a/node_modules/@azure/functions/types/cosmosDB.d.ts b/node_modules/@azure/functions/types/cosmosDB.d.ts new file mode 100644 index 00000000..8aecad9c --- /dev/null +++ b/node_modules/@azure/functions/types/cosmosDB.d.ts @@ -0,0 +1,36 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { + CosmosDBv3FunctionOptions, + CosmosDBv3Handler, + CosmosDBv3Input, + CosmosDBv3InputOptions, + CosmosDBv3Output, + CosmosDBv3OutputOptions, + CosmosDBv3Trigger, + CosmosDBv3TriggerOptions, +} from './cosmosDB.v3'; +import { + CosmosDBv4FunctionOptions, + CosmosDBv4Handler, + CosmosDBv4Input, + CosmosDBv4InputOptions, + CosmosDBv4Output, + CosmosDBv4OutputOptions, + CosmosDBv4Trigger, + CosmosDBv4TriggerOptions, +} from './cosmosDB.v4'; + +export type CosmosDBHandler = CosmosDBv3Handler | CosmosDBv4Handler; + +export type CosmosDBFunctionOptions = CosmosDBv3FunctionOptions | CosmosDBv4FunctionOptions; + +export type CosmosDBInputOptions = CosmosDBv3InputOptions | CosmosDBv4InputOptions; +export type CosmosDBInput = CosmosDBv3Input | CosmosDBv4Input; + +export type CosmosDBTriggerOptions = CosmosDBv3TriggerOptions | CosmosDBv4TriggerOptions; +export type CosmosDBTrigger = CosmosDBv3Trigger | CosmosDBv4Trigger; + +export type CosmosDBOutputOptions = CosmosDBv3OutputOptions | CosmosDBv4OutputOptions; +export type CosmosDBOutput = CosmosDBv3Output | CosmosDBv4Output; diff --git a/node_modules/@azure/functions/types/cosmosDB.v3.d.ts b/node_modules/@azure/functions/types/cosmosDB.v3.d.ts new file mode 100644 index 00000000..4fec1f16 --- /dev/null +++ b/node_modules/@azure/functions/types/cosmosDB.v3.d.ts @@ -0,0 +1,216 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type CosmosDBv3Handler = (documents: unknown[], context: InvocationContext) => FunctionResult; + +export interface CosmosDBv3FunctionOptions extends CosmosDBv3TriggerOptions, Partial { + handler: CosmosDBv3Handler; + + trigger?: CosmosDBv3Trigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface CosmosDBv3InputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connectionStringSetting: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + collectionName: string; + + /** + * Specifies the partition key value for the lookup. May include binding parameters. It is required for lookups in partitioned collections + */ + partitionKey?: string; + + /** + * The ID of the document to retrieve. This property supports [binding expressions](https://docs.microsoft.com/azure/azure-functions/functions-bindings-expressions-patterns). + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire collection is retrieved. + */ + id?: string; + + /** + * An Azure Cosmos DB SQL query used for retrieving multiple documents. The property supports runtime bindings, as in this example: + * `SELECT * FROM c where c.departmentId = {departmentId}` + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire collection is retrieved. + */ + sqlQuery?: string; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv3Input = FunctionInput & CosmosDBv3InputOptions; + +export interface CosmosDBv3TriggerOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connectionStringSetting: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + collectionName: string; + + /** + * The name of an app setting that contains the connection string to the service which holds the lease collection. + * If not set it will connect to the service defined by `connectionStringSetting` + */ + leaseConnectionStringSetting?: string; + + /** + * The name of the database that holds the collection to store leases. If not set, it will use the value of `databaseName` + */ + leaseDatabaseName?: string; + + /** + * The name of the collection to store leases. If not set, it will use "leases" + */ + leaseCollectionName?: string; + + /** + * Checks for existence and automatically creates the leases collection. Default is `false` + */ + createLeaseCollectionIfNotExists?: boolean; + + /** + * When `createLeaseCollectionIfNotExists` is set to `true`, defines the amount of Request Units to assign to the created lease collection + */ + leaseCollectionThroughput?: number; + + /** + * When set, the value is added as a prefix to the leases created in the Lease collection for this function. + * Using a prefix allows two separate Azure Functions to share the same Lease collection by using different prefixes. + */ + leaseCollectionPrefix?: string; + + /** + * The time (in milliseconds) for the delay between polling a partition for new changes on the feed, after all current changes are drained. + * Default is 5,000 milliseconds, or 5 seconds. + */ + feedPollDelay?: number; + + /** + * When set, it defines, in milliseconds, the interval to kick off a task to compute if partitions are distributed evenly among known host instances. + * Default is 13000 (13 seconds). + */ + leaseAcquireInterval?: number; + + /** + * When set, it defines, in milliseconds, the interval for which the lease is taken on a lease representing a partition. + * If the lease is not renewed within this interval, it will cause it to expire and ownership of the partition will move to another instance. + * Default is 60000 (60 seconds). + */ + leaseExpirationInterval?: number; + + /** + * When set, it defines, in milliseconds, the renew interval for all leases for partitions currently held by an instance. + * Default is 17000 (17 seconds). + */ + leaseRenewInterval?: number; + + /** + * When set, it defines, in milliseconds, the interval between lease checkpoints. Default is always after each Function call. + */ + checkpointInterval?: number; + + /** + * Customizes the amount of documents between lease checkpoints. Default is after every function call. + */ + checkpointDocumentCount?: number; + + /** + * When set, this property sets the maximum number of items received per Function call. + * If operations in the monitored container are performed through stored procedures, transaction scope is preserved when reading items from the change feed. + * As a result, the number of items received could be higher than the specified value so that the items changed by the same transaction are returned as part of one atomic batch. + */ + maxItemsPerInvocation?: number; + + /** + * This option tells the Trigger to read changes from the beginning of the container's change history instead of starting at the current time. + * Reading from the beginning only works the first time the trigger starts, as in subsequent runs, the checkpoints are already stored. + * Setting this option to true when there are leases already created has no effect. + */ + startFromBeginning?: boolean; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; + + /** + * Enables multi-region accounts for writing to the leases collection. + */ + useMultipleWriteLocations?: boolean; +} +export type CosmosDBv3Trigger = FunctionTrigger & CosmosDBv3TriggerOptions; + +export interface CosmosDBv3OutputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connectionStringSetting: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + collectionName: string; + + /** + * A boolean value to indicate whether the collection is created when it doesn't exist. + * The default is false because new collections are created with reserved throughput, which has cost implications. For more information, see the [pricing page](https://azure.microsoft.com/pricing/details/cosmos-db/). + */ + createIfNotExists?: boolean; + + /** + * When `createIfNotExists` is true, it defines the partition key path for the created collection. May include binding parameters. + */ + partitionKey?: string; + + /** + * When createIfNotExists is true, it defines the [throughput](https://docs.microsoft.com/azure/cosmos-db/set-throughput) of the created collection + */ + collectionThroughput?: number; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; + + /** + * When set to true along with preferredLocations, supports multi-region writes in the Azure Cosmos DB service. + */ + useMultipleWriteLocations?: boolean; +} +export type CosmosDBv3Output = FunctionOutput & CosmosDBv3OutputOptions; diff --git a/node_modules/@azure/functions/types/cosmosDB.v4.d.ts b/node_modules/@azure/functions/types/cosmosDB.v4.d.ts new file mode 100644 index 00000000..f62162c5 --- /dev/null +++ b/node_modules/@azure/functions/types/cosmosDB.v4.d.ts @@ -0,0 +1,203 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type CosmosDBv4Handler = (documents: unknown[], context: InvocationContext) => FunctionResult; + +export interface CosmosDBv4FunctionOptions extends CosmosDBv4TriggerOptions, Partial { + handler: CosmosDBv4Handler; + + trigger?: CosmosDBv4Trigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface CosmosDBv4InputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connection: string; + + /** + * The name of the Azure Cosmos DB database with the container being monitored + */ + databaseName: string; + + /** + * The name of the container being monitored + */ + containerName: string; + + /** + * Specifies the partition key value for the lookup. May include binding parameters. It is required for lookups in partitioned containers + */ + partitionKey?: string; + + /** + * The ID of the document to retrieve. This property supports [binding expressions](https://docs.microsoft.com/azure/azure-functions/functions-bindings-expressions-patterns). + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire container is retrieved. + */ + id?: string; + + /** + * An Azure Cosmos DB SQL query used for retrieving multiple documents. The property supports runtime bindings, as in this example: + * `SELECT * FROM c where c.departmentId = {departmentId}` + * Don't set both the id and sqlQuery properties. If you don't set either one, the entire container is retrieved. + */ + sqlQuery?: string; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv4Input = FunctionInput & CosmosDBv4InputOptions; + +export interface CosmosDBv4TriggerOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connection: string; + + /** + * The name of the Azure Cosmos DB database with the container being monitored + */ + databaseName: string; + + /** + * The name of the container being monitored + */ + containerName: string; + + /** + * The name of an app setting that contains the connection string to the service which holds the lease container. + * If not set it will connect to the service defined by `connection` + */ + leaseConnection?: string; + + /** + * The name of the database that holds the container to store leases. If not set, it will use the value of `databaseName` + */ + leaseDatabaseName?: string; + + /** + * The name of the container to store leases. If not set, it will use "leases" + */ + leaseContainerName?: string; + + /** + * Checks for existence and automatically creates the leases container. Default is `false` + */ + createLeaseContainerIfNotExists?: boolean; + + /** + * When `createLeaseContainerIfNotExists` is set to `true`, defines the amount of Request Units to assign to the created lease container + */ + leasesContainerThroughput?: number; + + /** + * When set, the value is added as a prefix to the leases created in the Lease container for this function. + * Using a prefix allows two separate Azure Functions to share the same Lease container by using different prefixes. + */ + leaseContainerPrefix?: string; + + /** + * The time (in milliseconds) for the delay between polling a partition for new changes on the feed, after all current changes are drained. + * Default is 5,000 milliseconds, or 5 seconds. + */ + feedPollDelay?: number; + + /** + * When set, it defines, in milliseconds, the interval to kick off a task to compute if partitions are distributed evenly among known host instances. + * Default is 13000 (13 seconds). + */ + leaseAcquireInterval?: number; + + /** + * When set, it defines, in milliseconds, the interval for which the lease is taken on a lease representing a partition. + * If the lease is not renewed within this interval, it will cause it to expire and ownership of the partition will move to another instance. + * Default is 60000 (60 seconds). + */ + leaseExpirationInterval?: number; + + /** + * When set, it defines, in milliseconds, the renew interval for all leases for partitions currently held by an instance. + * Default is 17000 (17 seconds). + */ + leaseRenewInterval?: number; + + /** + * When set, this property sets the maximum number of items received per Function call. + * If operations in the monitored container are performed through stored procedures, transaction scope is preserved when reading items from the change feed. + * As a result, the number of items received could be higher than the specified value so that the items changed by the same transaction are returned as part of one atomic batch. + */ + maxItemsPerInvocation?: number; + + /** + * This option tells the Trigger to read changes from the beginning of the container's change history instead of starting at the current time. + * Reading from the beginning only works the first time the trigger starts, as in subsequent runs, the checkpoints are already stored. + * Setting this option to true when there are leases already created has no effect. + */ + startFromBeginning?: boolean; + + /** + * Gets or sets the date and time from which to initialize the change feed read operation. + * The recommended format is ISO 8601 with the UTC designator, such as 2021-02-16T14:19:29Z. + * This is only used to set the initial trigger state. After the trigger has a lease state, changing this value has no effect. + */ + startFromTime?: string; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv4Trigger = FunctionTrigger & CosmosDBv4TriggerOptions; + +export interface CosmosDBv4OutputOptions { + /** + * An app setting (or environment variable) with the Cosmos DB connection string + */ + connection: string; + + /** + * The name of the Azure Cosmos DB database with the collection being monitored + */ + databaseName: string; + + /** + * The name of the collection being monitored + */ + containerName: string; + + /** + * A boolean value to indicate whether the collection is created when it doesn't exist. + * The default is false because new collections are created with reserved throughput, which has cost implications. For more information, see the [pricing page](https://azure.microsoft.com/pricing/details/cosmos-db/). + */ + createIfNotExists?: boolean; + + /** + * When `createIfNotExists` is true, it defines the partition key path for the created collection. May include binding parameters. + */ + partitionKey?: string; + + /** + * When createIfNotExists is true, it defines the [throughput](https://docs.microsoft.com/azure/cosmos-db/set-throughput) of the created collection + */ + containerThroughput?: number; + + /** + * Defines preferred locations (regions) for geo-replicated database accounts in the Azure Cosmos DB service. + * Values should be comma-separated. For example, East US,South Central US,North Europe + */ + preferredLocations?: string; +} +export type CosmosDBv4Output = FunctionOutput & CosmosDBv4OutputOptions; diff --git a/node_modules/@azure/functions/types/eventGrid.d.ts b/node_modules/@azure/functions/types/eventGrid.d.ts new file mode 100644 index 00000000..039bc62e --- /dev/null +++ b/node_modules/@azure/functions/types/eventGrid.d.ts @@ -0,0 +1,109 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type EventGridHandler = (event: EventGridEvent, context: InvocationContext) => FunctionResult; + +export interface EventGridFunctionOptions extends EventGridTriggerOptions, Partial { + handler: EventGridHandler; + + trigger?: EventGridTrigger; +} + +/** + * At this point in time there are no event grid trigger-specific options + */ +export interface EventGridTriggerOptions {} +export type EventGridTrigger = FunctionTrigger & EventGridTriggerOptions; + +export interface EventGridOutputKeyOptions { + /** + * An app setting (or environment variable) that contains the URI for the custom topic + */ + topicEndpointUri: string; + + /** + * An app setting (or environment variable) that contains an access key for the custom topic + */ + topicKeySetting: string; +} +export interface EventGridOutputConnectionOptions { + /** + * The value of the common prefix for the app setting that contains the `topicEndpointUri`. + * When setting the `connection` property, the `topicEndpointUri` and `topicKeySetting` properties should NOT be set. + */ + connection: string; +} +export type EventGridOutputOptions = EventGridOutputKeyOptions | EventGridOutputConnectionOptions; +export type EventGridOutput = FunctionOutput & EventGridOutputOptions; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/event-grid/event-schema) + * This "partial" interface is meant to be used when creating an event yourself and allows some properties to be left out + */ +export interface EventGridPartialEvent { + /** + * Full resource path to the event source. This field isn't writeable. Event Grid provides this value + * If included, must match the Event Grid topic Azure Resource Manager ID exactly. If not included, Event Grid will stamp onto the event. + */ + topic?: string; + + /** + * Publisher-defined path to the event subject + */ + subject: string; + + /** + * One of the registered event types for this event source + */ + eventType: string; + + /** + * The time the event is generated based on the provider's UTC time + */ + eventTime: string; + + /** + * Unique identifier for the event + */ + id: string; + + /** + * Event data specific to the resource provider + */ + data?: Record; + + /** + * The schema version of the data object. The publisher defines the schema version. + * If not included, will be stamped with an empty value + */ + dataVersion?: string; + + /** + * The schema version of the event metadata. Event Grid defines the schema of the top-level properties. Event Grid provides this value. + * If included, must match the Event Grid Schema `metadataVersion` exactly (currently, only 1). If not included, Event Grid will stamp onto the event. + */ + metadataVersion?: string; +} + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/event-grid/event-schema) + */ +export interface EventGridEvent extends EventGridPartialEvent { + /** + * Full resource path to the event source. This field isn't writeable. Event Grid provides this value + */ + topic: string; + + /** + * The schema version of the data object. The publisher defines the schema version. + */ + dataVersion: string; + + /** + * The schema version of the event metadata. Event Grid defines the schema of the top-level properties. Event Grid provides this value. + */ + metadataVersion: string; +} diff --git a/node_modules/@azure/functions/types/eventHub.d.ts b/node_modules/@azure/functions/types/eventHub.d.ts new file mode 100644 index 00000000..2e1ddcb7 --- /dev/null +++ b/node_modules/@azure/functions/types/eventHub.d.ts @@ -0,0 +1,55 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type EventHubHandler = (messages: unknown, context: InvocationContext) => FunctionResult; + +export interface EventHubFunctionOptions extends EventHubTriggerOptions, Partial { + handler: EventHubHandler; + + trigger?: EventHubTrigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface EventHubTriggerOptions { + /** + * An app setting (or environment variable) with the event hub connection string + */ + connection: string; + + /** + * The name of the event hub. When the event hub name is also present in the connection string, that value overrides this property at runtime. + */ + eventHubName: string; + + /** + * Set to `many` in order to enable batching. If omitted or set to `one`, a single message is passed to the function. + */ + cardinality?: 'many' | 'one'; + + /** + * An optional property that sets the [consumer group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#event-consumers) used to subscribe to events in the hub. If omitted, the `$Default` consumer group is used. + */ + consumerGroup?: string; +} +export type EventHubTrigger = FunctionTrigger & EventHubTriggerOptions; + +export interface EventHubOutputOptions { + /** + * An app setting (or environment variable) with the event hub connection string + */ + connection: string; + + /** + * The name of the event hub. When the event hub name is also present in the connection string, that value overrides this property at runtime. + */ + eventHubName: string; +} +export type EventHubOutput = FunctionOutput & EventHubOutputOptions; diff --git a/node_modules/@azure/functions/types/generic.d.ts b/node_modules/@azure/functions/types/generic.d.ts new file mode 100644 index 00000000..faa1b420 --- /dev/null +++ b/node_modules/@azure/functions/types/generic.d.ts @@ -0,0 +1,24 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, RetryOptions } from './index'; + +export interface GenericFunctionOptions extends FunctionOptions { + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface GenericTriggerOptions extends Record { + type: string; +} + +export interface GenericInputOptions extends Record { + type: string; +} + +export interface GenericOutputOptions extends Record { + type: string; +} diff --git a/node_modules/@azure/functions/types/hooks/HookContext.d.ts b/node_modules/@azure/functions/types/hooks/HookContext.d.ts new file mode 100644 index 00000000..9ef8e664 --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/HookContext.d.ts @@ -0,0 +1,27 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +/** + * Base class for all hook context objects + */ +export declare class HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: HookContextInit); + + /** + * The recommended place to store and share data between hooks in the same scope (app-level vs invocation-level). + * You should use a unique property name so that it doesn't conflict with other hooks' data. + * This object is readonly. You may modify it, but attempting to overwrite it will throw an error + */ + readonly hookData: Record; +} + +/** + * Base interface for objects passed to HookContext constructors. + * For testing purposes only. + */ +export interface HookContextInit { + hookData?: Record; +} diff --git a/node_modules/@azure/functions/types/hooks/appHooks.d.ts b/node_modules/@azure/functions/types/hooks/appHooks.d.ts new file mode 100644 index 00000000..a4e51857 --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/appHooks.d.ts @@ -0,0 +1,46 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { HookContext, HookContextInit } from './HookContext'; + +/** + * Handler for app start hooks + */ +export type AppStartHandler = (context: AppStartContext) => void | Promise; + +/** + * Context on a function app during app startup. + */ +export declare class AppStartContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: AppStartContextInit); +} + +/** + * Handler for app terminate hooks + */ +export type AppTerminateHandler = (context: AppTerminateContext) => void | Promise; + +/** + * Context on a function app during app termination. + */ +export declare class AppTerminateContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: AppTerminateContextInit); +} + +/** + * Object passed to AppStartContext constructors. + * For testing purposes only + */ +export interface AppStartContextInit extends HookContextInit {} + +/** + * Object passed to AppTerminateContext constructors. + * For testing purposes only + */ +export interface AppTerminateContextInit extends HookContextInit {} diff --git a/node_modules/@azure/functions/types/hooks/invocationHooks.d.ts b/node_modules/@azure/functions/types/hooks/invocationHooks.d.ts new file mode 100644 index 00000000..03aafd3d --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/invocationHooks.d.ts @@ -0,0 +1,106 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionHandler } from '../index'; +import { InvocationContext } from '../InvocationContext'; +import { HookContext, HookContextInit } from './HookContext'; + +/** + * Handler for pre-invocation hooks. + */ +export type PreInvocationHandler = (context: PreInvocationContext) => void | Promise; + +/** + * Context on a function before it executes. + */ +export declare class PreInvocationContext extends InvocationHookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: PreInvocationContextInit); + + /** + * The arguments passed to this specific invocation. + * Changes to this array _will_ affect the inputs passed to your function + */ + inputs: unknown[]; + + /** + * The function handler for this specific invocation. Changes to this value _will_ affect the function itself + */ + functionHandler: FunctionHandler; +} + +/** + * Handler for post-invocation hooks + */ +export type PostInvocationHandler = (context: PostInvocationContext) => void | Promise; + +/** + * Context on a function after it executes. + */ +export declare class PostInvocationContext extends InvocationHookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: PostInvocationContextInit); + + /** + * The arguments passed to this specific invocation. + */ + inputs: unknown[]; + + /** + * The result of the function. Changes to this value _will_ affect the overall result of the function + */ + result: unknown; + + /** + * The error thrown by the function, or null/undefined if there is no error. Changes to this value _will_ affect the overall result of the function + */ + error: unknown; +} + +/** + * Base class for all invocation hook context objects + */ +export declare class InvocationHookContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: InvocationHookContextInit); + + /** + * The context object passed to the function. + * This object is readonly. You may modify it, but attempting to overwrite it will throw an error + */ + readonly invocationContext: InvocationContext; +} + +/** + * Object passed to InvocationHookContext constructors. + * For testing purposes only + */ +export interface InvocationHookContextInit extends HookContextInit { + inputs?: unknown[]; + + invocationContext?: InvocationContext; +} + +/** + * Object passed to PreInvocationContext constructors. + * For testing purposes only + */ +export interface PreInvocationContextInit extends InvocationHookContextInit { + functionCallback?: FunctionHandler; +} + +/** + * Object passed to PostInvocationContext constructors. + * For testing purposes only + */ +export interface PostInvocationContextInit extends InvocationHookContextInit { + result?: unknown; + + error?: unknown; +} diff --git a/node_modules/@azure/functions/types/hooks/logHooks.d.ts b/node_modules/@azure/functions/types/hooks/logHooks.d.ts new file mode 100644 index 00000000..a223c598 --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/logHooks.d.ts @@ -0,0 +1,58 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { LogLevel } from '../index'; +import { InvocationContext } from '../InvocationContext'; +import { HookContext, HookContextInit } from './HookContext'; + +/** + * Handler for log hooks. + */ +export type LogHookHandler = (context: LogHookContext) => void; + +/** + * Context on a log + */ +export declare class LogHookContext extends HookContext { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(init?: LogHookContextInit); + + /** + * If the log occurs during a function execution, the context object passed to the function handler. + * Otherwise, undefined. + */ + readonly invocationContext: InvocationContext | undefined; + + /** + * 'system' if the log is generated by Azure Functions, 'user' if the log is generated by your own app. + */ + readonly category: LogCategory; + + /** + * Changes to this value _will_ affect the resulting log, but only for user-generated logs. + */ + level: LogLevel; + + /** + * Changes to this value _will_ affect the resulting log, but only for user-generated logs. + */ + message: string; +} + +/** + * Object passed to LogHookContext constructors. + * For testing purposes only + */ +export interface LogHookContextInit extends HookContextInit { + invocationContext?: InvocationContext; + + level?: LogLevel; + + category?: LogCategory; + + message?: string; +} + +export type LogCategory = 'user' | 'system' | 'customMetric'; diff --git a/node_modules/@azure/functions/types/hooks/registerHook.d.ts b/node_modules/@azure/functions/types/hooks/registerHook.d.ts new file mode 100644 index 00000000..c736687b --- /dev/null +++ b/node_modules/@azure/functions/types/hooks/registerHook.d.ts @@ -0,0 +1,50 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Disposable } from '../index'; +import { AppStartHandler, AppTerminateHandler } from './appHooks'; +import { PostInvocationHandler, PreInvocationHandler } from './invocationHooks'; +import { LogHookHandler } from './logHooks'; + +/** + * Register a hook to be run at the start of your application + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function appStart(handler: AppStartHandler): Disposable; + +/** + * Register a hook to be run during graceful shutdown of your application. + * This hook will not be executed if your application is terminated forcefully. + * Hooks have a limited time to execute during the termination grace period. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function appTerminate(handler: AppTerminateHandler): Disposable; + +/** + * Register a hook to be run before a function is invoked. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function preInvocation(handler: PreInvocationHandler): Disposable; + +/** + * Register a hook to be run after a function is invoked. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function postInvocation(handler: PostInvocationHandler): Disposable; + +/** + * PREVIEW: Register a hook to be run for each log. + * This functionality requires Azure Functions Host v4.34+. + * + * @param handler the handler for the hook + * @returns a `Disposable` object that can be used to unregister the hook + */ +export function log(handler: LogHookHandler): Disposable; diff --git a/node_modules/@azure/functions/types/http.d.ts b/node_modules/@azure/functions/types/http.d.ts new file mode 100644 index 00000000..e918ef1f --- /dev/null +++ b/node_modules/@azure/functions/types/http.d.ts @@ -0,0 +1,386 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { Blob } from 'buffer'; +import { ReadableStream } from 'stream/web'; +import { BodyInit, FormData, Headers, HeadersInit } from 'undici'; +import { URLSearchParams } from 'url'; +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type HttpHandler = ( + request: HttpRequest, + context: InvocationContext +) => FunctionResult; + +export interface HttpFunctionOptions extends HttpTriggerOptions, Partial { + handler: HttpHandler; + + trigger?: HttpTrigger; + + /** + * Configuration for the optional primary output of the function. If not set, this will default to a standard http response output + * This is the main output that you should set as the return value of the function handler during invocation + */ + return?: FunctionOutput; +} + +export type HttpMethodFunctionOptions = Omit; + +export interface HttpTriggerOptions { + /** + * The function HTTP authorization level + * Defaults to 'anonymous' if not specified + */ + authLevel?: 'anonymous' | 'function' | 'admin'; + + /** + * An array of the http methods for this http input + * Defaults to ["get", "post"] if not specified + */ + methods?: HttpMethod[]; + + /** + * The route for this http input. If not specified, the function name will be used + */ + route?: string; +} + +export interface HttpTrigger extends FunctionTrigger { + /** + * The function HTTP authorization level. + */ + authLevel: 'anonymous' | 'function' | 'admin'; + + /** + * An array of the http methods for this http input + */ + methods: HttpMethod[]; + + /** + * The route for this http input. If not specified, the function name will be used + */ + route?: string; +} + +/** + * At this point in time there are no http output specific options + */ +export interface HttpOutputOptions {} + +export type HttpOutput = FunctionOutput & HttpOutputOptions; + +/** + * HTTP request object. Provided to your function when using HTTP Bindings. + */ +export declare class HttpRequest { + /** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ + constructor(httpRequestInit: HttpRequestInit); + + /** + * HTTP request method used to invoke this function. + */ + readonly method: string; + + /** + * Request URL. + */ + readonly url: string; + + /** + * HTTP request headers. + */ + readonly headers: Headers; + + /** + * Query string parameter keys and values from the URL. + */ + readonly query: URLSearchParams; + + /** + * Route parameter keys and values. + */ + readonly params: HttpRequestParams; + + /** + * Object representing logged-in user, either through + * AppService/Functions authentication, or SWA Authentication + * null when no such user is logged in. + */ + readonly user: HttpRequestUser | null; + + /** + * Returns the body as a ReadableStream + */ + readonly body: ReadableStream | null; + + /** + * Returns whether the body has been read from + */ + readonly bodyUsed: boolean; + + /** + * Returns a promise fulfilled with the body as an ArrayBuffer + */ + readonly arrayBuffer: () => Promise; + + /** + * Returns a promise fulfilled with the body as a Blob + */ + readonly blob: () => Promise; + + /** + * Returns a promise fulfilled with the body as FormData + */ + readonly formData: () => Promise; + + /** + * Returns a promise fulfilled with the body parsed as JSON + */ + readonly json: () => Promise; + + /** + * Returns a promise fulfilled with the body as a string + */ + readonly text: () => Promise; + + /** + * Creates a copy of the request object, with special handling of the body. + * [Learn more here](https://developer.mozilla.org/docs/Web/API/Request/clone) + */ + readonly clone: () => HttpRequest; +} + +/** + * Route parameter keys and values. + */ +export type HttpRequestParams = Record; + +/** + * Object representing logged-in user, either through + * AppService/Functions authentication, or SWA Authentication + */ +export interface HttpRequestUser { + /** + * Type of authentication, either AppService or StaticWebApps + */ + type: HttpRequestUserType; + + /** + * unique user GUID + */ + id: string; + + /** + * unique username + */ + username: string; + + /** + * provider of authentication service + */ + identityProvider: string; + + /** + * Extra authentication information, dependent on auth type + * and auth provider + */ + claimsPrincipalData: Record; +} + +/** + * Possible values for an HTTP request method. + */ +export type HttpMethod = 'GET' | 'POST' | 'DELETE' | 'HEAD' | 'PATCH' | 'PUT' | 'OPTIONS' | 'TRACE' | 'CONNECT'; + +/** + * Possible values for an HTTP Request user type + */ +export type HttpRequestUserType = 'AppService' | 'StaticWebApps'; + +export interface HttpResponseInit { + /** + * HTTP response body + */ + body?: BodyInit; + + /** + * A JSON-serializable HTTP Response body. + * If set, the `HttpResponseInit.body` property will be ignored in favor of this property + */ + jsonBody?: any; + + /** + * HTTP response status code + * @default 200 + */ + status?: number; + + /** + * HTTP response headers + */ + headers?: HeadersInit; + + /** + * HTTP response cookies + */ + cookies?: Cookie[]; + + /** + * Enable content negotiation of response body if true + * If false, treat response body as raw + * @default false + */ + enableContentNegotiation?: boolean; +} + +/** + * HTTP response class + */ +export declare class HttpResponse { + constructor(responseInit?: HttpResponseInit); + + /** + * HTTP response status code + * @default 200 + */ + readonly status: number; + + /** + * HTTP response headers. + */ + readonly headers: Headers; + + /** + * HTTP response cookies + */ + readonly cookies: Cookie[]; + + /** + * Enable content negotiation of response body if true + * If false, treat response body as raw + * @default false + */ + readonly enableContentNegotiation: boolean; + + /** + * Returns the body as a ReadableStream + */ + readonly body: ReadableStream | null; + + /** + * Returns whether the body has been read from + */ + readonly bodyUsed: boolean; + + /** + * Returns a promise fulfilled with the body as an ArrayBuffer + */ + readonly arrayBuffer: () => Promise; + + /** + * Returns a promise fulfilled with the body as a Blob + */ + readonly blob: () => Promise; + + /** + * Returns a promise fulfilled with the body as FormData + */ + readonly formData: () => Promise; + + /** + * Returns a promise fulfilled with the body parsed as JSON + */ + readonly json: () => Promise; + + /** + * Returns a promise fulfilled with the body as a string + */ + readonly text: () => Promise; + + /** + * Creates a copy of the response object, with special handling of the body. + * [Learn more here](https://developer.mozilla.org/docs/Web/API/Response/clone) + */ + readonly clone: () => HttpResponse; +} + +/** + * Http response cookie object to "Set-Cookie" + */ +export interface Cookie { + name: string; + + value: string; + + /** + * Specifies allowed hosts to receive the cookie + */ + domain?: string; + + /** + * Specifies URL path that must exist in the requested URL + */ + path?: string; + + /** + * NOTE: It is generally recommended that you use maxAge over expires. + * Sets the cookie to expire at a specific date instead of when the client closes. + * This can be a Javascript Date or Unix time in milliseconds. + */ + expires?: Date | number; + + /** + * Sets the cookie to only be sent with an encrypted request + */ + secure?: boolean; + + /** + * Sets the cookie to be inaccessible to JavaScript's Document.cookie API + */ + httpOnly?: boolean; + + /** + * Can restrict the cookie to not be sent with cross-site requests + */ + sameSite?: 'Strict' | 'Lax' | 'None' | undefined; + + /** + * Number of seconds until the cookie expires. A zero or negative number will expire the cookie immediately. + */ + maxAge?: number; +} + +/** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ +export interface HttpRequestInit { + method?: string; + + url?: string; + + body?: HttpRequestBodyInit; + + headers?: Record; + + query?: Record; + + params?: Record; +} + +/** + * For testing purposes only. This will always be constructed for you when run in the context of the Azure Functions runtime + */ +export interface HttpRequestBodyInit { + /** + * The body as a buffer. You only need to specify one of the `bytes` or `string` properties + */ + bytes?: Uint8Array; + + /** + * The body as a string. You only need to specify one of the `bytes` or `string` properties + */ + string?: string; +} diff --git a/node_modules/@azure/functions/types/index.d.ts b/node_modules/@azure/functions/types/index.d.ts new file mode 100644 index 00000000..314c4c16 --- /dev/null +++ b/node_modules/@azure/functions/types/index.d.ts @@ -0,0 +1,206 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { InvocationContext } from './InvocationContext'; + +export * as app from './app'; +export * from './cosmosDB'; +export * from './cosmosDB.v3'; +export * from './cosmosDB.v4'; +export * from './eventGrid'; +export * from './eventHub'; +export * from './generic'; +export * from './hooks/appHooks'; +export * from './hooks/HookContext'; +export * from './hooks/invocationHooks'; +export * from './hooks/logHooks'; +export * from './http'; +export * as input from './input'; +export * from './InvocationContext'; +export * from './mcpTool'; +export * from './mySql'; +export * as output from './output'; +export * from './serviceBus'; +export * from './setup'; +export * from './sql'; +export * from './storage'; +export * from './table'; +export * from './timer'; +export * as trigger from './trigger'; +export * from './warmup'; +export * from './webpubsub'; + +/** + * Void if no `return` output is registered + * Otherwise, the registered `return` output + */ +export type FunctionResult = T | Promise; + +export type FunctionHandler = (triggerInput: any, context: InvocationContext) => FunctionResult; + +/** + * Configures the inputs, outputs, and handler for an Azure Function + */ +export interface FunctionOptions { + /** + * The code that will be executed when your function is triggered + */ + handler: FunctionHandler; + + /** + * Configuration for the primary input to the function, aka the reason it will be triggered + * This is the only input that is passed as an argument to the function handler during invocation + */ + trigger: FunctionTrigger; + + /** + * Configuration for the optional primary output of the function + * This is the main output that you should set as the return value of the function handler during invocation + */ + return?: FunctionOutput; + + /** + * Configuration for an optional set of secondary inputs + * During invocation, get these values with `context.extraInputs.get()` + */ + extraInputs?: FunctionInput[]; + + /** + * Configuration for an optional set of secondary outputs + * During invocation, set these values with `context.extraOutputs.set()` + */ + extraOutputs?: FunctionOutput[]; +} + +/** + * Full configuration for the primary input to a function + */ +export interface FunctionTrigger extends Record { + /** + * The type for this trigger ('httpTrigger', 'timerTrigger', etc.) + * If using the `trigger` namespace to create this object, the type will be set for you + */ + type: string; + + /** + * Must be unique within this function. + * If using the `trigger` namespace to create this object, the name will be auto-generated for you + */ + name: string; +} + +/** + * Full configuration for the secondary input to a function ("trigger" is the primary input) + * NOTE: Not all triggers can be used as secondary inputs + */ +export interface FunctionInput extends Record { + /** + * The type for this trigger ('blob', 'cosmosDB', etc.) + * If using the `input` namespace to create this object, the type will be set for you + */ + type: string; + + /** + * Must be unique within this function. + * If using the `input` namespace to create this object, the name will be auto-generated for you + */ + name: string; +} + +/** + * Full configuration for the output to a function + */ +export interface FunctionOutput extends Record { + /** + * The type for this output ('http', 'blob', 'queue', etc.) + * If using the `output` namespace to create this object, the type will be set for you + */ + type: string; + + /** + * Must be unique within this function. + * If using the `output` namespace to create this object, the name will be auto-generated for you + */ + name: string; +} + +export type RetryOptions = FixedDelayRetryOptions | ExponentialBackoffRetryOptions; + +export interface FixedDelayRetryOptions { + /** + * A specified amount of time is allowed to elapse between each retry. + */ + strategy: 'fixedDelay'; + + /** + * The maximum number of retries allowed per function execution. -1 means to retry indefinitely. + */ + maxRetryCount: number; + + /** + * The delay that's used between retries. + * This can be a number in milliseconds or a Duration object + */ + delayInterval: Duration | number; +} + +export interface ExponentialBackoffRetryOptions { + /** + * The first retry waits for the minimum delay. On subsequent retries, time is added exponentially to + * the initial duration for each retry, until the maximum delay is reached. Exponential back-off adds + * some small randomization to delays to stagger retries in high-throughput scenarios. + */ + strategy: 'exponentialBackoff'; + + /** + * The maximum number of retries allowed per function execution. -1 means to retry indefinitely. + */ + maxRetryCount: number; + + /** + * The minimum retry delay. + * This can be a number in milliseconds, or a Duration object + */ + minimumInterval: Duration | number; + + /** + * The maximum retry delay. + * This can be a number in milliseconds, or a Duration object + */ + maximumInterval: Duration | number; +} + +export interface Duration { + hours?: number; + minutes?: number; + seconds?: number; + milliseconds?: number; +} + +/** + * Represents a type which can release resources, such as event listening or a timer. + */ +export declare class Disposable { + /** + * Combine many disposable-likes into one. You can use this method when having objects with a dispose function which aren't instances of `Disposable`. + * + * @param disposableLikes Objects that have at least a `dispose`-function member. Note that asynchronous dispose-functions aren't awaited. + * @return Returns a new disposable which, upon dispose, will dispose all provided disposables. + */ + static from(...disposableLikes: { dispose: () => any }[]): Disposable; + + /** + * Creates a new disposable that calls the provided function on dispose. + * *Note* that an asynchronous function is not awaited. + * + * @param callOnDispose Function that disposes something. + */ + constructor(callOnDispose: () => any); + + /** + * Dispose this object. + */ + dispose(): any; +} + +export type LogLevel = 'trace' | 'debug' | 'information' | 'warning' | 'error' | 'critical' | 'none'; diff --git a/node_modules/@azure/functions/types/input.d.ts b/node_modules/@azure/functions/types/input.d.ts new file mode 100644 index 00000000..52d8c680 --- /dev/null +++ b/node_modules/@azure/functions/types/input.d.ts @@ -0,0 +1,57 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBInput, CosmosDBInputOptions } from './cosmosDB'; +import { GenericInputOptions } from './generic'; +import { FunctionInput } from './index'; +import { SqlInput, SqlInputOptions } from './sql'; +import { StorageBlobInput, StorageBlobInputOptions } from './storage'; +import { TableInput, TableInputOptions } from './table'; +import { MySqlInput, MySqlInputOptions } from './mySql'; +import { + WebPubSubConnectionInput, + WebPubSubConnectionInputOptions, + WebPubSubContextInput, + WebPubSubContextInputOptions, +} from './webpubsub'; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-blob-input?pivots=programming-language-javascript) + */ +export function storageBlob(options: StorageBlobInputOptions): StorageBlobInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-table-input?pivots=programming-language-javascript) + */ +export function table(options: TableInputOptions): TableInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-cosmosdb-v2-input?pivots=programming-language-javascript) + */ +export function cosmosDB(options: CosmosDBInputOptions): CosmosDBInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-azure-sql-input?pivots=programming-language-javascript) + */ +export function sql(options: SqlInputOptions): SqlInput; + +/** + * [Link to docs and examples](https://learn.microsoft.com/en-us/azure/azure-functions/functions-bindings-azure-mysql-input?pivots=programming-language-javascript) + */ +export function mySql(options: MySqlInputOptions): MySqlInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-input?pivots=programming-language-javascript) + */ +export function webPubSubConnection(options: WebPubSubConnectionInputOptions): WebPubSubConnectionInput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-input?pivots=programming-language-javascript) + */ +export function webPubSubContext(options: WebPubSubContextInputOptions): WebPubSubContextInput; + +/** + * A generic option that can be used for any input type + * Use this method if your desired input type does not already have its own method + */ +export function generic(options: GenericInputOptions): FunctionInput; diff --git a/node_modules/@azure/functions/types/mcpTool.d.ts b/node_modules/@azure/functions/types/mcpTool.d.ts new file mode 100644 index 00000000..2a67bd24 --- /dev/null +++ b/node_modules/@azure/functions/types/mcpTool.d.ts @@ -0,0 +1,107 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +/** + * A handler function for MCP Tool triggers. + * + * @param messages - The messages or data received by the trigger. + * @param context - The invocation context for the function. + * @returns A result that can be a promise or a synchronous value. + */ +export type McpToolTriggerHandler = (messages: unknown, context: InvocationContext) => FunctionResult; + +/** + * Configuration options for an MCP Tool function. + * This includes trigger-specific options and general function options. + */ +export interface McpToolFunctionOptions extends McpToolTriggerOptions, Partial { + /** + * The handler function to execute when the trigger is invoked. + */ + handler: McpToolTriggerHandler; + + /** + * The trigger configuration for the MCP Tool. + */ + trigger?: McpToolTrigger; +} + +/** + * Configuration options for an MCP Tool trigger. + * These options define the behavior and metadata for the trigger. + */ +export interface McpToolTriggerOptions { + /** + * The name of the tool associated with the trigger. + * This is typically an app setting or environment variable. + */ + toolName: string; + + /** + * A description of the tool or trigger. + * This provides additional context about the trigger's purpose. + */ + description: string; + + /** + * Additional properties or metadata for the tool. + * This is a dictionary of key-value pairs that can be used to configure the trigger. + */ + toolProperties?: any | McpToolProperty[]; +} + +/** + * Configuration options for an MCP Tool trigger. + * These options define the behavior and metadata for the trigger. + */ +export interface McpToolTriggerOptionsToRpc { + /** + * The name of the tool associated with the trigger. + * This is typically an app setting or environment variable. + */ + toolName: string; + + /** + * A description of the tool or trigger. + * This provides additional context about the trigger's purpose. + */ + description: string; + + /** + * Additional properties or metadata for the tool. + * This is a dictionary of key-value pairs that can be used to configure the trigger. + */ + toolProperties?: string; +} + +/** + * Represents an MCP Tool trigger, combining base function trigger options + * with MCP Tool-specific trigger options. + */ +export type McpToolTrigger = FunctionTrigger & McpToolTriggerOptionsToRpc; + +export interface McpToolProperty { + /** + * The name of the property. + */ + propertyName: string; + + /** + * The type of the property. + */ + propertyType: string; + + /** + * A description of the property. + * This provides additional context about the purpose or usage of the property. + */ + description: string; + + /** + * Indicates whether the property is required. + */ + required?: boolean; +} diff --git a/node_modules/@azure/functions/types/mySql.d.ts b/node_modules/@azure/functions/types/mySql.d.ts new file mode 100644 index 00000000..82eb69db --- /dev/null +++ b/node_modules/@azure/functions/types/mySql.d.ts @@ -0,0 +1,73 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type MySqlHandler = (changes: MySqlChange[], context: InvocationContext) => FunctionResult; + +export interface MySqlFunctionOptions extends MySqlTriggerOptions, Partial { + handler: MySqlHandler; + + trigger?: MySqlTrigger; +} + +export interface MySqlTriggerOptions { + /** + * The name of the table monitored by the trigger. + */ + tableName: string; + + /** + * An app setting (or environment variable) with the connection string for the database containing the table monitored for changes + */ + connectionStringSetting: string; +} +export type MySqlTrigger = FunctionTrigger & MySqlTriggerOptions; + +export interface MySqlChange { + Item: unknown; + Operation: MySqlChangeOperation; +} + +export enum MySqlChangeOperation { + Update = 0, +} + +export interface MySqlInputOptions { + /** + * The Transact-SQL query command or name of the stored procedure executed by the binding. + */ + commandText: string; + + /** + * The command type value + */ + commandType: 'Text' | 'StoredProcedure'; + + /** + * An app setting (or environment variable) with the connection string for the database against which the query or stored procedure is being executed + */ + connectionStringSetting: string; + + /** + * Zero or more parameter values passed to the command during execution as a single string. + * Must follow the format @param1=param1,@param2=param2. + * Neither the parameter name nor the parameter value can contain a comma (,) or an equals sign (=). + */ + parameters?: string; +} +export type MySqlInput = FunctionInput & MySqlInputOptions; + +export interface MySqlOutputOptions { + /** + * The name of the table being written to by the binding. + */ + commandText: string; + + /** + * An app setting (or environment variable) with the connection string for the database to which data is being written + */ + connectionStringSetting: string; +} +export type MySqlOutput = FunctionOutput & MySqlOutputOptions; diff --git a/node_modules/@azure/functions/types/output.d.ts b/node_modules/@azure/functions/types/output.d.ts new file mode 100644 index 00000000..b9d9d83a --- /dev/null +++ b/node_modules/@azure/functions/types/output.d.ts @@ -0,0 +1,86 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBOutput, CosmosDBOutputOptions } from './cosmosDB'; +import { EventGridOutput, EventGridOutputOptions } from './eventGrid'; +import { EventHubOutput, EventHubOutputOptions } from './eventHub'; +import { GenericOutputOptions } from './generic'; +import { HttpOutput, HttpOutputOptions } from './http'; +import { FunctionOutput } from './index'; +import { + ServiceBusQueueOutput, + ServiceBusQueueOutputOptions, + ServiceBusTopicOutput, + ServiceBusTopicOutputOptions, +} from './serviceBus'; +import { SqlOutput, SqlOutputOptions } from './sql'; +import { StorageBlobOutput, StorageBlobOutputOptions, StorageQueueOutput, StorageQueueOutputOptions } from './storage'; +import { TableOutput, TableOutputOptions } from './table'; +import { MySqlOutput, MySqlOutputOptions } from './mySql'; +import { WebPubSubOutput, WebPubSubOutputOptions } from './webpubsub'; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-http-webhook-output?&pivots=programming-language-javascript) + */ +export function http(options: HttpOutputOptions): HttpOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-blob-output?pivots=programming-language-javascript) + */ +export function storageBlob(options: StorageBlobOutputOptions): StorageBlobOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-table-output?pivots=programming-language-javascript) + */ +export function table(options: TableOutputOptions): TableOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-queue-output?pivots=programming-language-javascript) + */ +export function storageQueue(options: StorageQueueOutputOptions): StorageQueueOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-output?pivots=programming-language-javascript) + */ +export function serviceBusQueue(options: ServiceBusQueueOutputOptions): ServiceBusQueueOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-output?pivots=programming-language-javascript) + */ +export function serviceBusTopic(options: ServiceBusTopicOutputOptions): ServiceBusTopicOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-hubs-output?pivots=programming-language-javascript) + */ +export function eventHub(options: EventHubOutputOptions): EventHubOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-grid-output?pivots=programming-language-javascript) + */ +export function eventGrid(options: EventGridOutputOptions): EventGridOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-cosmosdb-v2-output?pivots=programming-language-javascript) + */ +export function cosmosDB(options: CosmosDBOutputOptions): CosmosDBOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-azure-sql-output?pivots=programming-language-javascript) + */ +export function sql(options: SqlOutputOptions): SqlOutput; + +/** + * [Link to docs and examples](https://learn.microsoft.com/en-us/azure/azure-functions/functions-bindings-azure-mysql-output?pivots=programming-language-javascript) + */ +export function mySql(options: MySqlOutputOptions): MySqlOutput; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-output?pivots=programming-language-javascript) + */ +export function webPubSub(options: WebPubSubOutputOptions): WebPubSubOutput; + +/** + * A generic option that can be used for any output type + * Use this method if your desired output type does not already have its own method + */ +export function generic(options: GenericOutputOptions): FunctionOutput; diff --git a/node_modules/@azure/functions/types/serviceBus.d.ts b/node_modules/@azure/functions/types/serviceBus.d.ts new file mode 100644 index 00000000..0a45d7ab --- /dev/null +++ b/node_modules/@azure/functions/types/serviceBus.d.ts @@ -0,0 +1,98 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type ServiceBusQueueHandler = (messages: unknown, context: InvocationContext) => FunctionResult; + +export interface ServiceBusQueueFunctionOptions extends ServiceBusQueueTriggerOptions, Partial { + handler: ServiceBusQueueHandler; + + trigger?: ServiceBusQueueTrigger; +} + +export interface ServiceBusQueueTriggerOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the queue to monitor + */ + queueName: string; + + /** + * `true` if connecting to a [session-aware](https://docs.microsoft.com/azure/service-bus-messaging/message-sessions) queue. Default is `false` + */ + isSessionsEnabled?: boolean; + + /** + * Set to `many` in order to enable batching. If omitted or set to `one`, a single message is passed to the function. + */ + cardinality?: 'many' | 'one'; +} +export type ServiceBusQueueTrigger = FunctionTrigger & ServiceBusQueueTriggerOptions; + +export interface ServiceBusQueueOutputOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the queue to monitor + */ + queueName: string; +} +export type ServiceBusQueueOutput = FunctionOutput & ServiceBusQueueOutputOptions; + +export type ServiceBusTopicHandler = (message: unknown, context: InvocationContext) => FunctionResult; + +export interface ServiceBusTopicFunctionOptions extends ServiceBusTopicTriggerOptions, Partial { + handler: ServiceBusTopicHandler; + + trigger?: ServiceBusTopicTrigger; +} + +export interface ServiceBusTopicTriggerOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the topic to monitor + */ + topicName: string; + + /** + * The name of the subscription to monitor + */ + subscriptionName: string; + + /** + * `true` if connecting to a [session-aware](https://docs.microsoft.com/azure/service-bus-messaging/message-sessions) subscription. Default is `false` + */ + isSessionsEnabled?: boolean; + + /** + * Set to `many` in order to enable batching. If omitted or set to `one`, a single message is passed to the function. + */ + cardinality?: 'many' | 'one'; +} +export type ServiceBusTopicTrigger = FunctionTrigger & ServiceBusTopicTriggerOptions; + +export interface ServiceBusTopicOutputOptions { + /** + * An app setting (or environment variable) with the service bus connection string + */ + connection: string; + + /** + * The name of the topic to monitor + */ + topicName: string; +} +export type ServiceBusTopicOutput = FunctionOutput & ServiceBusTopicOutputOptions; diff --git a/node_modules/@azure/functions/types/setup.d.ts b/node_modules/@azure/functions/types/setup.d.ts new file mode 100644 index 00000000..92af251d --- /dev/null +++ b/node_modules/@azure/functions/types/setup.d.ts @@ -0,0 +1,16 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +export interface SetupOptions { + /** + * Stream http requests and responses instead of loading entire body in memory. + * [Learn more here](https://aka.ms/AzFuncNodeHttpStreams) + */ + enableHttpStream?: boolean; + + /** + * Dictionary of Node.js worker capabilities. + * This will be merged with existing capabilities specified by the Node.js worker and library. + */ + capabilities?: Record; +} diff --git a/node_modules/@azure/functions/types/sql.d.ts b/node_modules/@azure/functions/types/sql.d.ts new file mode 100644 index 00000000..5614d022 --- /dev/null +++ b/node_modules/@azure/functions/types/sql.d.ts @@ -0,0 +1,75 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type SqlHandler = (changes: SqlChange[], context: InvocationContext) => FunctionResult; + +export interface SqlFunctionOptions extends SqlTriggerOptions, Partial { + handler: SqlHandler; + + trigger?: SqlTrigger; +} + +export interface SqlTriggerOptions { + /** + * The name of the table monitored by the trigger. + */ + tableName: string; + + /** + * An app setting (or environment variable) with the connection string for the database containing the table monitored for changes + */ + connectionStringSetting: string; +} +export type SqlTrigger = FunctionTrigger & SqlTriggerOptions; + +export interface SqlChange { + Item: unknown; + Operation: SqlChangeOperation; +} + +export enum SqlChangeOperation { + Insert = 0, + Update = 1, + Delete = 2, +} + +export interface SqlInputOptions { + /** + * The Transact-SQL query command or name of the stored procedure executed by the binding. + */ + commandText: string; + + /** + * The command type value + */ + commandType: 'Text' | 'StoredProcedure'; + + /** + * An app setting (or environment variable) with the connection string for the database against which the query or stored procedure is being executed + */ + connectionStringSetting: string; + + /** + * Zero or more parameter values passed to the command during execution as a single string. + * Must follow the format @param1=param1,@param2=param2. + * Neither the parameter name nor the parameter value can contain a comma (,) or an equals sign (=). + */ + parameters?: string; +} +export type SqlInput = FunctionInput & SqlInputOptions; + +export interface SqlOutputOptions { + /** + * The name of the table being written to by the binding. + */ + commandText: string; + + /** + * An app setting (or environment variable) with the connection string for the database to which data is being written + */ + connectionStringSetting: string; +} +export type SqlOutput = FunctionOutput & SqlOutputOptions; diff --git a/node_modules/@azure/functions/types/storage.d.ts b/node_modules/@azure/functions/types/storage.d.ts new file mode 100644 index 00000000..2ed33ba9 --- /dev/null +++ b/node_modules/@azure/functions/types/storage.d.ts @@ -0,0 +1,66 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type StorageBlobHandler = (blob: unknown, context: InvocationContext) => FunctionResult; +export type StorageQueueHandler = (queueEntry: unknown, context: InvocationContext) => FunctionResult; + +export interface StorageBlobFunctionOptions extends StorageBlobTriggerOptions, Partial { + handler: StorageBlobHandler; + + trigger?: StorageBlobTrigger; +} + +export interface StorageQueueFunctionOptions extends StorageQueueTriggerOptions, Partial { + handler: StorageQueueHandler; + + trigger?: StorageQueueTrigger; +} + +export interface StorageBlobOptions { + /** + * The path to the blob container, for example "samples-workitems/{name}" + */ + path: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this blob input or output + */ + connection: string; +} + +export interface StorageQueueOptions { + /** + * The queue name + */ + queueName: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this queue input or output + */ + connection: string; +} + +export interface StorageBlobTriggerOptions extends StorageBlobOptions { + /** + * The source of the triggering event. + * Use `EventGrid` for an Event Grid-based blob trigger, which provides much lower latency. + * The default is `LogsAndContainerScan`, which uses the standard polling mechanism to detect changes in the container. + */ + source?: 'EventGrid' | 'LogsAndContainerScan'; +} +export type StorageBlobTrigger = FunctionTrigger & StorageBlobTriggerOptions; + +export type StorageBlobInputOptions = StorageBlobOptions; +export type StorageBlobInput = FunctionInput & StorageBlobInputOptions; + +export type StorageBlobOutputOptions = StorageBlobOptions; +export type StorageBlobOutput = FunctionOutput & StorageBlobOutputOptions; + +export type StorageQueueTriggerOptions = StorageQueueOptions; +export type StorageQueueTrigger = FunctionTrigger & StorageQueueTriggerOptions; + +export type StorageQueueOutputOptions = StorageQueueOptions; +export type StorageQueueOutput = FunctionOutput & StorageQueueOutputOptions; diff --git a/node_modules/@azure/functions/types/table.d.ts b/node_modules/@azure/functions/types/table.d.ts new file mode 100644 index 00000000..8e0c83e5 --- /dev/null +++ b/node_modules/@azure/functions/types/table.d.ts @@ -0,0 +1,60 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOutput } from './index'; + +export interface TableOutputOptions { + /** + * The table name + */ + tableName: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this table output + */ + connection: string; + + /** + * The partition key of the table entity to write. + */ + partitionKey?: string; + + /** + * The row key of the table entity to write. + */ + rowKey?: string; +} +export type TableOutput = FunctionOutput & TableOutputOptions; + +export interface TableInputOptions { + /** + * The table name + */ + tableName: string; + + /** + * An app setting (or environment variable) with the storage connection string to be used by this table input + */ + connection: string; + + /** + * The partition key of the table entity to read. + */ + partitionKey?: string; + + /** + * The row key of the table entity to read. Can't be used with `take` or `filter`. + */ + rowKey?: string; + + /** + * The maximum number of entities to return. Can't be used with `rowKey` + */ + take?: number; + + /** + * An OData filter expression for the entities to return from the table. Can't be used with `rowKey`. + */ + filter?: string; +} +export type TableInput = FunctionInput & TableInputOptions; diff --git a/node_modules/@azure/functions/types/timer.d.ts b/node_modules/@azure/functions/types/timer.d.ts new file mode 100644 index 00000000..aa2d4c0c --- /dev/null +++ b/node_modules/@azure/functions/types/timer.d.ts @@ -0,0 +1,70 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionResult, FunctionTrigger, RetryOptions } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type TimerHandler = (myTimer: Timer, context: InvocationContext) => FunctionResult; + +export interface TimerFunctionOptions extends TimerTriggerOptions, Partial { + handler: TimerHandler; + + trigger?: TimerTrigger; + + /** + * An optional retry policy to rerun a failed execution until either successful completion occurs or the maximum number of retries is reached. + * Learn more [here](https://learn.microsoft.com/azure/azure-functions/functions-bindings-error-pages) + */ + retry?: RetryOptions; +} + +export interface TimerTriggerOptions { + /** + * A [cron expression](https://docs.microsoft.com/azure/azure-functions/functions-bindings-timer?pivots=programming-language-javascript#ncrontab-expressions) of the format '{second} {minute} {hour} {day} {month} {day of week}' to specify the schedule + */ + schedule: string; + + /** + * If `true`, the function is invoked when the runtime starts. + * For example, the runtime starts when the function app wakes up after going idle due to inactivity, when the function app restarts due to function changes, and when the function app scales out. + * _Use with caution_. runOnStartup should rarely if ever be set to `true`, especially in production. + */ + runOnStartup?: boolean; + + /** + * When true, schedule will be persisted to aid in maintaining the correct schedule even through restarts. Defaults to true for schedules with interval >= 1 minute + */ + useMonitor?: boolean; +} + +export type TimerTrigger = FunctionTrigger & TimerTriggerOptions; + +/** + * Timer schedule information. Provided to your function when using a timer binding. + */ +export interface Timer { + /** + * Whether this timer invocation is due to a missed schedule occurrence. + */ + isPastDue: boolean; + schedule: { + /** + * Whether intervals between invocations should account for DST. + */ + adjustForDST: boolean; + }; + scheduleStatus: { + /** + * The last recorded schedule occurrence. Date ISO string. + */ + last: string; + /** + * The expected next schedule occurrence. Date ISO string. + */ + next: string; + /** + * The last time this record was updated. This is used to re-calculate `next` with the current schedule after a host restart. Date ISO string. + */ + lastUpdated: string; + }; +} diff --git a/node_modules/@azure/functions/types/trigger.d.ts b/node_modules/@azure/functions/types/trigger.d.ts new file mode 100644 index 00000000..e8105d29 --- /dev/null +++ b/node_modules/@azure/functions/types/trigger.d.ts @@ -0,0 +1,103 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { CosmosDBTrigger, CosmosDBTriggerOptions } from './cosmosDB'; +import { EventGridTrigger, EventGridTriggerOptions } from './eventGrid'; +import { EventHubTrigger, EventHubTriggerOptions } from './eventHub'; +import { GenericTriggerOptions } from './generic'; +import { HttpTrigger, HttpTriggerOptions } from './http'; +import { FunctionTrigger } from './index'; +import { McpToolFunctionOptions, McpToolTrigger } from './mcpTool'; +import { MySqlTrigger, MySqlTriggerOptions } from './mySql'; +import { + ServiceBusQueueTrigger, + ServiceBusQueueTriggerOptions, + ServiceBusTopicTrigger, + ServiceBusTopicTriggerOptions, +} from './serviceBus'; +import { SqlTrigger, SqlTriggerOptions } from './sql'; +import { + StorageBlobTrigger, + StorageBlobTriggerOptions, + StorageQueueTrigger, + StorageQueueTriggerOptions, +} from './storage'; +import { TimerTrigger, TimerTriggerOptions } from './timer'; +import { WarmupTrigger, WarmupTriggerOptions } from './warmup'; +import { WebPubSubTrigger, WebPubSubTriggerOptions } from './webpubsub'; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-http-webhook-trigger?&pivots=programming-language-javascript) + */ +export function http(options: HttpTriggerOptions): HttpTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-timer?pivots=programming-language-javascript) + */ +export function timer(options: TimerTriggerOptions): TimerTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-blob-trigger?pivots=programming-language-javascript) + */ +export function storageBlob(options: StorageBlobTriggerOptions): StorageBlobTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-storage-queue-trigger?pivots=programming-language-javascript) + */ +export function storageQueue(options: StorageQueueTriggerOptions): StorageQueueTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-trigger?pivots=programming-language-javascript) + */ +export function serviceBusQueue(options: ServiceBusQueueTriggerOptions): ServiceBusQueueTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-service-bus-trigger?pivots=programming-language-javascript) + */ +export function serviceBusTopic(options: ServiceBusTopicTriggerOptions): ServiceBusTopicTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-hubs-trigger?pivots=programming-language-javascript) + */ +export function eventHub(options: EventHubTriggerOptions): EventHubTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-event-grid-trigger?pivots=programming-language-javascript) + */ +export function eventGrid(options: EventGridTriggerOptions): EventGridTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-cosmosdb-v2-trigger?pivots=programming-language-javascript) + */ +export function cosmosDB(options: CosmosDBTriggerOptions): CosmosDBTrigger; + +/** + * [Link to docs and examples](https://learn.microsoft.com/azure/azure-functions/functions-bindings-warmup?tabs=isolated-process&pivots=programming-language-javascript) + */ +export function warmup(options: WarmupTriggerOptions): WarmupTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-azure-sql-trigger?pivots=programming-language-javascript) + */ +export function sql(options: SqlTriggerOptions): SqlTrigger; + +/** + * [Link to docs and examples](https://learn.microsoft.com/en-us/azure/azure-functions/functions-bindings-azure-mysql-trigger?pivots=programming-language-javascript) + */ +export function mySql(options: MySqlTriggerOptions): MySqlTrigger; + +/** + * [Link to docs and examples](https://docs.microsoft.com/azure/azure-functions/functions-bindings-web-pubsub-trigger?pivots=programming-language-javascript) + */ +export function webPubSub(options: WebPubSubTriggerOptions): WebPubSubTrigger; + +/** + * [Link to docs and examples](//TODO Add link to docs and examples) + */ +export function mcpTool(options: McpToolFunctionOptions): McpToolTrigger; + +/** + * A generic option that can be used for any trigger type + * Use this method if your desired trigger type does not already have its own method + */ +export function generic(options: GenericTriggerOptions): FunctionTrigger; diff --git a/node_modules/@azure/functions/types/warmup.d.ts b/node_modules/@azure/functions/types/warmup.d.ts new file mode 100644 index 00000000..10a3ca8b --- /dev/null +++ b/node_modules/@azure/functions/types/warmup.d.ts @@ -0,0 +1,17 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionOptions, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export interface WarmupContext {} +export type WarmupHandler = (warmupContext: WarmupContext, context: InvocationContext) => FunctionResult; + +export interface WarmupFunctionOptions extends WarmupTriggerOptions, Partial { + handler: WarmupHandler; + + trigger?: WarmupTrigger; +} + +export interface WarmupTriggerOptions {} +export type WarmupTrigger = FunctionTrigger & WarmupTriggerOptions; diff --git a/node_modules/@azure/functions/types/webpubsub.d.ts b/node_modules/@azure/functions/types/webpubsub.d.ts new file mode 100644 index 00000000..e86ac46f --- /dev/null +++ b/node_modules/@azure/functions/types/webpubsub.d.ts @@ -0,0 +1,124 @@ +// Copyright (c) .NET Foundation. All rights reserved. +// Licensed under the MIT License. + +import { FunctionInput, FunctionOptions, FunctionOutput, FunctionResult, FunctionTrigger } from './index'; +import { InvocationContext } from './InvocationContext'; + +export type WebPubSubHandler = (message: unknown, context: InvocationContext) => FunctionResult; + +export interface WebPubSubFunctionOptions extends WebPubSubTriggerOptions, Partial { + handler: WebPubSubHandler; + + trigger?: WebPubSubTrigger; +} + +export interface WebPubSubTriggerOptions { + /** + * Required - The variable name used in function code for the parameter that receives the event data + */ + name: string; + + /** + * Required - The name of the hub to which the function is bound + */ + hub: string; + + /** + * Required - The type of event to which the function should respond + * Must be either 'user' or 'system' + */ + eventType: 'user' | 'system'; + + /** + * Required - The name of the event to which the function should respond + * For system event type: 'connect', 'connected', or 'disconnected' + * For user-defined subprotocols: 'message' + * For system supported subprotocol json.webpubsub.azure.v1: user-defined event name + */ + eventName: string; + + /** + * Optional - Specifies which client protocol can trigger the Web PubSub trigger functions + * Default is 'all' + */ + clientProtocols?: 'all' | 'webPubSub' | 'mqtt'; + + /** + * Optional - The name of an app setting or setting collection that specifies the upstream Azure Web PubSub service + * Used for signature validation + * Defaults to "WebPubSubConnectionString" if not specified + * Set to null to disable validation + */ + connection?: string | null; +} + +export type WebPubSubTrigger = FunctionTrigger & WebPubSubTriggerOptions; + +export interface WebPubSubConnectionInputOptions { + /** + * Required - Variable name used in function code for input connection binding object. + */ + name: string; + + /** + * Required - The name of the Web PubSub hub for the function to be triggered. + * Can be set in the attribute (higher priority) or in app settings as a global value. + */ + hub: string; + + /** + * Optional - The value of the user identifier claim to be set in the access key token. + */ + userId?: string; + + /** + * Optional - The client protocol type. + * Valid values are 'default' and 'mqtt'. + * For MQTT clients, you must set it to 'mqtt'. + * For other clients, you can omit the property or set it to 'default'. + */ + clientProtocol?: 'default' | 'mqtt'; + + /** + * Optional - The name of the app setting that contains the Web PubSub Service connection string. + * Defaults to "WebPubSubConnectionString". + */ + connection?: string; +} +export type WebPubSubConnectionInput = FunctionInput & WebPubSubConnectionInputOptions; + +export interface WebPubSubContextInputOptions { + /** + * Required - Variable name used in function code for input Web PubSub request. + */ + name: string; + + /** + * Optional - The name of an app settings or setting collection that specifies the upstream Azure Web PubSub service. + * The value is used for Abuse Protection and Signature validation. + * The value is auto resolved with "WebPubSubConnectionString" by default. + * Null means the validation isn't needed and always succeeds. + */ + connection?: string; +} +export type WebPubSubContextInput = FunctionInput & WebPubSubContextInputOptions; + +export interface WebPubSubOutputOptions { + /** + * Required - Variable name used in function code for output binding object. + */ + name: string; + + /** + * Required - The name of the hub to which the function is bound. + * Can be set in the attribute (higher priority) or in app settings as a global value. + */ + hub: string; + + /** + * Optional - The name of the app setting that contains the Web PubSub Service connection string. + * Defaults to "WebPubSubConnectionString". + */ + connection?: string; +} +export type WebPubSubOutput = FunctionOutput & WebPubSubOutputOptions; diff --git a/node_modules/@fastify/busboy/LICENSE b/node_modules/@fastify/busboy/LICENSE new file mode 100644 index 00000000..290762e9 --- /dev/null +++ b/node_modules/@fastify/busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/README.md b/node_modules/@fastify/busboy/README.md new file mode 100644 index 00000000..ece3cc8a --- /dev/null +++ b/node_modules/@fastify/busboy/README.md @@ -0,0 +1,271 @@ +# busboy + +
+ +[![Build Status](https://github.com/fastify/busboy/actions/workflows/ci.yml/badge.svg)](https://github.com/fastify/busboy/actions) +[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) +[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/fastify/.github/blob/main/SECURITY.md) + +
+ +
+ +[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) +[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) + +
+ +Description +=========== + +A Node.js module for parsing incoming HTML form data. + +This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White, +aimed at addressing long-standing issues with it. + +Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup): + +| Library | Version | Mean time in nanoseconds (less is better) | +|-----------------------|---------|-------------------------------------------| +| busboy | 0.3.1 | `340114` | +| @fastify/busboy | 1.0.0 | `270984` | + +[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31. + +Requirements +============ + +* [Node.js](http://nodejs.org/) 10+ + + +Install +======= + + npm i @fastify/busboy + + +Examples +======== + +* Parsing (multipart) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); +const Busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`); + file.on('data', data => { + console.log(`File [${fieldname}] got ${data.length} bytes`); + }); + file.on('end', () => { + console.log(`File [${fieldname}] Finished`); + }); + }); + busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + console.log(`Field [${fieldname}]: value: ${inspect(val)}`); + }); + busboy.on('finish', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` +
+
+
+ +
+ `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file: +// +// Listening for requests +// File [filefield]: filename: ryan-speaker.jpg, encoding: binary +// File [filefield] got 11971 bytes +// Field [textfield]: value: 'testing! :-)' +// File [filefield] Finished +// Done parsing form! +``` + +* Save all incoming files to disk: + +```javascript +const http = require('node:http'); +const path = require('node:path'); +const os = require('node:os'); +const fs = require('node:fs'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + var saveTo = path.join(os.tmpdir(), path.basename(fieldname)); + file.pipe(fs.createWriteStream(saveTo)); + }); + busboy.on('finish', function() { + res.writeHead(200, { 'Connection': 'close' }); + res.end("That's all folks!"); + }); + return req.pipe(busboy); + } + res.writeHead(404); + res.end(); +}).listen(8000, function() { + console.log('Listening for requests'); +}); +``` + +* Parsing (urlencoded) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + console.log('File [' + fieldname + ']: filename: ' + filename); + file.on('data', function(data) { + console.log('File [' + fieldname + '] got ' + data.length + ' bytes'); + }); + file.on('end', function() { + console.log('File [' + fieldname + '] Finished'); + }); + }); + busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) { + console.log('Field [' + fieldname + ']: value: ' + inspect(val)); + }); + busboy.on('finish', function() { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end('\ +
\ +
\ +
\ + Node.js rules!
\ + \ +
\ + '); + } +}).listen(8000, function() { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// Field [textfield]: value: 'testing! :-)' +// Field [selectfield]: value: '9001' +// Field [checkfield]: value: 'on' +// Done parsing form! +``` + + +API +=== + +_Busboy_ is a _Writable_ stream + +Busboy (special) events +----------------------- + +* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream. + * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * The property `bytesRead` informs about the number of bytes that have been read so far. + +* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found. + +* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + +* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + +* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + + +Busboy methods +-------------- + +* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance. + + * The constructor takes the following valid `config` settings: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false). + + * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default). + + * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default). + + * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8'). + + * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false). + + * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters: + + * fieldName - __string__ The name of the field. + + * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream` + + * fileName - __string__ The name of a file supplied by the part. + + (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`) + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes). + + * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes). + + * **fields** - _integer_ - Max number of non-file fields (Default: Infinity). + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity). + + * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity). + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity). + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 + + * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920. + + * The constructor can throw errors: + + * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters. + + * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute. + + * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number. + + * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse. + + * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all. diff --git a/node_modules/@fastify/busboy/deps/dicer/LICENSE b/node_modules/@fastify/busboy/deps/dicer/LICENSE new file mode 100644 index 00000000..290762e9 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/dicer/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js new file mode 100644 index 00000000..b90c0e86 --- /dev/null +++ b/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js @@ -0,0 +1,228 @@ +'use strict' + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH diff --git a/node_modules/@fastify/busboy/package.json b/node_modules/@fastify/busboy/package.json new file mode 100644 index 00000000..83693acb --- /dev/null +++ b/node_modules/@fastify/busboy/package.json @@ -0,0 +1,86 @@ +{ + "name": "@fastify/busboy", + "version": "2.1.1", + "private": false, + "author": "Brian White ", + "contributors": [ + { + "name": "Igor Savin", + "email": "kibertoad@gmail.com", + "url": "https://github.com/kibertoad" + }, + { + "name": "Aras Abbasi", + "email": "aras.abbasi@gmail.com", + "url": "https://github.com/uzlopak" + } + ], + "description": "A streaming parser for HTML form data for node.js", + "main": "lib/main", + "type": "commonjs", + "types": "lib/main.d.ts", + "scripts": { + "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify", + "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js", + "coveralls": "nyc report --reporter=lcov", + "lint": "npm run lint:standard", + "lint:everything": "npm run lint && npm run test:types", + "lint:fix": "standard --fix", + "lint:standard": "standard --verbose | snazzy", + "test:mocha": "tap", + "test:types": "tsd", + "test:coverage": "nyc npm run test", + "test": "npm run test:mocha" + }, + "engines": { + "node": ">=14" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "busboy": "^1.0.0", + "photofinish": "^1.8.0", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.3.8", + "tinybench": "^2.5.1", + "tsd": "^0.30.0", + "typescript": "^5.0.2" + }, + "keywords": [ + "uploads", + "forms", + "multipart", + "form-data" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/fastify/busboy.git" + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": false, + "module": "commonjs", + "target": "ES2017" + } + }, + "standard": { + "globals": [ + "describe", + "it" + ], + "ignore": [ + "bench" + ] + }, + "files": [ + "README.md", + "LICENSE", + "lib/*", + "deps/encoding/*", + "deps/dicer/lib", + "deps/streamsearch/", + "deps/dicer/LICENSE" + ] +} diff --git a/node_modules/cookie/LICENSE b/node_modules/cookie/LICENSE new file mode 100644 index 00000000..058b6b4e --- /dev/null +++ b/node_modules/cookie/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2014 Roman Shtylman +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/cookie/README.md b/node_modules/cookie/README.md new file mode 100644 index 00000000..71fdac11 --- /dev/null +++ b/node_modules/cookie/README.md @@ -0,0 +1,317 @@ +# cookie + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +Basic HTTP cookie parser and serializer for HTTP servers. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install cookie +``` + +## API + +```js +var cookie = require('cookie'); +``` + +### cookie.parse(str, options) + +Parse an HTTP `Cookie` header string and returning an object of all cookie name-value pairs. +The `str` argument is the string representing a `Cookie` header value and `options` is an +optional object containing additional parsing options. + +```js +var cookies = cookie.parse('foo=bar; equation=E%3Dmc%5E2'); +// { foo: 'bar', equation: 'E=mc^2' } +``` + +#### Options + +`cookie.parse` accepts these properties in the options object. + +##### decode + +Specifies a function that will be used to decode a cookie's value. Since the value of a cookie +has a limited character set (and must be a simple string), this function can be used to decode +a previously-encoded cookie value into a JavaScript string or other object. + +The default function is the global `decodeURIComponent`, which will decode any URL-encoded +sequences into their byte representations. + +**note** if an error is thrown from this function, the original, non-decoded cookie value will +be returned as the cookie's value. + +### cookie.serialize(name, value, options) + +Serialize a cookie name-value pair into a `Set-Cookie` header string. The `name` argument is the +name for the cookie, the `value` argument is the value to set the cookie to, and the `options` +argument is an optional object containing additional serialization options. + +```js +var setCookie = cookie.serialize('foo', 'bar'); +// foo=bar +``` + +#### Options + +`cookie.serialize` accepts these properties in the options object. + +##### domain + +Specifies the value for the [`Domain` `Set-Cookie` attribute][rfc-6265-5.2.3]. By default, no +domain is set, and most clients will consider the cookie to apply to only the current domain. + +##### encode + +Specifies a function that will be used to encode a cookie's value. Since value of a cookie +has a limited character set (and must be a simple string), this function can be used to encode +a value into a string suited for a cookie's value. + +The default function is the global `encodeURIComponent`, which will encode a JavaScript string +into UTF-8 byte sequences and then URL-encode any that fall outside of the cookie range. + +##### expires + +Specifies the `Date` object to be the value for the [`Expires` `Set-Cookie` attribute][rfc-6265-5.2.1]. +By default, no expiration is set, and most clients will consider this a "non-persistent cookie" and +will delete it on a condition like exiting a web browser application. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### httpOnly + +Specifies the `boolean` value for the [`HttpOnly` `Set-Cookie` attribute][rfc-6265-5.2.6]. When truthy, +the `HttpOnly` attribute is set, otherwise it is not. By default, the `HttpOnly` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not allow client-side +JavaScript to see the cookie in `document.cookie`. + +##### maxAge + +Specifies the `number` (in seconds) to be the value for the [`Max-Age` `Set-Cookie` attribute][rfc-6265-5.2.2]. +The given number will be converted to an integer by rounding down. By default, no maximum age is set. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### partitioned + +Specifies the `boolean` value for the [`Partitioned` `Set-Cookie`](rfc-cutler-httpbis-partitioned-cookies) +attribute. When truthy, the `Partitioned` attribute is set, otherwise it is not. By default, the +`Partitioned` attribute is not set. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +More information about can be found in [the proposal](https://github.com/privacycg/CHIPS). + +##### path + +Specifies the value for the [`Path` `Set-Cookie` attribute][rfc-6265-5.2.4]. By default, the path +is considered the ["default path"][rfc-6265-5.1.4]. + +##### priority + +Specifies the `string` to be the value for the [`Priority` `Set-Cookie` attribute][rfc-west-cookie-priority-00-4.1]. + + - `'low'` will set the `Priority` attribute to `Low`. + - `'medium'` will set the `Priority` attribute to `Medium`, the default priority when not set. + - `'high'` will set the `Priority` attribute to `High`. + +More information about the different priority levels can be found in +[the specification][rfc-west-cookie-priority-00-4.1]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### sameSite + +Specifies the `boolean` or `string` to be the value for the [`SameSite` `Set-Cookie` attribute][rfc-6265bis-09-5.4.7]. + + - `true` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + - `false` will not set the `SameSite` attribute. + - `'lax'` will set the `SameSite` attribute to `Lax` for lax same site enforcement. + - `'none'` will set the `SameSite` attribute to `None` for an explicit cross-site cookie. + - `'strict'` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + +More information about the different enforcement levels can be found in +[the specification][rfc-6265bis-09-5.4.7]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### secure + +Specifies the `boolean` value for the [`Secure` `Set-Cookie` attribute][rfc-6265-5.2.5]. When truthy, +the `Secure` attribute is set, otherwise it is not. By default, the `Secure` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not send the cookie back to +the server in the future if the browser does not have an HTTPS connection. + +## Example + +The following example uses this module in conjunction with the Node.js core HTTP server +to prompt a user for their name and display it back on future visits. + +```js +var cookie = require('cookie'); +var escapeHtml = require('escape-html'); +var http = require('http'); +var url = require('url'); + +function onRequest(req, res) { + // Parse the query string + var query = url.parse(req.url, true, true).query; + + if (query && query.name) { + // Set a new cookie with the name + res.setHeader('Set-Cookie', cookie.serialize('name', String(query.name), { + httpOnly: true, + maxAge: 60 * 60 * 24 * 7 // 1 week + })); + + // Redirect back after setting cookie + res.statusCode = 302; + res.setHeader('Location', req.headers.referer || '/'); + res.end(); + return; + } + + // Parse the cookies on the request + var cookies = cookie.parse(req.headers.cookie || ''); + + // Get the visitor name set in the cookie + var name = cookies.name; + + res.setHeader('Content-Type', 'text/html; charset=UTF-8'); + + if (name) { + res.write('

Welcome back, ' + escapeHtml(name) + '!

'); + } else { + res.write('

Hello, new visitor!

'); + } + + res.write('
'); + res.write(' '); + res.end('
'); +} + +http.createServer(onRequest).listen(3000); +``` + +## Testing + +```sh +$ npm test +``` + +## Benchmark + +``` +$ npm run bench + +> cookie@0.5.0 bench +> node benchmark/index.js + + node@18.18.2 + acorn@8.10.0 + ada@2.6.0 + ares@1.19.1 + brotli@1.0.9 + cldr@43.1 + icu@73.2 + llhttp@6.0.11 + modules@108 + napi@9 + nghttp2@1.57.0 + nghttp3@0.7.0 + ngtcp2@0.8.1 + openssl@3.0.10+quic + simdutf@3.2.14 + tz@2023c + undici@5.26.3 + unicode@15.0 + uv@1.44.2 + uvwasi@0.0.18 + v8@10.2.154.26-node.26 + zlib@1.2.13.1-motley + +> node benchmark/parse-top.js + + cookie.parse - top sites + + 14 tests completed. + + parse accounts.google.com x 2,588,913 ops/sec ±0.74% (186 runs sampled) + parse apple.com x 2,370,002 ops/sec ±0.69% (186 runs sampled) + parse cloudflare.com x 2,213,102 ops/sec ±0.88% (188 runs sampled) + parse docs.google.com x 2,194,157 ops/sec ±1.03% (184 runs sampled) + parse drive.google.com x 2,265,084 ops/sec ±0.79% (187 runs sampled) + parse en.wikipedia.org x 457,099 ops/sec ±0.81% (186 runs sampled) + parse linkedin.com x 504,407 ops/sec ±0.89% (186 runs sampled) + parse maps.google.com x 1,230,959 ops/sec ±0.98% (186 runs sampled) + parse microsoft.com x 926,294 ops/sec ±0.88% (184 runs sampled) + parse play.google.com x 2,311,338 ops/sec ±0.83% (185 runs sampled) + parse support.google.com x 1,508,850 ops/sec ±0.86% (186 runs sampled) + parse www.google.com x 1,022,582 ops/sec ±1.32% (182 runs sampled) + parse youtu.be x 332,136 ops/sec ±1.02% (185 runs sampled) + parse youtube.com x 323,833 ops/sec ±0.77% (183 runs sampled) + +> node benchmark/parse.js + + cookie.parse - generic + + 6 tests completed. + + simple x 3,214,032 ops/sec ±1.61% (183 runs sampled) + decode x 587,237 ops/sec ±1.16% (187 runs sampled) + unquote x 2,954,618 ops/sec ±1.35% (183 runs sampled) + duplicates x 857,008 ops/sec ±0.89% (187 runs sampled) + 10 cookies x 292,133 ops/sec ±0.89% (187 runs sampled) + 100 cookies x 22,610 ops/sec ±0.68% (187 runs sampled) +``` + +## References + +- [RFC 6265: HTTP State Management Mechanism][rfc-6265] +- [Same-site Cookies][rfc-6265bis-09-5.4.7] + +[rfc-cutler-httpbis-partitioned-cookies]: https://tools.ietf.org/html/draft-cutler-httpbis-partitioned-cookies/ +[rfc-west-cookie-priority-00-4.1]: https://tools.ietf.org/html/draft-west-cookie-priority-00#section-4.1 +[rfc-6265bis-09-5.4.7]: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-09#section-5.4.7 +[rfc-6265]: https://tools.ietf.org/html/rfc6265 +[rfc-6265-5.1.4]: https://tools.ietf.org/html/rfc6265#section-5.1.4 +[rfc-6265-5.2.1]: https://tools.ietf.org/html/rfc6265#section-5.2.1 +[rfc-6265-5.2.2]: https://tools.ietf.org/html/rfc6265#section-5.2.2 +[rfc-6265-5.2.3]: https://tools.ietf.org/html/rfc6265#section-5.2.3 +[rfc-6265-5.2.4]: https://tools.ietf.org/html/rfc6265#section-5.2.4 +[rfc-6265-5.2.5]: https://tools.ietf.org/html/rfc6265#section-5.2.5 +[rfc-6265-5.2.6]: https://tools.ietf.org/html/rfc6265#section-5.2.6 +[rfc-6265-5.3]: https://tools.ietf.org/html/rfc6265#section-5.3 + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/cookie/master?label=ci +[ci-url]: https://github.com/jshttp/cookie/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/cookie/master +[coveralls-url]: https://coveralls.io/r/jshttp/cookie?branch=master +[node-image]: https://badgen.net/npm/node/cookie +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/cookie +[npm-url]: https://npmjs.org/package/cookie +[npm-version-image]: https://badgen.net/npm/v/cookie diff --git a/node_modules/cookie/SECURITY.md b/node_modules/cookie/SECURITY.md new file mode 100644 index 00000000..fd4a6c53 --- /dev/null +++ b/node_modules/cookie/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The `cookie` team and community take all security bugs seriously. Thank +you for improving the security of the project. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `cookie`. This +information can be found in the npm registry using the command +`npm owner ls cookie`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/jshttp/cookie/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/node_modules/cookie/index.js b/node_modules/cookie/index.js new file mode 100644 index 00000000..acd5acd6 --- /dev/null +++ b/node_modules/cookie/index.js @@ -0,0 +1,335 @@ +/*! + * cookie + * Copyright(c) 2012-2014 Roman Shtylman + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +exports.parse = parse; +exports.serialize = serialize; + +/** + * Module variables. + * @private + */ + +var __toString = Object.prototype.toString +var __hasOwnProperty = Object.prototype.hasOwnProperty + +/** + * RegExp to match cookie-name in RFC 6265 sec 4.1.1 + * This refers out to the obsoleted definition of token in RFC 2616 sec 2.2 + * which has been replaced by the token definition in RFC 7230 appendix B. + * + * cookie-name = token + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / + * "*" / "+" / "-" / "." / "^" / "_" / + * "`" / "|" / "~" / DIGIT / ALPHA + */ + +var cookieNameRegExp = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/; + +/** + * RegExp to match cookie-value in RFC 6265 sec 4.1.1 + * + * cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + * cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + * ; US-ASCII characters excluding CTLs, + * ; whitespace DQUOTE, comma, semicolon, + * ; and backslash + */ + +var cookieValueRegExp = /^("?)[\u0021\u0023-\u002B\u002D-\u003A\u003C-\u005B\u005D-\u007E]*\1$/; + +/** + * RegExp to match domain-value in RFC 6265 sec 4.1.1 + * + * domain-value = + * ; defined in [RFC1034], Section 3.5, as + * ; enhanced by [RFC1123], Section 2.1 + * =