diff --git a/.dockerignore b/.dockerignore index 1aea1b7..e928c26 100644 --- a/.dockerignore +++ b/.dockerignore @@ -51,7 +51,6 @@ Thumbs.db # Project specific tests/ -docs/ *.egg-info/ .pytest_cache/ .coverage diff --git a/.github/workflows/docker-push.yml b/.github/workflows/docker-push.yml index 2ccf7b5..bf7489a 100644 --- a/.github/workflows/docker-push.yml +++ b/.github/workflows/docker-push.yml @@ -35,4 +35,4 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: ghcr.io/agile-learning-institute/stage0_mongodb_api:latest + tags: ghcr.io/agile-learning-institute/mongodb_configurator_api:latest diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index a04f5c4..0000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,199 +0,0 @@ -# Contributing to stage0_mongodb_api - -This document provides information for developers contributing to the stage0_mongodb_api project. - -## Development Standards - -This project follows the [Stage0 development standards](https://github.com/agile-learning-institute/stage0/blob/main/developer_edition/docs/contributing.md) and implements [API standards](https://github.com/agile-learning-institute/stage0/blob/main/developer_edition/docs/api-standards.md) for consistency across the platform. The service is designed with [configurability](https://github.com/agile-learning-institute/stage0/blob/main/developer_edition/docs/service-configurability.md) and [observability](https://github.com/agile-learning-institute/stage0/blob/main/developer_edition/docs/service-observability.md) in mind. - -## Separation of Concerns - -### Application Entry Point - -- **server.py** - Main Flask application entry point that initializes the API server, registers routes, and handles startup/shutdown logic - -### API Layer - -- **Routes** - Flask Blueprint modules that handle HTTP requests and responses - - **collection_routes.py** - Collection management endpoints (`/api/collections`) - - **render_routes.py** - Schema rendering endpoints (`/api/render`) - - **config_routes.py** - Configuration endpoints (`/api/config`) from stage0_py_utils - -- **Services** - Business logic layer that coordinates operations between routes and managers - - **collection_service.py** - Collection processing and management operations - - **render_service.py** - Schema rendering operations - -### Core Managers - -- **ConfigManager** - Loads collection configurations and orchestrates version processing workflows -- **VersionManager** - Tracks collection versions in MongoDB and provides version comparison -- **SchemaManager** - Handles schema loading, validation, rendering, and application to MongoDB -- **IndexManager** - Manages MongoDB index creation and deletion -- **MigrationManager** - Executes data migration pipelines using MongoDB aggregations - -### Supporting Components - -- **VersionNumber** - Parses and compares version strings (major.minor.patch.enumerator) -- **SchemaRenderer** - Renders schemas in JSON and BSON formats -- **SchemaValidator** - Validates schema definitions and configurations -- **SchemaTypes** - Type definitions and enums for schema operations - -## Development Setup - -### Prerequisites - -- Stage0 [Developers Edition](https://github.com/agile-learning-institute/stage0/blob/main/developer_edition/README.md) - -### Quick Start - -```bash -# Clone the repository -git clone git@github.com:agile-learning-institute/stage0_mongodb_api.git -cd stage0_mongodb_api -pipenv run service -# Open http://localhost:8082/ -``` - -### Developer Commands - -```bash -# Configure environment for testing -export MONGO_DB_NAME=test_database -export MONGO_CONNECTION_STRING=mongodb://localhost:27017/?replicaSet=rs0 - -# Select a test_case for the server -export INPUT_FOLDER=./tests/test_cases/small_sample -export INPUT_FOLDER=./tests/test_cases/large_sample - -# Set Debug Mode if needed -export LOGGING_LEVEL=DEBUG - -# Install dependencies -pipenv install --dev - -# Run Unit Tests and generate coverage report -pipenv run test - -# Run a backing mongo database -pipenv run database - -## All run locally commands assume the database is running -# Start server locally** -pipenv run local - -# Start locally with debugging -pipenv run debug - -# Run locally in Batch mode (process and shut down) -pipenv run batch - -# Build container after code changes -pipenv run build - -# Start Containerized Stack (Database, API, and SPA) -pipenv run service - -# Stop the testing containers -pipenv run down - -##################### -# Black Box Testing # - -# MongoDB Utilities -pipenv run db-drop-silent # drop the testing database -pipenv run db-compare # Compare the database to a know set of data -pipenv run db-harvest # Update the set of known data from the database - -# Run StepCI black box testing -pipenv run stepci-observability -pipenv run stepci-small -pipenv run stepci-large - -# Combine DB actions with Batch testing -pipenv run db-drop-silent -pipenv run db-compare # Should fail -pipenv run batch -pipenv run db-compare # Should pass - -# Combine DB actions, containerized runtime, and StepCI testing -pipenv run service -pipenv run db-compare # Should fail -pipenv run stepci-large -pipenv run db-compare # Should pass - -# Use the SPA to find errors and test configuration -pipenv run service # if it's not already running -pipenv run db-compare # Should fail -# visit http://localhost:8082 and "process all" -pipenv run db-compare # Should pass - -``` - -## Testing - -### Test Structure - -The `tests/` directory contains python unit tests, stepci black box, and testing data. - -``` -tests/ -├── test_server.py # Server.py unit tests -├── managers/ # Manager class unit tests -├── routes/ # Route class unit tests -├── services/ # Service layer unit tests -├── test_cases/ # Test data -│ ├── small_sample/ # Simple test configuration -│ ├── large_sample/ # Complex test configuration -│ ├── empty_input/ # Load Error testing -│ ├── .../ # Additional test cases -``` - -### Test Cases - -The `tests/test_cases/` directory contains test scenarios: - -- **small_sample**: Minimal configuration with one collection for basic functionality testing -- **large_sample**: Complex multi-collection setup with relationships and advanced features -- **validation_errors**: Test cases for error handling and validation scenarios -- **minimum_valid**: Empty configuration for edge case testing - -If you need a new set of test data to validate features you are adding, feel free to add a new test case folder. Take note of these unit tests that use the test data. - -### Load and Validation Errors - Load and validation unit testing leverages test cases with known errors. Assertions validate that the errors were thrown using the unique identifier thrown in the code. If you introduce new testing, make sure you add new unique identifiers here. - -### Rendering Tests - Rendering tests for both the small_sample and large_sample test cases is done using the expected output found in the `tests/test_cases/{case}/expected/json_schema` and `expected/bson_schema` folders. If your new test case needs to include rendering tests, you can add the expected output there and extend the rendering unit tests. - -## CURL Examples - -```bash -# Get Configuration -curl -X GET http://localhost:8081/api/config - -# Health Check -curl -X GET http://localhost:8081/health - -# List Collections -curl -X GET http://localhost:8081/api/collections/ - -# Get a Collection Config -curl -X GET http://localhost:8081/api/collections/{collection_name} - -# Process All Collections -curl -X POST http://localhost:8081/api/collections/ - -# Process Specific Collection -curl -X POST http://localhost:8081/api/collections/{collection_name} - -# Render BSON Schema -curl -X GET http://localhost:8081/api/render/bson_schema/{version_name} - -# Render JSON Schema -curl -X GET http://localhost:8081/api/render/json_schema/{version_name} - -# Render OpenAPI Specification -curl -X GET http://localhost:8081/api/render/openapi/{version_name} - -``` - diff --git a/Dockerfile b/Dockerfile index d74f8cb..61a2a65 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,38 +1,45 @@ -# Stage 1: Build stage -FROM python:3.12-slim as build +# Use Python 3.12 slim image +FROM python:3.12-slim -# Set the working directory in the container -WORKDIR /app +# Set the working directory +WORKDIR /opt/mongo_configurator -# Copy the entire context to the container -COPY . . +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + && rm -rf /var/lib/apt/lists/* -# Get the current Git branch and build time -RUN DATE=$(date +'%Y%m%d-%H%M%S') && \ - echo "${DATE}" > /app/BUILT_AT +# Copy dependency files first for better caching +COPY Pipfile Pipfile.lock ./ -# Stage 2: Production stage -FROM python:3.12-slim +# Install pipenv +RUN pip install pipenv -# Set the working directory in the container -WORKDIR /opt/stage0_mongodb_api +# Copy application code +COPY . . -# Copy the entire source code and the BUILT_AT file from the build stage -COPY --from=build /app/ /opt/stage0_mongodb_api/ +# Install dependencies +RUN pipenv install --deploy --system -# Install pipenv and dependencies -COPY Pipfile Pipfile.lock /opt/stage0_mongodb_api/ -RUN pip install pipenv && pipenv install --deploy --system +# Create build timestamp +RUN echo $(date +'%Y%m%d-%H%M%S') > /opt/mongo_configurator/BUILT_AT -# Install Gunicorn for running the Flask app in production +# Install Gunicorn for production RUN pip install gunicorn -# Expose the port the app will run on +# Create non-root user for security +RUN useradd --create-home --shell /bin/bash app && \ + chown -R app:app /opt/mongo_configurator + +# Switch to non-root user +USER app + +# Expose the port EXPOSE 8081 -# Set Environment Variables -ENV PYTHONPATH=/opt/stage0_mongodb_api/stage0_mongodb_api -ENV MONGODB_API_PORT=8081 +# Set environment variables +ENV PYTHONPATH=/opt/mongo_configurator/configurator +ENV API_PORT=8081 -# Command to run the application using Gunicorn -CMD exec gunicorn --bind 0.0.0.0:${MONGODB_API_PORT} --timeout 120 --preload stage0_mongodb_api.server:app +# Command to run the application +CMD ["gunicorn", "--bind", "0.0.0.0:8081", "--timeout", "10", "--preload", "configurator.server:app"] diff --git a/Pipfile b/Pipfile index 828b611..3490951 100644 --- a/Pipfile +++ b/Pipfile @@ -4,33 +4,27 @@ verify_ssl = true name = "pypi" [scripts] -local = "sh -c 'PYTHONPATH=$(pwd)/stage0_mongodb_api python3 -m server'" -debug = "sh -c 'PYTHONPATH=$(pwd)/stage0_mongodb_api LOGGING_LEVEL=DEBUG python3 -m server'" -batch = "sh -c 'PYTHONPATH=$(pwd)/stage0_mongodb_api AUTO_PROCESS=True EXIT_AFTER_PROCESSING=True LOAD_TEST_DATA=True python3 -m server'" -test = "sh -c 'PYTHONPATH=$(pwd)/stage0_mongodb_api LOGGING_LEVEL=CRITICAL coverage run -m unittest discover -s ./tests -p \"test_*.py\" -v && echo \"\\n=== COVERAGE REPORT ===\" && coverage report && coverage html'" -stepci-observability = "stepci run ./tests/stepci/observability.yaml" -stepci-small = "stepci run ./tests/stepci/small_sample.yaml" -stepci-large = "stepci run ./tests/stepci/large_sample.yaml" -db-drop = "python tests/db_util.py drop" -db-drop-silent = "python tests/db_util.py drop --passphrase DROP_DROWSSAP_YEK" -db-compare = "python tests/db_util.py compare" -db-harvest = "python tests/db_util.py harvest" -build = "docker build --tag ghcr.io/agile-learning-institute/stage0_mongodb_api:latest ." -service = "sh -c 'pipenv run down && docker compose --profile mongodb up --detach && echo `Visit http://localhost:8082/`'" -database = "sh -c 'pipenv run down && docker compose --profile mongodb-only up --detach'" -down = "docker compose down mongodb_spa mongodb_api mongodb" +local = "sh -c 'PYTHONPATH=$(pwd)/configurator BUILT_AT=Local LOAD_TEST_DATA=True python3 -m server'" +debug = "sh -c 'PYTHONPATH=$(pwd)/configurator BUILT_AT=Local LOAD_TEST_DATA=True LOGGING_LEVEL=DEBUG python3 -m server'" +batch = "sh -c 'PYTHONPATH=$(pwd)/configurator AUTO_PROCESS=True EXIT_AFTER_PROCESSING=True LOAD_TEST_DATA=True python3 -m server'" +test = "sh -c 'PYTHONPATH=$(pwd)/configurator LOGGING_LEVEL=CRITICAL coverage run -m unittest discover -s ./tests -p \"test_*.py\" -v && echo \"\\n=== COVERAGE REPORT ===\" && coverage report && coverage html'" +stepci = "stepci run ./tests/stepci/workflow.yaml" +container = "docker build --tag ghcr.io/agile-learning-institute/mongodb_configurator_api:latest ." +database = "sh -c 'pipenv run down && docker compose --profile mongodb up --detach'" +api = "sh -c 'pipenv run down && docker compose --profile configurator-api up --detach'" +service = "sh -c 'pipenv run down && docker compose --profile configurator up --detach && echo `Visit http://localhost:8082/`'" +down = "docker compose down mongodb configurator_api configurator_spa" [packages] python-dotenv = "*" flask = "*" prometheus-flask-exporter = "*" pymongo = "*" -stage0-py-utils = "*" pyyaml = ">=6.0.1" -stage0-mongodb-api = {file = ".", editable = true} [dev-packages] coverage = "*" +pytest = "*" [requires] python_version = "3.12" diff --git a/Pipfile.lock b/Pipfile.lock index d2edf54..021db51 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b50156e27bd79a2bab374f7e6a05f2b3d94aac4cb91ba4f83a55b12ffcd8988e" + "sha256": "9610a5230ad7d6efdf4243d004719681658976074dff1c15fa1df7e4c7233dfa" }, "pipfile-spec": 6, "requires": { @@ -16,138 +16,6 @@ ] }, "default": { - "aiohappyeyeballs": { - "hashes": [ - "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", - "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8" - ], - "markers": "python_version >= '3.9'", - "version": "==2.6.1" - }, - "aiohttp": { - "hashes": [ - "sha256:0022de47ef63fd06b065d430ac79c6b0bd24cdae7feaf0e8c6bac23b805a23a8", - "sha256:003038e83f1a3ff97409999995ec02fe3008a1d675478949643281141f54751d", - "sha256:03d5eb3cfb4949ab4c74822fb3326cd9655c2b9fe22e4257e2100d44215b2e2b", - "sha256:04076d8c63471e51e3689c93940775dc3d12d855c0c80d18ac5a1c68f0904358", - "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", - "sha256:0653d15587909a52e024a261943cf1c5bdc69acb71f411b0dd5966d065a51a47", - "sha256:06b07c418bde1c8e737d8fa67741072bd3f5b0fb66cf8c0655172188c17e5fa6", - "sha256:09c4767af0b0b98c724f5d47f2bf33395c8986995b0a9dab0575ca81a554a8c0", - "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", - "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", - "sha256:0fcda86f6cb318ba36ed8f1396a6a4a3fd8f856f84d426584392083d10da4de0", - "sha256:119c79922a7001ca6a9e253228eb39b793ea994fd2eccb79481c64b5f9d2a055", - "sha256:1561db63fa1b658cd94325d303933553ea7d89ae09ff21cc3bcd41b8521fbbb6", - "sha256:1582745eb63df267c92d8b61ca655a0ce62105ef62542c00a74590f306be8cb5", - "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", - "sha256:1b6f46613031dbc92bdcaad9c4c22c7209236ec501f9c0c5f5f0b6a689bf50f3", - "sha256:1e8b27b2d414f7e3205aa23bb4a692e935ef877e3a71f40d1884f6e04fd7fa74", - "sha256:2332b4c361c05ecd381edb99e2a33733f3db906739a83a483974b3df70a51b40", - "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", - "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", - "sha256:29e08111ccf81b2734ae03f1ad1cb03b9615e7d8f616764f22f71209c094f122", - "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", - "sha256:34ebf1aca12845066c963016655dac897651e1544f22a34c9b461ac3b4b1d3aa", - "sha256:36f6c973e003dc9b0bb4e8492a643641ea8ef0e97ff7aaa5c0f53d68839357b4", - "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da", - "sha256:3fea41a2c931fb582cb15dc86a3037329e7b941df52b487a9f8b5aa960153cbd", - "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", - "sha256:4cd71c9fb92aceb5a23c4c39d8ecc80389c178eba9feab77f19274843eb9412d", - "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", - "sha256:4f95db8c8b219bcf294a53742c7bda49b80ceb9d577c8e7aa075612b7f39ffb7", - "sha256:5304d74867028cca8f64f1cc1215eb365388033c5a691ea7aa6b0dc47412f495", - "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace", - "sha256:5421af8f22a98f640261ee48aae3a37f0c41371e99412d55eaf2f8a46d5dad29", - "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", - "sha256:55683615813ce3601640cfaa1041174dc956d28ba0511c8cbd75273eb0587014", - "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706", - "sha256:5a1ecce0ed281bec7da8550da052a6b89552db14d0a0a45554156f085a912f48", - "sha256:5d6c85ac7dd350f8da2520bac8205ce99df4435b399fa7f4dc4a70407073e390", - "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911", - "sha256:6383dd0ffa15515283c26cbf41ac8e6705aab54b4cbb77bdb8935a713a89bee9", - "sha256:64d1f24ee95a2d1e094a4cd7a9b7d34d08db1bbcb8aa9fb717046b0a884ac294", - "sha256:6548a411bc8219b45ba2577716493aa63b12803d1e5dc70508c539d0db8dbf5a", - "sha256:663d8ee3ffb3494502ebcccb49078faddbb84c1d870f9c1dd5a29e85d1f747ce", - "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", - "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3", - "sha256:6a83797a0174e7995e5edce9dcecc517c642eb43bc3cba296d4512edf346eee2", - "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b", - "sha256:6cbfc73179bd67c229eb171e2e3745d2afd5c711ccd1e40a68b90427f282eab1", - "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", - "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103", - "sha256:7c229b1437aa2576b99384e4be668af1db84b31a45305d02f61f5497cfa6f60c", - "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd", - "sha256:81b0fcbfe59a4ca41dc8f635c2a4a71e63f75168cc91026c61be665945739e2d", - "sha256:846104f45d18fb390efd9b422b27d8f3cf8853f1218c537f36e71a385758c896", - "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", - "sha256:893a4639694c5b7edd4bdd8141be296042b6806e27cc1d794e585c43010cc294", - "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", - "sha256:921bc91e602d7506d37643e77819cb0b840d4ebb5f8d6408423af3d3bf79a7b7", - "sha256:9445c1842680efac0f81d272fd8db7163acfcc2b1436e3f420f4c9a9c5a50795", - "sha256:9a27da9c3b5ed9d04c36ad2df65b38a96a37e9cfba6f1381b842d05d98e6afe9", - "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1", - "sha256:a0be857f0b35177ba09d7c472825d1b711d11c6d0e8a2052804e3b93166de1ad", - "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041", - "sha256:a5734d8469a5633a4e9ffdf9983ff7cdb512524645c7a3d4bc8a3de45b935ac3", - "sha256:a77b48997c66722c65e157c06c74332cdf9c7ad00494b85ec43f324e5c5a9b9a", - "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d", - "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177", - "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", - "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7", - "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", - "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef", - "sha256:bb18f00396d22e2f10cd8825d671d9f9a3ba968d708a559c02a627536b36d91c", - "sha256:c332c6bb04650d59fb94ed96491f43812549a3ba6e7a16a218e612f99f04145e", - "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938", - "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1", - "sha256:d59227776ee2aa64226f7e086638baa645f4b044f2947dbf85c76ab11dcba073", - "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", - "sha256:d6946bae55fd36cfb8e4092c921075cde029c71c7cb571d72f1079d1e4e013bc", - "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb", - "sha256:e72d17fe0974ddeae8ed86db297e23dba39c7ac36d84acdbb53df2e18505a013", - "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", - "sha256:eabded0c2b2ef56243289112c48556c395d70150ce4220d9008e6b4b3dd15690", - "sha256:f0f8f6a85a0006ae2709aa4ce05749ba2cdcb4b43d6c21a16c8517c16593aabe", - "sha256:f3854fbde7a465318ad8d3fc5bef8f059e6d0a87e71a0d3360bb56c0bf87b18a", - "sha256:fcc30ad4fb5cb41a33953292d45f54ef4066746d625992aeac33b8c681173178", - "sha256:fef8d50dfa482925bb6b4c208b40d8e9fa54cecba923dc65b825a72eed9a5dbd" - ], - "markers": "python_version >= '3.9'", - "version": "==3.12.13" - }, - "aiosignal": { - "hashes": [ - "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", - "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54" - ], - "markers": "python_version >= '3.9'", - "version": "==1.3.2" - }, - "annotated-types": { - "hashes": [ - "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", - "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89" - ], - "markers": "python_version >= '3.8'", - "version": "==0.7.0" - }, - "anyio": { - "hashes": [ - "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", - "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c" - ], - "markers": "python_version >= '3.9'", - "version": "==4.9.0" - }, - "attrs": { - "hashes": [ - "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", - "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b" - ], - "markers": "python_version >= '3.8'", - "version": "==25.3.0" - }, "blinker": { "hashes": [ "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", @@ -156,14 +24,6 @@ "markers": "python_version >= '3.9'", "version": "==1.9.0" }, - "certifi": { - "hashes": [ - "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", - "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b" - ], - "markers": "python_version >= '3.7'", - "version": "==2025.6.15" - }, "click": { "hashes": [ "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", @@ -172,21 +32,6 @@ "markers": "python_version >= '3.10'", "version": "==8.2.1" }, - "discord": { - "hashes": [ - "sha256:cc1ee2dbe6df218ca51519af355b97e87309f8230f58c7f34885feb8e8a76145", - "sha256:d7959418799dd3b1e896685812d880169c193468b061b3431fa2a4664febd3da" - ], - "version": "==2.3.2" - }, - "discord.py": { - "hashes": [ - "sha256:01cd362023bfea1a4a1d43f5280b5ef00cad2c7eba80098909f98bf28e578524", - "sha256:81f23a17c50509ffebe0668441cb80c139e74da5115305f70e27ce821361295a" - ], - "markers": "python_version >= '3.8'", - "version": "==2.5.2" - }, "dnspython": { "hashes": [ "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", @@ -204,148 +49,6 @@ "markers": "python_version >= '3.9'", "version": "==3.1.1" }, - "frozenlist": { - "hashes": [ - "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", - "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", - "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", - "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615", - "sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6", - "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718", - "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", - "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", - "sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677", - "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", - "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50", - "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", - "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", - "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa", - "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", - "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", - "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", - "sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938", - "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", - "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", - "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", - "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e", - "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", - "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", - "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", - "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", - "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", - "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", - "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", - "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e", - "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e", - "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", - "sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63", - "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", - "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd", - "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", - "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", - "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", - "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", - "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", - "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", - "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", - "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577", - "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", - "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", - "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464", - "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61", - "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", - "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", - "sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87", - "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", - "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", - "sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71", - "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", - "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", - "sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2", - "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", - "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", - "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", - "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", - "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59", - "sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878", - "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", - "sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890", - "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", - "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", - "sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb", - "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d", - "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", - "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", - "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", - "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", - "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", - "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c", - "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", - "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9", - "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", - "sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e", - "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", - "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", - "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", - "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", - "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", - "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", - "sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb", - "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", - "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a", - "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", - "sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630", - "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", - "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", - "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", - "sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44", - "sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319", - "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", - "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", - "sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35", - "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", - "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", - "sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd", - "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", - "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", - "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981", - "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5" - ], - "markers": "python_version >= '3.9'", - "version": "==1.7.0" - }, - "h11": { - "hashes": [ - "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", - "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86" - ], - "markers": "python_version >= '3.8'", - "version": "==0.16.0" - }, - "httpcore": { - "hashes": [ - "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", - "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8" - ], - "markers": "python_version >= '3.8'", - "version": "==1.0.9" - }, - "httpx": { - "hashes": [ - "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", - "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad" - ], - "markers": "python_version >= '3.8'", - "version": "==0.28.1" - }, - "idna": { - "hashes": [ - "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", - "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" - ], - "markers": "python_version >= '3.6'", - "version": "==3.10" - }, "itsdangerous": { "hashes": [ "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", @@ -429,130 +132,6 @@ "markers": "python_version >= '3.9'", "version": "==3.0.2" }, - "multidict": { - "hashes": [ - "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", - "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", - "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", - "sha256:05db2f66c9addb10cfa226e1acb363450fab2ff8a6df73c622fefe2f5af6d4e7", - "sha256:0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9", - "sha256:0db58da8eafb514db832a1b44f8fa7906fdd102f7d982025f816a93ba45e3dcb", - "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", - "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", - "sha256:12f4581d2930840295c461764b9a65732ec01250b46c6b2c510d7ee68872b140", - "sha256:1328201ee930f069961ae707d59c6627ac92e351ed5b92397cf534d1336ce557", - "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", - "sha256:14117a41c8fdb3ee19c743b1c027da0736fdb79584d61a766da53d399b71176c", - "sha256:15332783596f227db50fb261c2c251a58ac3873c457f3a550a95d5c0aa3c770d", - "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", - "sha256:18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c", - "sha256:1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462", - "sha256:1c8082e5814b662de8589d6a06c17e77940d5539080cbab9fe6794b5241b76d9", - "sha256:208b9b9757060b9faa6f11ab4bc52846e4f3c2fb8b14d5680c8aac80af3dc751", - "sha256:20c5a0c3c13a15fd5ea86c42311859f970070e4e24de5a550e99d7c271d76318", - "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", - "sha256:233ad16999afc2bbd3e534ad8dbe685ef8ee49a37dbc2cdc9514e57b6d589ced", - "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", - "sha256:295adc9c0551e5d5214b45cf29ca23dbc28c2d197a9c30d51aed9e037cb7c578", - "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", - "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", - "sha256:35712f1748d409e0707b165bf49f9f17f9e28ae85470c41615778f8d4f7d9609", - "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", - "sha256:37b09ca60998e87734699e88c2363abfd457ed18cfbf88e4009a4e83788e63ed", - "sha256:3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b", - "sha256:41bb9d1d4c303886e2d85bade86e59885112a7f4277af5ad47ab919a2251f306", - "sha256:42ca5aa9329a63be8dc49040f63817d1ac980e02eeddba763a9ae5b4027b9c9c", - "sha256:43571f785b86afd02b3855c5ac8e86ec921b760298d6f82ff2a61daf5a35330b", - "sha256:448e4a9afccbf297577f2eaa586f07067441e7b63c8362a3540ba5a38dc0f14a", - "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", - "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", - "sha256:531e331a2ee53543ab32b16334e2deb26f4e6b9b28e41f8e0c87e99a6c8e2d69", - "sha256:53becb01dd8ebd19d1724bebe369cfa87e4e7f29abbbe5c14c98ce4c383e16cd", - "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", - "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", - "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", - "sha256:5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539", - "sha256:61af8a4b771f1d4d000b3168c12c3120ccf7284502a94aa58c68a81f5afac090", - "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", - "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", - "sha256:68e9e12ed00e2089725669bdc88602b0b6f8d23c0c95e52b95f0bc69f7fe9b55", - "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", - "sha256:70b72e749a4f6e7ed8fb334fa8d8496384840319512746a5f42fa0aec79f4d61", - "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", - "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", - "sha256:72d8815f2cd3cf3df0f83cac3f3ef801d908b2d90409ae28102e0553af85545a", - "sha256:7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9", - "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", - "sha256:766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430", - "sha256:769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5", - "sha256:775b464d31dac90f23192af9c291dc9f423101857e33e9ebf0020a10bfcf4144", - "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", - "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", - "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", - "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", - "sha256:877443eaaabcd0b74ff32ebeed6f6176c71850feb7d6a1d2db65945256ea535c", - "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", - "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", - "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", - "sha256:934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56", - "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", - "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", - "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", - "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", - "sha256:9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183", - "sha256:9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5", - "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", - "sha256:a2be5b7b35271f7fff1397204ba6708365e3d773579fe2a30625e16c4b4ce817", - "sha256:ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485", - "sha256:acf6b97bd0884891af6a8b43d0f586ab2fcf8e717cbd47ab4bdddc09e20652d8", - "sha256:b1db4d2093d6b235de76932febf9d50766cf49a5692277b2c28a501c9637f616", - "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", - "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", - "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", - "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", - "sha256:bb933c891cd4da6bdcc9733d048e994e22e1883287ff7540c2a0f3b117605092", - "sha256:bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2", - "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", - "sha256:bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d", - "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", - "sha256:c8161b5a7778d3137ea2ee7ae8a08cce0010de3b00ac671c5ebddeaa17cefd22", - "sha256:cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d", - "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", - "sha256:d04d01f0a913202205a598246cf77826fe3baa5a63e9f6ccf1ab0601cf56eca0", - "sha256:d25594d3b38a2e6cabfdcafef339f754ca6e81fbbdb6650ad773ea9775af35ab", - "sha256:d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb", - "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", - "sha256:dd7793bab517e706c9ed9d7310b06c8672fd0aeee5781bfad612f56b8e0f7d14", - "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", - "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", - "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", - "sha256:e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7", - "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", - "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", - "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", - "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", - "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", - "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", - "sha256:e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6", - "sha256:ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df", - "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", - "sha256:f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821", - "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", - "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", - "sha256:f54cb79d26d0cd420637d184af38f0668558f3c4bbe22ab7ad830e67249f2e0b", - "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1" - ], - "markers": "python_version >= '3.9'", - "version": "==6.6.3" - }, - "ollama": { - "hashes": [ - "sha256:4c8839f35bc173c7057b1eb2cbe7f498c1a7e134eafc9192824c8aecb3617506", - "sha256:5a799e4dc4e7af638b11e3ae588ab17623ee019e496caaf4323efbaa8feeff93" - ], - "markers": "python_version >= '3.8'", - "version": "==0.5.1" - }, "prometheus-client": { "hashes": [ "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28", @@ -569,223 +148,6 @@ "index": "pypi", "version": "==0.23.2" }, - "propcache": { - "hashes": [ - "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", - "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", - "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", - "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", - "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", - "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", - "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", - "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3", - "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", - "sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4", - "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", - "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b", - "sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea", - "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", - "sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2", - "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", - "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", - "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", - "sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb", - "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", - "sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef", - "sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe", - "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", - "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3", - "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70", - "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", - "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", - "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0", - "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", - "sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1", - "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", - "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", - "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", - "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", - "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220", - "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", - "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", - "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50", - "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", - "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2", - "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", - "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", - "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb", - "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", - "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", - "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7", - "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9", - "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", - "sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701", - "sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9", - "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", - "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", - "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", - "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", - "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", - "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", - "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", - "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", - "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", - "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", - "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", - "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", - "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e", - "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614", - "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", - "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", - "sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5", - "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", - "sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1", - "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", - "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339", - "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", - "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", - "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", - "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", - "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", - "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", - "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", - "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", - "sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b", - "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", - "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", - "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", - "sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec", - "sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886", - "sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb", - "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", - "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", - "sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d", - "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", - "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", - "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", - "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", - "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", - "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", - "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", - "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c", - "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206" - ], - "markers": "python_version >= '3.9'", - "version": "==0.3.2" - }, - "pydantic": { - "hashes": [ - "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", - "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b" - ], - "markers": "python_version >= '3.9'", - "version": "==2.11.7" - }, - "pydantic-core": { - "hashes": [ - "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", - "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", - "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", - "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", - "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", - "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", - "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", - "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", - "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", - "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", - "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", - "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", - "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", - "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", - "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", - "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", - "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", - "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", - "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", - "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", - "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", - "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", - "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", - "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", - "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", - "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", - "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", - "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", - "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", - "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", - "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", - "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", - "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", - "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", - "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", - "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", - "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", - "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", - "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", - "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", - "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", - "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", - "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", - "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", - "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954", - "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", - "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", - "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", - "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64", - "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", - "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9", - "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101", - "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", - "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", - "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", - "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", - "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", - "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", - "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", - "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", - "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", - "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", - "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", - "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", - "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d", - "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", - "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", - "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", - "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", - "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", - "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", - "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", - "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", - "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", - "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", - "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", - "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", - "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", - "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", - "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", - "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", - "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", - "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", - "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", - "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", - "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", - "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", - "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", - "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", - "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", - "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", - "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3", - "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", - "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", - "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", - "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", - "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", - "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", - "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d" - ], - "markers": "python_version >= '3.9'", - "version": "==2.33.2" - }, "pymongo": { "hashes": [ "sha256:01065eb1838e3621a30045ab14d1a60ee62e01f65b7cf154e69c5c722ef14d2f", @@ -919,236 +281,130 @@ "markers": "python_version >= '3.8'", "version": "==6.0.2" }, - "sniffio": { + "werkzeug": { "hashes": [ - "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", - "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" + "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", + "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746" ], - "markers": "python_version >= '3.7'", - "version": "==1.3.1" - }, - "stage0-mongodb-api": { - "editable": true, - "file": "." - }, - "stage0-py-utils": { + "markers": "python_version >= '3.9'", + "version": "==3.1.3" + } + }, + "develop": { + "coverage": { "hashes": [ - "sha256:23fb80f78dc3e009c8ac22c207f19be06ca3d37f97f536e851bdb84a7edc8a87", - "sha256:fa34fb9e777914c11d77cd1df462cbc0e7365e02faca9bd201995e28158f8651" + "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a", + "sha256:0a17eaf46f56ae0f870f14a3cbc2e4632fe3771eab7f687eda1ee59b73d09fe4", + "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c", + "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c", + "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b", + "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa", + "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6", + "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2", + "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926", + "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2", + "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6", + "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e", + "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd", + "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e", + "sha256:2f3da12e0ccbcb348969221d29441ac714bbddc4d74e13923d3d5a7a0bebef7a", + "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9", + "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc", + "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a", + "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74", + "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694", + "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d", + "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf", + "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f", + "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b", + "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7", + "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5", + "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868", + "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584", + "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912", + "sha256:669135a9d25df55d1ed56a11bf555f37c922cf08d80799d4f65d77d7d6123fcf", + "sha256:71ae8b53855644a0b1579d4041304ddc9995c7b21c8a1f16753c4d8903b4dfed", + "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d", + "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d", + "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355", + "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050", + "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8", + "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038", + "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b", + "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3", + "sha256:9d3a700304d01a627df9db4322dc082a0ce1e8fc74ac238e2af39ced4c083193", + "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0", + "sha256:a535c0c7364acd55229749c2b3e5eebf141865de3a8f697076a3291985f02d30", + "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba", + "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3", + "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0", + "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46", + "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3", + "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14", + "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e", + "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547", + "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62", + "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac", + "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb", + "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b", + "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f", + "sha256:dd7a57b33b5cf27acb491e890720af45db05589a80c1ffc798462a765be6d4d7", + "sha256:ddc39510ac922a5c4c27849b739f875d3e1d9e590d1e7b64c98dadf037a16cce", + "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45", + "sha256:df0f9ef28e0f20c767ccdccfc5ae5f83a6f4a2fbdfbcbcc8487a8a78771168c8", + "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4", + "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f", + "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b", + "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd", + "sha256:f65bb452e579d5540c8b37ec105dd54d8b9307b07bcaa186818c104ffda22441", + "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1", + "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b", + "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615" ], "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==0.2.15" + "markers": "python_version >= '3.9'", + "version": "==7.9.2" }, - "typing-extensions": { + "iniconfig": { "hashes": [ - "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", - "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af" + "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", + "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" ], - "markers": "python_version >= '3.9'", - "version": "==4.14.0" + "markers": "python_version >= '3.8'", + "version": "==2.1.0" }, - "typing-inspection": { + "packaging": { "hashes": [ - "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", - "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28" + "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", + "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f" ], - "markers": "python_version >= '3.9'", - "version": "==0.4.1" + "markers": "python_version >= '3.8'", + "version": "==25.0" }, - "werkzeug": { + "pluggy": { "hashes": [ - "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", - "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746" + "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", + "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746" ], "markers": "python_version >= '3.9'", - "version": "==3.1.3" + "version": "==1.6.0" }, - "yarl": { + "pygments": { "hashes": [ - "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", - "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", - "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", - "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed", - "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", - "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", - "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", - "sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010", - "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", - "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", - "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", - "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23", - "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e", - "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", - "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", - "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", - "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a", - "sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8", - "sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805", - "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2", - "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", - "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", - "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", - "sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b", - "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73", - "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", - "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309", - "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", - "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", - "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", - "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", - "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", - "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", - "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", - "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", - "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", - "sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240", - "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", - "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", - "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", - "sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba", - "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4", - "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e", - "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", - "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", - "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", - "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", - "sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d", - "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", - "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8", - "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", - "sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723", - "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13", - "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", - "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", - "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", - "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", - "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30", - "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", - "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", - "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", - "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", - "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", - "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", - "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", - "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", - "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", - "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", - "sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000", - "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", - "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", - "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", - "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16", - "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", - "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", - "sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06", - "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb", - "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", - "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", - "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8", - "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", - "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", - "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", - "sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c", - "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", - "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", - "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", - "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", - "sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e", - "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", - "sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee", - "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", - "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70", - "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", - "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24", - "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", - "sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3", - "sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00", - "sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983", - "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", - "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", - "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", - "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", - "sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5" + "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", + "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b" ], - "markers": "python_version >= '3.9'", - "version": "==1.20.1" - } - }, - "develop": { - "coverage": { + "markers": "python_version >= '3.8'", + "version": "==2.19.2" + }, + "pytest": { "hashes": [ - "sha256:02532fd3290bb8fa6bec876520842428e2a6ed6c27014eca81b031c2d30e3f71", - "sha256:0a4be2a28656afe279b34d4f91c3e26eccf2f85500d4a4ff0b1f8b54bf807338", - "sha256:0b3496922cb5f4215bf5caaef4cf12364a26b0be82e9ed6d050f3352cf2d7ef0", - "sha256:0c804506d624e8a20fb3108764c52e0eef664e29d21692afa375e0dd98dc384f", - "sha256:0f16649a7330ec307942ed27d06ee7e7a38417144620bb3d6e9a18ded8a2d3e5", - "sha256:16aa0830d0c08a2c40c264cef801db8bc4fc0e1892782e45bcacbd5889270509", - "sha256:18a0912944d70aaf5f399e350445738a1a20b50fbea788f640751c2ed9208b6c", - "sha256:1c503289ffef1d5105d91bbb4d62cbe4b14bec4d13ca225f9c73cde9bb46207d", - "sha256:2241ad5dbf79ae1d9c08fe52b36d03ca122fb9ac6bca0f34439e99f8327ac89f", - "sha256:25308bd3d00d5eedd5ae7d4357161f4df743e3c0240fa773ee1b0f75e6c7c0f1", - "sha256:2a876e4c3e5a2a1715a6608906aa5a2e0475b9c0f68343c2ada98110512ab1d8", - "sha256:2d04b16a6062516df97969f1ae7efd0de9c31eb6ebdceaa0d213b21c0ca1a683", - "sha256:30f445f85c353090b83e552dcbbdad3ec84c7967e108c3ae54556ca69955563e", - "sha256:31324f18d5969feef7344a932c32428a2d1a3e50b15a6404e97cba1cc9b2c631", - "sha256:34ed2186fe52fcc24d4561041979a0dec69adae7bce2ae8d1c49eace13e55c43", - "sha256:37ab6be0859141b53aa89412a82454b482c81cf750de4f29223d52268a86de67", - "sha256:37ae0383f13cbdcf1e5e7014489b0d71cc0106458878ccde52e8a12ced4298ed", - "sha256:382e7ddd5289f140259b610e5f5c58f713d025cb2f66d0eb17e68d0a94278875", - "sha256:3bb5838701ca68b10ebc0937dbd0eb81974bac54447c55cd58dea5bca8451029", - "sha256:437c576979e4db840539674e68c84b3cda82bc824dd138d56bead1435f1cb5d7", - "sha256:49f1d0788ba5b7ba65933f3a18864117c6506619f5ca80326b478f72acf3f385", - "sha256:52e92b01041151bf607ee858e5a56c62d4b70f4dac85b8c8cb7fb8a351ab2c10", - "sha256:535fde4001b2783ac80865d90e7cc7798b6b126f4cd8a8c54acfe76804e54e58", - "sha256:56f5eb308b17bca3bbff810f55ee26d51926d9f89ba92707ee41d3c061257e55", - "sha256:5add197315a054e92cee1b5f686a2bcba60c4c3e66ee3de77ace6c867bdee7cb", - "sha256:5f646a99a8c2b3ff4c6a6e081f78fad0dde275cd59f8f49dc4eab2e394332e74", - "sha256:600a1d4106fe66f41e5d0136dfbc68fe7200a5cbe85610ddf094f8f22e1b0300", - "sha256:60c458224331ee3f1a5b472773e4a085cc27a86a0b48205409d364272d67140d", - "sha256:64bdd969456e2d02a8b08aa047a92d269c7ac1f47e0c977675d550c9a0863643", - "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c", - "sha256:684e2110ed84fd1ca5f40e89aa44adf1729dc85444004111aa01866507adf363", - "sha256:68cd53aec6f45b8e4724c0950ce86eacb775c6be01ce6e3669fe4f3a21e768ed", - "sha256:69aa417a030bf11ec46149636314c24c8d60fadb12fc0ee8f10fda0d918c879d", - "sha256:6ad935f0016be24c0e97fc8c40c465f9c4b85cbbe6eac48934c0dc4d2568321e", - "sha256:6b55ad10a35a21b8015eabddc9ba31eb590f54adc9cd39bcf09ff5349fd52125", - "sha256:6cf43c78c4282708a28e466316935ec7489a9c487518a77fa68f716c67909cec", - "sha256:6f424507f57878e424d9a95dc4ead3fbdd72fd201e404e861e465f28ea469951", - "sha256:70760b4c5560be6ca70d11f8988ee6542b003f982b32f83d5ac0b72476607b70", - "sha256:73e9439310f65d55a5a1e0564b48e34f5369bee943d72c88378f2d576f5a5751", - "sha256:7931b9e249edefb07cd6ae10c702788546341d5fe44db5b6108a25da4dca513f", - "sha256:81f34346dd63010453922c8e628a52ea2d2ccd73cb2487f7700ac531b247c8a5", - "sha256:888f8eee13f2377ce86d44f338968eedec3291876b0b8a7289247ba52cb984cd", - "sha256:95335095b6c7b1cc14c3f3f17d5452ce677e8490d101698562b2ffcacc304c8d", - "sha256:9565c3ab1c93310569ec0d86b017f128f027cab0b622b7af288696d7ed43a16d", - "sha256:95c765060e65c692da2d2f51a9499c5e9f5cf5453aeaf1420e3fc847cc060582", - "sha256:9969ef1e69b8c8e1e70d591f91bbc37fc9a3621e447525d1602801a24ceda898", - "sha256:9ca8e220006966b4a7b68e8984a6aee645a0384b0769e829ba60281fe61ec4f7", - "sha256:a39d18b3f50cc121d0ce3838d32d58bd1d15dab89c910358ebefc3665712256c", - "sha256:a66e8f628b71f78c0e0342003d53b53101ba4e00ea8dabb799d9dba0abbbcebe", - "sha256:a8de12b4b87c20de895f10567639c0797b621b22897b0af3ce4b4e204a743626", - "sha256:af41da5dca398d3474129c58cb2b106a5d93bbb196be0d307ac82311ca234342", - "sha256:b30a25f814591a8c0c5372c11ac8967f669b97444c47fd794926e175c4047ece", - "sha256:ba383dc6afd5ec5b7a0d0c23d38895db0e15bcba7fb0fa8901f245267ac30d86", - "sha256:bb4fbcab8764dc072cb651a4bcda4d11fb5658a1d8d68842a862a6610bd8cfa3", - "sha256:be9e3f68ca9edb897c2184ad0eee815c635565dbe7a0e7e814dc1f7cbab92c0a", - "sha256:bfa447506c1a52271f1b0de3f42ea0fa14676052549095e378d5bff1c505ff7b", - "sha256:cc94d7c5e8423920787c33d811c0be67b7be83c705f001f7180c7b186dcf10ca", - "sha256:cea0a27a89e6432705fffc178064503508e3c0184b4f061700e771a09de58187", - "sha256:cf95981b126f23db63e9dbe4cf65bd71f9a6305696fa5e2262693bc4e2183f5b", - "sha256:d4fe2348cc6ec372e25adec0219ee2334a68d2f5222e0cba9c0d613394e12d86", - "sha256:db0f04118d1db74db6c9e1cb1898532c7dcc220f1d2718f058601f7c3f499514", - "sha256:dd24bd8d77c98557880def750782df77ab2b6885a18483dc8588792247174b32", - "sha256:e1b5191d1648acc439b24721caab2fd0c86679d8549ed2c84d5a7ec1bedcc244", - "sha256:e5532482344186c543c37bfad0ee6069e8ae4fc38d073b8bc836fc8f03c9e250", - "sha256:e980b53a959fa53b6f05343afbd1e6f44a23ed6c23c4b4c56c6662bbb40c82ce", - "sha256:ef64c27bc40189f36fcc50c3fb8f16ccda73b6a0b80d9bd6e6ce4cffcd810bbd", - "sha256:f05031cf21699785cd47cb7485f67df619e7bcdae38e0fde40d23d3d0210d3c3" + "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", + "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==7.9.1" + "version": "==8.4.1" } } } diff --git a/README.md b/README.md index 97a36eb..7bcdff6 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,26 @@ -# stage0_mongodb_api +# MongoDB Configurator API -This project builds a Utility Container that implements index, schema, and migration management services for a MongoDB database. -Schemas are described using the [stage0 Simple Schema]() standard. +This project builds a the [MongoDB Configurator](https://github.com/agile-learning-institute/mongodb_configurator) API. -## Quick Start for Users - -- Read about [Simple Schema](https://github.com/agile-learning-institute/stage0/blob/main/SIMPLE_SCHEMA.md) to understand the Schema Language -- Read the [Reference](./REFERENCE.md) that describes how the utility works. -- Use the [Template](https://github.com/agile-learning-institute/stage0_template_mongodb_api) to create your system mongodb_api - -## Quick Start for Contributors +## Quick Start ### Prerequisites -- [Python](https://www.python.org/downloads/) 3.8 or later +- [Python](https://www.python.org/downloads/) 3.12 or later - [Pipenv](https://pipenv.pypa.io/en/latest/installation.html) -- [Stage0 Developers Edition](https://github.com/agile-learning-institute/stage0/tree/main/developer_edition) +- [Docker Desktop](https://github.com/agile-learning-institute/stage0/tree/main/developer_edition) +- [MongoDB Compass](https://www.mongodb.com/products/compass) *optional* -### Setup +### Quick Start +```bash +# Clone the repository +git clone git@github.com:agile-learning-institute/mongodb_configurator_api.git +cd mongodb_configurator_api +pipenv run service +# Open http://localhost:8082/ +``` +### Developer Setup ```bash # Install dependencies pipenv install @@ -26,14 +28,130 @@ pipenv install # Run tests to verify setup pipenv run test -# Start development server -pipenv run local ``` -## Documentation +### Developer Commands + +```bash +# Select a test_case for the server +export INPUT_FOLDER=./tests/test_cases/small_sample +export INPUT_FOLDER=./tests/test_cases/large_sample +export INPUT_FOLDER=./tests/test_cases/playground + +# Set Debug Mode if needed +export LOGGING_LEVEL=DEBUG + +# Install dependencies +pipenv install --dev -- **[Reference](./REFERENCE.md)** - Detailed technical reference for users -- **[Contributing](./CONTRIBUTING.md)** - Development guidelines and architecture -- **[API Standards](https://github.com/agile-learning-institute/stage0/blob/main/developer_edition/docs/api-standards.md)** - Stage0 API standards -- **[Collection Config Schema](./docs/collection_config_schema.yaml)** - Configuration file specification +# Run Unit Tests and generate coverage report +pipenv run test + +##################### +# Running test server - uses INPUT_FOLDER setting# +pipenv run database # Start the backing mongo database +pipenv run local # Start the server locally +pipenv run debug # Start locally with DEBUG logging +pipenv run batch # Run locally in Batch mode (process and exit) + +# Drop the Testing Database - Live - Real Drop Database!!! +pipenv run drop + +##################### +# Building and Testing the container (before a PR) +pipenv run build # Build the container +pipenv run service # Run the DB, API, and SPA containers +# visit http://localhost:8082 and "process all" + +pipenv run down # Stops all testing containers + +################################ +# Black Box Testing with StepCI +# https://github.com/stepci/stepci/blob/main/README.md +export INPUT_FOLDER=./tests/test_cases/stepci +pipenv run stepci + +``` +## Separation of Concerns +The /configurator directory contains source code. +``` +configurator/ +├── routes/ # Flask HTTP Handlers +│ ├── config_routes.py # API Config Routes +│ ├── configuration_routes.py # Configuration Routes +│ ├── database_routes.py # Database Routes +│ ├── dictionary_routes.py # Dictionary Routes +│ ├── enumerator_routes.py # Enumerator Routes +│ ├── migration_routes.py # Migration Routes +│ ├── test_data_routes.py # Test Data Routes +│ ├── type_routes.py # Type Routes +├── services/ # Processing, Rendering Models +│ ├── configuration_services.py # Configuration Services +│ ├── dictionary_services.py # Dictionary Services +│ ├── enumerator_service.py # Enumerator Services +│ ├── template_service.py # Template Services +│ ├── type_services.py # Type Services +├── utils/ # Utilities +│ ├── config.py # API Configuration +│ ├── configurator_exception.py # Exception Classes +│ ├── ejson_encoder.py # Extended JSON Encoder +│ ├── file_io.py # File IO Wrappers +│ ├── mongo_io.py # MongoDB Wrappers +│ ├── route_decorators.py # Route Decorators +│ ├── version_manager.py # Version Manager +│ ├── version_number.py # Version Number utility +├── server.py # Application Entrypoint +``` + +## Testing +The `tests/` directory contains python unit tests, stepci black box, and testing data. +``` +tests/ +├── test_server.py # Server.py unit tests +├── models/ # Model class unit tests +├── routes/ # Route class unit tests +├── services/ # Service layer unit tests +├── utils/ # Utility unit tests +├── stepci/ # API Black Box testing +├── test_cases/ # Test data +│ ├── small_sample/ # Simple test configuration +│ ├── large_sample/ # Complex test configuration +│ ├── stepci/ # For step ci testing +│ ├── sample_template/ # Configuration for Template +│ ├── playground/ # Served with Stack for interactive UI testing +│ ├── .../ # Additional test cases +``` +the unit tests TestConfigurationIntegration and TestTypeRendering are integration tests that use the small_sample and large_sample input folders in test_cases. + +## API Documentation + +The complete API documentation with interactive testing is available: +- [API Server docs/index.html](http://localhost:8081/docs/index.html) if the API is running +- GoLive on [index.html](./docs/index.html) + +The Swagger UI provides: +- Interactive endpoint testing +- Auto-generated curl commands for each endpoint +- Request/response schemas +- Parameter documentation + +### Quick API Examples + +```bash +# Health check +curl -X GET http://localhost:8081/api/health + +# Get current configuration +curl -X GET http://localhost:8081/api/config/ + +# List all configurations +curl -X GET http://localhost:8081/api/configurations/ + +# Process all configurations +curl -X POST http://localhost:8081/api/configurations/ + +# Lock all types +curl -X PATCH http://localhost:8081/api/types/ +``` +--- diff --git a/REFERENCE.md b/REFERENCE.md deleted file mode 100644 index 5c36946..0000000 --- a/REFERENCE.md +++ /dev/null @@ -1,138 +0,0 @@ -# stage0_mongodb_api Reference - -This document provides detailed technical reference information for using the stage0_mongodb_api service. This service implements the [stage0 simple schema](https://github.com/agile-learning-institute/stage0/blob/main/SIMPLE_SCHEMA.md) standard for mongodb configuration purposes. - -## INPUT_FOLDER Structure - -The `INPUT_FOLDER` contains the configuration files that define your MongoDB collections, schemas, and data structures. The folder structure follows this pattern: - -``` -INPUT_FOLDER/ -├── collections/ # Collection configuration files (.yaml) -│ ├── user.yaml -│ ├── organization.yaml -│ └── media.yaml -├── data/ # Data files (.json) -│ ├── enumerators.json # Enumeration definitions -│ └── user.1.0.0.1.json # Test Data -└── dictionary/ # Simple Schema definitions - ├── types/ # Primitive type definitions - │ ├── word.yaml - │ ├── sentence.yaml - │ └── identity.yaml - ├── user.1.0.0.yaml # Collection schema versions - ├── organization.1.0.0.yaml - └── media.1.0.0.yaml -``` - -### Collection Configuration Files - -Each `.yaml` file in the `collections/` directory defines a MongoDB collection with its versioning strategy. The configuration specifies: -- Collection name and description -- Version definitions with schema references -- Index management (add/drop operations) -- Data migration pipelines -- Schema validation rules - -## Collection Configuration Schema - -The [collection configuration schema](./docs/collection_config_schema.yaml) defines the structure for collection configuration files. Key elements include: - -- **name**: Collection identifier -- **versions**: Array of version configurations - - **version**: Version string (e.g., "1.0.0.1") - - **schema**: Reference to schema definition file - - **add_indexes**: Indexes to create - - **drop_indexes**: Indexes to remove - - **aggregations**: Data migration pipelines - -## Versioning Scheme - -The API uses a four-component versioning scheme: `major.minor.patch.enumerator` - -- **Major**: Breaking changes to schema structure -- **Minor**: Backward-compatible feature additions -- **Patch**: Bug fixes and minor improvements -- **Enumerator**: Sequential number for multiple changes at same version - -Examples: -- `1.0.0.1` - First version of a collection -- `1.2.3.4` - Version 1.2.3 with version 4 enumerators -- `2.0.0.4` - Major version upgrade, with v4 enumerators - -## Schema Processing - -The API processes collection configurations to manage MongoDB schemas, indexes, and data migrations. "Processing" a collection involves, first determining if the schema version in the collection configuration is greater than the current version, and if so then: -- Removing any existing schema validation -- Remove specified indexes (optional) -- Run defined migrations (optional) -- Add any new indexes (optional) -- Apply new Schema Validation -- Load Test Data (optional) - -## Enumerators Processing - -The API processes enumerators from the `data/enumerators.json` file during the "process all" operation. This processing: - -- Loads enumerator definitions from `INPUT_FOLDER/data/enumerators.json` -- Validates the structure of each enumerator version -- Up-serts enumerator documents into the database using the `ENUMERATORS_COLLECTION_NAME` collection -- Uses the `version` field as the key for upsert operations - -### Enumerators File Format - -The `enumerators.json` file contains an array of enumerator version objects: - -```json -[ - { - "version": 0, - "name": "Enumerations", - "status": "Deprecated", - "enumerators": {} - }, - { - "version": 1, - "name": "Enumerations", - "status": "Active", - "enumerators": { - "default_status": { - "active": "Not Deleted", - "archived": "Soft Delete Indicator" - }, - "media_type": { - "movie": "A motion picture", - "tv_show": "A television series" - } - } - } -] -``` - -### Enumerators Processing Workflow - -When processing all collections, the API: - -1. **Processes Enumerators First**: Loads and up-serts all enumerator versions from `enumerators.json` -2. **Processes Collections**: Then processes all configured collections as usual -3. **Reports Results**: Includes enumerators processing results in the overall operation results - -The enumerators processing is included in the "process all" operation results under the `"enumerators"` key, with the same operation result format as collection processing. - -## Configuration Reference - -The API is configured through environment variables. - -| Variable | Description | Default | -|----------|-------------|---------| -| `MONGODB_API_PORT` | API Port number | `8081` | -| `MONGO_DB_NAME` | MongoDB database name | `stage0` | -| `MONGO_CONNECTION_STRING` | MongoDB connection string | `mongodb://root:example@localhost:27017/?tls=false&directConnection=true` | -| `VERSION_COLLECTION_NAME`| MongoDB Version Collection name | `CollectionVersions` | -| `ENUMERATORS_COLLECTION_NAME` | MongoDB Enumerators Collection name | `Enumerators` | -| `INPUT_FOLDER` | Directory containing configurations | `/input` | -| `LOAD_TEST_DATA` | Load Test data during processing | `false` | -| `AUTO_PROCESS` | Process configurations on startup | `false` | -| `EXIT_AFTER_PROCESSING` | Exit after processing | `false` | -| `LOGGING_LEVEL` | Logging level (DEBUG, INFO, WARNING, ERROR) | `INFO` | - diff --git a/stage0_mongodb_api/__init__.py b/configurator/__init__.py similarity index 100% rename from stage0_mongodb_api/__init__.py rename to configurator/__init__.py diff --git a/stage0_mongodb_api/routes/__init__.py b/configurator/routes/__init__.py similarity index 100% rename from stage0_mongodb_api/routes/__init__.py rename to configurator/routes/__init__.py diff --git a/configurator/routes/config_routes.py b/configurator/routes/config_routes.py new file mode 100644 index 0000000..6fa3cf9 --- /dev/null +++ b/configurator/routes/config_routes.py @@ -0,0 +1,20 @@ +from flask import Blueprint, jsonify +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.route_decorators import event_route +import logging +logger = logging.getLogger(__name__) + +# Define the Blueprint for config routes +def create_config_routes(): + config_routes = Blueprint('config_routes', __name__) + config = Config.get_instance() + + # GET /api/config - Return the current configuration as JSON + @config_routes.route('/', methods=['GET']) + @event_route("CFG-00", "GET_CONFIG", "getting config") + def get_config(): + return jsonify(config.to_dict()) + + logger.info("Config Flask Routes Registered") + return config_routes \ No newline at end of file diff --git a/configurator/routes/configuration_routes.py b/configurator/routes/configuration_routes.py new file mode 100644 index 0000000..caae079 --- /dev/null +++ b/configurator/routes/configuration_routes.py @@ -0,0 +1,91 @@ +from flask import Blueprint, request, jsonify +from configurator.services.configuration_services import Configuration +from configurator.services.template_service import TemplateService +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.config import Config +from configurator.utils.file_io import FileIO, File +from configurator.utils.route_decorators import event_route +import logging + +logger = logging.getLogger(__name__) + +def create_configuration_routes(): + blueprint = Blueprint('configurations', __name__) + config = Config.get_instance() + + # GET /api/configurations - Return the current configuration files + @blueprint.route('/', methods=['GET']) + @event_route("CFG-01", "GET_CONFIGURATIONS", "listing configurations") + def get_configurations(): + files = FileIO.get_documents(config.CONFIGURATION_FOLDER) + return jsonify([file.to_dict() for file in files]) + + @blueprint.route('/', methods=['POST']) + @event_route("CFG-ROUTES-02", "PROCESS_CONFIGURATIONS", "processing configurations") + def process_configurations(): + results = ConfiguratorEvent(event_id="CFG-ROUTES-02", event_type="PROCESS_CONFIGURATIONS") + files = FileIO.get_documents(config.CONFIGURATION_FOLDER) + for file in files: + configuration = Configuration(file.name) + results.append_events([configuration.process()]) + results.record_success() + return jsonify(results.to_dict()) + + @blueprint.route('/', methods=['PATCH']) + @event_route("CFG-ROUTES-03", "LOCK_ALL_CONFIGURATIONS", "locking all configurations") + def lock_all_configurations(): + result = Configuration.lock_all() + return jsonify(result.to_dict()) + + @blueprint.route('/collection//', methods=['POST']) + @event_route("CFG-ROUTES-04", "CREATE_COLLECTION", "creating collection") + def create_collection(file_name): + template_service = TemplateService() + result = template_service.create_collection(file_name) + return jsonify(result) + + @blueprint.route('//', methods=['GET']) + @event_route("CFG-ROUTES-05", "GET_CONFIGURATION", "getting configuration") + def get_configuration(file_name): + configuration = Configuration(file_name) + return jsonify(configuration.to_dict()) + + @blueprint.route('//', methods=['PUT']) + @event_route("CFG-ROUTES-06", "PUT_CONFIGURATION", "updating configuration") + def put_configuration(file_name): + configuration = Configuration(file_name, request.json) + configuration.save() + return jsonify(configuration.to_dict()) + + @blueprint.route('//', methods=['DELETE']) + @event_route("CFG-ROUTES-07", "DELETE_CONFIGURATION", "deleting configuration") + def delete_configuration(file_name): + configuration = Configuration(file_name) + event = configuration.delete() + return jsonify(event.to_dict()) + + + + @blueprint.route('//', methods=['POST']) + @event_route("CFG-ROUTES-09", "PROCESS_CONFIGURATION", "processing configuration") + def process_configuration(file_name): + configuration = Configuration(file_name) + result = configuration.process() + return jsonify(result.to_dict()) + + @blueprint.route('json_schema///', methods=['GET']) + @event_route("CFG-ROUTES-10", "GET_JSON_SCHEMA", "getting JSON schema") + def get_json_schema(file_name, version): + configuration = Configuration(file_name) + schema = configuration.get_json_schema(version) + return jsonify(schema) + + @blueprint.route('bson_schema///', methods=['GET']) + @event_route("CFG-ROUTES-11", "GET_BSON_SCHEMA", "getting BSON schema") + def get_bson_schema(file_name, version): + configuration = Configuration(file_name) + schema = configuration.get_bson_schema_for_version(version) + return jsonify(schema) + + logger.info("configuration Flask Routes Registered") + return blueprint \ No newline at end of file diff --git a/configurator/routes/database_routes.py b/configurator/routes/database_routes.py new file mode 100644 index 0000000..2e88eab --- /dev/null +++ b/configurator/routes/database_routes.py @@ -0,0 +1,24 @@ +from flask import Blueprint, jsonify +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.mongo_io import MongoIO +from configurator.utils.route_decorators import event_route +import logging +logger = logging.getLogger(__name__) + +# Define the Blueprint for test_data routes +def create_database_routes(): + database_routes = Blueprint('database_routes', __name__) + config = Config.get_instance() + + # DELETE /api/database - Drop the Database + @database_routes.route('/', methods=['DELETE']) + @event_route("DB-01", "DROP_DATABASE", "dropping database") + def drop_database(): + mongo_io = MongoIO(config.MONGO_CONNECTION_STRING, config.MONGO_DB_NAME) + events = mongo_io.drop_database() + mongo_io.disconnect() + return jsonify([event.to_dict() for event in events]) + + logger.info("database Flask Routes Registered") + return database_routes \ No newline at end of file diff --git a/configurator/routes/dictionary_routes.py b/configurator/routes/dictionary_routes.py new file mode 100644 index 0000000..8d22d0b --- /dev/null +++ b/configurator/routes/dictionary_routes.py @@ -0,0 +1,54 @@ +from flask import Blueprint, request, jsonify +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.file_io import FileIO +from configurator.services.dictionary_services import Dictionary +from configurator.utils.route_decorators import event_route +import logging +logger = logging.getLogger(__name__) + +# Define the Blueprint for dictionary routes +def create_dictionary_routes(): + dictionary_routes = Blueprint('dictionary_routes', __name__) + config = Config.get_instance() + + # GET /api/dictionaries - Return the current dictionary files + @dictionary_routes.route('/', methods=['GET']) + @event_route("DIC-01", "GET_DICTIONARIES", "listing dictionaries") + def get_dictionaries(): + files = FileIO.get_documents(config.DICTIONARY_FOLDER) + return jsonify([file.to_dict() for file in files]) + + # PATCH /api/dictionaries/ - Lock All Dictionaries + @dictionary_routes.route('/', methods=['PATCH']) + @event_route("DIC-04", "LOCK_ALL_DICTIONARIES", "locking all dictionaries") + def lock_all_dictionaries(): + result = Dictionary.lock_all() + return jsonify(result.to_dict()) + + # GET /api/dictionaries/ - Return a dictionary file + @dictionary_routes.route('//', methods=['GET']) + @event_route("DIC-02", "GET_DICTIONARY", "getting dictionary") + def get_dictionary(file_name): + dictionary = Dictionary(file_name) + return jsonify(dictionary.to_dict()) + + # PUT /api/dictionaries/ - Update a dictionary file + @dictionary_routes.route('//', methods=['PUT']) + @event_route("DIC-03", "PUT_DICTIONARY", "updating dictionary") + def update_dictionary(file_name): + dictionary = Dictionary(file_name, request.json) + saved_dictionary = dictionary.save() + return jsonify(saved_dictionary.to_dict()) + + @dictionary_routes.route('//', methods=['DELETE']) + @event_route("DIC-05", "DELETE_DICTIONARY", "deleting dictionary") + def delete_dictionary(file_name): + dictionary = Dictionary(file_name) + deleted = dictionary.delete() + return jsonify(deleted.to_dict()) + + + + logger.info("dictionary Flask Routes Registered") + return dictionary_routes \ No newline at end of file diff --git a/configurator/routes/enumerator_routes.py b/configurator/routes/enumerator_routes.py new file mode 100644 index 0000000..2ed378e --- /dev/null +++ b/configurator/routes/enumerator_routes.py @@ -0,0 +1,29 @@ +from flask import Blueprint, request, jsonify +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.services.enumerator_service import Enumerators +from configurator.utils.route_decorators import event_route +import logging +logger = logging.getLogger(__name__) + +def create_enumerator_routes(): + enumerator_routes = Blueprint('enumerator_routes', __name__) + config = Config.get_instance() + + # GET /api/enumerators - Return the content of enumerators.json + @enumerator_routes.route('/', methods=['GET']) + @event_route("ENU-01", "GET_ENUMERATORS", "getting enumerators") + def get_enumerators(): + enumerators = Enumerators(None) + return jsonify(enumerators.to_dict()) + + # PUT /api/enumerators - Overwrite enumerators.json + @enumerator_routes.route('/', methods=['PUT']) + @event_route("ENU-02", "PUT_ENUMERATORS", "saving enumerators") + def put_enumerators(): + enumerators = Enumerators(data=request.get_json(force=True)) + saved_enumerators = enumerators.save() + return jsonify(saved_enumerators.to_dict()) + + logger.info("Enumerator Flask Routes Registered") + return enumerator_routes \ No newline at end of file diff --git a/configurator/routes/migration_routes.py b/configurator/routes/migration_routes.py new file mode 100644 index 0000000..c14b501 --- /dev/null +++ b/configurator/routes/migration_routes.py @@ -0,0 +1,61 @@ +from flask import Blueprint, request, jsonify +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.file_io import FileIO +from configurator.utils.route_decorators import event_route +import logging +import os +logger = logging.getLogger(__name__) + +# Define the Blueprint for migration routes +def create_migration_routes(): + migration_routes = Blueprint('migration_routes', __name__) + config = Config.get_instance() + + # GET /api/migrations/ - List all migration files + @migration_routes.route('/', methods=['GET']) + @event_route("MIG-01", "GET_MIGRATIONS", "listing migrations") + def get_migrations(): + files = FileIO.get_documents(config.MIGRATIONS_FOLDER) + filenames = [file.file_name for file in files] + return jsonify(filenames) + + # GET /api/migrations// - Get a migration file + @migration_routes.route('//', methods=['GET']) + @event_route("MIG-02", "GET_MIGRATION", "getting migration") + def get_migration(file_name): + try: + content = FileIO.get_document(config.MIGRATIONS_FOLDER, file_name) + return jsonify(content) + except Exception as e: + raise ConfiguratorException(f"Migration file {file_name} not found", ConfiguratorEvent(event_id="MIG-01", event_type="MIGRATION_NOT_FOUND")) + + # PUT /api/migrations// - Update or create a migration file + @migration_routes.route('//', methods=['PUT']) + @event_route("MIG-08", "UPDATE_MIGRATION", "updating migration") + def put_migration(file_name): + content = request.get_json(force=True) + file = FileIO.put_document(config.MIGRATIONS_FOLDER, file_name, content) + return jsonify(file.to_dict()) + + # DELETE /api/migrations// - Delete a migration file + @migration_routes.route('//', methods=['DELETE']) + @event_route("MIG-06", "DELETE_MIGRATION", "deleting migration") + def delete_migration(file_name): + event = ConfiguratorEvent(event_id="MIG-06", event_type="DELETE_MIGRATION") + try: + delete_event = FileIO.delete_document(config.MIGRATIONS_FOLDER, file_name) + if delete_event.status == "SUCCESS": + event.record_success() + else: + event.append_events([delete_event]) + event.record_failure("error deleting migration") + except ConfiguratorException as e: + event.append_events([e.event]) + event.record_failure("error deleting migration") + except Exception as e: + event.record_failure("unexpected error deleting migration", {"error": str(e)}) + return jsonify(event.to_dict()) + + logger.info("Migration Flask Routes Registered") + return migration_routes \ No newline at end of file diff --git a/configurator/routes/test_data_routes.py b/configurator/routes/test_data_routes.py new file mode 100644 index 0000000..00dee7a --- /dev/null +++ b/configurator/routes/test_data_routes.py @@ -0,0 +1,84 @@ +from flask import Blueprint, request, jsonify, abort, Response +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.file_io import FileIO +from configurator.utils.route_decorators import event_route +import json +import logging +import os +logger = logging.getLogger(__name__) + +class TestDataJSONEncoder(json.JSONEncoder): + """Custom JSON encoder that preserves MongoDB extended JSON format for test data""" + def default(self, obj): + # For test data, we want to preserve the original format + # This encoder will be used specifically for test data responses + return super().default(obj) + + def encode(self, obj): + # Override encode to preserve MongoDB extended JSON format + if isinstance(obj, dict): + # Recursively process dictionaries to preserve $oid and $date + return super().encode(obj) + return super().encode(obj) + +# Define the Blueprint for test_data routes +def create_test_data_routes(): + test_data_routes = Blueprint('test_data_routes', __name__) + config = Config.get_instance() + + # GET /api/test_data - Return the current test_data files (only .json) + @test_data_routes.route('/', methods=['GET']) + @event_route("TST-01", "GET_TEST_DATA_FILES", "getting test data files") + def get_data_files(): + files = FileIO.get_documents(config.TEST_DATA_FOLDER) + # Only include .json files + return jsonify([ + {**{('file_name' if k == 'name' else k): v for k, v in file.to_dict().items()}} + for file in files if file.file_name.endswith('.json') + ]) + + # GET /api/test_data/ - Return a test_data file (only .json) + @test_data_routes.route('//', methods=['GET']) + @event_route("TST-02", "GET_TEST_DATA", "getting test data") + def get_test_data(file_name): + if not file_name.endswith('.json'): + abort(404) + + # Read the raw JSON file content to preserve MongoDB extended JSON format + config = Config.get_instance() + folder = os.path.join(config.INPUT_FOLDER, config.TEST_DATA_FOLDER) + file_path = os.path.join(folder, file_name) + + try: + with open(file_path, 'r', encoding='utf-8') as f: + raw_content = f.read() + return Response(raw_content, mimetype='application/json') + except Exception as e: + event = ConfiguratorEvent("TST-02", "GET_TEST_DATA") + event.record_failure(f"Failed to read test data file {file_name}: {str(e)}") + raise ConfiguratorException(f"Failed to read test data file {file_name}: {str(e)}", event) + + # PUT /api/test_data/ - Update a test_data file (only .json) + @test_data_routes.route('//', methods=['PUT']) + @event_route("TST-03", "PUT_TEST_DATA", "updating test data") + def update_test_data(file_name): + if not file_name.endswith('.json'): + abort(400, description='Test data files must be .json') + file = FileIO.put_document(config.TEST_DATA_FOLDER, file_name, request.json) + d = file.to_dict() + d['file_name'] = d.pop('name', file_name) + return jsonify(d) + + @test_data_routes.route('//', methods=['DELETE']) + @event_route("TST-04", "DELETE_TEST_DATA", "deleting test data") + def delete_test_data(file_name): + if not file_name.endswith('.json'): + abort(404) + file = FileIO.delete_document(config.TEST_DATA_FOLDER, file_name) + d = file.to_dict() + d['file_name'] = d.pop('name', file_name) + return jsonify(d) + + logger.info("test_data Flask Routes Registered") + return test_data_routes \ No newline at end of file diff --git a/configurator/routes/type_routes.py b/configurator/routes/type_routes.py new file mode 100644 index 0000000..afaaf24 --- /dev/null +++ b/configurator/routes/type_routes.py @@ -0,0 +1,54 @@ +from flask import Blueprint, request, jsonify +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.file_io import FileIO +from configurator.services.type_services import Type +from configurator.utils.route_decorators import event_route +import logging +logger = logging.getLogger(__name__) + +# Define the Blueprint for type routes +def create_type_routes(): + type_routes = Blueprint('type_routes', __name__) + config = Config.get_instance() + + # GET /api/types/ - Return the current type files + @type_routes.route('/', methods=['GET']) + @event_route("TYP-01", "GET_TYPES", "listing types") + def get_types(): + files = FileIO.get_documents(config.TYPE_FOLDER) + return jsonify([file.to_dict() for file in files]) + + # PATCH /api/types/ - Lock All Types + @type_routes.route('/', methods=['PATCH']) + @event_route("TYP-04", "LOCK_ALL_TYPES", "locking all types") + def lock_all_types(): + result = Type.lock_all() + return jsonify(result.to_dict()) + + # GET /api/types// - Return a type file + @type_routes.route('//', methods=['GET']) + @event_route("TYP-02", "GET_TYPE", "getting type") + def get_type(file_name): + type_obj = Type(file_name) + return jsonify(type_obj.to_dict()) + + # PUT /api/types/ - Update a type file + @type_routes.route('//', methods=['PUT']) + @event_route("TYP-03", "PUT_TYPE", "updating type") + def update_type(file_name): + type_obj = Type(file_name, request.json) + saved_type = type_obj.save() + return jsonify(saved_type.to_dict()) + + @type_routes.route('//', methods=['DELETE']) + @event_route("TYP-05", "DELETE_TYPE", "deleting type") + def delete_type(file_name): + type_obj = Type(file_name) + deleted = type_obj.delete() + return jsonify(deleted.to_dict()) + + + + logger.info("Type Flask Routes Registered") + return type_routes \ No newline at end of file diff --git a/configurator/server.py b/configurator/server.py new file mode 100644 index 0000000..d9529be --- /dev/null +++ b/configurator/server.py @@ -0,0 +1,90 @@ +import json +import sys +import signal +import os + +# Initialize Singletons +from configurator.services.configuration_services import Configuration +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.file_io import FileIO +config = Config.get_instance() + +# Initialize Logging +import logging +logger = logging.getLogger(__name__) +logger.info(f"============= Starting Server Initialization ===============") + +# Define a signal handler for SIGTERM and SIGINT +def handle_exit(signum, frame): + logger.info(f"Received signal {signum}. Initiating shutdown...") + logger.info("============= Shutdown complete. ===============") + sys.exit(0) + +# Register the signal handler +signal.signal(signal.SIGTERM, handle_exit) +signal.signal(signal.SIGINT, handle_exit) + +# Initialize Flask App +from flask import Flask +from configurator.utils.ejson_encoder import MongoJSONEncoder +project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +docs_path = os.path.join(project_root, 'docs') +app = Flask(__name__, static_folder=docs_path, static_url_path='/docs') +# app.json = MongoJSONEncoder(app) # Commented out to test if this causes MongoDB object conversion + +# Auto-processing logic - runs when module is imported (including by Gunicorn) +if config.AUTO_PROCESS: + try: + logger.info(f"============= Auto Processing is Starting ===============") + event = ConfiguratorEvent(event_id="AUTO-00", event_type="PROCESS") + files = FileIO.get_documents(config.CONFIGURATION_FOLDER) + for file in files: + logger.info(f"Processing Configuration: {file.file_name}") + configuration = Configuration(file.file_name) + event.append_events([configuration.process()]) + logger.info(f"Processing Output: {app.json.dumps(event.to_dict())}") + logger.info(f"============= Auto Processing is Completed ===============") + except ConfiguratorException as e: + logger.error(f"Configurator error processing all configurations: {app.json.dumps(e.to_dict())}") + sys.exit(1) + except Exception as e: + logger.error(f"Unexpected error processing all configurations: {str(e)}") + sys.exit(1) + +if config.EXIT_AFTER_PROCESSING: + logger.info(f"============= Exiting After Processing ===============") + sys.exit(0) + +# Apply Prometheus monitoring middleware +from prometheus_flask_exporter import PrometheusMetrics +metrics = PrometheusMetrics(app, path='/api/health') +metrics.info('app_info', 'Application info', version=config.BUILT_AT) + +# Register flask routes +from configurator.routes.config_routes import create_config_routes +from configurator.routes.configuration_routes import create_configuration_routes +from configurator.routes.dictionary_routes import create_dictionary_routes +from configurator.routes.type_routes import create_type_routes +from configurator.routes.test_data_routes import create_test_data_routes +from configurator.routes.database_routes import create_database_routes +from configurator.routes.enumerator_routes import create_enumerator_routes +from configurator.routes.migration_routes import create_migration_routes + +app.register_blueprint(create_config_routes(), url_prefix='/api/config') +app.register_blueprint(create_configuration_routes(), url_prefix='/api/configurations') +app.register_blueprint(create_dictionary_routes(), url_prefix='/api/dictionaries') +app.register_blueprint(create_type_routes(), url_prefix='/api/types') +app.register_blueprint(create_test_data_routes(), url_prefix='/api/test_data') +app.register_blueprint(create_database_routes(), url_prefix='/api/database') +app.register_blueprint(create_enumerator_routes(), url_prefix='/api/enumerators') +app.register_blueprint(create_migration_routes(), url_prefix='/api/migrations') + +logger.info(f"============= Routes Registered ===============") + +# Start the server (only when run directly, not when imported by Gunicorn) +if __name__ == "__main__": + logger.info(f"============= Starting Server ===============") + logger.info(f"Starting Flask server on port {config.API_PORT}...") + app.run(host="0.0.0.0", port=config.API_PORT) + \ No newline at end of file diff --git a/stage0_mongodb_api/services/__init__.py b/configurator/services/__init__.py similarity index 100% rename from stage0_mongodb_api/services/__init__.py rename to configurator/services/__init__.py diff --git a/configurator/services/configuration_services.py b/configurator/services/configuration_services.py new file mode 100644 index 0000000..ab33f5a --- /dev/null +++ b/configurator/services/configuration_services.py @@ -0,0 +1,407 @@ +from configurator.services.dictionary_services import Dictionary +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.services.enumerator_service import Enumerators +from configurator.utils.file_io import FileIO, File +from configurator.utils.mongo_io import MongoIO +from configurator.utils.version_manager import VersionManager +from configurator.utils.version_number import VersionNumber +import os + +class Configuration: + def __init__(self, file_name: str, document: dict = None): + self.config = Config.get_instance() + self.file_name = file_name + if not document: + document = FileIO.get_document(self.config.CONFIGURATION_FOLDER, file_name) + + self.title = document.get("title", "") + self.description = document.get("description", "") + self.versions = [Version(file_name.replace('.yaml', ''), v, self.config) for v in document.get("versions", [])] + self._locked = document.get("_locked", False) + + def to_dict(self): + return { + "file_name": self.file_name, + "title": self.title, + "description": self.description, + "versions": [v.to_dict() for v in self.versions], + "_locked": bool(self._locked), + } + + def save(self): + """Save the configuration and return the Configuration object.""" + try: + # Save the cleaned content + FileIO.put_document(self.config.CONFIGURATION_FOLDER, self.file_name, self.to_dict()) + return self + except Exception as e: + event = ConfiguratorEvent("CFG-ROUTES-06", "PUT_CONFIGURATION") + event.record_failure(f"Failed to save configuration {self.file_name}: {str(e)}") + raise ConfiguratorException(f"Failed to save configuration {self.file_name}: {str(e)}", event) + + @staticmethod + def lock_all(): + """Lock all configuration files.""" + config = Config.get_instance() + files = FileIO.get_documents(config.CONFIGURATION_FOLDER) + event = ConfiguratorEvent("CFG-07", "LOCK_ALL_CONFIGURATIONS") + + for file in files: + try: + sub_event = ConfiguratorEvent(f"CFG-{file.file_name}", "LOCK_CONFIGURATION") + configuration = Configuration(file.file_name) + sub_event.record_success() + event.append_events([sub_event]) + except ConfiguratorException as ce: + event.append_events([ce.event]) + event.record_failure(f"ConfiguratorException locking configuration {file.file_name}") + raise ConfiguratorException(f"ConfiguratorException locking configuration {file.file_name}", event) + except Exception as e: + sub_event = ConfiguratorEvent(f"CFG-{file.file_name}", "LOCK_CONFIGURATION") + sub_event.record_failure(f"Failed to lock configuration {file.file_name}: {str(e)}") + event.append_events([sub_event]) + event.record_failure(f"Unexpected error locking configuration {file.file_name}") + raise ConfiguratorException(f"Unexpected error locking configuration {file.file_name}", event) + + event.record_success() + return event + + def delete(self): + if self._locked: + event = ConfiguratorEvent(event_id="CFG-ROUTES-07", event_type="DELETE_CONFIGURATION") + event.record_failure("Cannot delete locked configuration") + raise ConfiguratorException("Cannot delete locked configuration", event) + + event = ConfiguratorEvent(event_id="CFG-ROUTES-07", event_type="DELETE_CONFIGURATION") + try: + delete_event = FileIO.delete_document(self.config.CONFIGURATION_FOLDER, self.file_name) + if delete_event.status == "SUCCESS": + event.record_success() + else: + event.append_events([delete_event]) + event.record_failure("error deleting configuration") + except ConfiguratorException as e: + event.append_events([e.event]) + event.record_failure("error deleting configuration") + except Exception as e: + event.record_failure("unexpected error deleting configuration", {"error": str(e)}) + return event + + def lock_unlock(self): + try: + # Toggle the locked state and persist it + self._locked = not self._locked + # Save the lock state directly without the lock check + FileIO.put_document(self.config.CONFIGURATION_FOLDER, self.file_name, self.to_dict()) + # Create a File object with the current lock state + file_path = os.path.join(self.config.INPUT_FOLDER, self.config.CONFIGURATION_FOLDER, self.file_name) + file = File(file_path) + return file + except ConfiguratorException as e: + raise + except Exception as e: + event = ConfiguratorEvent(event_id="CFG-ROUTES-08", event_type="LOCK_UNLOCK_CONFIGURATION") + event.record_failure("unexpected error locking/unlocking configuration", {"error": str(e)}) + raise ConfiguratorException("Unexpected error locking/unlocking configuration", event) + + def process(self) -> ConfiguratorEvent: + config = Config.get_instance() + event = ConfiguratorEvent(event_id="CFG-00", event_type="PROCESS") + mongo_io = MongoIO(self.config.MONGO_CONNECTION_STRING, self.config.MONGO_DB_NAME) + try: + # Add configuration context to main event + event.data = { + "configuration_file": self.file_name, + "configuration_name": self.file_name.replace('.yaml', ''), + "configuration_title": self.title, + "version_count": len(self.versions) + } + + for version in self.versions: + current_version = VersionManager.get_current_version(mongo_io, self.file_name.replace('.yaml', '')) + if version.collection_version <= current_version: + sub_event = ConfiguratorEvent( + event_id="PRO-00", + event_type="SKIP_VERSION", + event_data={ + "configuration_file": self.file_name, + "version": version.to_dict(), + "current_version": current_version.get_version_str(), + "skip_reason": "version_already_processed" + }, + ) + sub_event.record_success() + event.append_events([sub_event]) + continue + event.append_events([version.process(mongo_io)]) + + # Load enumerators into database + sub_event = ConfiguratorEvent(event_id="PRO-08", event_type="LOAD_ENUMERATORS") + try: + enumerators = Enumerators(None) + sub_event.data = enumerators.to_dict() + for enum_doc in enumerators.dict: + # Upsert based on enums version number + mongo_io.upsert(config.ENUMERATORS_COLLECTION_NAME, + {"version": enum_doc["version"]}, + enum_doc + ) + sub_event.record_success() + except Exception as e: + sub_event.record_failure({"error": str(e)}) + event.append_events([sub_event]) + + event.record_success() + mongo_io.disconnect() + return event + except ConfiguratorException as e: + event.append_events([e.event]) + event.record_failure("error processing configuration") + mongo_io.disconnect() + return event + except Exception as e: + event.record_failure("unexpected error processing configuration", {"error": str(e)}) + mongo_io.disconnect() + return event + + def get_json_schema(self, version: str) -> dict: + version_obj = next((v for v in self.versions if v.version_str == version), None) + if version_obj is None: + data = {"message": f"Version {version} not found"} + event = ConfiguratorEvent(event_id="CFG-01", event_type="RENDER", event_data=data) + raise ConfiguratorException("Version not found", event, data) + # Get the correct enumerations version for this configuration version + enumerations = Enumerators(None).version(version_obj.collection_version.get_enumerator_version()) + return version_obj.get_json_schema(enumerations) + + def get_bson_schema_for_version(self, version: str): + version_obj = next((v for v in self.versions if v.version_str == version), None) + if version_obj is None: + data = {"message": f"Version {version} not found"} + event = ConfiguratorEvent(event_id="CFG-02", event_type="RENDER", event_data=data) + raise ConfiguratorException("Version not found", event, data) + # Get the correct enumerations version for this configuration version + enumerations = Enumerators(None).version(version_obj.collection_version.get_enumerator_version()) + return version_obj.get_bson_schema(enumerations) + +class Version: + def __init__(self, collection_name: str, version: dict, config): + self.config = config + self.collection_name = collection_name + # Always construct VersionNumber with 4-part version string + self.collection_version = VersionNumber(f"{collection_name}.{version['version']}") + self.version_str = self.collection_version.get_version_str() + self.drop_indexes = version.get("drop_indexes", []) + self.add_indexes = version.get("add_indexes", []) + self.migrations = version.get("migrations", []) + self.test_data = version.get("test_data", None) + + def to_dict(self): + return { + "version": self.collection_version.get_version_str(), + "drop_indexes": self.drop_indexes, + "add_indexes": self.add_indexes, + "migrations": self.migrations, + "test_data": self.test_data, + } + + def get_json_schema(self, enumerations) -> dict: + """Get JSON schema for this version with provided enumerations.""" + # Load dictionary data first + dictionary_filename: str = self.collection_version.get_schema_filename() + dictionary_data: dict = FileIO.get_document(self.config.DICTIONARY_FOLDER, dictionary_filename) + dictionary: Dictionary = Dictionary(dictionary_filename, dictionary_data) + return dictionary.get_json_schema(enumerations) + + def get_bson_schema(self, enumerations) -> dict: + """Get BSON schema for this version with provided enumerations.""" + # Load dictionary data first + dictionary_filename: str = self.collection_version.get_schema_filename() + dictionary_data: dict = FileIO.get_document(self.config.DICTIONARY_FOLDER, dictionary_filename) + dictionary: Dictionary = Dictionary(dictionary_filename, dictionary_data) + return dictionary.get_bson_schema(enumerations) + + def process(self, mongo_io: MongoIO) -> ConfiguratorEvent: + """Process this version with proper event nesting.""" + event = ConfiguratorEvent(event_id=f"{self.collection_name}.{self.version_str}", event_type="PROCESS") + + # Add version context to main event + event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "drop_indexes_count": len(self.drop_indexes), + "add_indexes_count": len(self.add_indexes), + "migrations_count": len(self.migrations), + "has_test_data": self.test_data is not None, + "test_data_file": self.test_data + } + + try: + # Remove schema validation + sub_event = ConfiguratorEvent(event_id="PRO-01", event_type="REMOVE_SCHEMA_VALIDATION") + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str + } + sub_event.append_events(mongo_io.remove_schema_validation(self.collection_name)) + sub_event.record_success() + event.append_events([sub_event]) + + # Remove indexes + sub_event = ConfiguratorEvent(event_id="PRO-02", event_type="REMOVE_INDEXES") + if self.drop_indexes: + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "indexes_to_drop": self.drop_indexes, + "index_count": len(self.drop_indexes) + } + for index in self.drop_indexes: + sub_event.append_events(mongo_io.remove_index(self.collection_name, index)) + # Check if any child events failed + if any(child.status == "FAILURE" for child in sub_event.sub_events): + sub_event.record_failure("One or more index removal operations failed") + else: + sub_event.record_success() + else: + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "message": "No indexes to drop" + } + sub_event.record_success() + event.append_events([sub_event]) + + # Execute migrations + sub_event = ConfiguratorEvent(event_id="PRO-03", event_type="EXECUTE_MIGRATIONS") + if self.migrations: + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "migration_files": self.migrations, + "migration_count": len(self.migrations) + } + for filename in self.migrations: + migration_file = os.path.join(self.config.INPUT_FOLDER, self.config.MIGRATIONS_FOLDER, filename) + sub_event.append_events(mongo_io.execute_migration_from_file(self.collection_name, migration_file)) + # Check if any child events failed + if any(child.status == "FAILURE" for child in sub_event.sub_events): + sub_event.record_failure("One or more migration operations failed") + else: + sub_event.record_success() + else: + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "message": "No migrations to execute" + } + sub_event.record_success() + event.append_events([sub_event]) + + # Add indexes + sub_event = ConfiguratorEvent(event_id="PRO-04", event_type="ADD_INDEXES") + if self.add_indexes: + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "indexes_to_add": self.add_indexes, + "index_count": len(self.add_indexes) + } + for index in self.add_indexes: + sub_event.append_events(mongo_io.add_index(self.collection_name, index)) + # Check if any child events failed + if any(child.status == "FAILURE" for child in sub_event.sub_events): + sub_event.record_failure("One or more index creation operations failed") + else: + sub_event.record_success() + else: + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "message": "No indexes to add" + } + sub_event.record_success() + event.append_events([sub_event]) + + # Apply schema validation + sub_event = ConfiguratorEvent(event_id="PRO-05", event_type="APPLY_SCHEMA_VALIDATION") + try: + # Get the correct enumerations version for this version + enumerations = Enumerators(None).version(self.collection_version.get_enumerator_version()) + # Render the BSON schema for this version + bson_schema: dict = self.get_bson_schema(enumerations) + + # Add schema context to event + sub_event.data = {"collection_name": self.collection_name, "version": self.collection_version.get_version_str()} + sub_event.append_events(mongo_io.apply_schema_validation(self.collection_name, bson_schema)) + sub_event.record_success() + except ConfiguratorException as e: + # Properly nest the exception event + sub_event.append_events([e.event]) + sub_event.record_failure("error rendering schema") + event.append_events([sub_event]) + event.record_failure("error processing version") + return event + except Exception as e: + # Handle unexpected exceptions + sub_event.record_failure("unexpected error rendering schema", {"error": str(e)}) + event.append_events([sub_event]) + event.record_failure("error processing version") + return event + event.append_events([sub_event]) + + # Load test data + sub_event = ConfiguratorEvent(event_id="PRO-06", event_type="LOAD_TEST_DATA") + if self.test_data: + test_data_path = os.path.join(self.config.INPUT_FOLDER, self.config.TEST_DATA_FOLDER, self.test_data) + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "test_data_file": self.test_data, + "test_data_path": test_data_path + } + sub_event.append_events(mongo_io.load_json_data(self.collection_name, test_data_path)) + # Check if any child events failed + if any(child.status == "FAILURE" for child in sub_event.sub_events): + sub_event.record_failure("Test data loading operation failed") + else: + sub_event.record_success() + else: + sub_event.data = { + "collection_name": self.collection_name, + "version": self.version_str, + "message": "No test data to load" + } + sub_event.record_success() + event.append_events([sub_event]) + + # Update version + sub_event = ConfiguratorEvent(event_id="PRO-07", event_type="UPDATE_VERSION") + try: + mongo_io.upsert( + self.config.VERSION_COLLECTION_NAME, + {"collection_name": self.collection_name}, + {"collection_name": self.collection_name, "current_version": self.collection_version.version} + ) + sub_event.data = { + "collection_name": self.collection_name, + "new_version": self.collection_version.get_version_str(), + "version_number": self.collection_version.version + } + sub_event.record_success() + except Exception as e: + sub_event.record_failure({"error": str(e)}) + event.append_events([sub_event]) + + event.record_success() + return event + + except ConfiguratorException as e: + # This should not happen since we handle ConfiguratorException above + event.append_events([e.event]) + event.record_failure("error processing version") + return event + except Exception as e: + event.record_failure("unexpected error processing version", {"error": str(e)}) + return event \ No newline at end of file diff --git a/configurator/services/dictionary_services.py b/configurator/services/dictionary_services.py new file mode 100644 index 0000000..881c6c9 --- /dev/null +++ b/configurator/services/dictionary_services.py @@ -0,0 +1,329 @@ +from configurator.services.type_services import Type +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.services.enumerator_service import Enumerators +from configurator.utils.file_io import FileIO, File +from configurator.utils.config import Config +import os + + +class Dictionary: + def __init__(self, file_name: str = "", document: dict = {}): + self.config = Config.get_instance() + self.file_name = file_name + self._locked = False + self.property = None + + if document: + self._locked = document.get("_locked", False) + self.property = Property("root", document) + else: + document_data = FileIO.get_document(self.config.DICTIONARY_FOLDER, file_name) + self._locked = document_data.get("_locked", False) + self.property = Property("root", document_data) + + def to_dict(self): + result = self.property.to_dict() + result["file_name"] = self.file_name + result["_locked"] = self._locked + return result + + def save(self): + """Save the dictionary and return the Dictionary object.""" + try: + # Save the cleaned content + FileIO.put_document(self.config.DICTIONARY_FOLDER, self.file_name, self.to_dict()) + return self + except Exception as e: + event = ConfiguratorEvent("DIC-03", "PUT_DICTIONARY") + event.record_failure(f"Failed to save dictionary {self.file_name}: {str(e)}") + raise ConfiguratorException(f"Failed to save dictionary {self.file_name}: {str(e)}", event) + + def get_json_schema(self, enumerations, ref_stack: list = None): + if ref_stack is None: + ref_stack = [] + return self.property.get_json_schema(enumerations, ref_stack) + + def get_bson_schema(self, enumerations, ref_stack: list = None): + if ref_stack is None: + ref_stack = [] + return self.property.get_bson_schema(enumerations, ref_stack) + + @staticmethod + def lock_all(): + """Lock all dictionary files.""" + config = Config.get_instance() + files = FileIO.get_documents(config.DICTIONARY_FOLDER) + event = ConfiguratorEvent("DIC-05", "LOCK_ALL_DICTIONARIES") + + for file in files: + try: + dictionary = Dictionary(file.file_name) + sub_event = ConfiguratorEvent(f"DIC-{file.file_name}", "LOCK_DICTIONARY") + sub_event.record_success() + event.append_events([sub_event]) + except ConfiguratorException as ce: + event.append_events([ce.event]) + event.record_failure(f"ConfiguratorException locking dictionary {file.file_name}") + raise ConfiguratorException(f"ConfiguratorException locking dictionary {file.file_name}", event) + except Exception as e: + sub_event = ConfiguratorEvent(f"DIC-{file.file_name}", "LOCK_DICTIONARY") + sub_event.record_failure(f"Failed to lock dictionary {file.file_name}: {str(e)}") + event.append_events([sub_event]) + event.record_failure(f"Unexpected error locking dictionary {file.file_name}") + raise ConfiguratorException(f"Unexpected error locking dictionary {file.file_name}", event) + + event.record_success() + return event + + def delete(self): + if self._locked: + event = ConfiguratorEvent(event_id="DIC-05", event_type="DELETE_DICTIONARY", event_data={"error": "Dictionary is locked"}) + raise ConfiguratorException("Cannot delete locked dictionary", event) + event = ConfiguratorEvent(event_id="DIC-05", event_type="DELETE_DICTIONARY") + try: + delete_event = FileIO.delete_document(self.config.DICTIONARY_FOLDER, self.file_name) + if delete_event.status == "SUCCESS": + event.record_success() + else: + event.append_events([delete_event]) + event.record_failure("error deleting dictionary") + except ConfiguratorException as e: + event.append_events([e.event]) + event.record_failure("error deleting dictionary") + except Exception as e: + event.record_failure("unexpected error deleting dictionary", {"error": str(e)}) + return event + + +class Property: + def __init__(self, name: str, property: dict): + self.config = Config.get_instance() + self.name = name + self.ref = property.get("ref", None) + self.description = property.get("description", "Missing Required Description") + self.type = property.get("type", "void") + self.required = property.get("required", False) + self.enums = property.get("enums", None) + self.additional_properties = property.get("additionalProperties", False) + self.properties = {} + self.items = None + self.one_of = None + + properties_data = property.get("properties", {}) + + # Initialize properties if this is an object type + if self.type == "object": + for prop_name, prop_data in properties_data.items(): + self.properties[prop_name] = Property(prop_name, prop_data) + + # Initialize one_of if present + one_of_data = property.get("one_of", None) + if one_of_data: + self.one_of = OneOf(one_of_data) + + # Initialize items if this is an array type + if self.type == "array": + items_data = property.get("items", {}) + if items_data: + self.items = Property("items", items_data) + + if self.type == "enum" or self.type == "enum_array": + self.enums = property.get("enums", None) + + def to_dict(self): + if self.ref: + return {"ref": self.ref} + result = {} + result["description"] = self.description + result["type"] = self.type + result["required"] = self.required + + if self.type == "object": + result["properties"] = {} + for prop_name, prop in self.properties.items(): + result["properties"][prop_name] = prop.to_dict() + result["additionalProperties"] = self.additional_properties + + # Add one_of if present + if self.one_of: + result["one_of"] = self.one_of.to_dict() + + elif self.type == "array": + result["items"] = self.items.to_dict() + + elif self.type in ["enum", "enum_array"]: + result["enums"] = self.enums + + return result + + def get_json_schema(self, enumerations, ref_stack: list = None): + if ref_stack is None: + ref_stack = [] + + if self.ref: + return self._handle_ref_schema(enumerations, ref_stack, "json") + + if self.type == "object": + schema = {} + schema["description"] = self.description + schema["type"] = "object" + schema["properties"] = {} + for prop_name, prop in self.properties.items(): + schema["properties"][prop_name] = prop.get_json_schema(enumerations, ref_stack) + required_props = self._get_required() + if required_props: + schema["required"] = required_props + schema["additionalProperties"] = self.additional_properties + + # Handle one_of structure + if self.one_of: + schema["oneOf"] = self.one_of.get_json_schema(enumerations, ref_stack) + + return schema + + elif self.type == "array": + schema = {} + schema["description"] = self.description + schema["type"] = "array" + if self.items: + schema["items"] = self.items.get_json_schema(enumerations, ref_stack) + return schema + + elif self.type == "enum": + schema = {} + schema["description"] = self.description + schema["type"] = "string" + if self.enums: + schema["enum"] = enumerations.get_enum_values(self.enums) + return schema + + elif self.type == "enum_array": + schema = {} + schema["description"] = self.description + schema["type"] = "array" + if self.enums: + schema["items"] = {"type": "string", "enum": enumerations.get_enum_values(self.enums)} + return schema + + elif self.type: + # Reference a custom type + custom_type = Type(f"{self.type}.yaml") + custom_schema = custom_type.get_json_schema() + custom_schema["description"] = self.description + return custom_schema + else: + raise ConfiguratorException(f"Invalid dictionary property type: {self.type}", + ConfiguratorEvent(event_id="DIC-99", event_type="INVALID_PROPERTY_TYPE")) + + def get_bson_schema(self, enumerations, ref_stack: list = None): + if ref_stack is None: + ref_stack = [] + + if self.ref: + return self._handle_ref_schema(enumerations, ref_stack, "bson") + + if self.type == "object": + schema = {} + schema["bsonType"] = "object" + schema["properties"] = {} + for prop_name, prop in self.properties.items(): + schema["properties"][prop_name] = prop.get_bson_schema(enumerations, ref_stack) + required_props = self._get_required() + if required_props: + schema["required"] = required_props + schema["additionalProperties"] = self.additional_properties + + # Handle one_of structure + if self.one_of: + schema["oneOf"] = self.one_of.get_bson_schema(enumerations, ref_stack) + + return schema + + elif self.type == "array": + schema = {} + schema["bsonType"] = "array" + if self.items: + schema["items"] = self.items.get_bson_schema(enumerations, ref_stack) + return schema + + elif self.type == "enum": + schema = {} + schema["bsonType"] = "string" + if self.enums: + schema["enum"] = enumerations.get_enum_values(self.enums) + return schema + + elif self.type == "enum_array": + schema = {} + schema["bsonType"] = "array" + if self.enums: + schema["items"] = {"bsonType": "string", "enum": enumerations.get_enum_values(self.enums)} + return schema + + elif self.type: + # Reference a custom type + custom_type = Type(f"{self.type}.yaml") + custom_schema = custom_type.get_bson_schema() + return custom_schema + else: + raise ConfiguratorException(f"Invalid dictionary property type: {self.type}", + ConfiguratorEvent(event_id="DIC-99", event_type="INVALID_PROPERTY_TYPE")) + + return schema + + def _handle_ref_schema(self, enumerations, ref_stack: list, schema_type: str): + """Handle reference schema processing with circular reference and depth checking.""" + # Check for circular reference + if self.ref in ref_stack: + ref_chain = " -> ".join(ref_stack + [self.ref]) + event = ConfiguratorEvent( + event_id="DIC-07", + event_type="CIRCULAR_REFERENCE", + event_data={"ref_chain": ref_chain, "ref_stack": ref_stack} + ) + raise ConfiguratorException(f"Circular reference detected: {ref_chain}", event) + + # Check stack depth limit + if len(ref_stack) >= self.config.RENDER_STACK_MAX_DEPTH: + event = ConfiguratorEvent( + event_id="DIC-08", + event_type="STACK_DEPTH_EXCEEDED", + event_data={"max_depth": self.config.RENDER_STACK_MAX_DEPTH, "current_depth": len(ref_stack)} + ) + raise ConfiguratorException(f"Reference stack depth exceeded maximum of {self.config.RENDER_STACK_MAX_DEPTH}", event) + + # Add current ref to stack and process + ref_stack.append(self.ref) + try: + dictionary = Dictionary(self.ref) + if schema_type == "json": + return dictionary.get_json_schema(enumerations, ref_stack) + else: + return dictionary.get_bson_schema(enumerations, ref_stack) + finally: + ref_stack.pop() + + def _get_required(self): + required = [] + for prop_name, prop in self.properties.items(): + if prop.required: + required.append(prop_name) + return required + + +class OneOf: + def __init__(self, one_of_data: dict): + self.schemas = {} + for schema_name, schema_data in one_of_data.get("schemas", {}).items(): + self.schemas[schema_name] = Property(schema_name, schema_data) + + def to_dict(self): + return { + "schemas": {name: schema.to_dict() for name, schema in self.schemas.items()} + } + + def get_json_schema(self, enumerations, ref_stack: list = None): + return [schema.get_json_schema(enumerations, ref_stack) for schema in self.schemas.values()] + + def get_bson_schema(self, enumerations, ref_stack: list = None): + return [schema.get_bson_schema(enumerations, ref_stack) for schema in self.schemas.values()] \ No newline at end of file diff --git a/configurator/services/enumerator_service.py b/configurator/services/enumerator_service.py new file mode 100644 index 0000000..b564c43 --- /dev/null +++ b/configurator/services/enumerator_service.py @@ -0,0 +1,93 @@ +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.file_io import FileIO, File +from configurator.utils.config import Config +import os + +class Enumerators: + """ A list of versioned Enumerations""" + def __init__(self, data: dict): + self.config = Config.get_instance() + + if data is None: + loaded_data = FileIO.get_document(self.config.TEST_DATA_FOLDER, "enumerators.json") + # Handle both list and dict formats + if isinstance(loaded_data, dict): + self.dict = loaded_data.get("enumerators", []) + else: + self.dict = loaded_data + else: + # Handle both list and dict formats + if isinstance(data, dict): + self.dict = data.get("enumerators", []) + else: + self.dict = data + + self.versions = [] + for enumerators in self.dict: + self.versions.append(Enumerations(enumerators)) + + def version(self, version_number: int): + """Get a specific version of enumerations""" + for version in self.versions: + if version.version == version_number: + return version + raise ConfiguratorException(f"Version {version_number} not found") + + def save(self): + """Save the enumerators and return self""" + try: + # Save the cleaned content + FileIO.put_document(self.config.TEST_DATA_FOLDER, "enumerators.json", self.to_dict()) + return self + except Exception as e: + event = ConfiguratorEvent("ENU-02", "PUT_ENUMERATORS") + event.record_failure(f"Failed to save enumerators: {str(e)}") + raise ConfiguratorException(f"Failed to save enumerators: {str(e)}", event) + + def to_dict(self): + """Return the enumerators data""" + return self.dict + + +class Enumerations: + """ A versioned collection of enumerations""" + def __init__(self, data: dict): + self.config = Config.get_instance() + self._locked = False # Default to unlocked + + if data is None: + event = ConfiguratorEvent("ENU-01", "INIT_ENUMERATIONS", {"error": "Enumerations data cannot be None"}) + raise ConfiguratorException("Enumerations data cannot be None", event) + if not isinstance(data, dict): + event = ConfiguratorEvent("ENU-01", "INIT_ENUMERATIONS", {"error": "Enumerations data must be a dictionary"}) + raise ConfiguratorException("Enumerations data must be a dictionary", event) + + self.name = data.get("name", "Enumerations") + self.status = data.get("status", "Active") + self.version = data.get("version", 0) + self.enumerators = data.get("enumerators", {}) + # Extract _locked from document if present + self._locked = data.get("_locked", False) + + def get_enum_values(self, enum_name: str): + """Get the values for a specific enum""" + if self.enumerators is None: + event = ConfiguratorEvent("ENU-01", "GET_ENUM_VALUES", {"error": "Enumerators is None"}) + raise ConfiguratorException("Enumerators is None", event) + if enum_name not in self.enumerators: + event = ConfiguratorEvent("ENU-01", "GET_ENUM_VALUES", {"error": f"Enum '{enum_name}' not found"}) + raise ConfiguratorException(f"Enum '{enum_name}' not found", event) + # Return the keys (values) as a list, not the full object + return list(self.enumerators[enum_name].keys()) + + def to_dict(self): + """Return the enumerations data""" + return { + "name": self.name, + "status": self.status, + "version": self.version, + "enumerators": self.enumerators, + "_locked": self._locked # Always include _locked + } + + \ No newline at end of file diff --git a/configurator/services/template_service.py b/configurator/services/template_service.py new file mode 100644 index 0000000..4049826 --- /dev/null +++ b/configurator/services/template_service.py @@ -0,0 +1,111 @@ +""" +Template Service for processing configuration and dictionary templates. +""" +from configurator.utils.config import Config +from configurator.utils.file_io import FileIO +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +import yaml + + +class TemplateService: + """Service for processing templates and creating new collections.""" + + def __init__(self): + self.config = Config.get_instance() + + def create_collection(self, collection_name: str) -> dict: + """ + Create a new collection with configuration and dictionary files. + + Args: + collection_name: Name of the collection to create + + Returns: + dict: Information about created files + """ + # Validate collection name + self._validate_collection_name(collection_name) + + # Check if files already exist + self._check_existing_files(collection_name) + + # Process and save configuration template + config_content = self.process_configuration_template(collection_name) + config_filename = f"{collection_name}.yaml" + FileIO.put_document(self.config.CONFIGURATION_FOLDER, config_filename, config_content) + + # Process and save dictionary template + dict_content = self.process_dictionary_template(collection_name) + dict_filename = f"{collection_name}.0.0.1.yaml" + FileIO.put_document(self.config.DICTIONARY_FOLDER, dict_filename, dict_content) + + return { + "collection_name": collection_name, + "configuration_file": config_filename, + "dictionary_file": dict_filename + } + + def process_configuration_template(self, collection_name: str) -> dict: + """Process the configuration template with the collection name.""" + template_content = self._load_template("configuration.yaml") + processed_content = self._replace_placeholders(template_content, collection_name) + return yaml.safe_load(processed_content) + + def process_dictionary_template(self, collection_name: str) -> dict: + """Process the dictionary template with the collection name.""" + template_content = self._load_template("dictionary.yaml") + processed_content = self._replace_placeholders(template_content, collection_name) + return yaml.safe_load(processed_content) + + def _load_template(self, template_name: str) -> str: + """Load a template file from the template folder.""" + try: + return FileIO.get_document(self.config.TEMPLATE_FOLDER, template_name) + except Exception as e: + event = ConfiguratorEvent("TPL-01", "TEMPLATE_NOT_FOUND", {"template": template_name}) + raise ConfiguratorException(f"Template {template_name} not found", event) + + def _replace_placeholders(self, content: str, collection_name: str) -> str: + """Replace placeholders in template content.""" + return content.replace("{{collection_name}}", collection_name) + + def _validate_collection_name(self, collection_name: str): + """Validate the collection name format.""" + if not collection_name or not collection_name.strip(): + event = ConfiguratorEvent("TPL-02", "INVALID_COLLECTION_NAME", {"name": collection_name}) + raise ConfiguratorException("Collection name cannot be empty", event) + + # Check for valid characters (alphanumeric, underscore, hyphen) + import re + if not re.match(r'^[a-zA-Z0-9_-]+$', collection_name): + event = ConfiguratorEvent("TPL-02", "INVALID_COLLECTION_NAME", {"name": collection_name}) + raise ConfiguratorException("Collection name can only contain letters, numbers, underscores, and hyphens", event) + + def _check_existing_files(self, collection_name: str): + """Check if configuration or dictionary files already exist.""" + config_filename = f"{collection_name}.yaml" + dict_filename = f"{collection_name}.0.0.1.yaml" + + try: + FileIO.get_document(self.config.CONFIGURATION_FOLDER, config_filename) + event = ConfiguratorEvent("TPL-03", "CONFIGURATION_EXISTS", {"file": config_filename}) + raise ConfiguratorException(f"Configuration file {config_filename} already exists", event) + except ConfiguratorException as e: + # If it's a file not found exception, that's what we want + if e.event.id == "FIL-02": + pass # File doesn't exist, which is what we want + else: + # Re-raise other ConfiguratorExceptions + raise + + try: + FileIO.get_document(self.config.DICTIONARY_FOLDER, dict_filename) + event = ConfiguratorEvent("TPL-03", "DICTIONARY_EXISTS", {"file": dict_filename}) + raise ConfiguratorException(f"Dictionary file {dict_filename} already exists", event) + except ConfiguratorException as e: + # If it's a file not found exception, that's what we want + if e.event.id == "FIL-02": + pass # File doesn't exist, which is what we want + else: + # Re-raise other ConfiguratorExceptions + raise \ No newline at end of file diff --git a/configurator/services/type_services.py b/configurator/services/type_services.py new file mode 100644 index 0000000..cfc2e35 --- /dev/null +++ b/configurator/services/type_services.py @@ -0,0 +1,315 @@ +""" +Type Definitions +=============== + +A valid Type in this system must be one of the following three forms: + +1. Universal Primitive: + - Has only a top-level 'schema' property (e.g., {'schema': {'type': 'string', 'format': 'email'}}) + - Must NOT have 'json_type' or 'bson_type' at any level. + +2. Typed Primitive: + - Has top-level 'json_type' and/or 'bson_type' properties (e.g., {'json_type': {...}, 'bson_type': {...}}) + - Must NOT have a 'schema' property. + +3. Complex Type (object or array): + - Has 'type': 'object' or 'type': 'array' at the top level. + - For 'object', must have a 'properties' dict; for 'array', must have an 'items' property. + - May have additional fields like 'description', 'required', 'additionalProperties', etc. + +Any other combination (e.g., 'schema' containing 'json_type'/'bson_type', or both 'schema' and 'json_type'/'bson_type' at the top level) is invalid and will raise an error. +""" +from configurator.utils.file_io import FileIO, File +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +import os +import yaml +from collections import OrderedDict + + +class Type: + def __init__(self, file_name: str, document: dict = {}): + self.config = Config.get_instance() + self.file_name = file_name + self.type_property = {} + self._locked = False # Default to unlocked + + if document: + self.property = TypeProperty(file_name.replace('.yaml', ''), document) + # Extract _locked from document if present + self._locked = document.get("_locked", False) + else: + document_data = FileIO.get_document(self.config.TYPE_FOLDER, file_name) + self.property = TypeProperty(file_name.replace('.yaml', ''), document_data) + # Extract _locked from loaded document if present + self._locked = document_data.get("_locked", False) + + + def save(self): + """Save the type and return the Type object.""" + try: + FileIO.put_document(self.config.TYPE_FOLDER, self.file_name, self.to_dict()) + return self + except Exception as e: + event = ConfiguratorEvent("TYP-03", "PUT_TYPE") + event.record_failure(f"Failed to save type {self.file_name}: {str(e)}") + raise ConfiguratorException(f"Failed to save type {self.file_name}: {str(e)}", event) + + @staticmethod + def lock_all(): + """Lock all type files.""" + config = Config.get_instance() + files = FileIO.get_documents(config.TYPE_FOLDER) + event = ConfiguratorEvent("TYP-06", "LOCK_ALL_TYPES") + + for file in files: + try: + type_obj = Type(file.file_name) + sub_event = ConfiguratorEvent(f"TYP-{file.file_name}", "LOCK_TYPE") + sub_event.record_success() + event.append_events([sub_event]) + except ConfiguratorException as ce: + event.append_events([ce.event]) + event.record_failure(f"ConfiguratorException locking type {file.file_name}") + raise ConfiguratorException(f"ConfiguratorException locking type {file.file_name}", event) + except Exception as e: + sub_event = ConfiguratorEvent(f"TYP-{file.file_name}", "LOCK_TYPE") + sub_event.record_failure(f"Failed to lock type {file.file_name}: {str(e)}") + event.append_events([sub_event]) + event.record_failure(f"Unexpected error locking type {file.file_name}") + raise ConfiguratorException(f"Unexpected error locking type {file.file_name}", event) + + event.record_success() + return event + + def get_json_schema(self, type_stack: list = None): + if type_stack is None: + type_stack = [] + return self.property.get_json_schema(type_stack) + + def get_bson_schema(self, type_stack: list = None): + if type_stack is None: + type_stack = [] + return self.property.get_bson_schema(type_stack) + + def to_dict(self): + return { + "file_name": self.file_name, + "_locked": self._locked, # Always include _locked + **self.property.to_dict() + } + + def delete(self): + if self._locked: + event = ConfiguratorEvent(event_id="TYP-05", event_type="DELETE_TYPE", event_data={"error": "Type is locked"}) + raise ConfiguratorException("Cannot delete locked type", event) + event = ConfiguratorEvent(event_id="TYP-05", event_type="DELETE_TYPE") + try: + delete_event = FileIO.delete_document(self.config.TYPE_FOLDER, self.file_name) + if delete_event.status == "SUCCESS": + event.record_success() + else: + event.append_events([delete_event]) + event.record_failure("error deleting type") + except ConfiguratorException as e: + event.append_events([e.event]) + event.record_failure("error deleting type") + except Exception as e: + event.record_failure("unexpected error deleting type", {"error": str(e)}) + return event + + + +class TypeProperty: + def __init__(self, name: str, property: dict): + self.config = Config.get_instance() + self.name = name + self.description = property.get("description", "Missing Required Description") + self.type = property.get("type", "void") + self.required = property.get("required", False) + self.schema = property.get("schema", None) + self.json_type = property.get("json_type", None) + self.bson_type = property.get("bson_type", None) + self.additional_properties = property.get("additionalProperties", False) + self.is_primitive = False + self.is_universal = False + + if self.schema is not None: + self.is_primitive = True + self.is_universal = True + return + + if self.json_type is not None or self.bson_type is not None: + self.is_primitive = True + self.is_universal = False + return + + if self.type == "array": + self.items = TypeProperty("items", property.get("items", {})) + return + + if self.type == "object": + self.properties = {} + for name, prop in property.get("properties", {}).items(): + self.properties[name] = TypeProperty(name, prop) + self.additional_properties = property.get("additionalProperties", False) + return + + def to_dict(self): + if self.is_universal: + return { + "description": self.description, + "required": self.required, + "schema": self.schema, + } + + elif self.is_primitive: + return { + "description": self.description, + "required": self.required, + "json_type": self.json_type or {}, + "bson_type": self.bson_type or {}, + } + + elif self.type == "array": + return { + "description": self.description, + "required": self.required, + "type": self.type, + "items": self.items.to_dict(), + } + + elif self.type == "object": + return { + "description": self.description, + "required": self.required, + "type": self.type, + "properties": {name: property.to_dict() for name, property in self.properties.items()}, + "additionalProperties": self.additional_properties + } + + else: # custom type + return { + "description": self.description, + "required": self.required, + "type": self.type + } + + def get_json_schema(self, type_stack: list = None): + if type_stack is None: + type_stack = [] + if self.is_universal: + return { + "description": self.description, + **self.schema + } + if self.is_primitive: + return { + "description": self.description, + **self.json_type + } + if self.type == "array": + return { + "description": self.description, + "type": "array", + "items": self.items.get_json_schema(type_stack) + } + if self.type == "object": + properties = {} + required_properties = [] + + for name, property in self.properties.items(): + properties[name] = property.get_json_schema(type_stack) + if property.required: + required_properties.append(name) + + result = { + "description": self.description, + "type": "object", + "properties": properties, + "additionalProperties": self.additional_properties + } + + if required_properties: + result["required"] = required_properties + + return result + if self.type: + return self._handle_type_reference(type_stack, "json") + + raise ConfiguratorException(f"Type {self.type} is not a valid type", ConfiguratorEvent(event_id="TYP-99", event_type="INVALID_TYPE")) + + def get_bson_schema(self, type_stack: list = None): + if type_stack is None: + type_stack = [] + if self.is_universal: + schema = self.schema.copy() + schema["bsonType"] = schema["type"] + del schema["type"] + return { + **schema + } + if self.is_primitive: + return { + **self.bson_type + } + if self.type == "array": + return { + "bsonType": "array", + "items": self.items.get_bson_schema(type_stack) + } + if self.type == "object": + properties = {} + required_properties = [] + + for name, property in self.properties.items(): + properties[name] = property.get_bson_schema(type_stack) + if property.required: + required_properties.append(name) + + result = { + "bsonType": "object", + "properties": properties, + "additionalProperties": self.additional_properties + } + + if required_properties: + result["required"] = required_properties + + return result + if self.type: + return self._handle_type_reference(type_stack, "bson") + + raise ConfiguratorException(f"Type {self.type} is not a valid type", ConfiguratorEvent(event_id="TYP-99", event_type="INVALID_TYPE")) + + def _handle_type_reference(self, type_stack: list, schema_type: str): + """Handle type reference processing with circular reference and depth checking.""" + type_name = f"{self.type}.yaml" + # Check for circular reference + if type_name in type_stack: + type_chain = " -> ".join(type_stack + [type_name]) + event = ConfiguratorEvent( + event_id="TYP-07", + event_type="CIRCULAR_TYPE_REFERENCE", + event_data={"type_chain": type_chain, "type_stack": type_stack} + ) + raise ConfiguratorException(f"Circular type reference detected: {type_chain}", event) + # Check stack depth limit + if len(type_stack) >= self.config.RENDER_STACK_MAX_DEPTH: + event = ConfiguratorEvent( + event_id="TYP-08", + event_type="TYPE_STACK_DEPTH_EXCEEDED", + event_data={"max_depth": self.config.RENDER_STACK_MAX_DEPTH, "current_depth": len(type_stack)} + ) + raise ConfiguratorException(f"Type stack depth exceeded maximum of {self.config.RENDER_STACK_MAX_DEPTH}", event) + # Add current type to stack and process + type_stack.append(type_name) + try: + custom_type = Type(type_name) + if schema_type == "json": + custom_schema = custom_type.property.get_json_schema(type_stack) + else: + custom_schema = custom_type.property.get_bson_schema(type_stack) + return custom_schema + finally: + type_stack.pop() \ No newline at end of file diff --git a/tests/managers/__init__.py b/configurator/utils/__init__.py similarity index 100% rename from tests/managers/__init__.py rename to configurator/utils/__init__.py diff --git a/configurator/utils/config.py b/configurator/utils/config.py new file mode 100644 index 0000000..cfae25a --- /dev/null +++ b/configurator/utils/config.py @@ -0,0 +1,173 @@ +import os +import json +from pathlib import Path + +import logging +logger = logging.getLogger(__name__) + +class Config: + _instance = None # Singleton instance + + def __init__(self): + if Config._instance is not None: + raise Exception("This class is a singleton!") + else: + Config._instance = self + self.config_items = [] + + # Set INPUT_FOLDER from environment or default FIRST + self.INPUT_FOLDER = os.getenv("INPUT_FOLDER", "/input") + + # Declare instance variables to support IDE code assist + self.BUILT_AT = '' + self.LOGGING_LEVEL = '' + self.MONGO_DB_NAME = '' + self.MONGO_CONNECTION_STRING = '' + self.ENUMERATORS_COLLECTION_NAME = '' + self.VERSION_COLLECTION_NAME = '' + self.TYPE_FOLDER = '' + self.DICTIONARY_FOLDER = '' + self.CONFIGURATION_FOLDER = '' + self.TEST_DATA_FOLDER = '' + self.TEMPLATE_FOLDER = '' + self.MIGRATIONS_FOLDER = '' + self.API_CONFIG_FOLDER = '' + self.API_PORT = 0 + self.SPA_PORT = 0 + self.AUTO_PROCESS = False + self.EXIT_AFTER_PROCESSING = False + self.LOAD_TEST_DATA = False + self.ENABLE_DROP_DATABASE = False + self.RENDER_STACK_MAX_DEPTH = 0 + + # Default Values grouped by value type + self.config_strings = { + "BUILT_AT": "DEFAULT! Set in code", + "INPUT_FOLDER": "/input", + "LOGGING_LEVEL": "INFO", + "MONGO_DB_NAME": "configurator", + "VERSION_COLLECTION_NAME": "CollectionVersions", + "ENUMERATORS_COLLECTION_NAME": "DatabaseEnumerators", + "TYPE_FOLDER": "types", + "DICTIONARY_FOLDER": "dictionaries", + "CONFIGURATION_FOLDER": "configurations", + "TEST_DATA_FOLDER": "test_data", + "TEMPLATE_FOLDER": "templates", + "MIGRATIONS_FOLDER": "migrations", + "API_CONFIG_FOLDER": "api_config" + } + self.config_ints = { + "API_PORT": "8081", + "SPA_PORT": "8082", + "RENDER_STACK_MAX_DEPTH": "100", + } + self.config_booleans = { + "AUTO_PROCESS": "false", + "EXIT_AFTER_PROCESSING": "false", + "LOAD_TEST_DATA": "false", + "ENABLE_DROP_DATABASE": "false", + } + self.config_string_secrets = { + "MONGO_CONNECTION_STRING": "mongodb://mongodb:27017/" + } + + # Initialize configuration + self.initialize() + self.configure_logging() + + def initialize(self): + """Initialize configuration values.""" + self.config_items = [] + + # Add INPUT_FOLDER to config_items since it's already set + self.config_items.append({ + "name": "INPUT_FOLDER", + "value": self.INPUT_FOLDER, + "from": "default" if not os.getenv("INPUT_FOLDER") else "environment" + }) + + # Initialize Config Strings (except INPUT_FOLDER) + for key, default in self.config_strings.items(): + if key == "INPUT_FOLDER": + continue # Already set + value = self._get_config_value(key, default, False) + setattr(self, key, value) + + # Initialize Config Integers + for key, default in self.config_ints.items(): + value = int(self._get_config_value(key, default, False)) + setattr(self, key, value) + + # Initialize Config Booleans + for key, default in self.config_booleans.items(): + value = (self._get_config_value(key, default, False)).lower() == "true" + setattr(self, key, value) + + # Initialize String Secrets + for key, default in self.config_string_secrets.items(): + value = self._get_config_value(key, default, True) + setattr(self, key, value) + + def configure_logging(self): + # Reset logging handlers + for handler in logging.root.handlers[:]: + logging.root.removeHandler(handler) + + # Configure logger + logging.basicConfig( + level=self.LOGGING_LEVEL, + format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S" + ) + + # Suppress noisy http logging + logging.getLogger("httpcore").setLevel(logging.WARNING) + logging.getLogger("httpx").setLevel(logging.WARNING) + + # Log configuration + logger.info(f"Configuration Initialized: {self.config_items}") + + def _get_config_value(self, name, default_value, is_secret): + """Retrieve a configuration value, first from a file, then environment variable, then default.""" + value = default_value + from_source = "default" + + # Check for config file first - try api_config folder first, then root + api_config_path = Path(self.INPUT_FOLDER, "api_config", name) + root_path = Path(self.INPUT_FOLDER, name) + + if api_config_path.exists(): + value = api_config_path.read_text().strip() + from_source = "file" + elif root_path.exists(): + value = root_path.read_text().strip() + from_source = "file" + # If no file, check for environment variable + elif os.getenv(name): + value = os.getenv(name) + from_source = "environment" + + # Record the source of the config value + self.config_items.append({ + "name": name, + "value": "secret" if is_secret else value, + "from": from_source + }) + return value + + # Serializer + def to_dict(self): + """Convert the Config object to a dictionary with the required fields.""" + return { + "config_items": self.config_items, + } + + # Singleton Getter + @staticmethod + def get_instance(): + """Get the singleton instance of the Config class.""" + if Config._instance is None: + Config() + + return Config._instance + \ No newline at end of file diff --git a/configurator/utils/configurator_exception.py b/configurator/utils/configurator_exception.py new file mode 100644 index 0000000..e09b4b5 --- /dev/null +++ b/configurator/utils/configurator_exception.py @@ -0,0 +1,54 @@ +import datetime + +class ConfiguratorEvent: + def __init__(self, event_id: str, event_type: str, event_data: dict = None): + self.id = event_id + self.type = event_type + self.data = event_data + self.starts = datetime.datetime.now() + self.ends = None + self.status = "PENDING" + self.sub_events = [] + + def append_events(self, events: list): + self.sub_events.extend(events) + + def record_success(self): + self.status = "SUCCESS" + self.ends = datetime.datetime.now() + + def record_failure(self, message: str, event_data: object = None): + if event_data is None: + self.data = {"error": message} + elif isinstance(event_data, dict): + self.data = {"error": message, **event_data} + else: + self.data = {"error": message, "details": str(event_data)} + self.status = "FAILURE" + self.ends = datetime.datetime.now() + + def to_dict(self): + return { + "id": self.id, + "type": self.type, + "data": self.data, + "starts": self.starts, + "ends": self.ends, + "status": self.status, + "sub_events": [event.to_dict() for event in self.sub_events] + } + + +class ConfiguratorException(Exception): + def __init__(self, message: str, event: ConfiguratorEvent): + self.message = message + self.event = event + + def __str__(self): + return self.message + + def to_dict(self): + return { + "message": self.message, + "event": self.event.to_dict() if self.event else None + } \ No newline at end of file diff --git a/configurator/utils/ejson_encoder.py b/configurator/utils/ejson_encoder.py new file mode 100644 index 0000000..90157a4 --- /dev/null +++ b/configurator/utils/ejson_encoder.py @@ -0,0 +1,11 @@ +from flask.json.provider import DefaultJSONProvider as FlaskJSONProvider +import datetime +from bson.objectid import ObjectId + +class MongoJSONEncoder(FlaskJSONProvider): + def default(self, obj): + if isinstance(obj, (ObjectId, datetime.datetime, datetime.date)): + return str(obj) + elif hasattr(obj, 'isoformat'): # Handle any object with isoformat method + return str(obj) + return super().default(obj) \ No newline at end of file diff --git a/configurator/utils/file_io.py b/configurator/utils/file_io.py new file mode 100644 index 0000000..ce21b8a --- /dev/null +++ b/configurator/utils/file_io.py @@ -0,0 +1,149 @@ +import json +import os +from datetime import datetime +from pathlib import Path + +import yaml +from bson import json_util + +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException + + +class File: + """Class representing a file with its properties.""" + + def __init__(self, file_path: str): + """Initialize a File instance with file properties.""" + self.file_name = os.path.basename(file_path) + self.created_at = None + self.updated_at = None + self.size = 0 + + # Get file properties if file exists + try: + stat = os.stat(file_path) + self.size = stat.st_size + self.created_at = datetime.fromtimestamp(stat.st_ctime).isoformat() + self.updated_at = datetime.fromtimestamp(stat.st_mtime).isoformat() + except Exception as e: + event = ConfiguratorEvent(event_id="FIL-01", event_type="GET_FILE_PROPERTIES", event_data={"error": str(e)}) + raise ConfiguratorException(f"Failed to get file properties for {file_path}", event) + + def to_dict(self): + """Convert file properties to dictionary matching OpenAPI schema (flat).""" + return { + "file_name": self.file_name, + "created_at": self.created_at, + "updated_at": self.updated_at, + "size": self.size + } + + +class FileIO: + """Class for file I/O operations.""" + + @staticmethod + def get_documents(folder_name: str) -> list[File]: + """Get all files from a folder.""" + config = Config.get_instance() + folder = os.path.join(config.INPUT_FOLDER, folder_name) + files = [] + + try: + if not os.path.exists(folder): + event = ConfiguratorEvent(event_id="FIL-02", event_type="GET_DOCUMENTS", event_data={"error": "Folder not found"}) + raise ConfiguratorException(f"Folder not found: {folder}", event) + + for file_name in os.listdir(folder): + file_path = os.path.join(folder, file_name) + if os.path.isfile(file_path): + files.append(File(file_path)) + return files + except ConfiguratorException as e: + raise e + except Exception as e: + event = ConfiguratorEvent(event_id="FIL-03", event_type="GET_DOCUMENTS", event_data={e}) + raise ConfiguratorException(f"Failed to get documents from {folder}", event) + + @staticmethod + def get_document(folder_name: str, file_name: str) -> dict: + """Read document content from a file.""" + config = Config.get_instance() + folder = os.path.join(config.INPUT_FOLDER, folder_name) + file_path = os.path.join(folder, file_name) + + # Check if file exists + if not os.path.exists(file_path): + abs_path = os.path.abspath(file_path) + cwd = os.getcwd() + event = ConfiguratorEvent(event_id="FIL-04", event_type="FILE_NOT_FOUND", + event_data={"file_path": file_path, "abs_path": abs_path, "cwd": cwd}) + raise ConfiguratorException(f"File not found: {file_path} (abs: {abs_path}, cwd: {cwd})", event) + + # Get extension from file path + extension = os.path.splitext(file_path)[1].lower() + + # Only allow .yaml and .json + if extension not in [".yaml", ".json"]: + event = ConfiguratorEvent(event_id="FIL-05", event_type="UNSUPPORTED_FILE_TYPE", + event_data={"file_name": file_name, "extension": extension}) + raise ConfiguratorException(f"Unsupported file type: {extension}", event) + + try: + with open(file_path, 'r', encoding='utf-8') as f: + if extension == ".yaml": + return yaml.safe_load(f) + elif extension == ".json": + return json_util.loads(f.read()) + except Exception as e: + event = ConfiguratorEvent(event_id="FIL-06", event_type="GET_DOCUMENT", event_data={"error": str(e)}) + raise ConfiguratorException(f"Failed to get document from {file_path}", event) + + @staticmethod + def put_document(folder_name: str, file_name: str, document: dict) -> File: + """Write document content to a file.""" + config = Config.get_instance() + folder = os.path.join(config.INPUT_FOLDER, folder_name) + file_path = os.path.join(folder, file_name) + + # Get extension from file path + extension = os.path.splitext(file_path)[1].lower() + + # Only allow .yaml and .json + if extension not in [".yaml", ".json"]: + event = ConfiguratorEvent(event_id="FIL-07", event_type="UNSUPPORTED_FILE_TYPE", event_data=file_name) + raise ConfiguratorException(f"Unsupported file type: {extension}", event) + + try: + with open(file_path, 'w', encoding='utf-8') as f: + if extension == ".yaml": + yaml.dump(document, f, default_flow_style=False, allow_unicode=True, sort_keys=False) + elif extension == ".json": + f.write(json_util.dumps(document, indent=2)) + + return File(file_path) + except Exception as e: + event = ConfiguratorEvent(event_id="FIL-08", event_type="PUT_DOCUMENT", event_data={"error": str(e)}) + raise ConfiguratorException(f"Failed to put document to {file_path}", event) + + @staticmethod + def delete_document(folder_name: str, file_name: str) -> ConfiguratorEvent: + """Delete a file.""" + config = Config.get_instance() + folder = os.path.join(config.INPUT_FOLDER, folder_name) + file_path = os.path.join(folder, file_name) + event = ConfiguratorEvent(event_id="FIL-09", event_type="DELETE_DOCUMENT") + + try: + if not os.path.exists(file_path): + event.record_failure({"error": "File not found", "file_path": file_path}) + return event + + os.remove(file_path) + event.record_success() + return event + except Exception as e: + event.record_failure({"error": str(e), "file_path": file_path}) + return event + \ No newline at end of file diff --git a/configurator/utils/mongo_io.py b/configurator/utils/mongo_io.py new file mode 100644 index 0000000..fa8214d --- /dev/null +++ b/configurator/utils/mongo_io.py @@ -0,0 +1,434 @@ +import json +from bson import ObjectId +from pymongo import MongoClient, ASCENDING, DESCENDING +from pymongo.operations import IndexModel +from bson import json_util +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException + +import logging +import os +from pymongo.errors import BulkWriteError + +logger = logging.getLogger(__name__) + +class MongoIO: + """Simplified MongoDB I/O class for configuration services.""" + + def __init__(self, connection_string, database_name): + """Initialize MongoDB connection. + + Args: + connection_string: MongoDB connection string + database_name: Name of the database to use + + Raises: + ConfiguratorException: If connection fails + """ + try: + self.client = MongoClient( + connection_string, + serverSelectionTimeoutMS=2000, + socketTimeoutMS=5000 + ) + self.client.admin.command('ping') # Force connection + self.db = self.client.get_database(database_name) + logger.info(f"Connected to MongoDB: {database_name}") + except Exception as e: + event = ConfiguratorEvent(event_id="MON-01", event_type="CONNECTION", event_data={"error": str(e)}) + raise ConfiguratorException("Failed to connect to MongoDB", event) + + def disconnect(self): + """Disconnect from MongoDB.""" + try: + if self.client: + self.client.close() + self.client = None + logger.info("Disconnected from MongoDB") + except Exception as e: + # Log the error but don't raise it - disconnect should be safe to call + logger.warning(f"Error during disconnect: {e}") + # Clear the client reference even if close failed + self.client = None + + def get_collection(self, collection_name): + """Get a collection, creating it if it doesn't exist.""" + try: + if collection_name not in self.db.list_collection_names(): + self.db.create_collection(collection_name) + logger.info(f"Created collection: {collection_name}") + + return self.db.get_collection(collection_name) + except Exception as e: + event = ConfiguratorEvent(event_id="MON-03", event_type="COLLECTION", event_data={"error": str(e), "collection": collection_name}) + raise ConfiguratorException(f"Failed to get/create collection {collection_name}", event) + + def get_documents(self, collection_name, match=None, project=None, sort_by=None): + """Retrieve documents from a collection. + + Args: + collection_name (str): Name of the collection to query. + match (dict, optional): MongoDB match filter. Defaults to {}. + project (dict, optional): Fields to include or exclude. Defaults to None. + sort_by (list of tuple, optional): Sorting criteria. Defaults to None. + + Returns: + list: List of documents matching the query. + """ + match = match or {} + project = project or None + sort_by = sort_by or None + + try: + collection = self.get_collection(collection_name) + cursor = collection.find(match, project) + if sort_by: + cursor = cursor.sort(sort_by) + + documents = list(cursor) + return documents + except Exception as e: + event = ConfiguratorEvent(event_id="MON-04", event_type="GET_DOCUMENTS", event_data={"error": str(e), "collection": collection_name}) + raise ConfiguratorException(f"Failed to get documents from {collection_name}", event) + + def upsert(self, collection_name, match, data): + """Upsert a document - create if not exists, update if exists. + + Args: + collection_name (str): Name of the collection + match (dict): Match criteria to find existing document + data (dict): Data to insert or update + + Returns: + dict: The upserted document + """ + try: + collection = self.get_collection(collection_name) + result = collection.find_one_and_update( + match, + {"$set": data}, + upsert=True, + return_document=True + ) + return result + except Exception as e: + event = ConfiguratorEvent(event_id="MON-05", event_type="UPSERT", event_data={"error": str(e), "collection": collection_name}) + raise ConfiguratorException(f"Failed to upsert document in {collection_name}", event) + + def remove_schema_validation(self, collection_name): + """Remove schema validation from a collection. + + Args: + collection_name (str): Name of the collection + + Returns: + list[ConfiguratorEvent]: List containing event with operation result + """ + event = ConfiguratorEvent(event_id="MON-06", event_type="REMOVE_SCHEMA") + + try: + self.get_collection(collection_name) + + command = { + "collMod": collection_name, + "validator": {} + } + + result = self.db.command(command) + logger.info(f"Schema validation cleared successfully: {collection_name}") + event.data = { + "collection": collection_name, + "operation": "schema_validation_removed" + } + event.record_success() + return [event] + except Exception as e: + event.record_failure({"error": str(e), "collection": collection_name}) + return [event] + + def remove_index(self, collection_name, index_name): + """Drop an index from a collection. + + Args: + collection_name (str): Name of the collection + index_name (str): Name of the index to drop + + Returns: + list[ConfiguratorEvent]: List containing event with operation result + """ + event = ConfiguratorEvent(event_id="MON-07", event_type="REMOVE_INDEX") + + try: + collection = self.get_collection(collection_name) + collection.drop_index(index_name) + logger.info(f"Dropped index {index_name} from collection: {collection_name}") + event.data = { + "collection": collection_name, + "index_name": index_name, + "operation": "dropped" + } + event.record_success() + return [event] + except Exception as e: + event.record_failure({"error": str(e), "collection": collection_name, "index": index_name}) + return [event] + + def execute_migration(self, collection_name, pipeline): + """Execute a MongoDB aggregation pipeline (migration). + + Args: + collection_name (str): Name of the collection + pipeline (list): List of pipeline stages to execute + + Returns: + list[ConfiguratorEvent]: List containing event with operation result + """ + event = ConfiguratorEvent(event_id="MON-08", event_type="EXECUTE_MIGRATION", event_data={"collection": collection_name}) + + try: + collection = self.get_collection(collection_name) + result = list(collection.aggregate(pipeline)) + logger.info(f"Executed migration on collection: {collection_name}") + event.record_success() + return [event] + except Exception as e: + event.record_failure({"error": str(e), "collection": collection_name}) + return [event] + + def load_migration_pipeline(self, migration_file): + """Load a migration pipeline from a JSON file using bson.json_util.loads(). + + Args: + migration_file (str): Path to the migration JSON file + + Returns: + tuple: (pipeline, events) where pipeline is the list of stages and events is a list of ConfiguratorEvent + + Raises: + ConfiguratorException: If file cannot be loaded or parsed + """ + event = ConfiguratorEvent(event_id="MON-13", event_type="LOAD_MIGRATION") + event.data = { + "file": migration_file, + "file_name": os.path.basename(migration_file) + } + + try: + with open(migration_file, 'r') as file: + # Use bson.json_util.loads to preserve $ prefixes in MongoDB operators + pipeline = json_util.loads(file.read()) + + if not isinstance(pipeline, list): + raise ValueError("Migration file must contain a list of pipeline stages") + + logger.info(f"Loaded migration pipeline from: {migration_file}") + event.data.update({ + "pipeline_stages": len(pipeline), + "pipeline_operations": [list(stage.keys())[0] for stage in pipeline if stage] + }) + event.record_success() + return pipeline, [event] + except Exception as e: + event.record_failure({"error": str(e), "file": migration_file}) + raise ConfiguratorException(f"Failed to load migration pipeline from {migration_file}", event) + + def execute_migration_from_file(self, collection_name, migration_file): + """Execute a migration from a JSON file. + + Args: + collection_name (str): Name of the collection + migration_file (str): Path to the migration JSON file + + Returns: + list[ConfiguratorEvent]: List containing event with operation result + """ + event = ConfiguratorEvent(event_id="MON-14", event_type="EXECUTE_MIGRATION_FILE") + event.data = { + "collection": collection_name, + "migration_file": os.path.basename(migration_file), + "migration_path": migration_file + } + + try: + # Load the migration pipeline (this can create MON-13 events) + pipeline, load_events = self.load_migration_pipeline(migration_file) + + # Execute the migration (this creates MON-08 events) + execution_events = self.execute_migration(collection_name, pipeline) + + # Add detailed migration information to event data + event.data.update({ + "pipeline_stages": len(pipeline), + "pipeline_summary": [ + { + "stage": i + 1, + "operation": list(stage.keys())[0] if stage else "unknown", + "details": stage + } + for i, stage in enumerate(pipeline) + ], + "pipeline_operations": [list(stage.keys())[0] for stage in pipeline if stage] + }) + + # Nest the load and execution events as sub-events + event.append_events(load_events + execution_events) + + # Check if any child events failed + if any(child.status == "FAILURE" for child in event.sub_events): + event.record_failure("One or more migration operations failed") + else: + event.record_success() + + return [event] + except ConfiguratorException as e: + # Nest the exception event + event.append_events([e.event]) + event.record_failure("Migration file processing failed") + return [event] + except Exception as e: + event.record_failure({"error": str(e), "collection": collection_name, "file": migration_file}) + return [event] + + def add_index(self, collection_name, index_spec): + """Create an index on a collection. + + Args: + collection_name (str): Name of the collection + index_spec (dict): Index specification with 'name' and 'key' fields + + Returns: + list[ConfiguratorEvent]: List containing event with operation result + """ + event = ConfiguratorEvent(event_id="MON-09", event_type="ADD_INDEX") + + try: + collection = self.get_collection(collection_name) + index_model = IndexModel(index_spec["key"], name=index_spec["name"]) + collection.create_indexes([index_model]) + logger.info(f"Created index {index_spec['name']} on collection: {collection_name}") + event.data = { + "collection": collection_name, + "index_name": index_spec["name"], + "index_keys": index_spec["key"], + "operation": "created" + } + event.record_success() + return [event] + except Exception as e: + event.record_failure({"error": str(e), "collection": collection_name, "index": index_spec}) + return [event] + + def apply_schema_validation(self, collection_name, schema_dict): + """Apply schema validation to a collection. + + Args: + collection_name (str): Name of the collection + schema_dict (dict): BSON schema dictionary to apply + + Returns: + list[ConfiguratorEvent]: List containing event with operation result + """ + event = ConfiguratorEvent(event_id="MON-10", event_type="APPLY_SCHEMA") + + try: + # Apply schema validation to MongoDB collection + collection = self.get_collection(collection_name) + command = { + "collMod": collection_name, + "validator": {"$jsonSchema": schema_dict}, + "validationLevel": "moderate", + "validationAction": "error" + } + + result = self.db.command(command) + logger.info(f"Schema validation applied to collection: {collection_name}") + event.data = schema_dict + event.record_success() + return [event] + + except Exception as e: + event.record_failure({"error": str(e), "collection": collection_name}) + return [event] + + def load_json_data(self, collection_name, data_file): + """Load test data from a file into a collection. + + Args: + collection_name (str): Name of the collection + data_file (str): Path to the JSON data file + + Returns: + list[ConfiguratorEvent]: List containing event with operation result + """ + event = ConfiguratorEvent(event_id="MON-11", event_type="LOAD_DATA") + + try: + collection = self.get_collection(collection_name) + with open(data_file, 'r') as file: + # Use bson.json_util.loads to handle Extended JSON ($oid, $date, etc.) + from bson import json_util + data = json_util.loads(file.read()) + + logger.info(f"Loading {len(data)} documents from {data_file} into collection: {collection_name}") + result = collection.insert_many(data) + + event.data = { + "collection": collection_name, + "data_file": os.path.basename(data_file), + "documents_loaded": len(data), + "insert_many_result": { + "inserted_ids": [str(oid) for oid in result.inserted_ids], + "acknowledged": result.acknowledged + } + } + event.record_success() + return [event] + except BulkWriteError as e: + # Extract rich error information from bulk write result + event.record_failure("Bulk write operation failed", e.details) + return [event] + except Exception as e: + event.record_failure("Bulk write operation failed unexpectedly", {"error": str(e), "collection": collection_name, "data_file": data_file}) + return [event] + + def drop_database(self) -> list[ConfiguratorEvent]: + """Drop the database.""" + event = ConfiguratorEvent(event_id="MON-12", event_type="DROP_DATABASE") + config = Config.get_instance() + if not config.ENABLE_DROP_DATABASE: + event.record_failure({"error": "Drop database feature is not enabled"}) + return [event] + if not config.BUILT_AT == "Local": + event.record_failure({"error": "Drop database not allowed on Non-Local Build"}) + return [event] + + # Check if any collections have more than 100 documents + try: + collections_with_many_docs = [] + for collection_name in self.db.list_collection_names(): + doc_count = self.db.get_collection(collection_name).count_documents({}) + if doc_count > 100: + collections_with_many_docs.append({ + "collection": collection_name, + "document_count": doc_count + }) + + if collections_with_many_docs: + event.event_data = collections_with_many_docs + event.record_failure("Drop database Safety Limit Exceeded - Collections with >100 documents found") + return [event] + + except Exception as e: + event.event_data = e + event.record_failure("Check collection counts raised an exception") + return [event] + + try: + self.client.drop_database(self.db.name) + event.record_success() + logger.info(f"Dropped database: {self.db.name}") + return [event] + except Exception as e: + event.event_data=e + event.record_failure(f"Failed to drop database {self.db.name}") + return [event] \ No newline at end of file diff --git a/configurator/utils/route_decorators.py b/configurator/utils/route_decorators.py new file mode 100644 index 0000000..bfcb0c7 --- /dev/null +++ b/configurator/utils/route_decorators.py @@ -0,0 +1,30 @@ +from functools import wraps +from flask import jsonify +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent +import logging + +logger = logging.getLogger(__name__) + +def event_route(event_id: str, event_type: str, operation_name: str): + """Decorator that only handles exceptions, routes handle their own serialization.""" + def decorator(f): + @wraps(f) + def wrapper(*args, **kwargs): + try: + result = f(*args, **kwargs) + # Routes are responsible for their own serialization + return result + except ConfiguratorException as e: + logger.error(f"Configurator error in {operation_name}: {str(e)}") + event = ConfiguratorEvent(event_id=event_id, event_type=event_type) + if hasattr(e, 'event') and e.event: + event.append_events([e.event]) + event.record_failure(f"Configurator error in {operation_name}") + return jsonify(event.to_dict()), 500 + except Exception as e: + logger.error(f"Unexpected error in {operation_name}: {str(e)}") + event = ConfiguratorEvent(event_id=event_id, event_type=event_type) + event.record_failure(f"Unexpected error in {operation_name}", {"details": str(e)}) + return jsonify(event.to_dict()), 500 + return wrapper + return decorator \ No newline at end of file diff --git a/configurator/utils/version_manager.py b/configurator/utils/version_manager.py new file mode 100644 index 0000000..7558e37 --- /dev/null +++ b/configurator/utils/version_manager.py @@ -0,0 +1,48 @@ +import re +import logging +from typing import Optional, Dict, List + +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorEvent, ConfiguratorException +from configurator.utils.mongo_io import MongoIO +from configurator.utils.version_number import VersionNumber + +logger = logging.getLogger(__name__) + +class VersionManager: + """Static class for managing collection version tracking in MongoDB. + + This class focuses on: + 1. Reading current versions from the database + 2. Updating version records + 3. Version comparison and validation + """ + + @staticmethod + def get_current_version(mongo_io: MongoIO, collection_name: str) -> VersionNumber: + """Get the current version of a collection.""" + config = Config.get_instance() + version_docs = mongo_io.get_documents( + config.VERSION_COLLECTION_NAME, + match={"collection_name": collection_name} + ) + if not version_docs or len(version_docs) == 0: + return VersionNumber(f"{collection_name}.0.0.0.0") + if len(version_docs) > 1: + event = ConfiguratorEvent(event_id="VER-01", event_type="GET_CURRENT_VERSION", event_data=version_docs) + raise ConfiguratorException(f"Multiple versions found for collection: {collection_name}", event) + current_version = version_docs[0].get('current_version') + return VersionNumber(current_version) + + @staticmethod + def update_version(mongo_io: MongoIO, collection_name: str, version: str) -> str: + """Update the version of a collection.""" + config = Config.get_instance() + # version is now always expected to include the collection name + version_obj = VersionNumber(version) + version_doc = mongo_io.upsert( + config.VERSION_COLLECTION_NAME, + match={"collection_name": collection_name}, + data={"collection_name": collection_name, "current_version": version} + ) + return VersionManager.get_current_version(mongo_io, collection_name) diff --git a/configurator/utils/version_number.py b/configurator/utils/version_number.py new file mode 100644 index 0000000..80b4214 --- /dev/null +++ b/configurator/utils/version_number.py @@ -0,0 +1,80 @@ +import re +from typing import List, Optional +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + +class VersionNumber: + """Class for handling version numbers.""" + + def __init__(self, version: str): + """Initialize a VersionNumber instance. + + Args: + version: Version string collection.major.minor.patch[.enumerator] + If enumerator is omitted, defaults to 0. + + """ + self.version = version + parts = version.split('.') + if len(parts) < 4 or len(parts) > 5: + event = ConfiguratorEvent(event_id="VER-01-INIT", event_type="VERSION_NUMBER_VALIDATION") + raise ConfiguratorException(f"Invalid version format {version}", event) + + # Initialize parts list + self.parts = [None] * 5 + self.parts[0] = parts[0] # Collection name + + # Validate that parts 1-3 are digits + for i, part in enumerate(parts[1:4], 1): + if not part.isdigit(): + event = ConfiguratorEvent(event_id=f"VER-01-NON_DIGIT-{i}", event_type="VERSION_NUMBER_VALIDATION") + raise ConfiguratorException(f"Invalid version format {version}", event) + self.parts[i] = int(part) + + # Handle enumerator (part 4) - default to 0 if not provided + if len(parts) == 5: + if not parts[4].isdigit(): + event = ConfiguratorEvent(event_id="VER-01-NON_DIGIT-ENUM", event_type="VERSION_NUMBER_VALIDATION") + raise ConfiguratorException(f"Invalid version format {version}", event) + self.parts[4] = int(parts[4]) + else: + self.parts[4] = 0 # Default enumerator to 0 + + def get_schema_filename(self) -> str: + """Get the schema file name - without enumerator version.""" + return '.'.join(str(part) for part in self.parts[0:4]) + ".yaml" + + def get_enumerator_version(self) -> int: + """Get the enumerator version.""" + return self.parts[4] + + def get_version_str(self) -> str: + """Get the four part version without collection name.""" + return '.'.join(str(part) for part in self.parts[1:5]) + + def __lt__(self, other: 'VersionNumber') -> bool: + if not isinstance(other, VersionNumber): + other = VersionNumber(other) + return self.parts[1:] < other.parts[1:] + + def __gt__(self, other: 'VersionNumber') -> bool: + if not isinstance(other, VersionNumber): + other = VersionNumber(other) + return self.parts[1:] > other.parts[1:] + + def __eq__(self, other: 'VersionNumber') -> bool: + if not isinstance(other, VersionNumber): + other = VersionNumber(other) + return self.parts[1:] == other.parts[1:] + + def __le__(self, other: 'VersionNumber') -> bool: + if not isinstance(other, VersionNumber): + other = VersionNumber(other) + return self.parts[1:] <= other.parts[1:] + + def __ge__(self, other: 'VersionNumber') -> bool: + if not isinstance(other, VersionNumber): + other = VersionNumber(other) + return self.parts[1:] >= other.parts[1:] + + def __str__(self) -> str: + return self.get_schema_filename() \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index 9892abd..a70e128 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -11,50 +11,51 @@ services: extra_hosts: - "mongodb:127.0.0.1" healthcheck: - test: echo "try { rs.status() } catch (err) { rs.initiate({_id:'rs0',members:[{_id:0,host:'mongodb:27017'}]}) }" | mongosh --port 27017 --quiet + test: echo "db.runCommand('ping')" | mongosh --port 27017 --quiet interval: 5s timeout: 30s start_period: 0s - retries: 30 - command: ["--replSet", "rs0", "--bind_ip_all", "--port", "27017"] + retries: 3 + command: ["--bind_ip_all", "--port", "27017"] profiles: - - mongodb-only - - mongodb-api - mongodb + - configurator + - configurator-api ################################## # MongoDB configuration Service ################################## - mongodb_api: - image: ghcr.io/agile-learning-institute/stage0_mongodb_api:latest + configurator_api: + image: ghcr.io/agile-learning-institute/mongodb_configurator_api:latest restart: no ports: - 8081:8081 environment: - MONGO_CONNECTION_STRING: mongodb://mongodb:27017/?replicaSet=rs0 + MONGO_CONNECTION_STRING: mongodb://mongodb:27017/ AUTO_PROCESS: False LOAD_TEST_DATA: True MONGO_DB_NAME: test_database - MONGODB_API_PORT: 8081 + API_PORT: 8081 + SPA_PORT: 8082 volumes: - - ${INPUT_FOLDER:-./tests/test_cases/large_sample}:/input + - ${INPUT_FOLDER:-./tests/test_cases/playground}:/input depends_on: mongodb: condition: service_healthy profiles: - - mongodb-api - - mongodb + - configurator-api + - configurator - mongodb_spa: - image: ghcr.io/agile-learning-institute/stage0_mongodb_spa:latest + configurator_spa: + image: ghcr.io/agile-learning-institute/mongodb_configurator_spa:latest restart: no environment: - MONGODB_API_HOST: mongodb_api - MONGODB_API_PORT: 8081 - MONGODB_SPA_PORT: 8082 + API_HOST: configurator_api + API_PORT: 8081 + SPA_PORT: 8082 ports: - 8082:8082 depends_on: - - mongodb_api + - configurator_api profiles: - - mongodb + - configurator diff --git a/docs/openapi.yaml b/docs/openapi.yaml index 099b74b..2a05225 100644 --- a/docs/openapi.yaml +++ b/docs/openapi.yaml @@ -1,35 +1,57 @@ openapi: 3.0.3 info: - title: MongoDB Schema Management API - description: API for managing MongoDB collections, indexes, and migrations + title: MongoDB Configurator API + description: | + API for managing MongoDB collections, indexes, and migrations + + **API Explorer**: [Interactive Documentation](/docs/) version: 1.0.0 contact: email: devs@agile-learning.institute license: name: Apache 2.0 url: http://www.apache.org/licenses/LICENSE-2.0.html + +servers: + - url: http://localhost:8081 + description: Local development server paths: - /api/collections/: + /docs/index.html: + get: + summary: (this) API Explorer + description: Interactive API documentation + tags: + - Documentation + responses: + '200': + description: API Explorer HTML page + content: + text/html: + schema: + type: string + /api/configurations/: get: - summary: List all Configured Collections - operationId: list_collections + summary: List all Collection Configurations + tags: + - Collection Configurations + operationId: list_configurations responses: '200': - description: List of collections + description: List of collections configuration files content: application/json: schema: - $ref: '#/components/schemas/collections' + $ref: '#/components/schemas/files' '500': description: Processing error content: application/json: schema: - type: array - items: - type: object + $ref: '#/components/schemas/events' post: summary: Process all Configured Collections + tags: + - Collection Configurations operationId: process_collections responses: '200': @@ -37,21 +59,64 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/processing_results' + $ref: '#/components/schemas/events' + '500': + description: Processing error occurred + content: + application/json: + schema: + $ref: '#/components/schemas/events' + /api/configurations/: + patch: + summary: Lock all Collection Configurations + operationId: lock_all_configurations + tags: + - Collection Configurations + responses: + '200': + description: All configurations locked successfully + content: + application/json: + schema: + type: object + properties: + total_files: + type: integer + description: Total number of files processed + locked_files: + type: integer + description: Number of files successfully locked + failed_files: + type: integer + description: Number of files that failed to lock + results: + type: array + items: + type: object + properties: + name: + type: string + description: Name of the file + locked: + type: boolean + description: Whether the file was successfully locked + error: + type: string + description: Error message if locking failed '500': description: Processing error content: application/json: schema: - type: array - items: - type: object - /api/collections/{name}/: + $ref: '#/components/schemas/events' + /api/configurations/{file_name}/: get: summary: Get a collection configuration - operationId: get_collection + operationId: get_configuration + tags: + - Collection Configurations parameters: - - name: name + - name: file_name in: path required: true schema: @@ -62,24 +127,42 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/collection' - '404': - description: Collection not found + $ref: '#/components/schemas/configuration' + '500': + description: Processing error content: application/json: schema: - type: string + $ref: '#/components/schemas/events' + put: + summary: Save a collection configuration + operationId: save_configuration + tags: + - Collection Configurations + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Collection configuration file + content: + application/json: + schema: + $ref: '#/components/schemas/file' '500': description: Processing error content: application/json: schema: - type: array - items: - type: object + $ref: '#/components/schemas/events' post: summary: Process a collection configuration operationId: process_collection + tags: + - Collection Configurations parameters: - name: name in: path @@ -92,34 +175,90 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/processing_results' - '404': - description: Collection not found + $ref: '#/components/schemas/events' + '500': + description: Processing error content: application/json: schema: - type: string + $ref: '#/components/schemas/events' + delete: + summary: Delete a collection configuration + operationId: delete_configuration + tags: + - Collection Configurations + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Collection configuration deleted + content: + application/json: + schema: + $ref: '#/components/schemas/events' '500': description: Processing error content: application/json: schema: - type: array - items: - type: object + $ref: '#/components/schemas/events' + + + /api/configurations/collection/{name}: + post: + summary: Create a new collection + description: | + Creates a new collection by generating configuration and dictionary files from templates found in the /templates directory. + This endpoint creates both a configuration file and a dictionary file for the specified collection name. + tags: + - Collection Configurations + parameters: + - name: name + in: path + required: true + description: The name of the collection to create + schema: + type: string + pattern: '^[a-zA-Z][a-zA-Z0-9_]*$' + example: "my_collection" + responses: + '200': + description: Collection created successfully + content: + application/json: + schema: + $ref: '#/components/schemas/configuration' + '500': + description: Internal server error + content: + application/json: + schema: + $ref: '#/components/schemas/events' - /api/render/json_schema/{schema_name}/: + /api/configurations/json_schema/{file_name}/{version}/: get: - summary: Get a Json Schema rendered for a schema - description: Json Schema for a specific schema version (e.g., "collection.1.0.0.1") + summary: Get a Json Schema + description: Json Schema for a specific schema version (e.g., 1.0.0.1") operationId: render_json_schema + tags: + - Collection Configurations parameters: - - name: schema_name + - name: file_name + in: path + required: true + schema: + description: Configuration file name + type: string + - name: version in: path required: true schema: + description: Version string (e.g., "1.0.0.1") type: string - description: Complete schema name including version (e.g., "collection.1.0.0.1") responses: '200': description: Schema @@ -127,33 +266,33 @@ paths: text/json: schema: type: object - '404': - description: Schema not found - content: - application/json: - schema: - type: string '500': description: Processing error content: application/json: schema: - type: array - items: - type: object + $ref: '#/components/schemas/events' - /api/render/bson_schema/{schema_name}/: + /api/configurations/bson_schema/{file_name}/{version}/: get: - summary: Get a Bson Schema rendered for a schema - description: Bson Schema for a specific schema version (e.g., "collection.1.0.0.1") + summary: Get a Bson Schema + description: Bson Schema for a specific schema version (e.g., "sample.1.0.0.1") operationId: render_bson_schema + tags: + - Collection Configurations parameters: - - name: schema_name + - name: file_name + in: path + required: true + schema: + description: Configuration file name + type: string + - name: version in: path required: true schema: + description: Version string (e.g., "1.0.0.1") type: string - description: Complete schema name including version (e.g., "collection.1.0.0.1") responses: '200': description: Schema @@ -161,60 +300,584 @@ paths: text/json: schema: type: object - '404': - description: Schema not found + '500': + description: Processing error content: application/json: schema: - type: string + $ref: '#/components/schemas/events' + + /api/dictionaries/: + get: + summary: List all Dictionaries + operationId: list_dictionaries + tags: + - Data Dictionaries + responses: + '200': + description: List of dictionaries configuration files + content: + application/json: + schema: + $ref: '#/components/schemas/files' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + /api/dictionaries/: + patch: + summary: Lock all Dictionaries + operationId: lock_all_dictionaries + tags: + - Data Dictionaries + responses: + '200': + description: All dictionaries locked successfully + content: + application/json: + schema: + type: object + properties: + total_files: + type: integer + description: Total number of files processed + locked_files: + type: integer + description: Number of files successfully locked + failed_files: + type: integer + description: Number of files that failed to lock + results: + type: array + items: + type: object + properties: + name: + type: string + description: Name of the file + locked: + type: boolean + description: Whether the file was successfully locked + error: + type: string + description: Error message if locking failed + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + /api/dictionaries/{file_name}/: + get: + summary: Get a dictionary + operationId: get_dictionary + tags: + - Data Dictionaries + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Dictionary + content: + application/json: + schema: + $ref: '#/components/schemas/dictionary' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + put: + summary: Save a dictionary + operationId: save_dictionary + tags: + - Data Dictionaries + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Dictionary file + content: + application/json: + schema: + $ref: '#/components/schemas/file' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + delete: + summary: Delete a dictionary + operationId: delete_dictionary + tags: + - Data Dictionaries + parameters: + - name: file_name + in: path + required: true + responses: + '200': + description: Dictionary deleted + content: + application/json: + schema: + $ref: '#/components/schemas/events' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + + + /api/types/: + get: + summary: List all Types + operationId: list_types + tags: + - Data Types + responses: + '200': + description: List of type files + content: + application/json: + schema: + $ref: '#/components/schemas/files' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + /api/types/: + patch: + summary: Lock all Types + operationId: lock_all_types + tags: + - Data Types + responses: + '200': + description: All types locked successfully + content: + application/json: + schema: + type: object + properties: + total_files: + type: integer + description: Total number of files processed + locked_files: + type: integer + description: Number of files successfully locked + failed_files: + type: integer + description: Number of files that failed to lock + results: + type: array + items: + type: object + properties: + name: + type: string + description: Name of the file + locked: + type: boolean + description: Whether the file was successfully locked + error: + type: string + description: Error message if locking failed + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + /api/types/{file_name}/: + get: + summary: Get a type + operationId: get_type + tags: + - Data Types + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Type + content: + application/json: + schema: + $ref: '#/components/schemas/type' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + put: + summary: Save a type + operationId: save_type + tags: + - Data Types + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Type file + content: + application/json: + schema: + $ref: '#/components/schemas/file' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + delete: + summary: Delete a type + operationId: delete_type + tags: + - Data Types + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Type deleted + content: + application/json: + schema: + $ref: '#/components/schemas/events' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + + + /api/test_data/: + get: + summary: List all Test Data Files + operationId: list_data_files + tags: + - Test Data + responses: + '200': + description: List of test data files + content: + application/json: + schema: + $ref: '#/components/schemas/files' '500': description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + /api/test_data/{file_name}/: + get: + summary: Get a test data file + description: | + Returns the contents of a test data file as a JSON array. + MongoDB Extended JSON (e.g., `$oid`, `$date`) is supported for values. + operationId: get_data_file + tags: + - Test Data + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Test Data File (always a JSON array) + content: + application/json: + schema: + type: array + items: {} + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/event' + put: + summary: Save a test data file + description: | + Saves the contents of a test data file as a JSON array. + MongoDB Extended JSON (e.g., `$oid`, `$date`) is supported for values. + operationId: save_data_file + tags: + - Test Data + parameters: + - name: file_name + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: array + items: {} + responses: + '200': + description: Test Data File saved + content: + application/json: + schema: + $ref: '#/components/schemas/file' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/event' + delete: + summary: Delete a test data file + operationId: delete_data_file + tags: + - Test Data + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Test Data File deleted + content: + application/json: + schema: + $ref: '#/components/schemas/events' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + + + /api/migrations/: + get: + summary: List all Migration Files + operationId: list_migrations + tags: + - Migrations + responses: + '200': + description: List of migration file names content: application/json: schema: type: array items: - type: object + type: string + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' - /api/render/openapi/{schema_name}/: + /api/migrations/{file_name}/: get: - summary: Get a OpenAPI rendered for a schema - description: OpenAPI Specification for a specific schema version (e.g., "collection.1.0.0.1") - operationId: render_openapi + summary: Get a migration file + description: | + Returns the contents of a migration file as JSON. + Migration files contain MongoDB aggregation pipelines for data migration. + operationId: get_migration + tags: + - Migrations parameters: - - name: schema_name + - name: file_name in: path required: true schema: type: string - description: Complete schema name including version (e.g., "collection.1.0.0.1") responses: '200': - description: OpenAPI Specification + description: Migration file content content: - text/yaml: + application/json: + schema: + type: array + items: {} + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/event' + put: + summary: Save a migration file + description: | + Saves the contents of a migration file as JSON. + Migration files contain MongoDB aggregation pipelines for data migration. + operationId: save_migration + tags: + - Migrations + parameters: + - name: file_name + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + type: array + items: {} + responses: + '200': + description: Migration file saved + content: + application/json: + schema: + $ref: '#/components/schemas/file' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/event' + delete: + summary: Delete a migration file + operationId: delete_migration + tags: + - Migrations + parameters: + - name: file_name + in: path + required: true + schema: + type: string + responses: + '200': + description: Migration file deleted + content: + application/json: + schema: + $ref: '#/components/schemas/events' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + + /api/enumerators/: + get: + summary: Get all Enumerators + operationId: get_enumerators + tags: + - Enumerator Values + responses: + '200': + description: Enumerators data + content: + application/json: schema: type: object - '404': - description: Schema not found + properties: + enumerators: + type: array + items: + $ref: '#/components/schemas/enumerations' + '500': + description: Processing error content: application/json: schema: - type: string + $ref: '#/components/schemas/events' + put: + summary: Save Enumerators + operationId: save_enumerators + tags: + - Enumerator Values + requestBody: + required: true + content: + application/json: + schema: + type: array + responses: + '200': + description: Enumerators file + content: + application/json: + schema: + $ref: '#/components/schemas/file' '500': description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' + + + /api/database/: + delete: + summary: Drop the Database + description: LIVE endpoint that DROPS THE DATABASE!!! There are three safety checks, + BUILT_AT must be Local, ENABLE_DROP_DATABASE must be True, and the database must have no collections with 100 or more documents + operationId: drop_database + tags: + - Database + responses: + '200': + description: Database dropped content: application/json: schema: type: array items: - type: object + $ref: '#/components/schemas/enumerators' + '500': + description: Processing error + content: + application/json: + schema: + $ref: '#/components/schemas/events' /api/health: get: summary: Health check endpoint description: Returns Prometheus-formatted metrics for monitoring operationId: health_check + tags: + - Observability responses: '200': description: Prometheus metrics @@ -228,6 +891,8 @@ paths: get: summary: Get current configuration operationId: get_config + tags: + - Observability responses: '200': description: Current configuration @@ -237,21 +902,51 @@ paths: $ref: '#/components/schemas/config' '500': description: Processing error + components: schemas: - collections: + files: type: array items: - type: object - properties: - collection_name: - type: string - version: - type: string - targeted_version: - type: string - description: The target version (last version) for this collection - collection: + $ref: '#/components/schemas/file' + file: + type: object + properties: + name: + type: string + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + description: Size in bytes + enumerators: + type: array + items: + $ref: '#/components/schemas/enumerations' + enumerations: + type: object + properties: + name: + type: string + description: Name of the enumeration + status: + type: string + description: Status of the enumeration (e.g., "Active", "Deprecated") + version: + type: integer + description: Version number of the enumeration + enumerators: + type: object + description: Dictionary of enumerator names to their value mappings + additionalProperties: + type: object + additionalProperties: + type: string + configuration: description: SchemaManager Collection Configuration type: object required: @@ -261,6 +956,9 @@ components: name: description: The name of the MongoDB collection type: string + description: + description: A description of the MongoDB collection + type: string versions: description: List of version configurations in chronological order type: array @@ -269,85 +967,176 @@ components: description: Configuration for a specific version properties: version: - description: Version string in format major.minor.patch.schema + description: Version string in format major.minor.patch.enumerator_version type: string - test_data: - description: Name of the test data file for this version - type: string - add_indexes: - description: List of indexes to add in this version. - type: array - items: - description: Index items are defined at https://www.mongodb.com/docs/manual/indexes/ drop_indexes: description: List of index names to drop in this version type: array items: type: string - aggregations: - description: List of aggregation pipelines to run for this version. + migrations: + description: List of migration/aggregation pipelines to run to migrate data from previous version to this version. type: array items: - description: Aggregate pipelines are defined at https://www.mongodb.com/docs/manual/aggregation/ - processing_results: - description: Array of processing operation results - type: array - items: - type: object - description: Result of a single processing operation + description: A migration file name + type: string + example: first_last_to_full.json + add_indexes: + description: List of indexes to add in this version. + type: array + items: + description: Index items are defined at https://www.mongodb.com/docs/manual/indexes/ + test_data: + description: Name of the test data file for this version + type: string + example: sample.1.0.0.1.json + dictionary: + description: A Simple Schema Dictionary + type: object + properties: + 'ref': + description: Reference to another dictionary + type: string + name: + description: The name of the property + type: string + description: + description: A description of the property + type: string + type: + description: The type of the property + type: string + enum: + - object + - array + - enum + - enum_array + - TYPE required: - - operation - - status + description: Whether the property is required + type: boolean + default: false properties: - operation: - description: Type of operation performed - type: string - enum: - - evaluate_version - - remove_schema - - drop_index - - run_migration - - create_index - - apply_schema - - load_test_data - - update_version - - collection_processing - - version_processing - - overall_status - status: - description: Operation status - type: string - enum: - - success - - error - - skipped - collection: - description: Collection name affected by the operation - type: string - message: - description: Human-readable status message - type: string - details_type: - description: Type of details for complex operations - type: string - enum: - - schema - - index - - migration - - test_data - - version - - error - - overall - details: - description: Operation-specific details (object or array) - oneOf: - - type: object - additionalProperties: true - - type: array - items: - type: object - additionalProperties: true - additionalProperties: true + description: Properties of an Object type + type: object + properties: + $ref: '#/components/schemas/dictionary' + additionalProperties: + description: Should the object type allow additional properties + type: boolean + items: + description: Items of an Array type + type: array + items: + $ref: '#/components/schemas/dictionary' + enum: + description: Enum name for enum or enum_array types + type: string + required: + - description + - name + - type + type: + description: A Type definition - must be one of three valid forms + type: object + properties: + description: + description: A description of the type + type: string + # Universal Primitive - schema only + schema: + description: Universal primitive schema (mutually exclusive with json_type/bson_type) + type: object + additionalProperties: true + # Typed Primitive - json_type/bson_type only + json_type: + description: JSON schema definition (mutually exclusive with schema) + type: object + additionalProperties: true + bson_type: + description: BSON schema definition (mutually exclusive with schema) + type: object + additionalProperties: true + # Complex Type - object or array + type: + description: Type of complex type (object or array) + type: string + enum: [object, array] + properties: + description: Properties for object types + type: object + additionalProperties: + $ref: '#/components/schemas/type' + items: + description: Items definition for array types + $ref: '#/components/schemas/type' + required: + description: Whether the type is required + type: boolean + default: false + additionalProperties: + description: Whether additional properties are allowed (for object types) + type: boolean + default: false + required: + - description + primitive_type: + description: A Simple Schema Primitive Type + type: object + properties: + title: + description: The name of the type + type: string + type: + description: A Universal Type + type: object + additionalProperties: true + json_type: + description: Type rendered as JSON Schema + type: object + additionalProperties: true + bson_type: + description: Type rendered as BSON Schema + type: object + additionalProperties: true + events: + description: Array of processing or validation events + type: array + items: + $ref: '#/components/schemas/event' + event: + type: object + required: [id, type, status] + properties: + id: + type: string + description: Unique identifier for the event + type: + type: string + description: Type of the event + status: + type: string + enum: [PENDING, SUCCESS, FAILURE] + description: Current status of the event + data: + description: Event data or error information + oneOf: + - type: object + additionalProperties: true + - type: null + starts: + type: string + format: date-time + description: When the event started + ends: + type: string + format: date-time + description: When the event ended (null if still pending) + sub_events: + type: array + description: List of sub-events + items: + $ref: '#/components/schemas/event' config: type: object properties: @@ -369,18 +1158,3 @@ components: - default - file - environment - token: - type: object - properties: - user_id: - description: Logged in user_id value - type: string - from_ip: - description: User's current IP address - type: string - roles: - description: User's RBAC roles - type: array - items: - type: string - diff --git a/setup.py b/setup.py deleted file mode 100644 index a17a8fb..0000000 --- a/setup.py +++ /dev/null @@ -1,33 +0,0 @@ -from setuptools import setup, find_packages - -with open("README.md", "r", encoding="utf-8") as fh: - long_description = fh.read() - -setup( - name="stage0-mongodb-api", - version="1.0.0", - author="Agile Learning Institute", - author_email="info@agilelearninginstitute.org", - description="A MongoDB API for schema, version, and migration management", - long_description="Build a utility container for used by projects that use Mongo DB", - long_description_content_type="text/markdown", - url="https://github.com/agile-learning-institute/stage0_mongodb_api", - packages=find_packages(), - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.12", - ], - python_requires=">=3.12", - install_requires=[ - "python-dotenv", - "flask", - "prometheus-flask-exporter", - "pymongo", - "stage0-py-utils", - "pyyaml>=6.0.1", - ], -) \ No newline at end of file diff --git a/stage0_mongodb_api/managers/__init__.py b/stage0_mongodb_api/managers/__init__.py deleted file mode 100644 index b628b20..0000000 --- a/stage0_mongodb_api/managers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""MongoDB collection management utilities.""" \ No newline at end of file diff --git a/stage0_mongodb_api/managers/config_manager.py b/stage0_mongodb_api/managers/config_manager.py deleted file mode 100644 index 821d69c..0000000 --- a/stage0_mongodb_api/managers/config_manager.py +++ /dev/null @@ -1,570 +0,0 @@ -from typing import Dict, List, Optional -from stage0_py_utils import Config -import yaml -import os -import logging -from stage0_py_utils import MongoIO -from stage0_mongodb_api.managers.version_number import VersionNumber -from stage0_mongodb_api.managers.version_manager import VersionManager -from stage0_mongodb_api.managers.schema_manager import SchemaManager -from stage0_mongodb_api.managers.index_manager import IndexManager -from stage0_mongodb_api.managers.migration_manager import MigrationManager - -logger = logging.getLogger(__name__) - -class ConfigManager: - """Manages collection configurations and version processing. - - This class handles: - 1. Loading and validating collection configurations - 2. Resolving schema and enumerator versions - 3. Processing version updates for collections - 4. Coordinating schema, index, and migration operations - """ - - def __init__(self): - """Initialize the config manager.""" - self.config = Config.get_instance() - self.mongo_io = MongoIO.get_instance() - self.collection_configs: Dict[str, Dict] = {} - self.load_errors: List[Dict] = [] - self.version_manager = VersionManager() - self.index_manager = IndexManager() - self.migration_manager = MigrationManager() - self._load_collection_configs() - - # Create schema manager with our collection configs - self.schema_manager = SchemaManager(self.collection_configs) - - def _load_collection_configs(self) -> None: - """Load collection configurations from the input folder. - - Only performs basic file existence and YAML parsing checks. - All other validation is handled by validate_configs(). - """ - collections_folder = os.path.join(self.config.INPUT_FOLDER, "collections") - logger.info(f"Loading collections from {collections_folder}") - - if not os.path.exists(collections_folder): - self.load_errors.append({ - "error": "directory_not_found", - "error_id": "CFG-001", - "path": collections_folder, - "message": f"Collections directory not found: {collections_folder}" - }) - return - - # Load all YAML files from collections folder - for file in os.listdir(collections_folder): - if not file.endswith(".yaml"): - continue - - file_path = os.path.join(collections_folder, file) - try: - with open(file_path, "r") as f: - data = yaml.safe_load(f) - key = os.path.splitext(file)[0] - self.collection_configs[key] = data - except yaml.YAMLError as e: - self.load_errors.append({ - "error": "parse_error", - "error_id": "CFG-002", - "file": file, - "message": str(e) - }) - except Exception as e: - self.load_errors.append({ - "error": "load_error", - "error_id": "CFG-003", - "file": file, - "message": str(e) - }) - - logger.info(f"Loaded {len(self.collection_configs)} collection configurations") - - def validate_configs(self) -> List[Dict]: - """Validate all loaded collection configurations. - - Returns: - List of validation errors, empty if all valid - """ - errors = [] - - for filename, config in self.collection_configs.items(): - # Validate required fields - if not isinstance(config, dict): - errors.append({ - "error": "invalid_config_format", - "error_id": "CFG-101", - "file": filename, - "message": "Configuration must be a dictionary" - }) - continue - - if "name" not in config: - errors.append({ - "error": "missing_required_field", - "error_id": "CFG-201", - "file": filename, - "message": "Configuration must include 'name' field" - }) - continue - - if "versions" not in config: - errors.append({ - "error": "missing_required_field", - "error_id": "CFG-202", - "file": filename, - "message": "Configuration must include 'versions' field" - }) - continue - - # Validate versions - for version_config in config["versions"]: - if not isinstance(version_config, dict): - errors.append({ - "error": "invalid_version_format", - "error_id": "CFG-501", - "file": filename, - "message": "Version must be a dictionary" - }) - continue - - if "version" not in version_config: - errors.append({ - "error": "missing_version_number", - "error_id": "CFG-601", - "file": filename, - "message": "Version must include 'version' field" - }) - continue - - # Validate version format using VersionNumber - try: - VersionNumber(version_config["version"]) - except ValueError as e: - errors.append({ - "error": "invalid_version_format", - "error_id": "CFG-701", - "file": filename, - "message": f"Version {version_config['version']}: {str(e)}" - }) - continue - - # Add schema validation errors if schema manager is available - if hasattr(self, "schema_manager") and self.schema_manager: - schema_errors = self.schema_manager.validate_schema() - errors.extend(schema_errors) - - return errors - - def get_collection_config(self, collection_name: str) -> Optional[Dict]: - """Get a specific collection configuration. - - Args: - collection_name: Name of the collection to retrieve - - Returns: - Dict containing the collection configuration, or None if not found - """ - return self.collection_configs.get(collection_name) - - def process_all_collections(self) -> Dict[str, List[Dict]]: - """Process all collections that have pending versions. - - Returns: - Dict[str, List[Dict]]: Dictionary mapping collection names to their operation results - """ - results = {} - any_collection_failed = False - - # Process enumerators first - enumerators_result = self._process_enumerators() - results["enumerators"] = [enumerators_result] - - # Check if enumerators processing failed - if enumerators_result.get("status") == "error": - any_collection_failed = True - - # Process all collections - for collection_name in self.collection_configs.keys(): - try: - results[collection_name] = self.process_collection_versions(collection_name) - - # Check if this collection had any errors - if any(isinstance(op, dict) and op.get("status") == "error" for op in results[collection_name]): - any_collection_failed = True - - except Exception as e: - logger.error(f"Error processing collection {collection_name}: {str(e)}") - results[collection_name] = [{ - "operation": "collection_processing", - "collection": collection_name, - "message": f"Error processing collection: {str(e)}", - "details_type": "error", - "details": { - "error": str(e) - }, - "status": "error" - }] - any_collection_failed = True - - # Add final overall status operation - overall_status = "error" if any_collection_failed else "success" - overall_message = "Some collections failed to process" if any_collection_failed else "All collections processed successfully" - - # Add the overall status to each collection's results - for collection_name in results.keys(): - # Check if this collection had any errors (excluding overall_status operations) - collection_has_errors = any( - isinstance(op, dict) and op.get("status") == "error" - and op.get("operation") != "overall_status" - for op in results[collection_name] - ) - - # Determine this collection's status - collection_status = "error" if collection_has_errors else "success" - collection_message = "Collection processing failed" if collection_has_errors else "Collection processed successfully" - - results[collection_name].append({ - "operation": "overall_status", - "message": collection_message, - "details_type": "overall", - "details": { - "collections_processed": len(self.collection_configs), - "collections_failed": sum(1 for result in results.values() - if any(isinstance(op, dict) and op.get("status") == "error" - and op.get("operation") != "overall_status" - for op in result)) - }, - "status": collection_status - }) - - return results - - def process_collection_versions(self, collection_name: str) -> List[Dict]: - """Process all pending versions for a collection. - - This method coordinates the processing workflow by: - 1. Getting current version from database - 2. Identifying pending versions from config - 3. Processing each version in sequence - 4. Updating version records - - Args: - collection_name: Name of the collection to process - - Returns: - List[Dict]: List of operation results - - Raises: - ValueError: If collection_name is empty or not found in configs - """ - if not collection_name: - raise ValueError("Collection name cannot be empty") - - if collection_name not in self.collection_configs: - raise ValueError(f"Collection '{collection_name}' not found in configurations") - - collection_config = self.collection_configs[collection_name] - versions = collection_config.get("versions", []) - operations = [] - - try: - # Process each version in sequence - for version_config in versions: - current_version = VersionManager.get_current_version(collection_name) - version_number = VersionNumber(version_config.get("version")) - operations.append({ - "operation": "evaluate_version", - "collection": collection_name, - "message": f"Evaluating version {version_number}", - "details_type": "version", - "details": { - "version": str(version_number), - "current_version": current_version - }, - "status": "success" - }) - - # Only process versions greater than current version - if version_number > current_version: - logger.info(f"Processing version {str(version_number)} for {collection_name}") - version_operations = self._process_version(collection_name, version_config) - operations.extend(version_operations) - - # Check if any operation in this version failed - if any(isinstance(op, dict) and op.get("status") == "error" for op in version_operations): - logger.error(f"Version {version_number} processing failed for {collection_name}, stopping version processing") - break - - current_version = VersionNumber(self.version_manager.get_current_version(collection_name)) - else: - logger.info(f"Skipping version {str(version_number)} for {collection_name} - already processed") - operations.append({ - "operation": "evaluate_version", - "collection": collection_name, - "message": f"Skipping version {version_number} - already processed", - "details_type": "version", - "details": { - "version": str(version_number), - "current_version": current_version, - "skipped": True - }, - "status": "skipped" - }) - - except Exception as e: - logger.error(f"Error during version processing for {collection_name}: {str(e)}") - operations.append({ - "operation": "version_processing", - "collection": collection_name, - "message": f"Error during version processing: {str(e)}", - "details_type": "error", - "details": { - "error": str(e), - "version": "unknown" - }, - "status": "error" - }) - - return operations - - def _process_version(self, collection_name: str, version_config: Dict) -> List[Dict]: - """Process a single version configuration for a collection. - - Args: - collection_name: Name of the collection - version_config: Version configuration to process - - Returns: - List[Dict]: List of operation results, including any errors that occurred - """ - operations = [] - - try: - # Required: Remove existing schema validation - operations.append({ - "operation": "remove_schema", - "collection": collection_name, - "message": f"Removing schema validation for {collection_name}", - "status": "success" - }) - remove_result = self.schema_manager.remove_schema(collection_name) - operations.append(remove_result) - self._assert_no_errors(operations) - - # Optional: Process drop_indexes if present - if "drop_indexes" in version_config: - for index in version_config["drop_indexes"]: - operations.append({ - "operation": "drop_index", - "collection": collection_name, - "message": f"Dropping index {index} for {collection_name}", - "status": "success" - }) - drop_result = self.index_manager.drop_index(collection_name, index) - operations.append(drop_result) - self._assert_no_errors(operations) - - # Optional: Process aggregations if present - if "aggregations" in version_config: - for migration in version_config["aggregations"]: - pipeline_name = migration.get("name", "unnamed_pipeline") - operations.append({ - "operation": "run_migration", - "collection": collection_name, - "message": f"Running Aggregation Pipeline '{pipeline_name}' for {collection_name}", - "status": "success" - }) - migration_result = self.migration_manager.run_migration(collection_name, migration) - operations.append(migration_result) - self._assert_no_errors(operations) - - # Optional: Process add_indexes if present - if "add_indexes" in version_config: - operations.append({ - "operation": "create_index", - "collection": collection_name, - "message": f"Creating indexes for {collection_name}", - "status": "success" - }) - create_result = self.index_manager.create_index(collection_name, version_config["add_indexes"]) - operations.append(create_result) - self._assert_no_errors(operations) - - # Required: Apply schema validation - operations.append({ - "operation": "apply_schema", - "collection": collection_name, - "message": f"Applying schema for {collection_name}", - "status": "success" - }) - apply_result = self.schema_manager.apply_schema(f"{collection_name}.{version_config.get("version")}") - operations.append(apply_result) - self._assert_no_errors(operations) - - # Optional: Load test data if enabled and present - if "test_data" in version_config and self.config.LOAD_TEST_DATA: - operations.append({ - "operation": "load_test_data", - "collection": collection_name, - "message": f"Loading test data for {collection_name} - {version_config['test_data']}", - "status": "success" - }) - test_data_result = self._load_test_data(collection_name, version_config["test_data"]) - operations.append(test_data_result) - self._assert_no_errors(operations) - - # Update version if version string is present - operations.append({ - "operation": "update_version", - "collection": collection_name, - "message": f"Updating version for {collection_name}", - "status": "success" - }) - version_result = self.version_manager.update_version(collection_name, version_config["version"]) - operations.append(version_result) - self._assert_no_errors(operations) - - except Exception as e: - logger.error(f"Error processing version for {collection_name}: {str(e)}") - operations.append({ - "operation": "version_processing", - "collection": collection_name, - "message": f"Error processing version: {str(e)}", - "details_type": "error", - "details": { - "error": str(e) - }, - "status": "error" - }) - - return operations - - def _load_test_data(self, collection_name: str, test_data_file: str) -> Dict: - """Load test data for a collection. - - Args: - collection_name: Name of the collection - test_data_file: Name of the test data file - - Returns: - Dict containing operation result in consistent format - """ - from stage0_py_utils.mongo_utils.mongo_io import TestDataLoadError - try: - data_file = os.path.join(self.config.INPUT_FOLDER, "data", test_data_file) - results = self.mongo_io.load_test_data(collection_name, data_file) - - return { - "operation": "load_test_data", - "collection": collection_name, - "message": f"Test data loaded successfully from {test_data_file}", - "details_type": "test_data", - "details": { - "test_data_file": str(data_file), - "results": results, - "documents_loaded": results.get("documents_loaded", 0), - "inserted_ids": results.get("inserted_ids", []), - "acknowledged": results.get("acknowledged", False) - }, - "status": "success" - } - - except TestDataLoadError as e: - return { - "operation": "load_test_data", - "collection": collection_name, - "message": str(e), - "details_type": "error", - "details": { - "error": str(e), - "test_data_file": str(data_file), - "details": e.details - }, - "status": "error" - } - except Exception as e: - error_message = str(e) - logger.error(f"Failed to load test data for {collection_name}: {error_message}") - return { - "operation": "load_test_data", - "collection": collection_name, - "message": error_message, - "details_type": "error", - "details": { - "error": error_message, - "test_data_file": str(data_file) - }, - "status": "error" - } - - def _process_enumerators(self) -> Dict: - """Process enumerators from the enumerators.json file. - - Returns: - Dict containing operation result in consistent format - """ - try: - # Use the already-loaded enumerators from schema_manager - enumerators = self.schema_manager.enumerators - - # Process each enumerator version - processed_count = 0 - - for document in enumerators: - version = document.get("version") - - # Upsert the document using version as the key - result = self.mongo_io.upsert_document( - self.config.ENUMERATORS_COLLECTION_NAME, - {"version": version}, - document - ) - - # upsert_document returns the document itself, so if we get a result, it succeeded - if result and isinstance(result, dict): - processed_count += 1 - else: - raise Exception(f"Failed to upsert version {version}") - - return { - "operation": "process_enumerators", - "collection": self.config.ENUMERATORS_COLLECTION_NAME, - "message": f"Successfully processed {processed_count} enumerator versions", - "details_type": "success", - "details": { - "processed_count": processed_count, - "total_count": len(enumerators) - }, - "status": "success" - } - - except Exception as e: - return { - "operation": "process_enumerators", - "collection": self.config.ENUMERATORS_COLLECTION_NAME, - "message": f"Error processing enumerators: {str(e)}", - "details_type": "error", - "details": { - "error": str(e), - "error_type": type(e).__name__ - }, - "status": "error" - } - - def _assert_no_errors(self, operations: List[Dict]) -> None: - """Check the last operation for errors and raise an exception if found. - - Args: - operations: List of operations to check - - Raises: - Exception: If the last operation has status "error" - """ - if operations and isinstance(operations[-1], dict) and operations[-1].get("status") == "error": - error_op = operations[-1] - raise Exception(f"Operation failed: {error_op.get('operation', 'unknown')} - {error_op.get('error', 'Unknown error')}") - - \ No newline at end of file diff --git a/stage0_mongodb_api/managers/index_manager.py b/stage0_mongodb_api/managers/index_manager.py deleted file mode 100644 index a2c2620..0000000 --- a/stage0_mongodb_api/managers/index_manager.py +++ /dev/null @@ -1,104 +0,0 @@ -from typing import Dict, List -from stage0_py_utils import MongoIO - -class IndexManager: - """Manages MongoDB indexes for collections.""" - - @staticmethod - def create_index(collection_name: str, index_configs: list) -> Dict: - """Create one or more indexes based on configuration. - - Args: - collection_name: Name of the collection - index_configs: List of index configuration dictionaries. Each dict must contain 'name' and 'key' fields. - - Returns: - Dict containing operation result in consistent format - - Raises: - ValueError: If any index_config is missing required fields - """ - for idx in index_configs: - if "name" not in idx: - raise ValueError("Index configuration must include 'name' field") - if "key" not in idx: - raise ValueError("Index configuration must include 'key' field") - - mongo = MongoIO.get_instance() - mongo.create_index(collection_name, index_configs) - - return { - "operation": "create_index", - "collection": collection_name, - "message": f"Created {len(index_configs)} index(es) for {collection_name}", - "details_type": "index", - "details": { - "indexes": [idx["name"] for idx in index_configs], - "index_configs": index_configs - }, - "status": "success" - } - - @staticmethod - def drop_index(collection_name: str, index_name: str) -> Dict: - """Drop an index by name. - - Args: - collection_name: Name of the collection - index_name: Name of the index to drop - - Returns: - Dict containing operation result in consistent format - """ - mongo = MongoIO.get_instance() - try: - mongo.drop_index(collection_name, index_name) - except Exception as e: - return { - "operation": "drop_index", - "collection": collection_name, - "message": str(e), - "details_type": "error", - "details": { - "error": str(e), - "index": index_name - }, - "status": "error" - } - - return { - "operation": "drop_index", - "collection": collection_name, - "message": f"Dropped index '{index_name}' from {collection_name}", - "details_type": "index", - "details": { - "index": index_name - }, - "status": "success" - } - - @staticmethod - def list_indexes(collection_name: str) -> Dict: - """List all indexes for a collection. - - Args: - collection_name: Name of the collection - - Returns: - Dict containing operation result: - { - "status": "success", - "operation": "list_indexes", - "collection": str, - "indexes": List[Dict] # List of index configurations - } - """ - mongo = MongoIO.get_instance() - indexes = mongo.get_indexes(collection_name=collection_name) - - return { - "operation": "list_indexes", - "collection": collection_name, - "indexes": indexes, - "status": "success" - } \ No newline at end of file diff --git a/stage0_mongodb_api/managers/migration_manager.py b/stage0_mongodb_api/managers/migration_manager.py deleted file mode 100644 index b9e186e..0000000 --- a/stage0_mongodb_api/managers/migration_manager.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import Dict, List -from stage0_py_utils import MongoIO - -class MigrationManager: - """Manages data migrations for collections using MongoDB aggregation pipelines.""" - - @staticmethod - def run_migration(collection_name: str, migration: Dict) -> Dict: - """Run a single migration pipeline on a collection. - - Args: - collection_name: Name of the collection - migration: Migration configuration containing: - - name: str (optional) - Name of the pipeline for logging - - pipeline: List[Dict] - MongoDB aggregation pipeline stages - See MongoDB's [Aggregation Pipeline](https://www.mongodb.com/docs/manual/core/aggregation-pipeline/) - for details on supported stages. - - Returns: - Dict containing operation result in consistent format - - Raises: - ValueError: If migration is invalid or pipeline is empty - """ - if not migration or "pipeline" not in migration: - raise ValueError("Migration must contain a 'pipeline' field") - - pipeline_name = migration.get("name", "unnamed_pipeline") - pipeline_stages = migration["pipeline"] - - if not pipeline_stages: - raise ValueError(f"Pipeline '{pipeline_name}' cannot be empty") - - mongo = MongoIO.get_instance() - - try: - # Execute the entire pipeline at once - mongo.execute_pipeline(collection_name, pipeline_stages) - return { - "operation": "migration", - "collection": collection_name, - "message": f"Migration pipeline '{pipeline_name}' completed successfully", - "details_type": "migration", - "details": { - "pipeline": { - "name": pipeline_name, - "stages": len(pipeline_stages) - } - }, - "status": "success" - } - except Exception as e: - return { - "operation": "migration", - "collection": collection_name, - "message": str(e), - "details_type": "error", - "details": { - "error": str(e), - "pipeline": { - "name": pipeline_name, - "stages": len(pipeline_stages) - } - }, - "status": "error" - } \ No newline at end of file diff --git a/stage0_mongodb_api/managers/schema_manager.py b/stage0_mongodb_api/managers/schema_manager.py deleted file mode 100644 index d4cbb02..0000000 --- a/stage0_mongodb_api/managers/schema_manager.py +++ /dev/null @@ -1,468 +0,0 @@ -from typing import Dict, List, Set, Optional, Any -import os -import re -import yaml -import json -import logging -from stage0_py_utils import Config, MongoIO -from stage0_mongodb_api.managers.schema_renderer import SchemaRenderer -from stage0_mongodb_api.managers.schema_validator import SchemaValidator -from stage0_mongodb_api.managers.schema_types import SchemaContext, SchemaFormat -from stage0_mongodb_api.managers.version_number import VersionNumber - -logger = logging.getLogger(__name__) - -class SchemaError(Exception): - """Base exception for schema-related errors.""" - pass - -class SchemaManager: - """Manager class for handling schema operations.""" - - def __init__(self, collection_configs: Optional[Dict[str, Dict]] = None): - """Initialize the schema manager. - - Args: - collection_configs: Optional collection configurations. If not provided, - will be loaded from the input folder. - """ - self.config = Config.get_instance() - self.mongo = MongoIO.get_instance() - self.collection_configs = collection_configs or {} - self.types: Dict = {} - self.enumerators: List[Dict] = [] - self.dictionaries: Dict = {} - - # Load all schema definitions - self.load_errors: List[Dict] = [] - self.load_errors.extend(self._load_types()) - self.load_errors.extend(self._load_enumerators()) - self.load_errors.extend(self._load_dictionaries()) - - # If collection_configs wasn't provided, load them - if not self.collection_configs: - self._load_collection_configs() - - # Resolve $ref values in dictionaries (after all dictionaries are loaded) - ref_errors = self._resolve_refs() - self.load_errors.extend(ref_errors) - - def _load_types(self) -> List[Dict]: - """Load type definitions. - - Returns: - List of load errors - """ - errors = [] - types_dir = os.path.join(self.config.INPUT_FOLDER, "types") - if not os.path.exists(types_dir): - errors.append({ - "error": "directory_not_found", - "error_id": "SCH-001", - "path": types_dir, - "message": "Types directory not found" - }) - return errors - - try: - for filename in os.listdir(types_dir): - if filename.endswith(".yaml"): - file_path = os.path.join(types_dir, filename) - try: - with open(file_path, "r") as f: - type_def = yaml.safe_load(f) - self.types[filename[:-5]] = type_def - except yaml.YAMLError: - errors.append({ - "error": "parse_error", - "error_id": "SCH-002", - "file": filename, - "message": "Failed to parse type definition" - }) - except Exception as e: - errors.append({ - "error": "load_error", - "error_id": "SCH-003", - "file": filename, - "message": str(e) - }) - except Exception as e: - errors.append({ - "error": "load_error", - "error_id": "SCH-003", - "path": types_dir, - "message": str(e) - }) - return errors - - def _load_enumerators(self) -> List[Dict]: - """Load all enumerator definitions from the enumerators.json file. - - Returns: - List of load errors - """ - errors = [] - enumerator_file = os.path.join(self.config.INPUT_FOLDER, "data", "enumerators.json") - - try: - with open(enumerator_file, 'r') as f: - enumerators = json.load(f) - self.enumerators = enumerators - - except FileNotFoundError: - errors.append({ - 'error_id': 'SCH-004', - 'message': f'Enumerator file not found: {enumerator_file}' - }) - except json.JSONDecodeError as e: - errors.append({ - 'error_id': 'SCH-007', - 'message': f'Failed to parse enumerator file {enumerator_file}: {str(e)}' - }) - return errors - - def _load_dictionaries(self) -> List[Dict]: - """Load dictionary definitions. - - Returns: - List of load errors - """ - errors = [] - dictionaries_dir = os.path.join(self.config.INPUT_FOLDER, "dictionary") - if not os.path.exists(dictionaries_dir): - errors.append({ - "error": "directory_not_found", - "error_id": "SCH-009", - "path": dictionaries_dir, - "message": "Dictionaries directory not found" - }) - return errors - - try: - for filename in os.listdir(dictionaries_dir): - if filename.endswith((".yaml", ".yml")): - file_path = os.path.join(dictionaries_dir, filename) - try: - with open(file_path, "r") as f: - dict_def = yaml.safe_load(f) - self.dictionaries[os.path.splitext(filename)[0]] = dict_def - except yaml.YAMLError: - errors.append({ - "error": "parse_error", - "error_id": "SCH-011", - "file": filename, - "message": "Failed to parse dictionary definition" - }) - except Exception as e: - errors.append({ - "error": "load_error", - "error_id": "SCH-012", - "file": filename, - "message": str(e) - }) - except Exception as e: - errors.append({ - "error": "load_error", - "error_id": "SCH-012", - "path": dictionaries_dir, - "message": str(e) - }) - return errors - - def _resolve_refs(self) -> List[Dict]: - """Resolve all $ref values in loaded dictionaries. - - This method recursively traverses all dictionary definitions and replaces - $ref objects with the actual referenced dictionary content. - - Returns: - List of errors encountered during resolution - """ - ref_errors = [] - - # Create a temporary copy of dictionaries for resolution - resolved = {} - - for dict_name, dict_def in self.dictionaries.items(): - resolved_def, errors = self._resolve_refs_in_object(dict_def, dict_name, set()) - resolved[dict_name] = resolved_def - ref_errors.extend(errors) - - self.dictionaries = resolved - - return ref_errors - - def _resolve_refs_in_object(self, obj: Any, dict_name: str, visited: Set[str]) -> tuple[Any, List[Dict]]: - """Recursively resolve $ref values in an object. - - Args: - obj: The object to resolve $ref values in - dict_name: The name of the dictionary being resolved - visited: Set of already visited paths (for cycle detection) - - Returns: - Tuple of (resolved_object, list_of_errors) - """ - errors = [] - if isinstance(obj, dict): - # Check if this is a $ref object - if "$ref" in obj: - ref_name = obj["$ref"] - if ref_name in visited: - errors.append({ - "error": "circular_reference", - "error_id": "SCH-013", - "dict_name": dict_name, - "ref_name": ref_name, - "message": f"Circular reference detected: {ref_name}" - }) - return obj, errors - elif ref_name not in self.dictionaries: - errors.append({ - "error": "ref_not_found", - "error_id": "SCH-014", - "dict_name": dict_name, - "ref_name": ref_name, - "message": f"Referenced dictionary not found: {ref_name}" - }) - return obj, errors - else: - # Resolve the reference - replace the entire object with the referenced content - visited.add(ref_name) - resolved, ref_errors = self._resolve_refs_in_object(self.dictionaries[ref_name], dict_name, visited) - visited.remove(ref_name) - errors.extend(ref_errors) - return resolved, errors - - # Otherwise, recursively resolve all values in the dictionary - resolved = {} - for key, value in obj.items(): - resolved_value, value_errors = self._resolve_refs_in_object(value, dict_name, visited) - resolved[key] = resolved_value - errors.extend(value_errors) - return resolved, errors - - elif isinstance(obj, list): - # Recursively resolve all items in the list - resolved_items = [] - for item in obj: - resolved_item, item_errors = self._resolve_refs_in_object(item, dict_name, visited) - resolved_items.append(resolved_item) - errors.extend(item_errors) - return resolved_items, errors - else: - # Primitive value, return as-is - return obj, errors - - def _load_collection_configs(self) -> None: - """Load collection configurations from the input folder. - - This method is only called if collection_configs is not provided in the constructor. - """ - collections_folder = os.path.join(self.config.INPUT_FOLDER, "collections") - logger.info(f"Loading collections from {collections_folder}") - - if not os.path.exists(collections_folder): - self.load_errors.append({ - "error": "directory_not_found", - "error_id": "CFG-001", - "path": collections_folder, - "message": "Collections directory not found" - }) - return - - # Load all YAML files from collections folder - for file in os.listdir(collections_folder): - if not file.endswith(".yaml"): - continue - - file_path = os.path.join(collections_folder, file) - try: - with open(file_path, "r") as f: - data = yaml.safe_load(f) - key = os.path.splitext(file)[0] - self.collection_configs[key] = data - except yaml.YAMLError as e: - self.load_errors.append({ - "error": "parse_error", - "error_id": "CFG-002", - "file": file, - "message": str(e) - }) - except Exception as e: - self.load_errors.append({ - "error": "load_error", - "error_id": "CFG-003", - "file": file, - "message": str(e) - }) - - logger.info(f"Loaded {len(self.collection_configs)} collection configurations") - - def validate_schema(self) -> List[Dict]: - """Validate all loaded schema definitions. - - Returns: - List of validation errors - """ - errors = [] - - # Create validation context - context: SchemaContext = { - "types": self.types, - "enumerators": self.enumerators, - "dictionaries": self.dictionaries, - "collection_configs": self.collection_configs, - "schema_name": None, - "format": None - } - - errors.extend(SchemaValidator.validate_schema(context)) - return errors - - def render_one(self, schema_name: str, format: SchemaFormat) -> Dict: - """Render a single schema version. - - Args: - schema_name: Name in the form collection.1.2.3.4 - format: Target schema format - - Returns: - Dict containing the rendered schema - """ - # Create schema context - context: SchemaContext = { - "types": self.types, - "dictionaries": self.dictionaries, - "enumerators": self.enumerators, - "collection_configs": self.collection_configs - } - - return SchemaRenderer.render_schema(schema_name, format, context) - - def render_all(self) -> Dict: - """Render all schemas in both BSON and JSON formats. - - Returns: - Dict containing rendered schemas for all collections and versions - """ - rendered = {} - - for collection_name, collection_config in self.collection_configs.items(): - for version_config in collection_config["versions"]: - # Get version string and ensure it has collection name - version_name = f"{collection_name}.{version_config["version"]}" - rendered[version_name] = {} - - # Render in both formats - for format in [SchemaFormat.BSON, SchemaFormat.JSON]: - rendered[version_name][format.value] = self.render_one(version_name, format) - - return rendered - - def apply_schema(self, version_name: str) -> Dict: - """Apply a schema to a collection. - - Args: - version_name: Name of the collection version (e.g. user.1.0.0.1) - - Returns: - Dict containing operation result in consistent format - """ - # Extract collection name using VersionNumber class (version_name is already validated) - try: - version = VersionNumber(version_name) - collection_name = version.collection_name - except ValueError as e: - # Handle case where VersionNumber fails (shouldn't happen if properly validated) - return { - "operation": "apply_schema", - "collection": version_name, # Use version_name as fallback - "message": f"Invalid version format: {str(e)}", - "details_type": "error", - "details": { - "error": str(e) - }, - "status": "error" - } - - try: - # Render and apply schema - bson_schema = self.render_one(version_name, SchemaFormat.BSON) - self.mongo.apply_schema(collection_name, bson_schema) - except ValueError as e: - return { - "operation": "apply_schema", - "collection": collection_name, - "message": f"Invalid version format: {str(e)}", - "details_type": "error", - "details": { - "error": str(e) - }, - "status": "error" - } - except Exception as e: - return { - "operation": "apply_schema", - "collection": collection_name, - "message": str(e), - "details_type": "error", - "details": { - "error": str(e) - }, - "status": "error" - } - - return { - "operation": "apply_schema", - "collection": collection_name, - "message": f"Schema applied successfully for {version_name}", - "details_type": "schema", - "details": { - "schema": bson_schema, - "version": version_name.split(".")[-1] if "." in version_name else "" - }, - "status": "success" - } - - def remove_schema(self, collection_name: str) -> Dict: - """Remove schema validation from a collection. - - Args: - collection_name: Name of the collection (e.g. user) - - Returns: - Dict containing operation result in consistent format - """ - try: - # Remove schema validation - self.mongo.remove_schema(collection_name) - except ValueError as e: - return { - "operation": "remove_schema", - "collection": collection_name, - "message": f"Invalid version format: {str(e)}", - "details_type": "error", - "details": { - "error": str(e) - }, - "status": "error" - } - except Exception as e: - return { - "operation": "remove_schema", - "collection": collection_name, - "message": str(e), - "details_type": "error", - "details": { - "error": str(e) - }, - "status": "error" - } - - return { - "operation": "remove_schema", - "collection": collection_name, - "message": f"Schema validation removed from {collection_name}", - "status": "success" - } \ No newline at end of file diff --git a/stage0_mongodb_api/managers/schema_renderer.py b/stage0_mongodb_api/managers/schema_renderer.py deleted file mode 100644 index 434e79a..0000000 --- a/stage0_mongodb_api/managers/schema_renderer.py +++ /dev/null @@ -1,154 +0,0 @@ -from typing import Dict, List, Optional, Any, TypedDict -from stage0_mongodb_api.managers.schema_types import SchemaType, SchemaFormat, SchemaContext -from stage0_mongodb_api.managers.version_number import VersionNumber -import logging -logger = logging.getLogger(__name__) - -class SchemaRenderer: - """Utility class for rendering schemas in different formats.""" - - @staticmethod - def render_schema(version_name: str, format: SchemaFormat, context: SchemaContext) -> Dict: - """Render a schema in the specified format.""" - version = VersionNumber(version_name) - schema = context["dictionaries"][version.get_schema_version()] - return SchemaRenderer._render(schema, format, version.get_enumerator_version(), context) - - @staticmethod - def _render(schema: Dict, format: SchemaFormat, enumerator_version: int, context: SchemaContext) -> Dict: - """ Recursively render a schema definition.""" - # Handle primitive types - if "schema" in schema or "json_type" in schema: - return SchemaRenderer._render_primitive(schema, format) - - # Handle complex types - logger.debug(f"Rendering schema: {schema}") - type_name = schema["type"] - if type_name == SchemaType.OBJECT.value: - return SchemaRenderer._render_object(schema, format, enumerator_version, context) - if type_name == SchemaType.ARRAY.value: - return SchemaRenderer._render_array(schema, format, enumerator_version, context) - if type_name in [SchemaType.ENUM.value, SchemaType.ENUM_ARRAY.value]: - return SchemaRenderer._render_enum(schema, format, enumerator_version, context) - - # Handle custom types - if type_name in context["types"]: - return SchemaRenderer._render_custom_type(schema, format, enumerator_version, context) - - raise ValueError(f"Unknown schema type: {type_name}") - - @staticmethod - def _render_primitive(schema: Dict, format: SchemaFormat) -> Dict: - """Render a primitive type definition.""" - - # Schema property - convert type to bsonType for BSON - if "schema" in schema: - rendered = schema["schema"].copy() - if format == SchemaFormat.BSON and "type" in rendered: - rendered["bsonType"] = rendered["type"] - del rendered["type"] - return rendered - - # or Use format-specific schema as-is - if format == SchemaFormat.JSON and "json_type" in schema: - return schema["json_type"].copy() - - if format == SchemaFormat.BSON and "bson_type" in schema: - return schema["bson_type"].copy() - - @staticmethod - def _render_object(schema: Dict, format: SchemaFormat, enumerator_version: int, context: SchemaContext) -> Dict: - """Render an object type definition.""" - type_prop = "bsonType" if format == SchemaFormat.BSON else "type" - rendered = { - "description": schema["description"], - type_prop: "object", - "additionalProperties": schema.get("additionalProperties", False), - "properties": {} - } - - # Add optional title if it exists - if "title" in schema: - rendered["title"] = schema["title"] - - required = [] - - # Render properties - for prop_name, prop_def in schema["properties"].items(): - rendered["properties"][prop_name] = SchemaRenderer._render( - prop_def, format, enumerator_version, context - ) - # If property is required, add to required list - if prop_def.get("required", False): - required.append(prop_name) - - # Render one_of if present - if "one_of" in schema: - one_of_props, one_of_required = SchemaRenderer._render_one_of( - schema["one_of"], format, enumerator_version, context - ) - rendered["properties"].update(one_of_props) - required.extend(one_of_required) - - # Add required properties if any - if required: - rendered["required"] = required - - return rendered - - @staticmethod - def _render_one_of(one_of_def: Dict, format: SchemaFormat, enumerator_version: int, context: SchemaContext) -> tuple[Dict, List[str]]: - """Render a one_of type definition. - - Returns: - Tuple of (properties dictionary, list of required property names) - """ - properties = {} - - # Add schema properties - for schema_name, schema_def in one_of_def["schemas"].items(): - properties[schema_name] = SchemaRenderer._render( - schema_def, format, enumerator_version, context - ) - - return properties, [] - - @staticmethod - def _render_array(schema: Dict, format: SchemaFormat, enumerator_version: int, context: SchemaContext) -> Dict: - """Render an array type definition.""" - type_prop = "bsonType" if format == SchemaFormat.BSON else "type" - return { - "description": schema["description"], - type_prop: "array", - "items": SchemaRenderer._render(schema["items"], format, enumerator_version, context) - } - - @staticmethod - def _render_enum(schema: Dict, format: SchemaFormat, enumerator_version: int, context: SchemaContext) -> Dict: - """Render an enum type definition.""" - type_prop = "bsonType" if format == SchemaFormat.BSON else "type" - enumerators = context["enumerators"][enumerator_version]["enumerators"][schema["enums"]] - rendered = {"description": schema["description"]} - - if schema["type"] == SchemaType.ENUM.value: - rendered[type_prop] = "string" - rendered["enum"] = list(enumerators.keys()) - return rendered - - if schema["type"] == SchemaType.ENUM_ARRAY.value: - rendered[type_prop] = "array" - rendered["items"] = { - type_prop: "string", - "enum": list(enumerators.keys()) - } - return rendered - - raise ValueError(f"Unknown schema type: {schema['type']}") - - @staticmethod - def _render_custom_type(schema: Dict, format: SchemaFormat, enumerator_version: int, context: SchemaContext) -> Dict: - """Render a custom type definition.""" - rendered = SchemaRenderer._render(context["types"][schema["type"]], format, enumerator_version, context) - rendered["description"] = schema["description"] - - return rendered \ No newline at end of file diff --git a/stage0_mongodb_api/managers/schema_types.py b/stage0_mongodb_api/managers/schema_types.py deleted file mode 100644 index b9d91a3..0000000 --- a/stage0_mongodb_api/managers/schema_types.py +++ /dev/null @@ -1,57 +0,0 @@ -from enum import Enum -from typing import Dict, Optional, TypedDict, List - -class SchemaType(str, Enum): - """Schema type definitions.""" - OBJECT = "object" - ARRAY = "array" - ENUM = "enum" - ENUM_ARRAY = "enum_array" - -class SchemaFormat(str, Enum): - """Schema format definitions.""" - BSON = "bson" - JSON = "json" - -class SchemaContext(TypedDict): - """Context for schema operations (validation and rendering).""" - types: Dict # Type definitions - dictionaries: Dict # Dictionary definitions - enumerators: List[Dict] # Enumerator definitions - collection_configs: Dict # Collection configurations - schema_name: Optional[str] # Full schema name with version (e.g. "user.1.0.0.1") - format: Optional[SchemaFormat] # Target format (BSON or JSON) - -class PrimitiveType(TypedDict): - """Type definition for primitive types (string, number, boolean, etc). - - These types map directly to JSON/BSON types and don't need complex validation. - """ - description: str - schema: Dict # Rules for both JSON and BSON when the only difference is type vs. bsonType - json_type: Dict # JSON Schema specific validation rules - bson_type: Dict # BSON Schema specific validation rules - -class Schema(TypedDict): - """Type definition for schema and schema properties. - - A schema can be either a root schema or a property definition. - Root schemas require title, description, and type. - Property definitions require description and type. - """ - title: Optional[str] # Best Practices for root schemas - description: str - type: str - required: Optional[List[str]] # Default is False - properties: Optional[Dict[str, 'Schema']] # Required for object type only - additionalProperties: Optional[bool] # For object type, default is False - items: Optional['Schema'] # Required for array type only - enums: Optional[str] # Required for enum and enum_array types only - one_of: Optional[Dict] # Optional for object type, defines polymorphic schemas - -class ValidationContext(TypedDict): - """Context for schema validation.""" - types: Dict - enumerators: List[Dict] - dictionaries: Dict - collection_configs: List[Dict] \ No newline at end of file diff --git a/stage0_mongodb_api/managers/schema_validator.py b/stage0_mongodb_api/managers/schema_validator.py deleted file mode 100644 index adf7867..0000000 --- a/stage0_mongodb_api/managers/schema_validator.py +++ /dev/null @@ -1,477 +0,0 @@ -from typing import Dict, List, Optional, Set -import re -import json -from stage0_py_utils import Config -from stage0_mongodb_api.managers.schema_types import SchemaType, Schema, ValidationContext -from stage0_mongodb_api.managers.version_number import VersionNumber -import logging -logger = logging.getLogger(__name__) - -class SchemaError(Exception): - """Base exception for schema-related errors.""" - pass - -class SchemaValidationError(SchemaError): - """Exception raised when schema validation fails.""" - def __init__(self, errors: List[Dict]): - self.errors = errors - super().__init__("\n".join(str(error) for error in errors)) - -class SchemaValidator: - """Static utility class for validating schemas.""" - - SCHEMA_NAME_PATTERN = r'^[a-zA-Z][a-zA-Z0-9_]*$' - VALID_TYPES = {t.value for t in SchemaType} - - @staticmethod - def validate_schema(context: ValidationContext) -> List[Dict]: - """Validate schema definitions against collection configurations. - - Args: - context: Validation context containing all necessary data - - Returns: - List of validation errors - """ - errors = [] - errors.extend(SchemaValidator._validate_enumerators(context["enumerators"])) - errors.extend(SchemaValidator._validate_types(context["types"], context)) - - for collection_name, collection in context["collection_configs"].items(): - # Must have a version list - if "versions" not in collection or not isinstance(collection["versions"], list): - errors.append({ - "error": "invalid_versions", - "error_id": "VLD-001", - "message": "versions must be a list" - }) - continue - - # Validate each version of the collection - for version_config in collection["versions"]: - if not isinstance(version_config, dict): - errors.append({ - "error": "invalid_version_config", - "error_id": "VLD-002", - "version_config": version_config, - "message": "Version config must be a dictionary" - }) - continue - - if "version" not in version_config or not isinstance(version_config["version"], str): - errors.append({ - "error": "missing_required_field", - "error_id": "VLD-003", - "field": "version", - "message": "Version config must have a version number" - }) - continue - - try: - # Create version number instance and ensure it has the collection name - version = VersionNumber(version_config["version"]) - if not version.collection_name: - version_str = f"{collection_name}.{version.version}" - version = VersionNumber(version_str) - except ValueError as e: - errors.append({ - "error": "invalid_version_format", - "error_id": "VLD-005", - "version": version_config["version"], - "message": str(e) - }) - continue - - # Use get_schema_version() which now handles collection names - schema_name = version.get_schema_version() - enumerator_version = version.get_enumerator_version() - - # Validate schema exists - if schema_name not in context["dictionaries"]: - errors.append({ - "error": "schema_not_found", - "error_id": "VLD-004", - "schema_name": schema_name, - "message": f"Schema not found for collection {collection_name} version {version} in dictionaries" - }) - continue - - # Validate schema structure and references - schema = context["dictionaries"][schema_name] - errors.extend(SchemaValidator._validate_complex_type(schema_name, schema, context, enumerator_version)) - - return errors - - @staticmethod - def _validate_enumerators(enumerators: List[Dict]) -> List[Dict]: - """Validate enumerator definitions against the schema. - - Validates: - 1. Each version has required fields (version, enumerators) - 2. Version number is valid - 3. Enumerator values are properly structured - 4. Each enumerator value has a string description - - Args: - enumerators: List of enumerator version definitions - - Returns: - List of validation errors - """ - errors = [] - - # Validate each enumerator version - for version in enumerators: - # Validate version field - if "version" not in version: - errors.append({ - "error": "missing_required_field", - "error_id": "VLD-101", - "field": "version", - "message": "Enumerator version must have a version number" - }) - elif type(version["version"]) != int: - errors.append({ - "error": "invalid_version_format", - "error_id": "VLD-102", - "version": version["version"], - "message": "Version must be an integer" - }) - - # Validate enumerators field - if "enumerators" not in version: - errors.append({ - "error": "missing_required_field", - "error_id": "VLD-103", - "field": "enumerators", - "message": "Enumerator version must have an enumerators definition" - }) - elif not isinstance(version["enumerators"], dict): - errors.append({ - "error": "invalid_enumerators_format", - "error_id": "VLD-104", - "enumerator": version.get("name", "unknown"), - "message": "Enumerators must be a dictionary of values to descriptions" - }) - else: - # Validate each enumerator values - for name, enumerations in version["enumerators"].items(): - if not isinstance(name, str): - errors.append({ - "error": "invalid_enumerator_name", - "error_id": "VLD-105", - "value": str(name), - "message": "Enumerator name must be a string" - }) - if not isinstance(enumerations, dict): - errors.append({ - "error": "invalid_enumerations", - "error_id": "VLD-106", - "description": enumerations, - "message": "Enumerators bust be an object" - }) - else: - for value, description in enumerations.items(): - if not isinstance(value, str): - errors.append({ - "error": "invalid_enumerator_value", - "error_id": "VLD-107", - "value": str(value), - "message": "Enumerator value must be a string" - }) - if not isinstance(description, str): - errors.append({ - "error": "invalid_enumerator_description", - "error_id": "VLD-108", - "description": str(description), - "message": "Enumerator description must be a string" - }) - - return errors - - @staticmethod - def _validate_types(types: Dict, context: ValidationContext) -> List[Dict]: - """Validate custom type definitions. - - Args: - types: Dictionary of custom type definitions - context: Validation context containing all necessary data - - Returns: - List of validation errors - """ - errors = [] - validated = set() # Track which types we've validated - - # Validate each type - for type_name, type_def in types.items(): - if type_name in validated: - continue - if any(key in type_def for key in {"schema", "json_type", "bson_type"}): - errors.extend(SchemaValidator._validate_primitive_type(type_name, type_def)) - else: - errors.extend(SchemaValidator._validate_complex_type(type_name, type_def, context)) - validated.add(type_name) - - return errors - - @staticmethod - def _validate_primitive_type(type_name: str, type_def: Dict) -> List[Dict]: - """Validate a primitive custom type definition.""" - type_errors = [] - - # Validate schema or json_type/bson_type - has_schema = "schema" in type_def - has_json_type = "json_type" in type_def - has_bson_type = "bson_type" in type_def - - # must have either schema or both json_type and bson_type - if not (has_schema or (has_json_type and has_bson_type)): - type_errors.append({ - "error": "invalid_primitive_type", - "error_id": "VLD-201", - "type": type_name, - "message": "Primitive type must have either schema or both json_type and bson_type" - }) - - if has_schema and not isinstance(type_def["schema"], dict): - type_errors.append({ - "error": "invalid_primitive_type", - "error_id": "VLD-202", - "type": type_name, - "message": "Primitive type `schema` must be a valid object" - }) - - if has_json_type and not isinstance(type_def["json_type"], dict): - type_errors.append({ - "error": "invalid_primitive_type", - "error_id": "VLD-203", - "type": type_name, - "message": "Primitive type `json_type` must be a valid object" - }) - - if has_bson_type and not isinstance(type_def["bson_type"], dict): - type_errors.append({ - "error": "invalid_primitive_type", - "error_id": "VLD-204", - "type": type_name, - "message": "Primitive type `bson_type` must be a valid object" - }) - - logging.info(f"Validated primitive type: {type_name}") - return type_errors - - @staticmethod - def _validate_complex_type(prop_name: str, prop_def: Dict, context: ValidationContext, enumerator_version: Optional[int] = 0, visited: Optional[Set[str]] = None) -> List[Dict]: - """Validate a complex type definition. - - Args: - prop_name: Name of the property being validated - prop_def: Property definition to validate - context: Validation context containing all necessary data - enumerator_version: Current enumerator version for enum validation - visited: Set of already visited paths (for cycle detection) - - Returns: - List of validation errors - """ - if visited is None: - visited = set() - - # Check for circular references - if prop_name in visited: - return [{ - "error": "circular_reference", - "error_id": "VLD-020", - "type": prop_name, - "message": f"Circular reference detected in type: {prop_name}" - }] - - visited.add(prop_name) - try: - return SchemaValidator._validate_complex_type_properties(prop_name, prop_def, context, enumerator_version, visited) - finally: - visited.remove(prop_name) - logging.info(f"Validated complex type: {prop_name}") - - - @staticmethod - def _validate_complex_type_properties(prop_name: str, prop_def: Dict, context: ValidationContext, enumerator_version: Optional[int], visited: Set[str]) -> List[Dict]: - """Internal validation logic for complex types.""" - type_errors = [] - - # Validate basic structure - if not isinstance(prop_def, dict): - type_errors.extend([{ - "error": "invalid_type", - "error_id": "VLD-301", - "type": prop_name, - "message": "Type must be a valid object" - }]) - return type_errors - - # Validate required fields - type_errors.extend(SchemaValidator._validate_required_fields(prop_name, prop_def)) - if type_errors: - return type_errors - - if prop_def["type"] not in SchemaValidator.VALID_TYPES: - return SchemaValidator._validate_custom_type(prop_name, prop_def["type"], context) - - # Validate type-specific properties - if prop_def["type"] == SchemaType.OBJECT.value: - type_errors.extend(SchemaValidator._validate_object_type(prop_name, prop_def, context, enumerator_version, visited)) - elif prop_def["type"] == SchemaType.ARRAY.value: - type_errors.extend(SchemaValidator._validate_array_type(prop_name, prop_def, context, enumerator_version, visited)) - elif prop_def["type"] in [SchemaType.ENUM.value, SchemaType.ENUM_ARRAY.value]: - type_errors.extend(SchemaValidator._validate_enum_type(prop_name, prop_def, context, enumerator_version)) - - return type_errors - - @staticmethod - def _validate_required_fields(prop_name: str, prop_def: Dict) -> List[Dict]: - """Validate required fields in a type definition.""" - errors = [] - for field in ["type", "description"]: - if field not in prop_def: - errors.append({ - "error": "missing_required_field", - "error_id": "VLD-401", - "type": prop_name, - "field": field, - "message": f"Missing required field: {field}" - }) - return errors - - @staticmethod - def _validate_custom_type(prop_name: str, type_name: str, context: ValidationContext) -> List[Dict]: - """Validate a custom type reference.""" - if type_name not in context["types"]: - return [{ - "error": "invalid_type", - "error_id": "VLD-601", - "type": prop_name, - "value": type_name, - "message": f"Unknown type: {type_name}" - }] - return [] - - @staticmethod - def _validate_object_type(prop_name: str, prop_def: Dict, context: ValidationContext, enumerator_version: Optional[int], visited: Set[str]) -> List[Dict]: - """Validate an object type definition.""" - errors = [] - - if "properties" not in prop_def: - errors.append({ - "error": "missing_required_field", - "error_id": "VLD-701", - "type": prop_name, - "field": "properties", - "message": f"Missing required field: properties" - }) - return errors - - # Validate properties if present - if "properties" in prop_def: - for nested_name, nested_def in prop_def["properties"].items(): - errors.extend(SchemaValidator._validate_complex_type( - f"{prop_name}.{nested_name}", - nested_def, - context, - enumerator_version, - visited - )) - - # Validate one_of if present - if "one_of" in prop_def: - errors.extend(SchemaValidator._validate_one_of_type(prop_name, prop_def["one_of"], context, enumerator_version, visited)) - - return errors - - @staticmethod - def _validate_array_type(prop_name: str, prop_def: Dict, context: ValidationContext, enumerator_version: Optional[int], visited: Set[str]) -> List[Dict]: - """Validate an array type definition.""" - if "items" not in prop_def: - return [{ - "error": "invalid_array_items", - "error_id": "VLD-801", - "type": prop_name, - "message": f"Array type {prop_name} must have items definition" - }] - - return SchemaValidator._validate_complex_type( - f"{prop_name}.items", - prop_def["items"], - context, - enumerator_version, - visited - ) - - @staticmethod - def _validate_enum_type(prop_name: str, prop_def: Dict, context: ValidationContext, enumerator_version: Optional[int] = 0) -> List[Dict]: - """Validate an enum type definition.""" - if "enums" not in prop_def: - return [{ - "error": "invalid_enum_reference", - "error_id": "VLD-901", - "type": prop_name, - "message": f"Enum type {prop_name} must have valid enums definition" - }] - - if prop_def["enums"] not in context["enumerators"][enumerator_version]["enumerators"]: - return [{ - "error": "invalid_enum_reference", - "error_id": "VLD-902", - "type": prop_name, - "enum": prop_def["enums"], - "message": f"Enum type {prop_def['enums']} not found in version {enumerator_version}" - }] - - return [] - - @staticmethod - def _validate_one_of_type(prop_name: str, one_of_def: Dict, context: ValidationContext, enumerator_version: Optional[int], visited: Set[str]) -> List[Dict]: - """Validate a one_of type definition.""" - logging.info(f"Validating one_of type: {prop_name}") - errors = [] - - if not isinstance(one_of_def, dict): - errors.append({ - "error": "invalid_one_of_format", - "error_id": "VLD-1001", - "type": prop_name, - "message": f"OneOf definition must be a valid object" - }) - return errors - - if "type_property" not in one_of_def: - errors.append({ - "error": "invalid_one_of_type_property", - "error_id": "VLD-1002", - "type": prop_name, - "message": f"OneOf definition must have a type_property" - }) - return errors - - if "schemas" not in one_of_def: - errors.append({ - "error": "invalid_one_of_schemas", - "error_id": "VLD-1003", - "type": prop_name, - "message": f"OneOf definition must have schemas" - }) - return errors - - # Validate each schema in the one_of definition - for schema_name, schema_def in one_of_def["schemas"].items(): - # Validate as a complex type (all $ref objects will have been resolved during loading) - errors.extend(SchemaValidator._validate_complex_type( - f"{prop_name}.{schema_name}", - schema_def, - context, - enumerator_version, - visited - )) - - return errors - \ No newline at end of file diff --git a/stage0_mongodb_api/managers/version_manager.py b/stage0_mongodb_api/managers/version_manager.py deleted file mode 100644 index b5d27ad..0000000 --- a/stage0_mongodb_api/managers/version_manager.py +++ /dev/null @@ -1,126 +0,0 @@ -import re -import logging -from typing import Optional, Dict, List -from stage0_py_utils import MongoIO, Config -from stage0_mongodb_api.managers.version_number import VersionNumber - -logger = logging.getLogger(__name__) - -class VersionManager: - """Static class for managing collection version tracking in MongoDB. - - This class focuses on: - 1. Reading current versions from the database - 2. Updating version records - 3. Version comparison and validation - """ - - @staticmethod - def get_current_version(collection_name: str) -> str: - """Get the current version of a collection. - - Args: - collection_name: Name of the collection to get version for - - Returns: - str: Version string in format major.minor.patch.schema or collection.major.minor.patch.schema - - Raises: - ValueError: If collection_name is empty or invalid - RuntimeError: If multiple versions exist for collection - """ - if not collection_name: - raise ValueError("Collection name cannot be empty") - - mongo = MongoIO.get_instance() - config = Config.get_instance() - - version_docs = mongo.get_documents( - config.VERSION_COLLECTION_NAME, - match={"collection_name": collection_name} - ) - - if not version_docs: - return f"{collection_name}.0.0.0.0" - - if len(version_docs) > 1: - raise RuntimeError(f"Multiple versions found for collection: {collection_name}") - - current_version = version_docs[0].get("current_version") - if not current_version: - raise RuntimeError(f"Invalid version document for collection: {collection_name}") - - # Ensure version includes collection name - version = VersionNumber(current_version) - if not version.collection_name: - return f"{collection_name}.{current_version}" - return current_version - - @staticmethod - def update_version(collection_name: str, version: str) -> Dict: - """Update the version of a collection. - - Args: - collection_name: Name of the collection - version: Version string in format major.minor.patch.schema or collection.major.minor.patch.schema - - Returns: - Dict containing operation result in consistent format - - Raises: - ValueError: If version format is invalid or collection_name is empty - """ - if not collection_name: - raise ValueError("Collection name cannot be empty") - - # Validate version by attempting to create a VersionNumber instance - version_obj = VersionNumber(version) - - # Ensure version includes collection name - if not version_obj.collection_name: - version = f"{collection_name}.{version}" - - mongo = MongoIO.get_instance() - config = Config.get_instance() - - # Upsert version document - version_doc = mongo.upsert_document( - config.VERSION_COLLECTION_NAME, - match={"collection_name": collection_name}, - data={"collection_name": collection_name, "current_version": version} - ) - - if not version_doc: - raise RuntimeError(f"Failed to update version for collection: {collection_name}") - - return { - "operation": "version_update", - "collection": collection_name, - "message": f"Version updated to {version} for {collection_name}", - "details_type": "version", - "details": { - "version": version - }, - "status": "success" - } - - @staticmethod - def get_pending_versions(collection_name: str, available_versions: List[str]) -> List[str]: - """Get list of versions that need to be processed for a collection. - - Args: - collection_name: Name of the collection - available_versions: List of available version strings to check - - Returns: - List[str]: List of version strings that are newer than current version - """ - current_version = VersionNumber(VersionManager.get_current_version(collection_name)) - pending_versions = [] - - for version_str in available_versions: - version_number = VersionNumber(version_str) - if version_number > current_version: - pending_versions.append(version_str) - - return sorted(pending_versions, key=lambda v: VersionNumber(v)) diff --git a/stage0_mongodb_api/managers/version_number.py b/stage0_mongodb_api/managers/version_number.py deleted file mode 100644 index 8489893..0000000 --- a/stage0_mongodb_api/managers/version_number.py +++ /dev/null @@ -1,111 +0,0 @@ -import re -from typing import List, Optional - -class VersionNumber: - """Class for handling four-part version numbers. - - Version format: major.minor.patch.enumerator - Example: 1.0.0.1 - Also supports collection.major.minor.patch.enumerator - Example: user.1.0.0.1 - """ - - MAX_VERSION = 999 - VERSION_PATTERN = r'^\d+\.\d+\.\d+\.\d+$' - - def __init__(self, version: str): - """Initialize a VersionNumber instance. - - Args: - version: Version string in format major.minor.patch.enumerator or collection.major.minor.patch.enumerator - - Raises: - ValueError: If version format is invalid or numbers exceed MAX_VERSION - """ - if not version: - raise ValueError("Version string cannot be empty") - - # Check for invalid dot patterns first - if version.startswith('.') or version.endswith('.') or '..' in version: - raise ValueError(f"Invalid version format: {version} - cannot have leading/trailing dots or consecutive dots") - - # Split into parts and check if first part is a collection name - parts = version.split('.') - if len(parts) == 5: # collection.major.minor.patch.enumerator - self.collection_name = parts[0] - version_str = '.'.join(parts[1:]) - else: - self.collection_name = None - version_str = version - - if not re.match(self.VERSION_PATTERN, version_str): - raise ValueError(f"Invalid version format: {version_str}") - - self.version = version_str - self.parts = [int(part) for part in version_str.split('.')] - - # Validate that all version parts are within acceptable range. - if any(part > self.MAX_VERSION for part in self.parts): - raise ValueError(f"Version components must not exceed {self.MAX_VERSION}") - - def get_schema_version(self) -> str: - """Get the three-part schema version, including collection name if present.""" - schema_version = '.'.join(str(p) for p in self.parts[:3]) - if self.collection_name: - return f"{self.collection_name}.{schema_version}" - return schema_version - - def get_enumerator_version(self) -> int: - """Get the enumerator version.""" - return self.parts[3] - - def get_full_version(self) -> str: - """Get the full version string including collection name if present.""" - if self.collection_name: - return f"{self.collection_name}.{self.version}" - return self.version - - def __lt__(self, other: 'VersionNumber') -> bool: - """Compare if this version is less than another version. - - Comparison is based on version numbers only, ignoring collection names. - """ - if not isinstance(other, VersionNumber): - other = VersionNumber(other) - return self.parts < other.parts - - def __gt__(self, other: 'VersionNumber') -> bool: - """Compare if this version is greater than another version. - - Comparison is based on version numbers only, ignoring collection names. - """ - if not isinstance(other, VersionNumber): - other = VersionNumber(other) - return self.parts > other.parts - - def __eq__(self, other: 'VersionNumber') -> bool: - """Compare if this version equals another version. - - Comparison is based on version numbers only, ignoring collection names. - """ - if not isinstance(other, VersionNumber): - other = VersionNumber(other) - return self.parts == other.parts - - def __le__(self, other: 'VersionNumber') -> bool: - """Compare if this version is less than or equal to another version. - - Comparison is based on version numbers only, ignoring collection names. - """ - return self < other or self == other - - def __ge__(self, other: 'VersionNumber') -> bool: - """Compare if this version is greater than or equal to another version. - - Comparison is based on version numbers only, ignoring collection names. - """ - return self > other or self == other - - def __str__(self) -> str: - """Return the version string.""" - return self.get_full_version() \ No newline at end of file diff --git a/stage0_mongodb_api/routes/collection_routes.py b/stage0_mongodb_api/routes/collection_routes.py deleted file mode 100644 index 79e427e..0000000 --- a/stage0_mongodb_api/routes/collection_routes.py +++ /dev/null @@ -1,102 +0,0 @@ -from flask import Blueprint, jsonify, request -from stage0_mongodb_api.services.collection_service import CollectionService, CollectionNotFoundError, CollectionProcessingError -from stage0_py_utils import create_flask_breadcrumb, create_flask_token -import logging - -logger = logging.getLogger(__name__) - -def create_collection_routes(): - blueprint = Blueprint('collections', __name__) - - @blueprint.route('/', methods=['GET']) - def list_collections(): - """List all configured collections""" - token = create_flask_token() - breadcrumb = create_flask_breadcrumb(token) - - try: - collections = CollectionService.list_collections(token) - logger.info(f"{breadcrumb} Successfully listed collections") - return jsonify(collections) - except CollectionProcessingError as e: - logger.error(f"{breadcrumb} Error listing collections: {str(e)}") - return jsonify(e.errors), 500 - except Exception as e: - logger.error(f"{breadcrumb} Unexpected error listing collections: {str(e)}") - return jsonify([{ - "error": "Failed to list collections", - "error_id": "API-001", - "message": str(e) - }]), 500 - - @blueprint.route('/', methods=['POST']) - def process_collections(): - """Process all configured collections""" - token = create_flask_token() - breadcrumb = create_flask_breadcrumb(token) - - try: - result = CollectionService.process_collections(token) - logger.info(f"{breadcrumb} Successfully processed collections") - return jsonify(result) - except CollectionProcessingError as e: - logger.error(f"{breadcrumb} Error processing collections: {str(e)}") - return jsonify(e.errors), 500 - except Exception as e: - logger.error(f"{breadcrumb} Unexpected error processing collections: {str(e)}") - return jsonify([{ - "error": "Failed to process collections", - "error_id": "API-002", - "message": str(e) - }]), 500 - - @blueprint.route('//', methods=['GET']) - def get_collection(collection_name): - """Get a specific collection configuration""" - token = create_flask_token() - breadcrumb = create_flask_breadcrumb(token) - - try: - result = CollectionService.get_collection(collection_name, token) - logger.info(f"{breadcrumb} Successfully retrieved collection: {collection_name}") - return jsonify(result) - except CollectionNotFoundError: - logger.warning(f"{breadcrumb} Collection not found: {collection_name}") - return "Collection not found", 404 - except CollectionProcessingError as e: - logger.error(f"{breadcrumb} Error getting collection {collection_name}: {str(e)}") - return jsonify(e.errors), 500 - except Exception as e: - logger.error(f"{breadcrumb} Unexpected error getting collection {collection_name}: {str(e)}") - return jsonify([{ - "error": "Failed to get collection", - "error_id": "API-003", - "message": str(e) - }]), 500 - - @blueprint.route('//', methods=['POST']) - def process_collection(collection_name): - """Process a specific collection""" - token = create_flask_token() - breadcrumb = create_flask_breadcrumb(token) - - try: - result = CollectionService.process_collection(collection_name, token) - logger.info(f"{breadcrumb} Successfully processed collection: {collection_name}") - return jsonify(result) - except CollectionNotFoundError: - logger.warning(f"{breadcrumb} Collection not found: {collection_name}") - return "Collection not found", 404 - except CollectionProcessingError as e: - logger.error(f"{breadcrumb} Error processing collection {collection_name}: {str(e)}") - return jsonify(e.errors), 500 - except Exception as e: - logger.error(f"{breadcrumb} Unexpected error processing collection {collection_name}: {str(e)}") - return jsonify([{ - "error": "Failed to process collection", - "error_id": "API-004", - "message": str(e) - }]), 500 - - logger.info("Collection Flask Routes Registered") - return blueprint \ No newline at end of file diff --git a/stage0_mongodb_api/routes/render_routes.py b/stage0_mongodb_api/routes/render_routes.py deleted file mode 100644 index 020fa33..0000000 --- a/stage0_mongodb_api/routes/render_routes.py +++ /dev/null @@ -1,86 +0,0 @@ -from flask import Blueprint, jsonify, request -import yaml -from stage0_mongodb_api.managers.config_manager import ConfigManager -from stage0_mongodb_api.services.render_service import RenderService, RenderNotFoundError, RenderProcessingError -from stage0_py_utils import create_flask_breadcrumb, create_flask_token -import logging - -logger = logging.getLogger(__name__) - -def create_render_routes(): - blueprint = Blueprint('renders', __name__) - - @blueprint.route('json_schema//', methods=['GET']) - def render_json_schema(schema_name): - """Render Json Schema for a schema""" - token = create_flask_token() - breadcrumb = create_flask_breadcrumb(token) - - try: - schema = RenderService.render_json_schema(schema_name, token) - logger.info(f"{breadcrumb} Successfully rendered JSON schema for: {schema_name}") - return jsonify(schema) - except RenderNotFoundError: - logger.warning(f"{breadcrumb} Schema not found for JSON schema rendering: {schema_name}") - return "Schema not found", 404 - except RenderProcessingError as e: - logger.error(f"{breadcrumb} Error rendering JSON schema for {schema_name}: {str(e)}") - return jsonify(e.errors), 500 - except Exception as e: - logger.error(f"{breadcrumb} Unexpected error rendering JSON schema for {schema_name}: {str(e)}") - return jsonify([{ - "error": "Failed to render JSON schema", - "error_id": "API-005", - "message": str(e) - }]), 500 - - @blueprint.route('bson_schema//', methods=['GET']) - def render_bson_schema(schema_name): - """Render Bson Schema for a schema""" - token = create_flask_token() - breadcrumb = create_flask_breadcrumb(token) - - try: - schema = RenderService.render_bson_schema(schema_name, token) - logger.info(f"{breadcrumb} Successfully rendered BSON schema for: {schema_name}") - return jsonify(schema) - except RenderNotFoundError: - logger.warning(f"{breadcrumb} Schema not found for BSON schema rendering: {schema_name}") - return "Schema not found", 404 - except RenderProcessingError as e: - logger.error(f"{breadcrumb} Error rendering BSON schema for {schema_name}: {str(e)}") - return jsonify(e.errors), 500 - except Exception as e: - logger.error(f"{breadcrumb} Unexpected error rendering BSON schema for {schema_name}: {str(e)}") - return jsonify([{ - "error": "Failed to render BSON schema", - "error_id": "API-006", - "message": str(e) - }]), 500 - - @blueprint.route('openapi//', methods=['GET']) - def render_openapi(schema_name): - """Render OpenAPI for a schema""" - token = create_flask_token() - breadcrumb = create_flask_breadcrumb(token) - - try: - openapi = RenderService.render_openapi(schema_name, token) - logger.info(f"{breadcrumb} Successfully rendered OpenAPI for: {schema_name}") - return yaml.dump(openapi) - except RenderNotFoundError: - logger.warning(f"{breadcrumb} Schema not found for OpenAPI rendering: {schema_name}") - return "Schema not found", 404 - except RenderProcessingError as e: - logger.error(f"{breadcrumb} Error rendering OpenAPI for {schema_name}: {str(e)}") - return jsonify(e.errors), 500 - except Exception as e: - logger.error(f"{breadcrumb} Unexpected error rendering OpenAPI for {schema_name}: {str(e)}") - return jsonify([{ - "error": "Failed to render OpenAPI", - "error_id": "API-007", - "message": str(e) - }]), 500 - - logger.info("Render Flask Routes Registered") - return blueprint \ No newline at end of file diff --git a/stage0_mongodb_api/server.py b/stage0_mongodb_api/server.py deleted file mode 100644 index 0b3bb52..0000000 --- a/stage0_mongodb_api/server.py +++ /dev/null @@ -1,83 +0,0 @@ -import json -import sys -import signal -from flask import Flask -from stage0_py_utils import Config, MongoIO, MongoJSONEncoder -from prometheus_flask_exporter import PrometheusMetrics -from stage0_mongodb_api.managers.config_manager import ConfigManager - -# Initialize Singletons -config = Config.get_instance() -mongo = MongoIO.get_instance() - -# Initialize Logging -import logging -logger = logging.getLogger(__name__) -logger.info(f"============= Starting Server Initialization ===============") - -# Define a signal handler for SIGTERM and SIGINT -def handle_exit(signum, frame): - logger.info(f"Received signal {signum}. Initiating shutdown...") - - # Disconnect from MongoDB - logger.info("Closing MongoDB connection.") - mongo.disconnect() - - logger.info("============= Shutdown complete. ===============") - sys.exit(0) - -# Register the signal handler -signal.signal(signal.SIGTERM, handle_exit) -signal.signal(signal.SIGINT, handle_exit) - -# Initialize Flask App -app = Flask(__name__) -app.json = MongoJSONEncoder(app) - -# Configure Flask to be strict about trailing slashes -app.url_map.strict_slashes = False - -# Auto-processing logic - runs when module is imported (including by Gunicorn) -if config.AUTO_PROCESS: - logger.info(f"============= Auto Processing is Enabled ===============") - config_manager = ConfigManager() - - # Check for load errors - if len(config_manager.load_errors) > 0: - logger.error(f"Auto Processing Failed to Load Configurations! {config_manager.load_errors}") - sys.exit(1) - - # Check for schema validation errors - validate_errors = config_manager.schema_manager.validate_schema() - if len(validate_errors) > 0: - logger.error(f"Auto Processing Failed to Validate Schema! {validate_errors}") - sys.exit(1) - - # Process all collections - processing_output = config_manager.process_all_collections() - logger.info(f"Processing Output: {app.json.dumps(processing_output)}") - logger.info(f"============= Auto Processing is Completed ===============") - -if config.EXIT_AFTER_PROCESSING: - logger.info(f"============= Exiting After Processing ===============") - sys.exit(0) - -# Apply Prometheus monitoring middleware -metrics = PrometheusMetrics(app, path='/api/health') -metrics.info('app_info', 'Application info', version=config.BUILT_AT) - -# Register flask routes -from stage0_py_utils import create_config_routes -from stage0_mongodb_api.routes.collection_routes import create_collection_routes -from stage0_mongodb_api.routes.render_routes import create_render_routes - -app.register_blueprint(create_config_routes(), url_prefix='/api/config') -app.register_blueprint(create_collection_routes(), url_prefix='/api/collections') -app.register_blueprint(create_render_routes(), url_prefix='/api/render') -logger.info(f"============= Routes Registered ===============") -# Start the server (only when run directly, not when imported by Gunicorn) -if __name__ == "__main__": - logger.info(f"============= Starting Server ===============") - logger.info(f"Starting Flask server on port {config.MONGODB_API_PORT}...") - app.run(host="0.0.0.0", port=config.MONGODB_API_PORT) - \ No newline at end of file diff --git a/stage0_mongodb_api/services/collection_service.py b/stage0_mongodb_api/services/collection_service.py deleted file mode 100644 index 4f6172e..0000000 --- a/stage0_mongodb_api/services/collection_service.py +++ /dev/null @@ -1,208 +0,0 @@ -from typing import Dict, List, Optional -import logging -from stage0_py_utils import Config -from stage0_mongodb_api.managers.config_manager import ConfigManager -from stage0_mongodb_api.managers.version_manager import VersionManager - -logger = logging.getLogger(__name__) - -class CollectionNotFoundError(Exception): - """Raised when a collection is not found.""" - def __init__(self, collection_name: str, errors: List[Dict] = None): - self.collection_name = collection_name - self.errors = errors or [] - super().__init__(f"Collection '{collection_name}' not found") - -class CollectionProcessingError(Exception): - """Raised when collection processing fails.""" - def __init__(self, collection_name: str, errors: List[Dict]): - self.collection_name = collection_name - self.errors = errors - super().__init__(f"Failed to process collection '{collection_name}'") - -class CollectionService: - """ - Utility class that backs the API collection routes. - """ - - @staticmethod - def list_collections(token: Dict = None) -> List[Dict]: - """List all configured collections. - - Args: - token: Authentication token for RBAC enforcement - - Returns: - List of dictionaries with collection_name, version, and targeted_version - - Raises: - CollectionProcessingError: If there are load or validation errors - """ - config_manager = ConfigManager() - - # Check for load errors - if config_manager.load_errors: - raise CollectionProcessingError("collections", config_manager.load_errors) - - # Check for validation errors - validation_errors = config_manager.validate_configs() - if validation_errors: - raise CollectionProcessingError("collections", validation_errors) - - # Create a list of collection objects matching the OpenAPI schema - collections = [] - for collection_name, collection in config_manager.collection_configs.items(): - # Get current version - current_version = VersionManager.get_current_version(collection_name) - - # Get targeted version (last version in the versions array) - versions = collection.get("versions", []) - targeted_version = f"{collection_name}.{versions[-1]['version']}" if versions else None - - collections.append({ - "collection_name": collection_name, - "version": current_version, - "targeted_version": targeted_version - }) - return collections - - @staticmethod - def get_collection(collection_name: str, token: Dict = None) -> Dict: - """Get a collection configuration. - - Args: - collection_name: Name of the collection to get - token: Authentication token for RBAC enforcement - - Returns: - Dict containing collection configuration - - Raises: - CollectionNotFoundError: If collection is not found - CollectionProcessingError: If there are load or validation errors - """ - config_manager = ConfigManager() - - # Check for load errors - if config_manager.load_errors: - raise CollectionProcessingError(collection_name, config_manager.load_errors) - - # Check for validation errors - validation_errors = config_manager.validate_configs() - if validation_errors: - raise CollectionProcessingError(collection_name, validation_errors) - - collection = config_manager.get_collection_config(collection_name) - if not collection: - raise CollectionNotFoundError(collection_name) - - return collection - - @staticmethod - def process_collections(token: Dict = None) -> List[Dict]: - """Process all configured collections. - - Args: - token: Authentication token for RBAC enforcement - - Returns: - List of processing results for each collection and enumerators - - Raises: - CollectionProcessingError: If there are load or validation errors - """ - config_manager = ConfigManager() - - # Check for load errors - if config_manager.load_errors: - raise CollectionProcessingError("collections", config_manager.load_errors) - - # Check for validation errors - validation_errors = config_manager.validate_configs() - if validation_errors: - raise CollectionProcessingError("collections", validation_errors) - - # Use process_all_collections to include enumerators processing - all_results = config_manager.process_all_collections() - - # Convert the dictionary format to list format for API consistency - results = [] - for collection_name, operations in all_results.items(): - # Check if any operations have an error status - has_errors = any( - isinstance(op, dict) and op.get("status") == "error" - for op in operations - ) - - results.append({ - "collection": collection_name, - "operations": operations, - "status": "error" if has_errors else "success" - }) - - return results - - @staticmethod - def process_collection(collection_name: str, token: Dict = None) -> Dict: - """Process a collection configuration. - - Args: - collection_name: Name of the collection to process - token: Authentication token for RBAC enforcement - - Returns: - Dict containing processing results: - { - "status": "success", - "collection": str, - "operations": List[Dict] # List of operation results - } - - Raises: - CollectionNotFoundError: If collection is not found - CollectionProcessingError: If there are load, validation, or processing errors - """ - config_manager = ConfigManager() - - # Check for load errors - if config_manager.load_errors: - raise CollectionProcessingError(collection_name, config_manager.load_errors) - - # Check for validation errors - validation_errors = config_manager.validate_configs() - if validation_errors: - raise CollectionProcessingError(collection_name, validation_errors) - - # Process collection versions through config manager - try: - operations = config_manager.process_collection_versions(collection_name) - except ValueError as e: - # ConfigManager.process_collection_versions raises ValueError for not found collections - if "not found in configurations" in str(e): - raise CollectionNotFoundError(collection_name) - else: - logger.error(f"Error processing collection {collection_name}: {str(e)}") - raise CollectionProcessingError(collection_name, [{ - "error": "processing_error", - "error_id": "API-005", - "message": str(e) - }]) - except Exception as e: - logger.error(f"Error processing collection {collection_name}: {str(e)}") - raise CollectionProcessingError(collection_name, [{ - "error": "processing_error", - "error_id": "API-005", - "message": str(e) - }]) - - # Check if any operations have an error status - has_errors = any( - isinstance(op, dict) and op.get("status") == "error" - for op in operations - ) - - return { - "collection": collection_name, - "operations": operations, - "status": "error" if has_errors else "success" - } diff --git a/stage0_mongodb_api/services/render_service.py b/stage0_mongodb_api/services/render_service.py deleted file mode 100644 index 2891660..0000000 --- a/stage0_mongodb_api/services/render_service.py +++ /dev/null @@ -1,117 +0,0 @@ -from typing import Dict, List, Optional -from stage0_mongodb_api.managers.schema_manager import SchemaManager, SchemaFormat -from stage0_mongodb_api.managers.version_number import VersionNumber -from stage0_mongodb_api.managers.config_manager import ConfigManager -import logging - -logger = logging.getLogger(__name__) - -class RenderNotFoundError(Exception): - """Raised when a schema is not found for rendering.""" - def __init__(self, schema_name: str, errors: List[Dict] = None): - self.schema_name = schema_name - self.errors = errors or [] - super().__init__(f"Schema '{schema_name}' not found for rendering") - -class RenderProcessingError(Exception): - """Raised when schema rendering fails.""" - def __init__(self, schema_name: str, errors: List[Dict]): - self.schema_name = schema_name - self.errors = errors - super().__init__(f"Failed to render schema '{schema_name}'") - -class RenderService: - """Service for rendering schemas in different formats.""" - - @staticmethod - def render_json_schema(schema_name: str, token: Dict = None) -> Dict: - """Render a JSON schema for a schema name - - Args: - schema_name: Complete schema name including version (e.g., "collection.1.0.0.1") - token: Authentication token for RBAC enforcement - - Returns: - Dict containing the rendered JSON schema - - Raises: - RenderNotFoundError: If schema is not found - RenderProcessingError: If there are load, validation, or rendering errors - """ - config_manager = ConfigManager() - schema_manager = SchemaManager(config_manager.collection_configs) - - # Check for load errors - if config_manager.load_errors: - raise RenderProcessingError(schema_name, config_manager.load_errors) - - # Check for validation errors - validation_errors = config_manager.validate_configs() - if validation_errors: - raise RenderProcessingError(schema_name, validation_errors) - - try: - return schema_manager.render_one(schema_name, SchemaFormat.JSON) - except Exception as e: - logger.error(f"Error rendering JSON schema for {schema_name}: {str(e)}") - raise RenderProcessingError(schema_name, [{ - "error": "rendering_error", - "error_id": "RND-002", - "message": str(e) - }]) - - @staticmethod - def render_bson_schema(schema_name: str, token: Dict = None) -> Dict: - """Render a BSON schema for a schema name - - Args: - schema_name: Complete schema name including version (e.g., "collection.1.0.0.1") - token: Authentication token for RBAC enforcement - - Returns: - Dict containing the rendered BSON schema - - Raises: - RenderNotFoundError: If schema is not found - RenderProcessingError: If there are load, validation, or rendering errors - """ - config_manager = ConfigManager() - schema_manager = SchemaManager(config_manager.collection_configs) - - # Check for load errors - if config_manager.load_errors: - raise RenderProcessingError(schema_name, config_manager.load_errors) - - # Check for validation errors - validation_errors = config_manager.validate_configs() - if validation_errors: - raise RenderProcessingError(schema_name, validation_errors) - - try: - return schema_manager.render_one(schema_name, SchemaFormat.BSON) - except Exception as e: - logger.error(f"Error rendering BSON schema for {schema_name}: {str(e)}") - raise RenderProcessingError(schema_name, [{ - "error": "rendering_error", - "error_id": "RND-003", - "message": str(e) - }]) - - @staticmethod - def render_openapi(schema_name: str, token: Dict = None) -> Dict: - """Render an OpenAPI specification for a schema name - - Args: - schema_name: Complete schema name including version (e.g., "collection.1.0.0.1") - token: Authentication token for RBAC enforcement - - Returns: - Dict containing a message that OpenAPI rendering is to be implemented - - Note: - This will be implemented using Jinja templates and JSON schema rendering - """ - return { - "message": "OpenAPI rendering not yet implemented" - } - diff --git a/temp.txt b/temp.txt new file mode 100644 index 0000000..f1c96a8 --- /dev/null +++ b/temp.txt @@ -0,0 +1 @@ +{"data": null, "ends": null, "id": "AUTO-00", "starts": "2025-07-09 23:17:07.965476", "status": "PENDING", "sub_events": [{"data": {"configuration_file": "search.yaml", "configuration_name": "search", "configuration_title": "Search Collection", "version_count": 3}, "ends": "2025-07-09 23:17:08.083043", "id": "CFG-00", "starts": "2025-07-09 23:17:07.966066", "status": "SUCCESS", "sub_events": [{"data": {"add_indexes_count": 0, "collection_name": "search", "drop_indexes_count": 0, "has_test_data": false, "migrations_count": 0, "test_data_file": null, "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.021960", "id": "search.1.0.0.1", "starts": "2025-07-09 23:17:07.993186", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "search", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.000810", "id": "PRO-01", "starts": "2025-07-09 23:17:07.993188", "status": "SUCCESS", "sub_events": [{"data": {"collection": "search", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.000806", "id": "MON-06", "starts": "2025-07-09 23:17:07.993190", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "search", "message": "No indexes to drop", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.000813", "id": "PRO-02", "starts": "2025-07-09 23:17:08.000812", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "search", "message": "No migrations to execute", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.000814", "id": "PRO-03", "starts": "2025-07-09 23:17:08.000814", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "search", "message": "No indexes to add", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.000816", "id": "PRO-04", "starts": "2025-07-09 23:17:08.000815", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEXES"}, {"data": {"collection_name": "search", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.019324", "id": "PRO-05", "starts": "2025-07-09 23:17:08.000816", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"content_data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"cast": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "director": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "genre": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}}, "required": ["director"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"episode": {"bsonType": "int", "minimum": 1}, "network": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "season": {"bsonType": "int", "minimum": 1}}, "required": ["season", "episode"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"narrator": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "subject": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["subject"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"awards": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "festival": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}}], "properties": {}}, "duration": {"bsonType": "int", "minimum": 1}, "format": {"bsonType": "string", "enum": ["dvd", "bluray", "digital", "streaming"]}, "quality": {"bsonType": "string", "enum": ["sd", "hd", "uhd"]}}}, "status": {"bsonType": "string", "enum": ["draft", "published", "archived"]}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}, "title": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "type": {"bsonType": "string", "enum": ["movie", "tv_show", "documentary", "short"]}}, "required": ["_id", "title", "status", "last_saved"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "home_address": {"additionalProperties": false, "bsonType": "object", "properties": {"city": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "postal_code": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "state": {"bsonType": "string", "pattern": "^[A-Z]{2}$"}, "street": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["street", "postal_code"]}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "name": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "users": {"bsonType": "array", "items": {"bsonType": "objectId"}}, "website": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}}, "required": ["_id", "name", "status", "last_saved"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}], "properties": {"collection_id": {"bsonType": "objectId"}, "collection_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["collection_name", "collection_id"]}, "ends": "2025-07-09 23:17:08.019321", "id": "MON-10", "starts": "2025-07-09 23:17:08.015355", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "search", "message": "No test data to load", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.019331", "id": "PRO-06", "starts": "2025-07-09 23:17:08.019330", "status": "SUCCESS", "sub_events": [], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "search", "new_version": "1.0.0.1", "version_number": "search.1.0.0.1"}, "ends": "2025-07-09 23:17:08.021959", "id": "PRO-07", "starts": "2025-07-09 23:17:08.019332", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 0, "collection_name": "search", "drop_indexes_count": 0, "has_test_data": false, "migrations_count": 0, "test_data_file": null, "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.046620", "id": "search.1.0.0.2", "starts": "2025-07-09 23:17:08.022733", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "search", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.025121", "id": "PRO-01", "starts": "2025-07-09 23:17:08.022734", "status": "SUCCESS", "sub_events": [{"data": {"collection": "search", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.025119", "id": "MON-06", "starts": "2025-07-09 23:17:08.022736", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "search", "message": "No indexes to drop", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.025125", "id": "PRO-02", "starts": "2025-07-09 23:17:08.025123", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "search", "message": "No migrations to execute", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.025126", "id": "PRO-03", "starts": "2025-07-09 23:17:08.025125", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "search", "message": "No indexes to add", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.025127", "id": "PRO-04", "starts": "2025-07-09 23:17:08.025127", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEXES"}, {"data": {"collection_name": "search", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.045244", "id": "PRO-05", "starts": "2025-07-09 23:17:08.025128", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"content_data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"cast": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "director": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "genre": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}}, "required": ["director"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"episode": {"bsonType": "int", "minimum": 1}, "network": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "season": {"bsonType": "int", "minimum": 1}}, "required": ["season", "episode"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"narrator": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "subject": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["subject"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"awards": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "festival": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}}], "properties": {}}, "duration": {"bsonType": "int", "minimum": 1}, "format": {"bsonType": "string", "enum": ["dvd", "bluray", "digital", "streaming"]}, "quality": {"bsonType": "string", "enum": ["sd", "hd", "uhd"]}}}, "status": {"bsonType": "string", "enum": ["draft", "published", "archived"]}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}, "title": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "type": {"bsonType": "string", "enum": ["movie", "tv_show", "documentary", "short"]}}, "required": ["_id", "title", "status", "last_saved"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "home_address": {"additionalProperties": false, "bsonType": "object", "properties": {"city": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "postal_code": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "state": {"bsonType": "string", "pattern": "^[A-Z]{2}$"}, "street": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["street", "postal_code"]}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "name": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "users": {"bsonType": "array", "items": {"bsonType": "objectId"}}, "website": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}}, "required": ["_id", "name", "status", "last_saved"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}], "properties": {"collection_id": {"bsonType": "objectId"}, "collection_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["collection_name", "collection_id"]}, "ends": "2025-07-09 23:17:08.045241", "id": "MON-10", "starts": "2025-07-09 23:17:08.039832", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "search", "message": "No test data to load", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.045248", "id": "PRO-06", "starts": "2025-07-09 23:17:08.045246", "status": "SUCCESS", "sub_events": [], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "search", "new_version": "1.0.0.2", "version_number": "search.1.0.0.2"}, "ends": "2025-07-09 23:17:08.046619", "id": "PRO-07", "starts": "2025-07-09 23:17:08.045249", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 0, "collection_name": "search", "drop_indexes_count": 0, "has_test_data": false, "migrations_count": 0, "test_data_file": null, "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.070671", "id": "search.1.0.1.3", "starts": "2025-07-09 23:17:08.047268", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "search", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.048605", "id": "PRO-01", "starts": "2025-07-09 23:17:08.047269", "status": "SUCCESS", "sub_events": [{"data": {"collection": "search", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.048603", "id": "MON-06", "starts": "2025-07-09 23:17:08.047270", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "search", "message": "No indexes to drop", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.048607", "id": "PRO-02", "starts": "2025-07-09 23:17:08.048606", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "search", "message": "No migrations to execute", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.048608", "id": "PRO-03", "starts": "2025-07-09 23:17:08.048607", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "search", "message": "No indexes to add", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.048609", "id": "PRO-04", "starts": "2025-07-09 23:17:08.048608", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEXES"}, {"data": {"collection_name": "search", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.069392", "id": "PRO-05", "starts": "2025-07-09 23:17:08.048609", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"content_data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"cast": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "director": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "genre": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}}, "required": ["director"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"episode": {"bsonType": "int", "minimum": 1}, "network": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "season": {"bsonType": "int", "minimum": 1}}, "required": ["season", "episode"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"narrator": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "subject": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["subject"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"awards": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "festival": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}}], "properties": {}}, "duration": {"bsonType": "int", "minimum": 1}, "format": {"bsonType": "string", "enum": ["dvd", "bluray", "digital", "streaming"]}, "quality": {"bsonType": "string", "enum": ["sd", "hd", "uhd"]}}}, "status": {"bsonType": "string", "enum": ["draft", "published", "archived"]}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}, "title": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "type": {"bsonType": "string", "enum": ["movie", "tv_show", "documentary", "short"]}}, "required": ["_id", "title", "status", "last_saved"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "home_address": {"additionalProperties": false, "bsonType": "object", "properties": {"city": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "postal_code": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "state": {"bsonType": "string", "pattern": "^[A-Z]{2}$"}, "street": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["street", "postal_code"]}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "name": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "users": {"bsonType": "array", "items": {"bsonType": "objectId"}}, "website": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}}, "required": ["_id", "name", "status", "last_saved"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "categories": {"bsonType": "array", "items": {"additionalProperties": false, "bsonType": "object", "properties": {"category": {"bsonType": "string", "enum": ["work", "personal", "project", "reference"]}, "name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["urgent", "important", "normal", "low", "completed", "in_progress", "blocked", "review"]}}}, "required": ["name", "category", "tags"]}}, "email": {"bsonType": "string", "pattern": "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$"}, "full_name": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "phone": {"bsonType": "string", "pattern": "^\\+1[2-9][0-9]{9}$"}, "preferences": {"additionalProperties": false, "bsonType": "object", "properties": {"content_tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["technology", "business", "entertainment", "education", "news"]}}, "delivery_channels": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["email", "sms", "push", "in_app"]}}, "notification_types": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["system", "user", "content", "reminder"]}}, "priority_levels": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["critical", "high", "medium", "low"]}}}, "required": ["notification_types", "delivery_channels"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}], "properties": {"collection_id": {"bsonType": "objectId"}, "collection_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["collection_name", "collection_id"]}, "ends": "2025-07-09 23:17:08.069390", "id": "MON-10", "starts": "2025-07-09 23:17:08.063630", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "search", "message": "No test data to load", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.069394", "id": "PRO-06", "starts": "2025-07-09 23:17:08.069393", "status": "SUCCESS", "sub_events": [], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "search", "new_version": "1.0.1.3", "version_number": "search.1.0.1.3"}, "ends": "2025-07-09 23:17:08.070670", "id": "PRO-07", "starts": "2025-07-09 23:17:08.069395", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": [{"enumerators": {}, "name": "Enumerations", "status": "Deprecated", "version": 0}, {"enumerators": {"category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "default_status": {"active": "Not Deleted", "archived": "Soft Delete Indicator"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}}, "name": "Enumerations", "status": "Active", "version": 1}, {"enumerators": {"default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}}, "name": "Enumerations", "status": "Active", "version": 2}, {"enumerators": {"category_tags": {"blocked": "Cannot proceed", "completed": "Task is done", "important": "High priority", "in_progress": "Currently being worked on", "low": "Low priority", "normal": "Standard priority", "review": "Needs review", "urgent": "Requires immediate attention"}, "category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "content_tags": {"business": "Business related content", "education": "Educational content", "entertainment": "Entertainment content", "news": "News content", "technology": "Technology related content"}, "content_type": {"article": "Written content", "podcast": "Audio content", "video": "Video content"}, "default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}, "tags": {"admin": "An administrator", "super": "A super user", "user": "A User"}, "type": {"check": "Select multiple options", "radio": "Select one option", "text": "Enter a text string"}}, "name": "Enumerations", "status": "Active", "version": 3}], "ends": "2025-07-09 23:17:08.083042", "id": "PRO-08", "starts": "2025-07-09 23:17:08.070675", "status": "SUCCESS", "sub_events": [], "type": "LOAD_ENUMERATORS"}], "type": "PROCESS"}, {"data": {"configuration_file": "content.yaml", "configuration_name": "content", "configuration_title": "Content Collection", "version_count": 3}, "ends": "2025-07-09 23:17:08.171164", "id": "CFG-00", "starts": "2025-07-09 23:17:08.084481", "status": "SUCCESS", "sub_events": [{"data": {"add_indexes_count": 2, "collection_name": "content", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 0, "test_data_file": "content.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.120911", "id": "content.1.0.0.1", "starts": "2025-07-09 23:17:08.090432", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "content", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.097522", "id": "PRO-01", "starts": "2025-07-09 23:17:08.090434", "status": "SUCCESS", "sub_events": [{"data": {"collection": "content", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.097520", "id": "MON-06", "starts": "2025-07-09 23:17:08.090435", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "content", "message": "No indexes to drop", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.097524", "id": "PRO-02", "starts": "2025-07-09 23:17:08.097523", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "content", "message": "No migrations to execute", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.097525", "id": "PRO-03", "starts": "2025-07-09 23:17:08.097525", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "content", "index_count": 2, "indexes_to_add": [{"key": {"content_type": 1}, "name": "contentTypeIndex", "options": {"unique": false}}, {"key": {"status": 1}, "name": "statusIndex", "options": {"unique": false}}], "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.103642", "id": "PRO-04", "starts": "2025-07-09 23:17:08.097526", "status": "SUCCESS", "sub_events": [{"data": {"collection": "content", "index_keys": {"content_type": 1}, "index_name": "contentTypeIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.100666", "id": "MON-09", "starts": "2025-07-09 23:17:08.097529", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "content", "index_keys": {"status": 1}, "index_name": "statusIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.103638", "id": "MON-09", "starts": "2025-07-09 23:17:08.100668", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}], "type": "ADD_INDEXES"}, {"data": {"collection_name": "content", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.117849", "id": "PRO-05", "starts": "2025-07-09 23:17:08.103643", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "author_id": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}, "content_data": {"additionalProperties": false, "bsonType": "object", "properties": {"audio_url": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}, "body": {"bsonType": "string", "maxLength": 4096}, "duration": {"bsonType": "int", "minimum": 1}, "episode_number": {"bsonType": "int", "minimum": 1}, "quality": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "transcript": {"bsonType": "string", "maxLength": 4096}, "url": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}, "word_count": {"bsonType": "int", "minimum": 1}}}, "content_type": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"categories": {"bsonType": "array", "items": {"additionalProperties": false, "bsonType": "object", "properties": {"name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "type": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["name", "type"]}}, "created_at": {"bsonType": "date"}, "published_at": {"bsonType": "date"}, "updated_at": {"bsonType": "date"}}, "required": ["created_at"]}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "subtitle": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "title": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "title", "content_type", "status", "content_data", "last_saved"]}, "ends": "2025-07-09 23:17:08.117847", "id": "MON-10", "starts": "2025-07-09 23:17:08.114935", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"error": "Test data loading operation failed"}, "ends": "2025-07-09 23:17:08.119257", "id": "PRO-06", "starts": "2025-07-09 23:17:08.117850", "status": "FAILURE", "sub_events": [{"data": {"error": "Bulk write operation failed", "nInserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "nUpserted": 0, "upserted": [], "writeConcernErrors": [], "writeErrors": [{"code": 121, "errInfo": {"details": {"operatorName": "$jsonSchema", "schemaRulesNotSatisfied": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "content_001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "objectId"}}], "propertyName": "_id"}, {"details": [{"consideredValue": "Introduction to MongoDB", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "title"}, {"details": [{"consideredValue": "published", "operatorName": "enum", "reason": "value was not found in enum", "specifiedAs": {"enum": ["active", "archived"]}}], "propertyName": "status"}, {"details": [{"consideredType": "string", "consideredValue": "A00000000000000000000001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "object"}}], "propertyName": "author_id"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-01-15T10:00:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "created_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-01-20T14:30:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "updated_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-01-20T14:30:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "published_at"}, {"details": [{"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredValue": "Database Tutorials", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "name"}]}], "itemIndex": 0, "operatorName": "items", "reason": "At least one item did not match the sub-schema"}], "propertyName": "categories"}]}], "propertyName": "metadata"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-01-20T14:30:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "at_time"}]}], "propertyName": "last_saved"}]}]}, "failingDocumentId": "content_001"}, "errmsg": "Document failed validation", "index": 0, "op": {"_id": "content_001", "author_id": "A00000000000000000000001", "content_data": {"body": "# Introduction to MongoDB\n\nMongoDB is a popular NoSQL database...", "tags": ["technology", "education"], "word_count": 2500}, "content_type": "article", "last_saved": {"at_time": "2024-01-20T14:30:00Z", "by_user": "admin", "correlation_id": "corr_008", "from_ip": "192.168.1.107"}, "metadata": {"categories": [{"name": "Database Tutorials", "tags": ["urgent", "important"], "type": "education"}], "created_at": "2024-01-15T10:00:00Z", "published_at": "2024-01-20T14:30:00Z", "updated_at": "2024-01-20T14:30:00Z"}, "status": "published", "subtitle": "A comprehensive guide for beginners", "title": "Introduction to MongoDB"}}]}, "ends": "2025-07-09 23:17:08.119248", "id": "MON-11", "starts": "2025-07-09 23:17:08.117855", "status": "FAILURE", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "content", "new_version": "1.0.0.1", "version_number": "content.1.0.0.1"}, "ends": "2025-07-09 23:17:08.120910", "id": "PRO-07", "starts": "2025-07-09 23:17:08.119258", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 1, "collection_name": "content", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 0, "test_data_file": "content.1.0.0.2.json", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.139632", "id": "content.1.0.0.2", "starts": "2025-07-09 23:17:08.121611", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "content", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.122801", "id": "PRO-01", "starts": "2025-07-09 23:17:08.121613", "status": "SUCCESS", "sub_events": [{"data": {"collection": "content", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.122800", "id": "MON-06", "starts": "2025-07-09 23:17:08.121614", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "content", "message": "No indexes to drop", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.122803", "id": "PRO-02", "starts": "2025-07-09 23:17:08.122802", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "content", "message": "No migrations to execute", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.122804", "id": "PRO-03", "starts": "2025-07-09 23:17:08.122804", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "content", "index_count": 1, "indexes_to_add": [{"key": {"author_id": 1}, "name": "authorIndex", "options": {"unique": false}}], "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.125638", "id": "PRO-04", "starts": "2025-07-09 23:17:08.122805", "status": "SUCCESS", "sub_events": [{"data": {"collection": "content", "index_keys": {"author_id": 1}, "index_name": "authorIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.125635", "id": "MON-09", "starts": "2025-07-09 23:17:08.122807", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}], "type": "ADD_INDEXES"}, {"data": {"collection_name": "content", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.137775", "id": "PRO-05", "starts": "2025-07-09 23:17:08.125639", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "author_id": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}, "content_data": {"additionalProperties": false, "bsonType": "object", "properties": {"audio_url": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}, "body": {"bsonType": "string", "maxLength": 4096}, "duration": {"bsonType": "int", "minimum": 1}, "episode_number": {"bsonType": "int", "minimum": 1}, "quality": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "transcript": {"bsonType": "string", "maxLength": 4096}, "url": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}, "word_count": {"bsonType": "int", "minimum": 1}}}, "content_type": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"categories": {"bsonType": "array", "items": {"additionalProperties": false, "bsonType": "object", "properties": {"name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "type": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["name", "type"]}}, "created_at": {"bsonType": "date"}, "published_at": {"bsonType": "date"}, "updated_at": {"bsonType": "date"}}, "required": ["created_at"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "subtitle": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "title": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "title", "content_type", "status", "content_data", "last_saved"]}, "ends": "2025-07-09 23:17:08.137774", "id": "MON-10", "starts": "2025-07-09 23:17:08.135749", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"error": "Test data loading operation failed"}, "ends": "2025-07-09 23:17:08.138682", "id": "PRO-06", "starts": "2025-07-09 23:17:08.137776", "status": "FAILURE", "sub_events": [{"data": {"error": "Bulk write operation failed", "nInserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "nUpserted": 0, "upserted": [], "writeConcernErrors": [], "writeErrors": [{"code": 121, "errInfo": {"details": {"operatorName": "$jsonSchema", "schemaRulesNotSatisfied": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "content_004", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "objectId"}}], "propertyName": "_id"}, {"details": [{"consideredValue": "MongoDB Performance Optimization", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "title"}, {"details": [{"consideredValue": "published", "operatorName": "enum", "reason": "value was not found in enum", "specifiedAs": {"enum": ["draft", "active", "archived"]}}], "propertyName": "status"}, {"details": [{"consideredType": "string", "consideredValue": "A00000000000000000000001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "object"}}], "propertyName": "author_id"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-01-25T11:00:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "created_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-01-26T15:20:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "updated_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-01-26T15:20:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "published_at"}, {"details": [{"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredValue": "Performance Guides", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "name"}]}], "itemIndex": 0, "operatorName": "items", "reason": "At least one item did not match the sub-schema"}], "propertyName": "categories"}]}], "propertyName": "metadata"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-01-26T15:20:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "at_time"}]}], "propertyName": "last_saved"}]}]}, "failingDocumentId": "content_004"}, "errmsg": "Document failed validation", "index": 0, "op": {"_id": "content_004", "author_id": "A00000000000000000000001", "content_data": {"body": "# MongoDB Performance Optimization\n\nPerformance optimization is crucial...", "tags": ["technology", "business"], "word_count": 3200}, "content_type": "article", "last_saved": {"at_time": "2024-01-26T15:20:00Z", "by_user": "admin", "correlation_id": "corr_011", "from_ip": "192.168.1.110"}, "metadata": {"categories": [{"name": "Performance Guides", "tags": ["important", "urgent"], "type": "education"}], "created_at": "2024-01-25T11:00:00Z", "published_at": "2024-01-26T15:20:00Z", "updated_at": "2024-01-26T15:20:00Z"}, "status": "published", "subtitle": "Techniques for improving database performance", "title": "MongoDB Performance Optimization"}}]}, "ends": "2025-07-09 23:17:08.138675", "id": "MON-11", "starts": "2025-07-09 23:17:08.137781", "status": "FAILURE", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "content", "new_version": "1.0.0.2", "version_number": "content.1.0.0.2"}, "ends": "2025-07-09 23:17:08.139631", "id": "PRO-07", "starts": "2025-07-09 23:17:08.138683", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 0, "collection_name": "content", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 1, "test_data_file": "content.1.0.1.3.json", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.168734", "id": "content.1.0.1.3", "starts": "2025-07-09 23:17:08.140114", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "content", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.141687", "id": "PRO-01", "starts": "2025-07-09 23:17:08.140115", "status": "SUCCESS", "sub_events": [{"data": {"collection": "content", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.141686", "id": "MON-06", "starts": "2025-07-09 23:17:08.140116", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "content", "message": "No indexes to drop", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.141689", "id": "PRO-02", "starts": "2025-07-09 23:17:08.141688", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "content", "migration_count": 1, "migration_files": ["content_merge_content_fields.json"], "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.153178", "id": "PRO-03", "starts": "2025-07-09 23:17:08.141689", "status": "SUCCESS", "sub_events": [{"data": {"collection": "content", "migration_file": "content_merge_content_fields.json", "migration_path": "./tests/test_cases/large_sample/migrations/content_merge_content_fields.json", "pipeline_operations": ["$addFields", "$unset", "$out"], "pipeline_stages": 3, "pipeline_summary": [{"details": {"$addFields": {"full_title": {"$concat": ["$title", " - ", "$subtitle"]}}}, "operation": "$addFields", "stage": 1}, {"details": {"$unset": ["subtitle"]}, "operation": "$unset", "stage": 2}, {"details": {"$out": "content"}, "operation": "$out", "stage": 3}]}, "ends": "2025-07-09 23:17:08.153177", "id": "MON-14", "starts": "2025-07-09 23:17:08.141694", "status": "SUCCESS", "sub_events": [{"data": {"file": "./tests/test_cases/large_sample/migrations/content_merge_content_fields.json", "file_name": "content_merge_content_fields.json", "pipeline_operations": ["$addFields", "$unset", "$out"], "pipeline_stages": 3}, "ends": "2025-07-09 23:17:08.141774", "id": "MON-13", "starts": "2025-07-09 23:17:08.141696", "status": "SUCCESS", "sub_events": [], "type": "LOAD_MIGRATION"}, {"data": {"collection": "content"}, "ends": "2025-07-09 23:17:08.153165", "id": "MON-08", "starts": "2025-07-09 23:17:08.141777", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATION"}], "type": "EXECUTE_MIGRATION_FILE"}], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "content", "message": "No indexes to add", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.153181", "id": "PRO-04", "starts": "2025-07-09 23:17:08.153180", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEXES"}, {"data": {"collection_name": "content", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.166645", "id": "PRO-05", "starts": "2025-07-09 23:17:08.153181", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "author_id": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}, "content_data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"body": {"bsonType": "string", "maxLength": 4096}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["technology", "business", "entertainment", "education", "news"]}}, "word_count": {"bsonType": "int", "minimum": 1}}, "required": ["body"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"duration": {"bsonType": "int", "minimum": 1}, "quality": {"bsonType": "string", "enum": ["sd", "hd", "uhd"]}, "url": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}}, "required": ["url"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"audio_url": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}, "episode_number": {"bsonType": "int", "minimum": 1}, "transcript": {"bsonType": "string", "maxLength": 4096}}, "required": ["audio_url"]}], "properties": {}}, "content_type": {"bsonType": "string", "enum": ["article", "video", "podcast"]}, "full_title": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"categories": {"bsonType": "array", "items": {"additionalProperties": false, "bsonType": "object", "properties": {"name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["urgent", "important", "normal", "low", "completed", "in_progress", "blocked", "review"]}}, "type": {"bsonType": "string", "enum": ["work", "personal", "project", "reference"]}}, "required": ["name", "type"]}}, "created_at": {"bsonType": "date"}, "published_at": {"bsonType": "date"}, "updated_at": {"bsonType": "date"}}, "required": ["created_at"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "subtitle": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "title": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "title", "content_type", "status", "content_data", "last_saved"]}, "ends": "2025-07-09 23:17:08.166643", "id": "MON-10", "starts": "2025-07-09 23:17:08.164224", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"error": "Test data loading operation failed"}, "ends": "2025-07-09 23:17:08.167723", "id": "PRO-06", "starts": "2025-07-09 23:17:08.166646", "status": "FAILURE", "sub_events": [{"data": {"error": "Bulk write operation failed", "nInserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "nUpserted": 0, "upserted": [], "writeConcernErrors": [], "writeErrors": [{"code": 121, "errInfo": {"details": {"operatorName": "$jsonSchema", "schemaRulesNotSatisfied": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "content_006", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "objectId"}}], "propertyName": "_id"}, {"details": [{"consideredValue": "MongoDB Aggregation Framework", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "title"}, {"details": [{"consideredValue": "published", "operatorName": "enum", "reason": "value was not found in enum", "specifiedAs": {"enum": ["draft", "active", "archived"]}}], "propertyName": "status"}, {"details": [{"consideredType": "string", "consideredValue": "A00000000000000000000001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "object"}}], "propertyName": "author_id"}, {"details": [{"additionalProperties": ["body", "tags", "word_count"], "operatorName": "additionalProperties", "specifiedAs": {"additionalProperties": false}}], "propertyName": "content_data"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-02-01T09:00:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "created_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-02-02T14:30:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "updated_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-02-02T14:30:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "published_at"}, {"details": [{"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredValue": "Advanced Topics", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "name"}, {"details": [{"consideredValue": "education", "operatorName": "enum", "reason": "value was not found in enum", "specifiedAs": {"enum": ["work", "personal", "project", "reference"]}}], "propertyName": "type"}]}], "itemIndex": 0, "operatorName": "items", "reason": "At least one item did not match the sub-schema"}], "propertyName": "categories"}]}], "propertyName": "metadata"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-02-02T14:30:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "at_time"}]}], "propertyName": "last_saved"}]}]}, "failingDocumentId": "content_006"}, "errmsg": "Document failed validation", "index": 0, "op": {"_id": "content_006", "author_id": "A00000000000000000000001", "content_data": {"body": "# MongoDB Aggregation Framework\n\nThe aggregation framework provides powerful data processing capabilities...", "tags": ["technology", "education"], "word_count": 4100}, "content_type": "article", "full_title": "MongoDB Aggregation Framework - Advanced data processing techniques", "last_saved": {"at_time": "2024-02-02T14:30:00Z", "by_user": "admin", "correlation_id": "corr_013", "from_ip": "192.168.1.112"}, "metadata": {"categories": [{"name": "Advanced Topics", "tags": ["important", "normal"], "type": "education"}], "created_at": "2024-02-01T09:00:00Z", "published_at": "2024-02-02T14:30:00Z", "updated_at": "2024-02-02T14:30:00Z"}, "status": "published", "subtitle": "Advanced data processing techniques", "title": "MongoDB Aggregation Framework"}}]}, "ends": "2025-07-09 23:17:08.167715", "id": "MON-11", "starts": "2025-07-09 23:17:08.166648", "status": "FAILURE", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "content", "new_version": "1.0.1.3", "version_number": "content.1.0.1.3"}, "ends": "2025-07-09 23:17:08.168733", "id": "PRO-07", "starts": "2025-07-09 23:17:08.167725", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": [{"enumerators": {}, "name": "Enumerations", "status": "Deprecated", "version": 0}, {"enumerators": {"category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "default_status": {"active": "Not Deleted", "archived": "Soft Delete Indicator"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}}, "name": "Enumerations", "status": "Active", "version": 1}, {"enumerators": {"default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}}, "name": "Enumerations", "status": "Active", "version": 2}, {"enumerators": {"category_tags": {"blocked": "Cannot proceed", "completed": "Task is done", "important": "High priority", "in_progress": "Currently being worked on", "low": "Low priority", "normal": "Standard priority", "review": "Needs review", "urgent": "Requires immediate attention"}, "category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "content_tags": {"business": "Business related content", "education": "Educational content", "entertainment": "Entertainment content", "news": "News content", "technology": "Technology related content"}, "content_type": {"article": "Written content", "podcast": "Audio content", "video": "Video content"}, "default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}, "tags": {"admin": "An administrator", "super": "A super user", "user": "A User"}, "type": {"check": "Select multiple options", "radio": "Select one option", "text": "Enter a text string"}}, "name": "Enumerations", "status": "Active", "version": 3}], "ends": "2025-07-09 23:17:08.171163", "id": "PRO-08", "starts": "2025-07-09 23:17:08.168738", "status": "SUCCESS", "sub_events": [], "type": "LOAD_ENUMERATORS"}], "type": "PROCESS"}, {"data": {"configuration_file": "organization.yaml", "configuration_name": "organization", "configuration_title": "Organization Collection", "version_count": 1}, "ends": "2025-07-09 23:17:08.210617", "id": "CFG-00", "starts": "2025-07-09 23:17:08.172264", "status": "SUCCESS", "sub_events": [{"data": {"add_indexes_count": 3, "collection_name": "organization", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 0, "test_data_file": "organization.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.207861", "id": "organization.1.0.0.1", "starts": "2025-07-09 23:17:08.179552", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "organization", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.185619", "id": "PRO-01", "starts": "2025-07-09 23:17:08.179553", "status": "SUCCESS", "sub_events": [{"data": {"collection": "organization", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.185618", "id": "MON-06", "starts": "2025-07-09 23:17:08.179554", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "organization", "message": "No indexes to drop", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.185622", "id": "PRO-02", "starts": "2025-07-09 23:17:08.185621", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "organization", "message": "No migrations to execute", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.185623", "id": "PRO-03", "starts": "2025-07-09 23:17:08.185622", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "organization", "index_count": 3, "indexes_to_add": [{"key": {"name": 1}, "name": "name", "options": {"unique": true}}, {"key": {"status": 1}, "name": "status", "options": {"unique": false}}, {"key": {"last_saved.saved_at": 1}, "name": "last_saved", "options": {"unique": false}}], "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.194626", "id": "PRO-04", "starts": "2025-07-09 23:17:08.185624", "status": "SUCCESS", "sub_events": [{"data": {"collection": "organization", "index_keys": {"name": 1}, "index_name": "name", "operation": "created"}, "ends": "2025-07-09 23:17:08.188615", "id": "MON-09", "starts": "2025-07-09 23:17:08.185625", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "organization", "index_keys": {"status": 1}, "index_name": "status", "operation": "created"}, "ends": "2025-07-09 23:17:08.191661", "id": "MON-09", "starts": "2025-07-09 23:17:08.188617", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "organization", "index_keys": {"last_saved.saved_at": 1}, "index_name": "last_saved", "operation": "created"}, "ends": "2025-07-09 23:17:08.194623", "id": "MON-09", "starts": "2025-07-09 23:17:08.191663", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}], "type": "ADD_INDEXES"}, {"data": {"collection_name": "organization", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.201775", "id": "PRO-05", "starts": "2025-07-09 23:17:08.194627", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "home_address": {"additionalProperties": false, "bsonType": "object", "properties": {"city": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "postal_code": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "state": {"bsonType": "string", "pattern": "^[A-Z]{2}$"}, "street": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["street", "postal_code"]}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "name": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "users": {"bsonType": "array", "items": {"bsonType": "objectId"}}, "website": {"bsonType": "string", "pattern": "^https?://[^\\s]+$"}}, "required": ["_id", "name", "status", "last_saved"]}, "ends": "2025-07-09 23:17:08.201773", "id": "MON-10", "starts": "2025-07-09 23:17:08.199424", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "organization", "test_data_file": "organization.1.0.0.1.json", "test_data_path": "./tests/test_cases/large_sample/test_data/organization.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.205804", "id": "PRO-06", "starts": "2025-07-09 23:17:08.201776", "status": "SUCCESS", "sub_events": [{"data": {"collection": "organization", "data_file": "organization.1.0.0.1.json", "documents_loaded": 2, "insert_many_result": {"acknowledged": true, "inserted_ids": ["b00000000000000000000001", "b00000000000000000000002"]}}, "ends": "2025-07-09 23:17:08.205799", "id": "MON-11", "starts": "2025-07-09 23:17:08.201779", "status": "SUCCESS", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "organization", "new_version": "1.0.0.1", "version_number": "organization.1.0.0.1"}, "ends": "2025-07-09 23:17:08.207860", "id": "PRO-07", "starts": "2025-07-09 23:17:08.205806", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": [{"enumerators": {}, "name": "Enumerations", "status": "Deprecated", "version": 0}, {"enumerators": {"category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "default_status": {"active": "Not Deleted", "archived": "Soft Delete Indicator"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}}, "name": "Enumerations", "status": "Active", "version": 1}, {"enumerators": {"default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}}, "name": "Enumerations", "status": "Active", "version": 2}, {"enumerators": {"category_tags": {"blocked": "Cannot proceed", "completed": "Task is done", "important": "High priority", "in_progress": "Currently being worked on", "low": "Low priority", "normal": "Standard priority", "review": "Needs review", "urgent": "Requires immediate attention"}, "category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "content_tags": {"business": "Business related content", "education": "Educational content", "entertainment": "Entertainment content", "news": "News content", "technology": "Technology related content"}, "content_type": {"article": "Written content", "podcast": "Audio content", "video": "Video content"}, "default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}, "tags": {"admin": "An administrator", "super": "A super user", "user": "A User"}, "type": {"check": "Select multiple options", "radio": "Select one option", "text": "Enter a text string"}}, "name": "Enumerations", "status": "Active", "version": 3}], "ends": "2025-07-09 23:17:08.210616", "id": "PRO-08", "starts": "2025-07-09 23:17:08.207864", "status": "SUCCESS", "sub_events": [], "type": "LOAD_ENUMERATORS"}], "type": "PROCESS"}, {"data": {"configuration_file": "notification.yaml", "configuration_name": "notification", "configuration_title": "Notification Collection", "version_count": 3}, "ends": "2025-07-09 23:17:08.287666", "id": "CFG-00", "starts": "2025-07-09 23:17:08.211996", "status": "SUCCESS", "sub_events": [{"data": {"add_indexes_count": 2, "collection_name": "notification", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 0, "test_data_file": "notification.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.241858", "id": "notification.1.0.0.1", "starts": "2025-07-09 23:17:08.217734", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "notification", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.223643", "id": "PRO-01", "starts": "2025-07-09 23:17:08.217736", "status": "SUCCESS", "sub_events": [{"data": {"collection": "notification", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.223641", "id": "MON-06", "starts": "2025-07-09 23:17:08.217737", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "notification", "message": "No indexes to drop", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.223646", "id": "PRO-02", "starts": "2025-07-09 23:17:08.223645", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "notification", "message": "No migrations to execute", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.223648", "id": "PRO-03", "starts": "2025-07-09 23:17:08.223647", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "notification", "index_count": 2, "indexes_to_add": [{"key": {"user_id": 1}, "name": "userIndex", "options": {"unique": false}}, {"key": {"status": 1}, "name": "statusIndex", "options": {"unique": false}}], "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.229554", "id": "PRO-04", "starts": "2025-07-09 23:17:08.223648", "status": "SUCCESS", "sub_events": [{"data": {"collection": "notification", "index_keys": {"user_id": 1}, "index_name": "userIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.226734", "id": "MON-09", "starts": "2025-07-09 23:17:08.223650", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "notification", "index_keys": {"status": 1}, "index_name": "statusIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.229551", "id": "MON-09", "starts": "2025-07-09 23:17:08.226738", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}], "type": "ADD_INDEXES"}, {"data": {"collection_name": "notification", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.238831", "id": "PRO-05", "starts": "2025-07-09 23:17:08.229555", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "message": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "title": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "user_id": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}}, "required": ["_id", "user_id", "title", "message", "status", "last_saved"]}, "ends": "2025-07-09 23:17:08.238830", "id": "MON-10", "starts": "2025-07-09 23:17:08.236351", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"error": "Test data loading operation failed"}, "ends": "2025-07-09 23:17:08.240026", "id": "PRO-06", "starts": "2025-07-09 23:17:08.238832", "status": "FAILURE", "sub_events": [{"data": {"error": "Bulk write operation failed", "nInserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "nUpserted": 0, "upserted": [], "writeConcernErrors": [], "writeErrors": [{"code": 121, "errInfo": {"details": {"operatorName": "$jsonSchema", "schemaRulesNotSatisfied": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "notification_001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "objectId"}}], "propertyName": "_id"}, {"details": [{"consideredType": "string", "consideredValue": "A00000000000000000000001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "object"}}], "propertyName": "user_id"}, {"details": [{"consideredValue": "Welcome to the Platform", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "title"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-01-15T10:05:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "at_time"}]}], "propertyName": "last_saved"}]}]}, "failingDocumentId": "notification_001"}, "errmsg": "Document failed validation", "index": 0, "op": {"_id": "notification_001", "last_saved": {"at_time": "2024-01-15T10:05:00Z", "by_user": "system", "correlation_id": "corr_001", "from_ip": "192.168.1.100"}, "message": "Thank you for joining our platform!", "status": "active", "title": "Welcome to the Platform", "user_id": "A00000000000000000000001"}}]}, "ends": "2025-07-09 23:17:08.240020", "id": "MON-11", "starts": "2025-07-09 23:17:08.238836", "status": "FAILURE", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "notification", "new_version": "1.0.0.1", "version_number": "notification.1.0.0.1"}, "ends": "2025-07-09 23:17:08.241858", "id": "PRO-07", "starts": "2025-07-09 23:17:08.240028", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 1, "collection_name": "notification", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 0, "test_data_file": "notification.1.0.0.2.json", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.257811", "id": "notification.1.0.0.2", "starts": "2025-07-09 23:17:08.242471", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "notification", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.243564", "id": "PRO-01", "starts": "2025-07-09 23:17:08.242472", "status": "SUCCESS", "sub_events": [{"data": {"collection": "notification", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.243563", "id": "MON-06", "starts": "2025-07-09 23:17:08.242473", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "notification", "message": "No indexes to drop", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.243566", "id": "PRO-02", "starts": "2025-07-09 23:17:08.243565", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "notification", "message": "No migrations to execute", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.243567", "id": "PRO-03", "starts": "2025-07-09 23:17:08.243567", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "notification", "index_count": 1, "indexes_to_add": [{"key": {"notification_type": 1}, "name": "typeIndex", "options": {"unique": false}}], "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.246662", "id": "PRO-04", "starts": "2025-07-09 23:17:08.243568", "status": "SUCCESS", "sub_events": [{"data": {"collection": "notification", "index_keys": {"notification_type": 1}, "index_name": "typeIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.246659", "id": "MON-09", "starts": "2025-07-09 23:17:08.243570", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}], "type": "ADD_INDEXES"}, {"data": {"collection_name": "notification", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.254625", "id": "PRO-05", "starts": "2025-07-09 23:17:08.246663", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "message": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "title": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "user_id": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}}, "required": ["_id", "user_id", "title", "message", "status", "last_saved"]}, "ends": "2025-07-09 23:17:08.254623", "id": "MON-10", "starts": "2025-07-09 23:17:08.253093", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"error": "Test data loading operation failed"}, "ends": "2025-07-09 23:17:08.255600", "id": "PRO-06", "starts": "2025-07-09 23:17:08.254626", "status": "FAILURE", "sub_events": [{"data": {"error": "Bulk write operation failed", "nInserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "nUpserted": 0, "upserted": [], "writeConcernErrors": [], "writeErrors": [{"code": 121, "errInfo": {"details": {"operatorName": "$jsonSchema", "schemaRulesNotSatisfied": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "notification_004", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "objectId"}}], "propertyName": "_id"}, {"details": [{"consideredType": "string", "consideredValue": "A00000000000000000000001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "object"}}], "propertyName": "user_id"}, {"details": [{"consideredValue": "Profile Update Required", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "title"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-01-25T11:05:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "at_time"}]}], "propertyName": "last_saved"}]}]}, "failingDocumentId": "notification_004"}, "errmsg": "Document failed validation", "index": 0, "op": {"_id": "notification_004", "last_saved": {"at_time": "2024-01-25T11:05:00Z", "by_user": "system", "correlation_id": "corr_004", "from_ip": "192.168.1.103"}, "message": "Please update your profile information to continue using the platform.", "status": "active", "title": "Profile Update Required", "user_id": "A00000000000000000000001"}}]}, "ends": "2025-07-09 23:17:08.255593", "id": "MON-11", "starts": "2025-07-09 23:17:08.254629", "status": "FAILURE", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "notification", "new_version": "1.0.0.2", "version_number": "notification.1.0.0.2"}, "ends": "2025-07-09 23:17:08.257811", "id": "PRO-07", "starts": "2025-07-09 23:17:08.255601", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 0, "collection_name": "notification", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 1, "test_data_file": "notification.1.0.1.3.json", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.284946", "id": "notification.1.0.1.3", "starts": "2025-07-09 23:17:08.258394", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "notification", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.259818", "id": "PRO-01", "starts": "2025-07-09 23:17:08.258396", "status": "SUCCESS", "sub_events": [{"data": {"collection": "notification", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.259816", "id": "MON-06", "starts": "2025-07-09 23:17:08.258397", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "notification", "message": "No indexes to drop", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.259820", "id": "PRO-02", "starts": "2025-07-09 23:17:08.259819", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "notification", "migration_count": 1, "migration_files": ["notification_merge_notification_data.json"], "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.269820", "id": "PRO-03", "starts": "2025-07-09 23:17:08.259820", "status": "SUCCESS", "sub_events": [{"data": {"collection": "notification", "migration_file": "notification_merge_notification_data.json", "migration_path": "./tests/test_cases/large_sample/migrations/notification_merge_notification_data.json", "pipeline_operations": ["$addFields", "$unset", "$out"], "pipeline_stages": 3, "pipeline_summary": [{"details": {"$addFields": {"full_message": {"$concat": ["$title", ": ", "$message"]}}}, "operation": "$addFields", "stage": 1}, {"details": {"$unset": ["message"]}, "operation": "$unset", "stage": 2}, {"details": {"$out": "notification"}, "operation": "$out", "stage": 3}]}, "ends": "2025-07-09 23:17:08.269818", "id": "MON-14", "starts": "2025-07-09 23:17:08.259826", "status": "SUCCESS", "sub_events": [{"data": {"file": "./tests/test_cases/large_sample/migrations/notification_merge_notification_data.json", "file_name": "notification_merge_notification_data.json", "pipeline_operations": ["$addFields", "$unset", "$out"], "pipeline_stages": 3}, "ends": "2025-07-09 23:17:08.259908", "id": "MON-13", "starts": "2025-07-09 23:17:08.259828", "status": "SUCCESS", "sub_events": [], "type": "LOAD_MIGRATION"}, {"data": {"collection": "notification"}, "ends": "2025-07-09 23:17:08.269809", "id": "MON-08", "starts": "2025-07-09 23:17:08.259911", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATION"}], "type": "EXECUTE_MIGRATION_FILE"}], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "notification", "message": "No indexes to add", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.269823", "id": "PRO-04", "starts": "2025-07-09 23:17:08.269822", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEXES"}, {"data": {"collection_name": "notification", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.281788", "id": "PRO-05", "starts": "2025-07-09 23:17:08.269823", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "categories": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["work", "personal", "project", "reference"]}}, "channels": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["email", "sms", "push", "in_app"]}}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "message": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"created_at": {"bsonType": "date"}, "expires_at": {"bsonType": "date"}, "read_at": {"bsonType": "date"}, "sent_at": {"bsonType": "date"}, "source": {"additionalProperties": false, "bsonType": "object", "properties": {"action": {"bsonType": "string", "enum": ["created", "updated", "deleted", "published"]}, "collection": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "document_id": {"bsonType": "objectId"}}, "required": ["collection", "document_id"]}}, "required": ["created_at"]}, "notification_type": {"bsonType": "string", "enum": ["system", "user", "content", "reminder"]}, "priority": {"bsonType": "string", "enum": ["critical", "high", "medium", "low"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["urgent", "important", "normal", "low"]}}, "title": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "user_id": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}}, "required": ["_id", "user_id", "title", "message", "notification_type", "status", "priority", "channels", "last_saved"]}, "ends": "2025-07-09 23:17:08.281786", "id": "MON-10", "starts": "2025-07-09 23:17:08.279300", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"error": "Test data loading operation failed"}, "ends": "2025-07-09 23:17:08.282822", "id": "PRO-06", "starts": "2025-07-09 23:17:08.281789", "status": "FAILURE", "sub_events": [{"data": {"error": "Bulk write operation failed", "nInserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "nUpserted": 0, "upserted": [], "writeConcernErrors": [], "writeErrors": [{"code": 121, "errInfo": {"details": {"operatorName": "$jsonSchema", "schemaRulesNotSatisfied": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "notification_006", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "objectId"}}], "propertyName": "_id"}, {"details": [{"consideredType": "string", "consideredValue": "A00000000000000000000001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "object"}}], "propertyName": "user_id"}, {"details": [{"consideredValue": "System Maintenance", "operatorName": "pattern", "reason": "regular expression did not match", "specifiedAs": {"pattern": "^\\S{1,40}$"}}], "propertyName": "title"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-02-01T08:00:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "created_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-02-01T08:05:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "sent_at"}, {"details": [{"consideredType": "string", "consideredValue": "2024-02-01T10:00:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "expires_at"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "maintenance_001", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "objectId"}}], "propertyName": "document_id"}]}], "propertyName": "source"}]}], "propertyName": "metadata"}, {"details": [{"operatorName": "properties", "propertiesNotSatisfied": [{"details": [{"consideredType": "string", "consideredValue": "2024-02-01T08:05:00Z", "operatorName": "bsonType", "reason": "type did not match", "specifiedAs": {"bsonType": "date"}}], "propertyName": "at_time"}]}], "propertyName": "last_saved"}]}, {"additionalProperties": ["full_message"], "operatorName": "additionalProperties", "specifiedAs": {"additionalProperties": false}}]}, "failingDocumentId": "notification_006"}, "errmsg": "Document failed validation", "index": 0, "op": {"_id": "notification_006", "categories": ["work"], "channels": ["email", "sms", "push", "in_app"], "full_message": "System Maintenance: Scheduled maintenance will begin in 2 hours.", "last_saved": {"at_time": "2024-02-01T08:05:00Z", "by_user": "system", "correlation_id": "corr_006", "from_ip": "192.168.1.105"}, "message": "Scheduled maintenance will begin in 2 hours.", "metadata": {"created_at": "2024-02-01T08:00:00Z", "expires_at": "2024-02-01T10:00:00Z", "sent_at": "2024-02-01T08:05:00Z", "source": {"action": "created", "collection": "system", "document_id": "maintenance_001"}}, "notification_type": "system", "priority": "critical", "status": "active", "tags": ["urgent"], "title": "System Maintenance", "user_id": "A00000000000000000000001"}}]}, "ends": "2025-07-09 23:17:08.282815", "id": "MON-11", "starts": "2025-07-09 23:17:08.281793", "status": "FAILURE", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "notification", "new_version": "1.0.1.3", "version_number": "notification.1.0.1.3"}, "ends": "2025-07-09 23:17:08.284945", "id": "PRO-07", "starts": "2025-07-09 23:17:08.282823", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": [{"enumerators": {}, "name": "Enumerations", "status": "Deprecated", "version": 0}, {"enumerators": {"category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "default_status": {"active": "Not Deleted", "archived": "Soft Delete Indicator"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}}, "name": "Enumerations", "status": "Active", "version": 1}, {"enumerators": {"default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}}, "name": "Enumerations", "status": "Active", "version": 2}, {"enumerators": {"category_tags": {"blocked": "Cannot proceed", "completed": "Task is done", "important": "High priority", "in_progress": "Currently being worked on", "low": "Low priority", "normal": "Standard priority", "review": "Needs review", "urgent": "Requires immediate attention"}, "category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "content_tags": {"business": "Business related content", "education": "Educational content", "entertainment": "Entertainment content", "news": "News content", "technology": "Technology related content"}, "content_type": {"article": "Written content", "podcast": "Audio content", "video": "Video content"}, "default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}, "tags": {"admin": "An administrator", "super": "A super user", "user": "A User"}, "type": {"check": "Select multiple options", "radio": "Select one option", "text": "Enter a text string"}}, "name": "Enumerations", "status": "Active", "version": 3}], "ends": "2025-07-09 23:17:08.287665", "id": "PRO-08", "starts": "2025-07-09 23:17:08.284950", "status": "SUCCESS", "sub_events": [], "type": "LOAD_ENUMERATORS"}], "type": "PROCESS"}, {"data": {"configuration_file": "media.yaml", "configuration_name": "media", "configuration_title": "Media Collection", "version_count": 1}, "ends": "2025-07-09 23:17:08.333221", "id": "CFG-00", "starts": "2025-07-09 23:17:08.288853", "status": "SUCCESS", "sub_events": [{"data": {"add_indexes_count": 3, "collection_name": "media", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 0, "test_data_file": "media.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.330742", "id": "media.1.0.0.1", "starts": "2025-07-09 23:17:08.294631", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "media", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.303573", "id": "PRO-01", "starts": "2025-07-09 23:17:08.294633", "status": "SUCCESS", "sub_events": [{"data": {"collection": "media", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.303571", "id": "MON-06", "starts": "2025-07-09 23:17:08.294634", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "media", "message": "No indexes to drop", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.303577", "id": "PRO-02", "starts": "2025-07-09 23:17:08.303575", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "media", "message": "No migrations to execute", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.303578", "id": "PRO-03", "starts": "2025-07-09 23:17:08.303577", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "media", "index_count": 3, "indexes_to_add": [{"key": {"title": 1}, "name": "title", "options": {"unique": true}}, {"key": {"status": 1}, "name": "status", "options": {"unique": false}}, {"key": {"last_saved.saved_at": 1}, "name": "last_saved", "options": {"unique": false}}], "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.315998", "id": "PRO-04", "starts": "2025-07-09 23:17:08.303579", "status": "SUCCESS", "sub_events": [{"data": {"collection": "media", "index_keys": {"title": 1}, "index_name": "title", "operation": "created"}, "ends": "2025-07-09 23:17:08.307978", "id": "MON-09", "starts": "2025-07-09 23:17:08.303581", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "media", "index_keys": {"status": 1}, "index_name": "status", "operation": "created"}, "ends": "2025-07-09 23:17:08.311846", "id": "MON-09", "starts": "2025-07-09 23:17:08.307982", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "media", "index_keys": {"last_saved.saved_at": 1}, "index_name": "last_saved", "operation": "created"}, "ends": "2025-07-09 23:17:08.315994", "id": "MON-09", "starts": "2025-07-09 23:17:08.311849", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}], "type": "ADD_INDEXES"}, {"data": {"collection_name": "media", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.326148", "id": "PRO-05", "starts": "2025-07-09 23:17:08.316000", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "metadata": {"additionalProperties": false, "bsonType": "object", "properties": {"content_data": {"additionalProperties": false, "bsonType": "object", "oneOf": [{"additionalProperties": false, "bsonType": "object", "properties": {"cast": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "director": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "genre": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}}, "required": ["director"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"episode": {"bsonType": "int", "minimum": 1}, "network": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "season": {"bsonType": "int", "minimum": 1}}, "required": ["season", "episode"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"narrator": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "subject": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}}, "required": ["subject"]}, {"additionalProperties": false, "bsonType": "object", "properties": {"awards": {"bsonType": "array", "items": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "festival": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}}], "properties": {}}, "duration": {"bsonType": "int", "minimum": 1}, "format": {"bsonType": "string", "enum": ["dvd", "bluray", "digital", "streaming"]}, "quality": {"bsonType": "string", "enum": ["sd", "hd", "uhd"]}}}, "status": {"bsonType": "string", "enum": ["draft", "published", "archived"]}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["action", "comedy", "drama", "sci_fi", "documentary"]}}, "title": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "type": {"bsonType": "string", "enum": ["movie", "tv_show", "documentary", "short"]}}, "required": ["_id", "title", "status", "last_saved"]}, "ends": "2025-07-09 23:17:08.326142", "id": "MON-10", "starts": "2025-07-09 23:17:08.323078", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "media", "test_data_file": "media.1.0.0.1.json", "test_data_path": "./tests/test_cases/large_sample/test_data/media.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.328786", "id": "PRO-06", "starts": "2025-07-09 23:17:08.326150", "status": "SUCCESS", "sub_events": [{"data": {"collection": "media", "data_file": "media.1.0.0.1.json", "documents_loaded": 2, "insert_many_result": {"acknowledged": true, "inserted_ids": ["c00000000000000000000001", "c00000000000000000000002"]}}, "ends": "2025-07-09 23:17:08.328779", "id": "MON-11", "starts": "2025-07-09 23:17:08.326155", "status": "SUCCESS", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "media", "new_version": "1.0.0.1", "version_number": "media.1.0.0.1"}, "ends": "2025-07-09 23:17:08.330741", "id": "PRO-07", "starts": "2025-07-09 23:17:08.328787", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": [{"enumerators": {}, "name": "Enumerations", "status": "Deprecated", "version": 0}, {"enumerators": {"category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "default_status": {"active": "Not Deleted", "archived": "Soft Delete Indicator"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}}, "name": "Enumerations", "status": "Active", "version": 1}, {"enumerators": {"default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}}, "name": "Enumerations", "status": "Active", "version": 2}, {"enumerators": {"category_tags": {"blocked": "Cannot proceed", "completed": "Task is done", "important": "High priority", "in_progress": "Currently being worked on", "low": "Low priority", "normal": "Standard priority", "review": "Needs review", "urgent": "Requires immediate attention"}, "category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "content_tags": {"business": "Business related content", "education": "Educational content", "entertainment": "Entertainment content", "news": "News content", "technology": "Technology related content"}, "content_type": {"article": "Written content", "podcast": "Audio content", "video": "Video content"}, "default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}, "tags": {"admin": "An administrator", "super": "A super user", "user": "A User"}, "type": {"check": "Select multiple options", "radio": "Select one option", "text": "Enter a text string"}}, "name": "Enumerations", "status": "Active", "version": 3}], "ends": "2025-07-09 23:17:08.333219", "id": "PRO-08", "starts": "2025-07-09 23:17:08.330746", "status": "SUCCESS", "sub_events": [], "type": "LOAD_ENUMERATORS"}], "type": "PROCESS"}, {"data": {"configuration_file": "user.yaml", "configuration_name": "user", "configuration_title": "User Collection", "version_count": 3}, "ends": "2025-07-09 23:17:08.400328", "id": "CFG-00", "starts": "2025-07-09 23:17:08.334669", "status": "SUCCESS", "sub_events": [{"data": {"add_indexes_count": 3, "collection_name": "user", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 0, "test_data_file": "user.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.364577", "id": "user.1.0.0.1", "starts": "2025-07-09 23:17:08.340539", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "user", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.346510", "id": "PRO-01", "starts": "2025-07-09 23:17:08.340540", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.346508", "id": "MON-06", "starts": "2025-07-09 23:17:08.340541", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "user", "message": "No indexes to drop", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.346513", "id": "PRO-02", "starts": "2025-07-09 23:17:08.346511", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "user", "message": "No migrations to execute", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.346514", "id": "PRO-03", "starts": "2025-07-09 23:17:08.346513", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "user", "index_count": 3, "indexes_to_add": [{"key": {"userName": 1}, "name": "nameIndex", "options": {"unique": true}}, {"key": {"status": 1}, "name": "statusIndex", "options": {"unique": false}}, {"key": {"last_saved.saved_at": 1}, "name": "savedIndex", "options": {"unique": false}}], "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.356541", "id": "PRO-04", "starts": "2025-07-09 23:17:08.346515", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "index_keys": {"userName": 1}, "index_name": "nameIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.350689", "id": "MON-09", "starts": "2025-07-09 23:17:08.346516", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "user", "index_keys": {"status": 1}, "index_name": "statusIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.353738", "id": "MON-09", "starts": "2025-07-09 23:17:08.350691", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}, {"data": {"collection": "user", "index_keys": {"last_saved.saved_at": 1}, "index_name": "savedIndex", "operation": "created"}, "ends": "2025-07-09 23:17:08.356538", "id": "MON-09", "starts": "2025-07-09 23:17:08.353740", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEX"}], "type": "ADD_INDEXES"}, {"data": {"collection_name": "user", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.361701", "id": "PRO-05", "starts": "2025-07-09 23:17:08.356542", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}, "ends": "2025-07-09 23:17:08.361699", "id": "MON-10", "starts": "2025-07-09 23:17:08.359944", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "user", "test_data_file": "user.1.0.0.1.json", "test_data_path": "./tests/test_cases/large_sample/test_data/user.1.0.0.1.json", "version": "1.0.0.1"}, "ends": "2025-07-09 23:17:08.363051", "id": "PRO-06", "starts": "2025-07-09 23:17:08.361702", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "data_file": "user.1.0.0.1.json", "documents_loaded": 6, "insert_many_result": {"acknowledged": true, "inserted_ids": ["a00000000000000000000001", "a00000000000000000000002", "a00000000000000000000003", "a00000000000000000000004", "a00000000000000000000005", "a00000000000000000000006"]}}, "ends": "2025-07-09 23:17:08.363045", "id": "MON-11", "starts": "2025-07-09 23:17:08.361705", "status": "SUCCESS", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "user", "new_version": "1.0.0.1", "version_number": "user.1.0.0.1"}, "ends": "2025-07-09 23:17:08.364576", "id": "PRO-07", "starts": "2025-07-09 23:17:08.363052", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 0, "collection_name": "user", "drop_indexes_count": 1, "has_test_data": true, "migrations_count": 0, "test_data_file": "user.1.0.0.2.json", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.375437", "id": "user.1.0.0.2", "starts": "2025-07-09 23:17:08.365157", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "user", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.366989", "id": "PRO-01", "starts": "2025-07-09 23:17:08.365158", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.366988", "id": "MON-06", "starts": "2025-07-09 23:17:08.365160", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "user", "index_count": 1, "indexes_to_drop": ["statusIndex"], "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.368655", "id": "PRO-02", "starts": "2025-07-09 23:17:08.366990", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "index_name": "statusIndex", "operation": "dropped"}, "ends": "2025-07-09 23:17:08.368652", "id": "MON-07", "starts": "2025-07-09 23:17:08.366994", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEX"}], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "user", "message": "No migrations to execute", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.368657", "id": "PRO-03", "starts": "2025-07-09 23:17:08.368656", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "user", "message": "No indexes to add", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.368658", "id": "PRO-04", "starts": "2025-07-09 23:17:08.368657", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEXES"}, {"data": {"collection_name": "user", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.373634", "id": "PRO-05", "starts": "2025-07-09 23:17:08.368658", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "first_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}, "ends": "2025-07-09 23:17:08.373632", "id": "MON-10", "starts": "2025-07-09 23:17:08.371996", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "user", "test_data_file": "user.1.0.0.2.json", "test_data_path": "./tests/test_cases/large_sample/test_data/user.1.0.0.2.json", "version": "1.0.0.2"}, "ends": "2025-07-09 23:17:08.374599", "id": "PRO-06", "starts": "2025-07-09 23:17:08.373635", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "data_file": "user.1.0.0.2.json", "documents_loaded": 4, "insert_many_result": {"acknowledged": true, "inserted_ids": ["a00000000000000000000010", "a00000000000000000000011", "a00000000000000000000012", "a00000000000000000000013"]}}, "ends": "2025-07-09 23:17:08.374595", "id": "MON-11", "starts": "2025-07-09 23:17:08.373638", "status": "SUCCESS", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "user", "new_version": "1.0.0.2", "version_number": "user.1.0.0.2"}, "ends": "2025-07-09 23:17:08.375437", "id": "PRO-07", "starts": "2025-07-09 23:17:08.374600", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": {"add_indexes_count": 0, "collection_name": "user", "drop_indexes_count": 0, "has_test_data": true, "migrations_count": 1, "test_data_file": "user.1.0.1.3.json", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.397701", "id": "user.1.0.1.3", "starts": "2025-07-09 23:17:08.376060", "status": "SUCCESS", "sub_events": [{"data": {"collection_name": "user", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.377903", "id": "PRO-01", "starts": "2025-07-09 23:17:08.376061", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "operation": "schema_validation_removed"}, "ends": "2025-07-09 23:17:08.377902", "id": "MON-06", "starts": "2025-07-09 23:17:08.376062", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_SCHEMA"}], "type": "REMOVE_SCHEMA_VALIDATION"}, {"data": {"collection_name": "user", "message": "No indexes to drop", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.377905", "id": "PRO-02", "starts": "2025-07-09 23:17:08.377904", "status": "SUCCESS", "sub_events": [], "type": "REMOVE_INDEXES"}, {"data": {"collection_name": "user", "migration_count": 1, "migration_files": ["user_merge_name_fields.json"], "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.385803", "id": "PRO-03", "starts": "2025-07-09 23:17:08.377906", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "migration_file": "user_merge_name_fields.json", "migration_path": "./tests/test_cases/large_sample/migrations/user_merge_name_fields.json", "pipeline_operations": ["$addFields", "$unset", "$out"], "pipeline_stages": 3, "pipeline_summary": [{"details": {"$addFields": {"full_name": {"$concat": ["$first_name", " ", "$last_name"]}}}, "operation": "$addFields", "stage": 1}, {"details": {"$unset": ["first_name", "last_name"]}, "operation": "$unset", "stage": 2}, {"details": {"$out": "user"}, "operation": "$out", "stage": 3}]}, "ends": "2025-07-09 23:17:08.385802", "id": "MON-14", "starts": "2025-07-09 23:17:08.377909", "status": "SUCCESS", "sub_events": [{"data": {"file": "./tests/test_cases/large_sample/migrations/user_merge_name_fields.json", "file_name": "user_merge_name_fields.json", "pipeline_operations": ["$addFields", "$unset", "$out"], "pipeline_stages": 3}, "ends": "2025-07-09 23:17:08.377986", "id": "MON-13", "starts": "2025-07-09 23:17:08.377911", "status": "SUCCESS", "sub_events": [], "type": "LOAD_MIGRATION"}, {"data": {"collection": "user"}, "ends": "2025-07-09 23:17:08.385796", "id": "MON-08", "starts": "2025-07-09 23:17:08.377988", "status": "SUCCESS", "sub_events": [], "type": "EXECUTE_MIGRATION"}], "type": "EXECUTE_MIGRATION_FILE"}], "type": "EXECUTE_MIGRATIONS"}, {"data": {"collection_name": "user", "message": "No indexes to add", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.385805", "id": "PRO-04", "starts": "2025-07-09 23:17:08.385804", "status": "SUCCESS", "sub_events": [], "type": "ADD_INDEXES"}, {"data": {"collection_name": "user", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.393825", "id": "PRO-05", "starts": "2025-07-09 23:17:08.385806", "status": "SUCCESS", "sub_events": [{"data": {"additionalProperties": false, "bsonType": "object", "properties": {"_id": {"bsonType": "objectId"}, "categories": {"bsonType": "array", "items": {"additionalProperties": false, "bsonType": "object", "properties": {"category": {"bsonType": "string", "enum": ["work", "personal", "project", "reference"]}, "name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["urgent", "important", "normal", "low", "completed", "in_progress", "blocked", "review"]}}}, "required": ["name", "category", "tags"]}}, "email": {"bsonType": "string", "pattern": "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$"}, "full_name": {"bsonType": "string", "pattern": "^[^\\t\\n\\r]{0,255}$"}, "last_saved": {"additionalProperties": false, "bsonType": "object", "properties": {"at_time": {"bsonType": "date"}, "by_user": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "correlation_id": {"bsonType": "string", "pattern": "^\\S{1,40}$"}, "from_ip": {"bsonType": "string", "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"}}, "required": ["from_ip", "by_user", "at_time", "correlation_id"]}, "phone": {"bsonType": "string", "pattern": "^\\+1[2-9][0-9]{9}$"}, "preferences": {"additionalProperties": false, "bsonType": "object", "properties": {"content_tags": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["technology", "business", "entertainment", "education", "news"]}}, "delivery_channels": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["email", "sms", "push", "in_app"]}}, "notification_types": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["system", "user", "content", "reminder"]}}, "priority_levels": {"bsonType": "array", "items": {"bsonType": "string", "enum": ["critical", "high", "medium", "low"]}}}, "required": ["notification_types", "delivery_channels"]}, "status": {"bsonType": "string", "enum": ["draft", "active", "archived"]}, "user_name": {"bsonType": "string", "pattern": "^\\S{1,40}$"}}, "required": ["_id", "user_name", "status", "last_saved"]}, "ends": "2025-07-09 23:17:08.393823", "id": "MON-10", "starts": "2025-07-09 23:17:08.391216", "status": "SUCCESS", "sub_events": [], "type": "APPLY_SCHEMA"}], "type": "APPLY_SCHEMA_VALIDATION"}, {"data": {"collection_name": "user", "test_data_file": "user.1.0.1.3.json", "test_data_path": "./tests/test_cases/large_sample/test_data/user.1.0.1.3.json", "version": "1.0.1.3"}, "ends": "2025-07-09 23:17:08.395985", "id": "PRO-06", "starts": "2025-07-09 23:17:08.393826", "status": "SUCCESS", "sub_events": [{"data": {"collection": "user", "data_file": "user.1.0.1.3.json", "documents_loaded": 7, "insert_many_result": {"acknowledged": true, "inserted_ids": ["a00000000000000000000020", "a00000000000000000000021", "a00000000000000000000022", "a00000000000000000000023", "a00000000000000000000024", "a00000000000000000000025", "a00000000000000000000026"]}}, "ends": "2025-07-09 23:17:08.395978", "id": "MON-11", "starts": "2025-07-09 23:17:08.393829", "status": "SUCCESS", "sub_events": [], "type": "LOAD_DATA"}], "type": "LOAD_TEST_DATA"}, {"data": {"collection_name": "user", "new_version": "1.0.1.3", "version_number": "user.1.0.1.3"}, "ends": "2025-07-09 23:17:08.397700", "id": "PRO-07", "starts": "2025-07-09 23:17:08.395986", "status": "SUCCESS", "sub_events": [], "type": "UPDATE_VERSION"}], "type": "PROCESS"}, {"data": [{"enumerators": {}, "name": "Enumerations", "status": "Deprecated", "version": 0}, {"enumerators": {"category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "default_status": {"active": "Not Deleted", "archived": "Soft Delete Indicator"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}}, "name": "Enumerations", "status": "Active", "version": 1}, {"enumerators": {"default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}}, "name": "Enumerations", "status": "Active", "version": 2}, {"enumerators": {"category_tags": {"blocked": "Cannot proceed", "completed": "Task is done", "important": "High priority", "in_progress": "Currently being worked on", "low": "Low priority", "normal": "Standard priority", "review": "Needs review", "urgent": "Requires immediate attention"}, "category_type": {"personal": "Personal items", "project": "Project specific items", "reference": "Reference materials", "work": "Work related items"}, "content_tags": {"business": "Business related content", "education": "Educational content", "entertainment": "Entertainment content", "news": "News content", "technology": "Technology related content"}, "content_type": {"article": "Written content", "podcast": "Audio content", "video": "Video content"}, "default_status": {"active": "Not deleted", "archived": "Soft delete indicator", "draft": "Not finalized"}, "delivery_channel": {"email": "Email delivery", "in_app": "In-app notification", "push": "Push notification", "sms": "SMS delivery"}, "media_format": {"bluray": "Blu-ray format", "digital": "Digital format", "dvd": "DVD format", "streaming": "Streaming format"}, "media_quality": {"hd": "High definition", "sd": "Standard definition", "uhd": "Ultra high definition"}, "media_status": {"archived": "No longer available", "draft": "Not yet published", "published": "Available to users"}, "media_tags": {"action": "Action genre", "comedy": "Comedy genre", "documentary": "Documentary genre", "drama": "Drama genre", "sci_fi": "Science fiction genre"}, "media_type": {"documentary": "A non-fiction film", "movie": "A motion picture", "short": "A short film", "tv_show": "A television series"}, "notification_action": {"created": "Document created", "deleted": "Document deleted", "published": "Document published", "updated": "Document updated"}, "notification_tags": {"important": "Important notification", "low": "Low priority notification", "normal": "Normal notification", "urgent": "Urgent notification"}, "notification_type": {"content": "Content notification", "reminder": "Reminder notification", "system": "System notification", "user": "User notification"}, "priority_level": {"critical": "Critical priority", "high": "High priority", "low": "Low priority", "medium": "Medium priority"}, "tags": {"admin": "An administrator", "super": "A super user", "user": "A User"}, "type": {"check": "Select multiple options", "radio": "Select one option", "text": "Enter a text string"}}, "name": "Enumerations", "status": "Active", "version": 3}], "ends": "2025-07-09 23:17:08.400327", "id": "PRO-08", "starts": "2025-07-09 23:17:08.397706", "status": "SUCCESS", "sub_events": [], "type": "LOAD_ENUMERATORS"}], "type": "PROCESS"}], "type": "PROCESS"} diff --git a/tests/db_util.py b/tests/db_util.py deleted file mode 100644 index 3f65f00..0000000 --- a/tests/db_util.py +++ /dev/null @@ -1,408 +0,0 @@ -#!/usr/bin/env python3 -""" -Database utility for testing purposes. - -This module provides utilities for: -- Dropping the entire database (for testing cleanup) -- Comparing database contents with JSON files -- Harvesting database contents to JSON files - -WARNING: This module is for testing only and should never be deployed to production. -""" - -import json -import os -import sys -from datetime import datetime -from pathlib import Path -from typing import Dict, List, Optional, Any -from bson import ObjectId, json_util - -# Add the project root to the path so we can import stage0_py_utils -project_root = Path(__file__).parent.parent -sys.path.insert(0, str(project_root)) - -from stage0_py_utils import Config, MongoIO - - -class DatabaseUtil: - """Database utility class for testing operations.""" - - def __init__(self): - """Initialize the database utility.""" - self.config = Config.get_instance() - self.mongo = MongoIO.get_instance() - self.db_name = self.config.MONGO_DB_NAME - - def drop_database(self, passphrase: Optional[str] = None) -> Dict[str, Any]: - """Drop the entire database. - - WARNING: This will permanently delete all data in the database. - - Args: - passphrase: Optional passphrase to skip confirmation prompt - - Returns: - Dict containing operation result - """ - try: - # Check passphrase if provided - if passphrase: - expected_passphrase = "DROP_DROWSSAP_YEK" - if passphrase != expected_passphrase: - return { - "operation": "drop_database", - "database": self.db_name, - "error": "Invalid passphrase", - "timestamp": datetime.now().isoformat(), - "status": "error" - } - else: - # Require manual confirmation if no passphrase - print("⚠️ WARNING: This will permanently delete all data in the database!") - confirm = input("Type 'YES' to confirm: ") - if confirm != "YES": - return { - "operation": "drop_database", - "database": self.db_name, - "timestamp": datetime.now().isoformat(), - "status": "cancelled" - } - - # Get the database object - db = self.mongo.client[self.db_name] - - # Drop the database - self.mongo.client.drop_database(self.db_name) - - print(f"✅ Database '{self.db_name}' dropped successfully") - - return { - "operation": "drop_database", - "database": self.db_name, - "timestamp": datetime.now().isoformat(), - "status": "success" - } - - except Exception as e: - error_msg = f"Failed to drop database '{self.db_name}': {str(e)}" - print(f"❌ {error_msg}") - - return { - "operation": "drop_database", - "database": self.db_name, - "error": str(e), - "timestamp": datetime.now().isoformat(), - "status": "error" - } - - def compare_database_with_files(self) -> Dict[str, Any]: - """Compare database contents with JSON files. - - Returns: - Dict containing comparison results - """ - try: - base_path = os.path.join(self.config.INPUT_FOLDER, self.db_name) - base_path = Path(base_path) - if not base_path.exists(): - raise ValueError(f"Base path does not exist: {base_path}") - - results = { - "operation": "compare_database", - "database": self.db_name, - "comparisons": [], - "timestamp": datetime.now().isoformat(), - "status": "success" - } - - # Get all collections in the database - db = self.mongo.client[self.db_name] - collections = db.list_collection_names() - - # Find JSON files that match collection names - json_files = list(base_path.glob("*.json")) - - for json_file in json_files: - # Extract collection name from filename (e.g., "user.1.0.0.1.json" -> "user") - collection_name = json_file.stem.split('.')[0] - - if collection_name in collections: - comparison = self._compare_collection_with_file(collection_name, json_file) - results["comparisons"].append(comparison) - else: - results["comparisons"].append({ - "collection": collection_name, - "file": str(json_file), - "status": "collection_not_found", - "message": f"Collection '{collection_name}' not found in database" - }) - - # Check for collections without corresponding files - for collection in collections: - if not any(collection in str(f) for f in json_files): - results["comparisons"].append({ - "collection": collection, - "file": None, - "status": "file_not_found", - "message": f"No JSON file found for collection '{collection}'" - }) - - # Check if any comparisons failed and update overall status - failed_comparisons = [ - comp for comp in results["comparisons"] - if comp["status"] not in ["match", "success"] - ] - - if failed_comparisons: - results["status"] = "error" - results["error"] = f"{len(failed_comparisons)} comparison(s) failed" - results["failed_count"] = len(failed_comparisons) - - return results - - except Exception as e: - return { - "operation": "compare_database", - "database": self.db_name, - "error": str(e), - "timestamp": datetime.now().isoformat(), - "status": "error" - } - - def _compare_collection_with_file(self, collection_name: str, json_file: Path) -> Dict[str, Any]: - """Compare a single collection with its corresponding JSON file. - - Args: - collection_name: Name of the collection - json_file: Path to the JSON file - - Returns: - Dict containing comparison result - """ - try: - # Load expected data from JSON file - with open(json_file, 'r') as f: - expected_data = json.load(f) - - # Get actual data from database - collection = self.mongo.client[self.db_name][collection_name] - actual_data = list(collection.find({})) - - # Convert ObjectIds to strings for comparison - actual_data = json.loads(json_util.dumps(actual_data)) - - # Remove _id fields from both expected and actual data - expected_data_no_id = [] - for doc in expected_data: - doc_copy = {k: v for k, v in doc.items() if k != '_id'} - expected_data_no_id.append(doc_copy) - - actual_data_no_id = [] - for doc in actual_data: - doc_copy = {k: v for k, v in doc.items() if k != '_id'} - actual_data_no_id.append(doc_copy) - - # Compare document counts - expected_count = len(expected_data_no_id) - actual_count = len(actual_data_no_id) - - if expected_count != actual_count: - return { - "collection": collection_name, - "file": str(json_file), - "status": "count_mismatch", - "expected_count": expected_count, - "actual_count": actual_count, - "message": f"Document count mismatch: expected {expected_count}, got {actual_count}" - } - - # Compare documents (simplified - could be enhanced for deep comparison) - matches = 0 - mismatches = [] - - for i, (expected, actual) in enumerate(zip(expected_data_no_id, actual_data_no_id)): - if expected == actual: - matches += 1 - else: - mismatches.append({ - "index": i, - "expected": expected, - "actual": actual - }) - - if matches == expected_count: - return { - "collection": collection_name, - "file": str(json_file), - "status": "match", - "document_count": expected_count, - "message": f"All {expected_count} documents match" - } - else: - return { - "collection": collection_name, - "file": str(json_file), - "status": "content_mismatch", - "document_count": expected_count, - "matches": matches, - "mismatches": len(mismatches), - "mismatch_details": mismatches[:5], # Limit to first 5 mismatches - "message": f"{matches}/{expected_count} documents match" - } - - except Exception as e: - return { - "collection": collection_name, - "file": str(json_file), - "status": "error", - "error": str(e), - "message": f"Error comparing collection: {str(e)}" - } - - def harvest_database(self, output_path: Optional[str] = None) -> Dict[str, Any]: - """Harvest all database contents to JSON files. - - Args: - output_path: Directory to save harvested JSON files (defaults to input folder) - - Returns: - Dict containing harvest results - """ - try: - if output_path is None: - output_path = os.path.join(self.config.INPUT_FOLDER, self.db_name) - - output_path = Path(output_path) - - # Check if files already exist and prompt for replacement - existing_files = [] - for collection_name in self.mongo.client[self.db_name].list_collection_names(): - potential_file = output_path / f"{collection_name}.json" - if potential_file.exists(): - existing_files.append(str(potential_file)) - - if existing_files: - print(f"⚠️ Found existing files in {output_path}:") - for file in existing_files: - print(f" - {file}") - confirm = input("Replace existing files? (y/N): ") - if confirm.lower() not in ['y', 'yes']: - return { - "operation": "harvest_database", - "database": self.db_name, - "output_path": str(output_path), - "message": "Operation cancelled by user", - "timestamp": datetime.now().isoformat(), - "status": "cancelled" - } - - output_path.mkdir(parents=True, exist_ok=True) - - results = { - "operation": "harvest_database", - "database": self.db_name, - "output_path": str(output_path), - "collections": [], - "timestamp": datetime.now().isoformat(), - "status": "success" - } - - # Get all collections - db = self.mongo.client[self.db_name] - collections = db.list_collection_names() - - for collection_name in collections: - collection_result = self._harvest_collection(collection_name, output_path) - results["collections"].append(collection_result) - - print(f"✅ Database harvested to {output_path}") - return results - - except Exception as e: - return { - "operation": "harvest_database", - "database": self.db_name, - "error": str(e), - "timestamp": datetime.now().isoformat(), - "status": "error" - } - - def _harvest_collection(self, collection_name: str, output_path: Path) -> Dict[str, Any]: - """Harvest a single collection to a JSON file. - - Args: - collection_name: Name of the collection - output_path: Directory to save the file - - Returns: - Dict containing harvest result - """ - try: - # Get all documents from collection - collection = self.mongo.client[self.db_name][collection_name] - documents = list(collection.find({})) - - # Convert to JSON-serializable format - json_data = json.loads(json_util.dumps(documents, default=str)) - - # Save to file - output_file = output_path / f"{collection_name}.json" - with open(output_file, 'w') as f: - json.dump(json_data, f, indent=2, default=str) - - return { - "collection": collection_name, - "file": str(output_file), - "document_count": len(documents), - "status": "success" - } - - except Exception as e: - return { - "collection": collection_name, - "error": str(e), - "status": "error" - } - - -def main(): - """Main function for command-line usage.""" - import argparse - - parser = argparse.ArgumentParser(description="Database utility for testing") - parser.add_argument("command", choices=["drop", "compare", "harvest"], - help="Command to execute") - parser.add_argument("--output-path", - help="Output path for harvested data (defaults to input folder / database name)") - parser.add_argument("--passphrase", - help="Passphrase for silent database drop: DROP_DROWSSAP_YEK") - - args = parser.parse_args() - - util = DatabaseUtil() - - if args.command == "drop": - if args.passphrase: - result = util.drop_database(args.passphrase) - else: - print("⚠️ WARNING: This will permanently delete all data in the database!") - confirm = input("Type 'YES' to confirm: ") - if confirm == "YES": - result = util.drop_database() - else: - print("Operation cancelled") - return - - elif args.command == "compare": - result = util.compare_database_with_files() - - elif args.command == "harvest": - result = util.harvest_database(args.output_path) - - # Print results - print(json.dumps(result, indent=2, default=str)) - - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/tests/managers/test_config_manager.py b/tests/managers/test_config_manager.py deleted file mode 100644 index 348676e..0000000 --- a/tests/managers/test_config_manager.py +++ /dev/null @@ -1,312 +0,0 @@ -import unittest -import os -import shutil -import tempfile -from stage0_mongodb_api.managers.config_manager import ConfigManager -from stage0_py_utils import Config -from unittest.mock import patch, MagicMock - -class TestConfigManager(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.mongoio_patcher = patch('stage0_py_utils.mongo_utils.mongo_io.MongoIO.get_instance') - cls.mock_mongoio_get_instance = cls.mongoio_patcher.start() - cls.mock_mongoio_get_instance.return_value = MagicMock() - - @classmethod - def tearDownClass(cls): - cls.mongoio_patcher.stop() - - def setUp(self): - self.test_cases_dir = os.path.join(os.path.dirname(__file__), '..', 'test_cases') - self.config = Config.get_instance() - - def tearDown(self): - pass - - def test_load_minimum_valid(self): - """Test loading empty Collections directory structure.""" - test_case_dir = os.path.join(self.test_cases_dir, "minimum_valid") - self.config.INPUT_FOLDER = test_case_dir - manager = ConfigManager() - self.assertEqual(manager.load_errors, [], "Unexpected load errors in simple_valid test case") - self.assertEqual(len(manager.collection_configs), 0, f"Unexpected number of collection configs {len(manager.collection_configs)}") - - def test_load_small_sample(self): - """Test loading Small configuration.""" - test_case_dir = os.path.join(self.test_cases_dir, "small_sample") - self.config.INPUT_FOLDER = test_case_dir - manager = ConfigManager() - self.assertEqual(manager.load_errors, [], "Unexpected load errors in simple_valid test case") - self.assertEqual(len(manager.collection_configs), 1, f"Unexpected number of collection configs {len(manager.collection_configs)}") - self.assertIn("simple", manager.collection_configs) - - def test_load_large_sample(self): - """Test loading large config""" - test_case_dir = os.path.join(self.test_cases_dir, "large_sample") - self.config.INPUT_FOLDER = test_case_dir - manager = ConfigManager() - self.assertEqual(manager.load_errors, [], "Unexpected load errors in simple_valid test case") - self.assertEqual(len(manager.collection_configs), 4, f"Unexpected number of collection configs {len(manager.collection_configs)}") - self.assertIn("media", manager.collection_configs) - self.assertIn("organization", manager.collection_configs) - self.assertIn("search", manager.collection_configs) - self.assertIn("user", manager.collection_configs) - - def test_non_parsable(self): - """Test loading with non-parsable files""" - test_case_dir = os.path.join(self.test_cases_dir, "non_parsable") - self.config.INPUT_FOLDER = test_case_dir - manager = ConfigManager() - self.assertEqual(len(manager.load_errors), 1, f"Unexpected load errors {manager.load_errors}") - - def test_load_test_data_bulk_write_error(self): - """Test that _load_test_data properly handles bulk write errors.""" - from stage0_py_utils.mongo_utils.mongo_io import TestDataLoadError - mock_details = {'writeErrors': [{'index': 0, 'code': 121, 'errmsg': 'Document failed validation'}]} - self.mock_mongoio_get_instance.return_value.load_test_data.side_effect = TestDataLoadError( - "Schema validation failed during test data load", details=mock_details - ) - config_manager = ConfigManager() - collection_name = "test_collection" - test_data_file = "test.json" - result = config_manager._load_test_data(collection_name, test_data_file) - - # Test structure rather than specific values - self.assertEqual(result["status"], "error") - self.assertEqual(result["operation"], "load_test_data") - self.assertEqual(result["collection"], collection_name) - self.assertEqual(result["details_type"], "error") - self.assertIn("test.json", result["details"]["test_data_file"]) - self.assertIn("message", result) # Should have message field - self.assertIn("details", result["details"]) # Should have details field - - def test_load_test_data_generic_error(self): - """Test that _load_test_data properly handles generic errors.""" - self.mock_mongoio_get_instance.return_value.load_test_data.side_effect = Exception("File not found") - config_manager = ConfigManager() - collection_name = "test_collection" - test_data_file = "test.json" - result = config_manager._load_test_data(collection_name, test_data_file) - - # Test structure rather than specific values - self.assertEqual(result["status"], "error") - self.assertEqual(result["operation"], "load_test_data") - self.assertEqual(result["collection"], collection_name) - self.assertEqual(result["details_type"], "error") - self.assertIn("test.json", result["details"]["test_data_file"]) - self.assertIn("message", result) # Should have message field - - def test_load_test_data_success(self): - """Test that _load_test_data properly handles successful loads.""" - # Reset any previous side effects or return values - self.mock_mongoio_get_instance.return_value.load_test_data.reset_mock() - self.mock_mongoio_get_instance.return_value.load_test_data.side_effect = None - mock_results = { - "status": "success", - "operation": "load_test_data", - "collection": "test_collection", - "documents_loaded": 5, - "inserted_ids": ["id1", "id2", "id3", "id4", "id5"], - "acknowledged": True - } - self.mock_mongoio_get_instance.return_value.load_test_data.return_value = mock_results - config_manager = ConfigManager() - collection_name = "test_collection" - test_data_file = "test.json" - result = config_manager._load_test_data(collection_name, test_data_file) - - # Test structure rather than specific values - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "load_test_data") - self.assertEqual(result["collection"], collection_name) - self.assertEqual(result["details_type"], "test_data") - self.assertIn("test.json", result["details"]["test_data_file"]) - self.assertIn("results", result["details"]) # Should have results field - - def test_process_collection_versions_structure(self): - """Test that process_collection_versions returns the expected structure.""" - test_case_dir = os.path.join(self.test_cases_dir, "small_sample") - self.config.INPUT_FOLDER = test_case_dir - - # Mock VersionManager.get_current_version to return a version that will be processed - with patch('stage0_mongodb_api.managers.config_manager.VersionManager.get_current_version') as mock_get_version: - mock_get_version.return_value = "simple.0.0.0.0" - - # Mock all the manager operations to return success - with patch('stage0_mongodb_api.managers.config_manager.SchemaManager') as mock_schema_manager, \ - patch('stage0_mongodb_api.managers.config_manager.IndexManager') as mock_index_manager, \ - patch('stage0_mongodb_api.managers.config_manager.MigrationManager') as mock_migration_manager, \ - patch('stage0_mongodb_api.managers.config_manager.VersionManager') as mock_version_manager: - - # Set up mock return values - mock_schema_manager.return_value.remove_schema.return_value = { - "operation": "remove_schema", "collection": "simple", "status": "success" - } - mock_schema_manager.return_value.apply_schema.return_value = { - "operation": "apply_schema", "collection": "simple", "schema": {}, "status": "success" - } - mock_version_manager.return_value.update_version.return_value = { - "operation": "version_update", "collection": "simple", "version": "simple.1.0.0.1", "status": "success" - } - - config_manager = ConfigManager() - result = config_manager.process_collection_versions("simple") - - # Test that we get a list of operations - self.assertIsInstance(result, list) - self.assertGreater(len(result), 0) - - # Test that each operation has the expected structure - for operation in result: - self.assertIsInstance(operation, dict) - self.assertIn("operation", operation) - self.assertIn("status", operation) - self.assertIn("collection", operation) - # Status should be the last property - self.assertEqual(list(operation.keys())[-1], "status") - - def test_process_all_collections_structure(self): - """Test that process_all_collections returns the expected structure.""" - test_case_dir = os.path.join(self.test_cases_dir, "small_sample") - self.config.INPUT_FOLDER = test_case_dir - - # Mock VersionManager.get_current_version to return a version that will be processed - with patch('stage0_mongodb_api.managers.config_manager.VersionManager.get_current_version') as mock_get_version: - mock_get_version.return_value = "simple.0.0.0.0" - - # Mock all the manager operations to return success - with patch('stage0_mongodb_api.managers.config_manager.SchemaManager') as mock_schema_manager, \ - patch('stage0_mongodb_api.managers.config_manager.IndexManager') as mock_index_manager, \ - patch('stage0_mongodb_api.managers.config_manager.MigrationManager') as mock_migration_manager, \ - patch('stage0_mongodb_api.managers.config_manager.VersionManager') as mock_version_manager: - - # Set up mock return values - mock_schema_manager.return_value.remove_schema.return_value = { - "operation": "remove_schema", "collection": "simple", "status": "success" - } - mock_schema_manager.return_value.apply_schema.return_value = { - "operation": "apply_schema", "collection": "simple", "schema": {}, "status": "success" - } - mock_version_manager.return_value.update_version.return_value = { - "operation": "version_update", "collection": "simple", "version": "simple.1.0.0.1", "status": "success" - } - - config_manager = ConfigManager() - result = config_manager.process_all_collections() - - # Test that we get a dict mapping collection names to operation lists - self.assertIsInstance(result, dict) - self.assertIn("simple", result) - self.assertIsInstance(result["simple"], list) - - # Test that each collection has operations - for collection_name, operations in result.items(): - self.assertIsInstance(operations, list) - self.assertGreater(len(operations), 0) - - # Test that each operation has the expected structure - for operation in operations: - self.assertIsInstance(operation, dict) - self.assertIn("operation", operation) - self.assertIn("status", operation) - # Status should be the last property - self.assertEqual(list(operation.keys())[-1], "status") - - def test_process_enumerators_success(self): - """Test that _process_enumerators successfully processes enumerators.""" - test_case_dir = os.path.join(self.test_cases_dir, "small_sample") - self.config.INPUT_FOLDER = test_case_dir - self.config.ENUMERATORS_COLLECTION_NAME = "Enumerators" - - # Mock schema_manager to return test enumerators - mock_enumerators = [ - {"version": 0, "enumerators": {}}, - {"version": 1, "enumerators": {"default_status": ["active", "archived"]}} - ] - - # Reset and mock MongoIO upsert_document to return the input document - self.mock_mongoio_get_instance.return_value.upsert_document.reset_mock() - def upsert_side_effect(collection, filter, document): - return document - self.mock_mongoio_get_instance.return_value.upsert_document.side_effect = upsert_side_effect - - with patch('stage0_mongodb_api.managers.config_manager.SchemaManager') as mock_schema_manager_class: - # Create a mock schema manager instance - mock_schema_manager = MagicMock() - mock_schema_manager.enumerators = mock_enumerators - mock_schema_manager_class.return_value = mock_schema_manager - - config_manager = ConfigManager() - result = config_manager._process_enumerators() - - # Test that we get the expected success structure - self.assertEqual(result["operation"], "process_enumerators") - self.assertEqual(result["collection"], "Enumerators") - self.assertEqual(result["status"], "success") - self.assertEqual(result["details_type"], "success") - self.assertEqual(result["details"]["processed_count"], 2) - self.assertEqual(result["details"]["total_count"], 2) - - # Verify upsert_document was called for each enumerator - self.assertEqual( - self.mock_mongoio_get_instance.return_value.upsert_document.call_count, 2 - ) - - def test_process_all_collections_includes_enumerators(self): - """Test that process_all_collections includes enumerators processing.""" - test_case_dir = os.path.join(self.test_cases_dir, "small_sample") - self.config.INPUT_FOLDER = test_case_dir - self.config.ENUMERATORS_COLLECTION_NAME = "Enumerators" - - # Mock schema_manager to return test enumerators - mock_enumerators = [ - {"version": 0, "enumerators": {}}, - {"version": 1, "enumerators": {"default_status": ["active", "archived"]}} - ] - - # Reset and mock MongoIO upsert_document to return the input document - self.mock_mongoio_get_instance.return_value.upsert_document.reset_mock() - def upsert_side_effect(collection, filter, document): - return document - self.mock_mongoio_get_instance.return_value.upsert_document.side_effect = upsert_side_effect - - # Mock VersionManager.get_current_version to return a version that will be processed - with patch('stage0_mongodb_api.managers.config_manager.VersionManager.get_current_version') as mock_get_version: - mock_get_version.return_value = "simple.0.0.0.0" - - # Mock all the manager operations to return success - with patch('stage0_mongodb_api.managers.config_manager.SchemaManager') as mock_schema_manager, \ - patch('stage0_mongodb_api.managers.config_manager.IndexManager') as mock_index_manager, \ - patch('stage0_mongodb_api.managers.config_manager.MigrationManager') as mock_migration_manager, \ - patch('stage0_mongodb_api.managers.config_manager.VersionManager') as mock_version_manager: - - # Set up mock schema manager with enumerators - mock_schema_manager.return_value.enumerators = mock_enumerators - mock_schema_manager.return_value.remove_schema.return_value = { - "operation": "remove_schema", "collection": "simple", "status": "success" - } - mock_schema_manager.return_value.apply_schema.return_value = { - "operation": "apply_schema", "collection": "simple", "schema": {}, "status": "success" - } - mock_version_manager.return_value.update_version.return_value = { - "operation": "version_update", "collection": "simple", "version": "simple.1.0.0.1", "status": "success" - } - - config_manager = ConfigManager() - result = config_manager.process_all_collections() - - # Test that we get a dict with enumerators and collections - self.assertIsInstance(result, dict) - self.assertIn("enumerators", result) - self.assertIn("simple", result) - - # Test that enumerators processing is included - enumerators_result = result["enumerators"] - self.assertIsInstance(enumerators_result, list) - self.assertEqual(len(enumerators_result), 2) # enumerators result + overall_status - self.assertEqual(enumerators_result[0]["operation"], "process_enumerators") - self.assertEqual(enumerators_result[0]["status"], "success") - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/managers/test_index_manager.py b/tests/managers/test_index_manager.py deleted file mode 100644 index 5ec43ab..0000000 --- a/tests/managers/test_index_manager.py +++ /dev/null @@ -1,100 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -from stage0_mongodb_api.managers.index_manager import IndexManager - -class TestIndexManager(unittest.TestCase): - """Test cases for the IndexManager class.""" - - def setUp(self): - """Set up test fixtures.""" - self.collection_name = "test_collection" - self.index_name = "test_index" - self.index_config = { - "name": self.index_name, - "key": {"field": 1}, - "unique": True - } - - @patch('stage0_mongodb_api.managers.index_manager.MongoIO') - def test_create_index(self, mock_mongo): - """Test creating an index.""" - # Arrange - mock_mongo.get_instance.return_value = MagicMock() - - # Act - result = IndexManager.create_index(self.collection_name, [self.index_config]) - - # Assert - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "create_index") - self.assertEqual(result["collection"], self.collection_name) - self.assertEqual(result["details_type"], "index") - self.assertIn(self.index_name, result["details"]["indexes"]) - mock_mongo.get_instance.return_value.create_index.assert_called_once_with( - self.collection_name, [self.index_config] - ) - - def test_create_index_missing_name(self): - """Test creating an index without a name.""" - # Arrange - config_without_name = {"key": {"field": 1}} - - # Act & Assert - with self.assertRaises(ValueError) as context: - IndexManager.create_index(self.collection_name, [config_without_name]) - self.assertEqual(str(context.exception), "Index configuration must include 'name' field") - - def test_create_index_missing_key(self): - """Test creating an index without a key.""" - # Arrange - config_without_key = {"name": self.index_name} - - # Act & Assert - with self.assertRaises(ValueError) as context: - IndexManager.create_index(self.collection_name, [config_without_key]) - self.assertEqual(str(context.exception), "Index configuration must include 'key' field") - - @patch('stage0_mongodb_api.managers.index_manager.MongoIO') - def test_drop_index(self, mock_mongo): - """Test dropping an index.""" - # Arrange - mock_mongo.get_instance.return_value = MagicMock() - - # Act - result = IndexManager.drop_index(self.collection_name, self.index_name) - - # Assert - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "drop_index") - self.assertEqual(result["collection"], self.collection_name) - self.assertEqual(result["details_type"], "index") - self.assertEqual(result["details"]["index"], self.index_name) - mock_mongo.get_instance.return_value.drop_index.assert_called_once_with( - self.collection_name, self.index_name - ) - - @patch('stage0_mongodb_api.managers.index_manager.MongoIO') - def test_list_indexes(self, mock_mongo): - """Test listing indexes.""" - # Arrange - mock_indexes = [ - {"name": "index1", "key": {"field1": 1}}, - {"name": "index2", "key": {"field2": -1}} - ] - mock_mongo.get_instance.return_value = MagicMock() - mock_mongo.get_instance.return_value.get_indexes.return_value = mock_indexes - - # Act - result = IndexManager.list_indexes(self.collection_name) - - # Assert - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "list_indexes") - self.assertEqual(result["collection"], self.collection_name) - self.assertEqual(result["indexes"], mock_indexes) - mock_mongo.get_instance.return_value.get_indexes.assert_called_once_with( - collection_name=self.collection_name - ) - -if __name__ == '__main__': - unittest.main() diff --git a/tests/managers/test_migration_manager.py b/tests/managers/test_migration_manager.py deleted file mode 100644 index 91f2d88..0000000 --- a/tests/managers/test_migration_manager.py +++ /dev/null @@ -1,103 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -from stage0_mongodb_api.managers.migration_manager import MigrationManager - -class TestMigrationManager(unittest.TestCase): - """Test cases for the MigrationManager class.""" - - def setUp(self): - self.collection_name = "test_collection" - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_run_migration_single_pipeline(self, mock_get_instance): - """Test running a single migration pipeline.""" - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - migration = { - "name": "test_pipeline", - "pipeline": [ - {"$addFields": {"test_field": "value"}}, - {"$out": "test_collection"} - ] - } - result = MigrationManager.run_migration(self.collection_name, migration) - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "migration") - self.assertEqual(result["collection"], self.collection_name) - self.assertEqual(result["details_type"], "migration") - self.assertEqual(result["details"]["pipeline"]["name"], "test_pipeline") - self.assertEqual(result["details"]["pipeline"]["stages"], 2) - self.assertEqual(mock_mongo.execute_pipeline.call_count, 1) - mock_mongo.execute_pipeline.assert_called_once_with(self.collection_name, migration["pipeline"]) - - def test_run_migration_empty_migration(self): - """Test that empty migration raises ValueError.""" - with self.assertRaises(ValueError) as context: - MigrationManager.run_migration(self.collection_name, {}) - self.assertIn("Migration must contain a 'pipeline' field", str(context.exception)) - - def test_run_migration_missing_pipeline(self): - """Test that migration without pipeline field raises ValueError.""" - migration = {"name": "test_pipeline"} - with self.assertRaises(ValueError) as context: - MigrationManager.run_migration(self.collection_name, migration) - self.assertIn("Migration must contain a 'pipeline' field", str(context.exception)) - - def test_run_migration_empty_pipeline(self): - """Test that empty pipeline raises ValueError.""" - migration = { - "name": "empty_pipeline", - "pipeline": [] - } - with self.assertRaises(ValueError) as context: - MigrationManager.run_migration(self.collection_name, migration) - self.assertIn("Pipeline 'empty_pipeline' cannot be empty", str(context.exception)) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_run_migration_unnamed_pipeline(self, mock_get_instance): - """Test running a migration pipeline without a name.""" - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - migration = { - "pipeline": [ - {"$addFields": {"test_field": "value"}}, - {"$out": "test_collection"} - ] - } - result = MigrationManager.run_migration(self.collection_name, migration) - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "migration") - self.assertEqual(result["collection"], self.collection_name) - self.assertEqual(result["details_type"], "migration") - self.assertEqual(result["details"]["pipeline"]["name"], "unnamed_pipeline") - self.assertEqual(result["details"]["pipeline"]["stages"], 2) - self.assertEqual(mock_mongo.execute_pipeline.call_count, 1) - mock_mongo.execute_pipeline.assert_called_once_with(self.collection_name, migration["pipeline"]) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_run_migration_complex_pipeline(self, mock_get_instance): - """Test running a complex migration pipeline with multiple stages.""" - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - migration = { - "name": "complex_pipeline", - "pipeline": [ - {"$match": {"status": "active"}}, - {"$set": {"status": "inactive"}}, - {"$set": {"updated_at": "$$NOW"}}, - {"$unset": ["old_field"]}, - {"$merge": {"into": "archive"}} - ] - } - result = MigrationManager.run_migration(self.collection_name, migration) - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "migration") - self.assertEqual(result["collection"], self.collection_name) - self.assertEqual(result["details_type"], "migration") - self.assertEqual(result["details"]["pipeline"]["name"], "complex_pipeline") - self.assertEqual(result["details"]["pipeline"]["stages"], 5) - self.assertEqual(mock_mongo.execute_pipeline.call_count, 1) - mock_mongo.execute_pipeline.assert_called_once_with(self.collection_name, migration["pipeline"]) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/managers/test_ref_load_errors.py b/tests/managers/test_ref_load_errors.py deleted file mode 100644 index aeb9dde..0000000 --- a/tests/managers/test_ref_load_errors.py +++ /dev/null @@ -1,54 +0,0 @@ -import unittest -import os -from unittest.mock import MagicMock, patch -from stage0_mongodb_api.managers.schema_manager import SchemaManager -from stage0_py_utils import Config - - -class TestRefLoadErrors(unittest.TestCase): - """Test cases for $ref load errors during schema loading.""" - - def setUp(self): - """Set up test environment.""" - self.config = Config.get_instance() - self.test_cases_dir = os.path.join(os.path.dirname(__file__), "..", "test_cases") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_ref_load_errors(self, mock_get_instance): - """Test that $ref load errors are properly caught and reported.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "ref_load_errors") - - # Act - schema_manager = SchemaManager() - - # Assert - # Check that we have load errors - self.assertGreater(len(schema_manager.load_errors), 0, - "Should have load errors for $ref issues") - - # Check for specific error types - error_codes = [error.get('error_id') for error in schema_manager.load_errors] - - # Should have SCH-013 (circular reference) and SCH-014 (missing reference) - self.assertIn('SCH-013', error_codes, - "Should have circular reference error (SCH-013)") - self.assertIn('SCH-014', error_codes, - "Should have missing reference error (SCH-014)") - - # Verify error details - circular_error = next((e for e in schema_manager.load_errors if e.get('error_id') == 'SCH-013'), None) - missing_error = next((e for e in schema_manager.load_errors if e.get('error_id') == 'SCH-014'), None) - - self.assertIsNotNone(circular_error, "Should have circular reference error") - self.assertEqual(circular_error['error'], 'circular_reference') - self.assertEqual(circular_error['ref_name'], 'circular_ref.1.0.0') - - self.assertIsNotNone(missing_error, "Should have missing reference error") - self.assertEqual(missing_error['error'], 'ref_not_found') - self.assertEqual(missing_error['ref_name'], 'does_not_exist.1.0.0') - - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/managers/test_schema_loading.py b/tests/managers/test_schema_loading.py deleted file mode 100644 index d7d4377..0000000 --- a/tests/managers/test_schema_loading.py +++ /dev/null @@ -1,136 +0,0 @@ -import unittest -import os -from unittest.mock import MagicMock, patch -from stage0_mongodb_api.managers.schema_manager import SchemaManager -from stage0_py_utils import Config - -class TestSchemaLoading(unittest.TestCase): - """Test suite for schema loading functionality.""" - - def setUp(self): - """Set up test fixtures.""" - self.config = Config.get_instance() - self.test_cases_dir = os.path.join(os.path.dirname(__file__), "..", "test_cases") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_load_minimum_valid(self, mock_get_instance): - """Test loading minimum valid schema structure.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "minimum_valid") - - # Act - schema_manager = SchemaManager() - - # Assert - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(len(schema_manager.dictionaries), 0) - self.assertEqual(len(schema_manager.types), 0) - self.assertEqual(len(schema_manager.enumerators), 0) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_load_small_sample(self, mock_get_instance): - """Test loading small sample schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "small_sample") - - # Act - schema_manager = SchemaManager() - - # Assert - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(len(schema_manager.dictionaries), 1) - self.assertEqual(len(schema_manager.types), 2) - self.assertEqual(len(schema_manager.enumerators), 2) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_load_large_sample(self, mock_get_instance): - """Test loading large sample schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - - # Act - schema_manager = SchemaManager() - - # Assert - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(len(schema_manager.dictionaries), 6) - self.assertEqual(len(schema_manager.types), 11) - self.assertEqual(len(schema_manager.enumerators), 4) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_empty_input(self, mock_get_instance): - """Test loading with empty input.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "empty_input") - - # Act - schema_manager = SchemaManager() - - # Assert - expected_error_ids = {"CFG-001", "SCH-001", "SCH-004", "SCH-009"} - actual_error_ids = {error.get('error_id') for error in schema_manager.load_errors if 'error_id' in error} - missing_error_ids = expected_error_ids - actual_error_ids - extra_error_ids = actual_error_ids - expected_error_ids - self.assertEqual(missing_error_ids, set(), f"Missing error IDs: {missing_error_ids}") - self.assertEqual(extra_error_ids, set(), f"Extra error IDs: {extra_error_ids}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_missing_sub_folders(self, mock_get_instance): - """Test loading with empty sub-folders.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "missing_folders") - - # Act - schema_manager = SchemaManager() - - # Assert - expected_error_ids = {"SCH-001", "SCH-004"} - actual_error_ids = {error.get('error_id') for error in schema_manager.load_errors if 'error_id' in error} - missing_error_ids = expected_error_ids - actual_error_ids - extra_error_ids = actual_error_ids - expected_error_ids - self.assertEqual(missing_error_ids, set(), f"Missing error IDs: {missing_error_ids}") - self.assertEqual(extra_error_ids, set(), f"Extra error IDs: {extra_error_ids}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_load_errors(self, mock_get_instance): - """Test loading with unparsable input files.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "unparsable_files") - - # Act - schema_manager = SchemaManager() - - # Assert - expected_error_ids = {"CFG-002", "SCH-002", "SCH-007", "SCH-011"} - actual_error_ids = {error.get('error_id') for error in schema_manager.load_errors if 'error_id' in error} - missing_error_ids = expected_error_ids - actual_error_ids - extra_error_ids = actual_error_ids - expected_error_ids - self.assertEqual(missing_error_ids, set(), f"Missing error IDs: {missing_error_ids}") - self.assertEqual(extra_error_ids, set(), f"Extra error IDs: {extra_error_ids}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_ref_resolution_errors(self, mock_get_instance): - """Test loading with $ref resolution errors.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "validation_errors") - - # Act - schema_manager = SchemaManager() - - # Assert - expected_error_ids = {"SCH-014"} # Only missing reference, no circular reference in this test case - actual_error_ids = {error.get('error_id') for error in schema_manager.load_errors if 'error_id' in error} - missing_error_ids = expected_error_ids - actual_error_ids - extra_error_ids = actual_error_ids - expected_error_ids - self.assertEqual(missing_error_ids, set(), f"Missing error IDs: {missing_error_ids}") - self.assertEqual(extra_error_ids, set(), f"Extra error IDs: {extra_error_ids}") - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/managers/test_schema_operations.py b/tests/managers/test_schema_operations.py deleted file mode 100644 index bda8881..0000000 --- a/tests/managers/test_schema_operations.py +++ /dev/null @@ -1,108 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -import os -from stage0_py_utils import Config -from stage0_mongodb_api.managers.schema_manager import SchemaManager - -class TestSchemaOperations(unittest.TestCase): - """Test suite for schema operations (MongoDB operations).""" - - def setUp(self): - """Set up test fixtures.""" - self.config = Config.get_instance() - self.test_cases_dir = os.path.join(os.path.dirname(__file__), "..", "test_cases") - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "small_sample") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_apply_schema_success(self, mock_get_instance): - """Test successful schema application.""" - # Arrange - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - schema_manager = SchemaManager() - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(schema_manager.validate_schema(), []) - - # Act - result = schema_manager.apply_schema("simple.1.0.0.1") - - # Assert - self.assertEqual(result["status"], "success", f"Expected success, got {result}") - self.assertEqual(result["operation"], "apply_schema", f"Expected apply_schema, got {result}") - self.assertEqual(result["collection"], "simple", f"Expected simple, got {result}") - mock_mongo.apply_schema.assert_called_once() - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_apply_schema_value_error(self, mock_get_instance): - """Test applying schema with empty version name.""" - # Arrange - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - schema_manager = SchemaManager() - - # Act - result = schema_manager.apply_schema("invalid_format") - - # Assert - self.assertEqual(result["status"], "error") - self.assertEqual(result["operation"], "apply_schema") - self.assertEqual(result["collection"], "invalid_format") - self.assertIn("Invalid version format", result["message"]) - mock_mongo.apply_schema.assert_not_called() - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_apply_schema_exception(self, mock_get_instance): - """Test applying schema exception handling.""" - # Arrange - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - mock_mongo.apply_schema.side_effect = Exception("mock exception") - schema_manager = SchemaManager() - - # Act - result = schema_manager.apply_schema("simple.1.0.0.1") - - # Assert - self.assertEqual(result["status"], "error") - self.assertEqual(result["operation"], "apply_schema") - self.assertEqual(result["collection"], "simple") - self.assertIn("mock exception", result["message"]) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_remove_schema_success(self, mock_get_instance): - """Test successful schema removal.""" - # Arrange - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - schema_manager = SchemaManager() - - # Act - result = schema_manager.remove_schema("simple") - - # Assert - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "remove_schema") - self.assertEqual(result["collection"], "simple") - mock_mongo.remove_schema.assert_called_once_with("simple") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_remove_schema_exception(self, mock_get_instance): - """Test exception handling.""" - # Arrange - mock_mongo = MagicMock() - mock_get_instance.return_value = mock_mongo - mock_mongo.remove_schema.side_effect = Exception("mock exception") - schema_manager = SchemaManager() - - # Act - result = schema_manager.remove_schema("simple.1.0.0.1") - - # Assert - self.assertEqual(result["status"], "error") - self.assertEqual(result["operation"], "remove_schema") - self.assertEqual(result["collection"], "simple.1.0.0.1") - self.assertIn("mock exception", result["message"]) - - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/managers/test_schema_renders.py b/tests/managers/test_schema_renders.py deleted file mode 100644 index 2106758..0000000 --- a/tests/managers/test_schema_renders.py +++ /dev/null @@ -1,230 +0,0 @@ -import unittest -import os -import json -from unittest.mock import MagicMock, patch -import yaml -from stage0_mongodb_api.managers.schema_manager import SchemaManager -from stage0_mongodb_api.managers.schema_types import SchemaFormat -from stage0_py_utils import Config - -class TestSchemaRenders(unittest.TestCase): - """Test suite for schema rendering functionality.""" - - def setUp(self): - """Set up test fixtures.""" - self.config = Config.get_instance() - self.test_cases_dir = os.path.join(os.path.dirname(__file__), "..", "test_cases") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_simple(self, mock_get_instance): - """Test simple rendering.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "small_sample") - schema_manager = SchemaManager() - version_name = "simple.1.0.0.1" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_nested_refs(self, mock_get_instance): - """Test rendering of nested $refs.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "complex_refs") - schema_manager = SchemaManager() - version_name = "workshop.1.0.0.1" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_organization(self, mock_get_instance): - """Test rendering with complex custom types.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "organization.1.0.0.1" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_media(self, mock_get_instance): - """Test rendering with complex defined types.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "media.1.0.0.1" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_user_1001(self, mock_get_instance): - """Test rendering a complex schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "user.1.0.0.1" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_user_1002(self, mock_get_instance): - """Test rendering a complex schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "user.1.0.0.2" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_user_1013(self, mock_get_instance): - """Test rendering a complex schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "user.1.0.1.3" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_search_1001(self, mock_get_instance): - """Test rendering a complex schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "search.1.0.0.1" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_search_1002(self, mock_get_instance): - """Test rendering a complex schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "search.1.0.0.2" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_render_search_1013(self, mock_get_instance): - """Test rendering a complex schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - schema_manager = SchemaManager() - version_name = "search.1.0.1.3" - - # Act - rendered_bson = schema_manager.render_one(version_name, SchemaFormat.BSON) - rendered_json = schema_manager.render_one(version_name, SchemaFormat.JSON) - - # Assert - expected_bson = self._load_bson(version_name) - expected_json = self._load_json(version_name) - - self.assertEqual(rendered_bson, expected_bson, f"BSON schema mismatch, rendered: {rendered_bson}") - self.assertEqual(rendered_json, expected_json, f"JSON schema mismatch, rendered: {rendered_json}") - - def _load_bson(self, version_name: str) -> dict: - """Helper method to load bson schema JSON files.""" - file_path = os.path.join(self.config.INPUT_FOLDER, "expected", "bson_schema", f"{version_name}.json") - with open(file_path, 'r') as f: - return json.load(f) - - def _load_json(self, version_name: str) -> dict: - """Helper method to load JSON Schema yaml files.""" - file_path = os.path.join(self.config.INPUT_FOLDER, "expected", "json_schema", f"{version_name}.yaml") - with open(file_path, 'r') as f: - return yaml.safe_load(f) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/managers/test_schema_validation.py b/tests/managers/test_schema_validation.py deleted file mode 100644 index 1b81817..0000000 --- a/tests/managers/test_schema_validation.py +++ /dev/null @@ -1,136 +0,0 @@ -import unittest -import os -from unittest.mock import MagicMock, patch -from stage0_mongodb_api.managers.schema_manager import SchemaManager -from stage0_mongodb_api.managers.config_manager import ConfigManager -from stage0_py_utils import Config -from stage0_mongodb_api.managers.schema_validator import SchemaValidator, SchemaValidationError -from stage0_mongodb_api.managers.schema_types import SchemaType, ValidationContext - -class TestSchemaValidation(unittest.TestCase): - """Test suite for schema validation functionality.""" - - def setUp(self): - """Set up test fixtures.""" - self.config = Config.get_instance() - self.test_cases_dir = os.path.join(os.path.dirname(__file__), "..", "test_cases") - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_validate_minimum_valid(self, mock_get_instance): - """Test validation of minimum valid schema.""" - # Arrange - mock_get_instance.return_value = MagicMock() - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "minimum_valid") - schema_manager = SchemaManager() - - # Act - errors = schema_manager.validate_schema() - - # Assert - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(errors, []) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_validate_small_sample(self, mock_get_instance): - """Test validation of small sample schema.""" - # Arrange - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "small_sample") - mock_get_instance.return_value = MagicMock() - schema_manager = SchemaManager() - - # Act - errors = schema_manager.validate_schema() - - # Assert - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(errors, []) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_validate_large_sample(self, mock_get_instance): - """Test validation of large sample schema.""" - # Arrange - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "large_sample") - mock_get_instance.return_value = MagicMock() - schema_manager = SchemaManager() - - # Act - errors = schema_manager.validate_schema() - - # Assert - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(errors, []) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_validate_complex_refs(self, mock_get_instance): - """Test validation of complex nested $refs.""" - # Arrange - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "complex_refs") - mock_get_instance.return_value = MagicMock() - schema_manager = SchemaManager() - - # Act - errors = schema_manager.validate_schema() - - # Assert - self.assertEqual(schema_manager.load_errors, []) - self.assertEqual(errors, []) - - @patch('stage0_py_utils.MongoIO.get_instance') - def test_validation_errors(self, mock_get_instance): - """Test validation with all validation errors.""" - # Arrange - self.config.INPUT_FOLDER = os.path.join(self.test_cases_dir, "validation_errors") - mock_get_instance.return_value = MagicMock() - schema_manager = SchemaManager() - config_manager = ConfigManager() - - # Act - schema_errors = schema_manager.validate_schema() - config_errors = config_manager.validate_configs() - - # Assert - Schema validation errors - expected_schema_error_ids = { - # Schema Validator validation errors - "VLD-001", "VLD-002", "VLD-003", "VLD-004", "VLD-005", # Schema validation errors - "VLD-101", "VLD-102", "VLD-103", "VLD-104", "VLD-106", # Enumerator validation errors - "VLD-108", # Enumerator description type error - "VLD-201", "VLD-202", "VLD-203", "VLD-204", # Primitive type validation errors - "VLD-301", # Complex type basic validation - "VLD-401", # Required fields validation - "VLD-601", # Custom type validation - "VLD-701", # Object type validation - "VLD-801", # Array type validation - "VLD-901", "VLD-902", # Enum type validation - "VLD-1001", "VLD-1002", "VLD-1003", # OneOf type validation - } - actual_schema_error_ids = {error.get('error_id') for error in schema_errors if 'error_id' in error} - missing_schema_error_ids = expected_schema_error_ids - actual_schema_error_ids - extra_schema_error_ids = actual_schema_error_ids - expected_schema_error_ids - self.assertEqual(missing_schema_error_ids, set()) - self.assertEqual(extra_schema_error_ids, set()) - - # Assert - Config validation errors - expected_config_error_ids = { - # Config Manager validation errors - "CFG-101", # Invalid config format - "CFG-201", "CFG-202", # Missing required fields - "CFG-501", # Invalid version format - "CFG-601", # Missing version number - "CFG-701", # Invalid version format - } - actual_config_error_ids = {error.get('error_id') for error in config_errors if 'error_id' in error} - missing_config_error_ids = expected_config_error_ids - actual_config_error_ids - extra_config_error_ids = actual_config_error_ids - expected_config_error_ids - self.assertEqual(missing_config_error_ids, set()) - - # Config validation now includes schema validation errors, so we expect both - # Check that config errors are present - self.assertTrue(expected_config_error_ids.issubset(actual_config_error_ids)) - - # Check that schema validation errors are also included - schema_error_ids_in_config = {error.get('error_id') for error in config_errors - if 'error_id' in error and error.get('error_id', '').startswith('VLD-')} - self.assertTrue(len(schema_error_ids_in_config) > 0, "Schema validation errors should be included in config validation") - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/managers/test_version_manager.py b/tests/managers/test_version_manager.py deleted file mode 100644 index 8c5b872..0000000 --- a/tests/managers/test_version_manager.py +++ /dev/null @@ -1,177 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -from stage0_mongodb_api.managers.version_manager import VersionManager -from stage0_py_utils import Config - -class TestVersionManager(unittest.TestCase): - """Test cases for VersionManager static methods.""" - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_get_current_version_empty_collection_name(self, mock_mongo_instance): - """Test getting current version with empty collection name""" - with self.assertRaises(ValueError) as context: - VersionManager.get_current_version("") - self.assertEqual(str(context.exception), "Collection name cannot be empty") - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_get_current_version_existing(self, mock_mongo_instance): - """Test getting current version when it exists""" - mock_mongo = MagicMock() - mock_mongo_instance.return_value = mock_mongo - - # Test with version without collection name - mock_mongo.get_documents.return_value = [{ - "collection_name": "test_collection", - "current_version": "1.2.3.4" - }] - - version = VersionManager.get_current_version("test_collection") - self.assertEqual(version, "test_collection.1.2.3.4") - - # Test with version that already includes collection name - mock_mongo.get_documents.return_value = [{ - "collection_name": "test_collection", - "current_version": "test_collection.1.2.3.4" - }] - - version = VersionManager.get_current_version("test_collection") - self.assertEqual(version, "test_collection.1.2.3.4") - - mock_mongo.get_documents.assert_called_with( - Config.get_instance().VERSION_COLLECTION_NAME, - match={"collection_name": "test_collection"} - ) - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_get_current_version_no_version_exists(self, mock_mongo_instance): - """Test getting current version when no version exists""" - mock_mongo = MagicMock() - mock_mongo_instance.return_value = mock_mongo - - # Arrange - mock_mongo.get_documents.return_value = [] - - # Act - version = VersionManager.get_current_version("test_collection") - - # Assert - self.assertEqual(version, "test_collection.0.0.0.0") - mock_mongo.get_documents.assert_called_once_with( - Config.get_instance().VERSION_COLLECTION_NAME, - match={"collection_name": "test_collection"} - ) - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_get_current_version_multiple_versions_exist(self, mock_mongo_instance): - """Test getting current version when multiple versions exist""" - mock_mongo = MagicMock() - mock_mongo_instance.return_value = mock_mongo - - # Arrange - mock_mongo.get_documents.return_value = [ - {"collection_name": "test_collection", "current_version": "1.2.3.4"}, - {"collection_name": "test_collection", "current_version": "1.2.3.5"} - ] - - # Act & Assert - with self.assertRaises(RuntimeError) as context: - VersionManager.get_current_version("test_collection") - self.assertEqual(str(context.exception), "Multiple versions found for collection: test_collection") - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_get_current_version_invalid_document(self, mock_mongo_instance): - """Test getting current version when document is invalid""" - mock_mongo = MagicMock() - mock_mongo_instance.return_value = mock_mongo - - # Arrange - mock_mongo.get_documents.return_value = [{ - "collection_name": "test_collection" - # Missing current_version field - }] - - # Act & Assert - with self.assertRaises(RuntimeError) as context: - VersionManager.get_current_version("test_collection") - self.assertEqual(str(context.exception), "Invalid version document for collection: test_collection") - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_update_version_empty_collection_name(self, mock_mongo_instance): - """Test updating version with empty collection name""" - with self.assertRaises(ValueError) as context: - VersionManager.update_version("", "1.2.3.4") - self.assertEqual(str(context.exception), "Collection name cannot be empty") - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_update_version_valid(self, mock_mongo_instance): - """Test updating version with valid version string""" - mock_mongo = MagicMock() - mock_mongo_instance.return_value = mock_mongo - - # Test with version without collection name - mock_mongo.upsert_document.return_value = True - - result = VersionManager.update_version("test_collection", "1.2.3.4") - - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "version_update") - self.assertEqual(result["collection"], "test_collection") - self.assertEqual(result["details_type"], "version") - self.assertEqual(result["details"]["version"], "test_collection.1.2.3.4") - - # Test with version that already includes collection name - result = VersionManager.update_version("test_collection", "test_collection.1.2.3.4") - - self.assertEqual(result["status"], "success") - self.assertEqual(result["operation"], "version_update") - self.assertEqual(result["collection"], "test_collection") - self.assertEqual(result["details_type"], "version") - self.assertEqual(result["details"]["version"], "test_collection.1.2.3.4") - - mock_mongo.upsert_document.assert_called_with( - Config.get_instance().VERSION_COLLECTION_NAME, - match={"collection_name": "test_collection"}, - data={"collection_name": "test_collection", "current_version": "test_collection.1.2.3.4"} - ) - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_update_version_invalid(self, mock_mongo_instance): - """Test updating version with invalid version string""" - # Arrange - invalid_versions = [ - "1.2.3.4.5.6", # Too many components - "a.b.c.d", # Non-numeric - "1.2.3.", # Trailing dot - "", # Empty string - "1.2.3.4.", # Trailing dot with schema - "1..2.3", # Double dot - ".1.2.3", # Leading dot - "1000.0.0.0", # Exceeds MAX_VERSION - "user.1.2.3", # Collection with too few components - "user.1.2.3.4.5", # Collection with too many components - "user..1.2.3.4", # Collection with double dot - "user.1.2.3.4.", # Collection with trailing dot - ".user.1.2.3.4", # Collection with leading dot - ] - - # Act & Assert - for invalid_version in invalid_versions: - with self.assertRaises(ValueError): - VersionManager.update_version("test_collection", invalid_version) - - @patch('stage0_mongodb_api.managers.version_manager.MongoIO.get_instance') - def test_update_version_failed_upsert(self, mock_mongo_instance): - """Test updating version when upsert fails""" - mock_mongo = MagicMock() - mock_mongo_instance.return_value = mock_mongo - - # Arrange - mock_mongo.upsert_document.return_value = None - - # Act & Assert - with self.assertRaises(RuntimeError) as context: - VersionManager.update_version("test_collection", "1.2.3.4") - self.assertEqual(str(context.exception), "Failed to update version for collection: test_collection") - -if __name__ == '__main__': - unittest.main() diff --git a/tests/managers/test_version_number.py b/tests/managers/test_version_number.py deleted file mode 100644 index e0e58dd..0000000 --- a/tests/managers/test_version_number.py +++ /dev/null @@ -1,123 +0,0 @@ -import unittest -from stage0_mongodb_api.managers.version_number import VersionNumber - -class TestVersionNumber(unittest.TestCase): - def test_valid_version(self): - """Test valid version string parsing and access.""" - # Test without collection name - version = VersionNumber("1.2.3.4") - self.assertEqual(version.parts, [1, 2, 3, 4]) - self.assertEqual(str(version), "1.2.3.4") - self.assertEqual(version.get_schema_version(), "1.2.3") - self.assertEqual(version.get_enumerator_version(), 4) - self.assertIsNone(version.collection_name) - - # Test with collection name - version = VersionNumber("user.1.2.3.4") - self.assertEqual(version.parts, [1, 2, 3, 4]) - self.assertEqual(str(version), "user.1.2.3.4") - self.assertEqual(version.get_schema_version(), "user.1.2.3") - self.assertEqual(version.get_enumerator_version(), 4) - self.assertEqual(version.collection_name, "user") - - def test_invalid_format(self): - """Test invalid version string formats.""" - invalid_versions = [ - "", # Empty string - "1.2.3", # Too few components - "1.2.3.4.5.6", # Too many components - "a.b.c.d", # Non-numeric - "1.2.3.4.", # Trailing dot - "1..2.3.4", # Double dot - ".1.2.3.4", # Leading dot - "user.1.2.3", # Collection with too few components - "user.1.2.3.4.5", # Collection with too many components - "user..1.2.3.4", # Collection with double dot - "user.1.2.3.4.", # Collection with trailing dot - ".user.1.2.3.4", # Collection with leading dot - ] - - for version in invalid_versions: - with self.assertRaises(ValueError, msg=f"Valid version: {version}"): - VersionNumber(version) - - def test_invalid_range(self): - """Test version numbers exceeding maximum allowed value.""" - invalid_versions = [ - "1000.0.0.0", # Exceeds MAX_VERSION - "0.1000.0.0", # Exceeds MAX_VERSION - "0.0.1000.0", # Exceeds MAX_VERSION - "0.0.0.1000", # Exceeds MAX_VERSION - "user.1000.0.0.0", # Collection with exceeds MAX_VERSION - "user.0.1000.0.0", # Collection with exceeds MAX_VERSION - "user.0.0.1000.0", # Collection with exceeds MAX_VERSION - "user.0.0.0.1000", # Collection with exceeds MAX_VERSION - ] - - for version in invalid_versions: - with self.assertRaises(ValueError): - VersionNumber(version) - - def test_version_comparison_naked(self): - """Test version comparison with naked version numbers (no collection name).""" - v1 = VersionNumber("1.2.3.4") - v2 = VersionNumber("1.2.3.5") - v3 = VersionNumber("1.2.3.4") - - # Test less than - self.assertTrue(v1 < v2) - self.assertFalse(v2 < v1) - - # Test equality - self.assertTrue(v1 == v3) - self.assertFalse(v1 == v2) - - # Test string comparison - self.assertTrue(v1 < "1.2.3.5") - self.assertTrue(v1 == "1.2.3.4") - - def test_version_comparison_full(self): - """Test version comparison with full version numbers (including collection name).""" - v1 = VersionNumber("user.1.2.3.4") - v2 = VersionNumber("user.1.2.3.5") - v3 = VersionNumber("user.1.2.3.4") - - # Test less than - self.assertTrue(v1 < v2) - self.assertFalse(v2 < v1) - - # Test equality - self.assertTrue(v1 == v3) - self.assertFalse(v1 == v2) - - # Test string comparison - self.assertTrue(v1 < "user.1.2.3.5") - self.assertTrue(v1 == "user.1.2.3.4") - - def test_version_comparison_mixed(self): - """Test version comparison between naked and full version numbers.""" - v1 = VersionNumber("1.2.3.4") - v2 = VersionNumber("user.1.2.3.4") - v3 = VersionNumber("other.1.2.3.4") - v4 = VersionNumber("1.2.3.5") - v5 = VersionNumber("user.1.2.3.5") - - # Test equality ignoring collection name - self.assertTrue(v1 == v2) - self.assertTrue(v1 == v3) - self.assertTrue(v2 == v3) - - # Test less than ignoring collection name - self.assertTrue(v1 < v4) - self.assertTrue(v1 < v5) - self.assertTrue(v2 < v4) - self.assertTrue(v2 < v5) - - # Test greater than ignoring collection name - self.assertTrue(v4 > v1) - self.assertTrue(v4 > v2) - self.assertTrue(v5 > v1) - self.assertTrue(v5 > v2) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/routes/test_collection_routes.py b/tests/routes/test_collection_routes.py deleted file mode 100644 index a03a1eb..0000000 --- a/tests/routes/test_collection_routes.py +++ /dev/null @@ -1,202 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -from flask import Flask -from stage0_mongodb_api.routes.collection_routes import create_collection_routes -from stage0_mongodb_api.services.collection_service import CollectionNotFoundError, CollectionProcessingError - -class TestCollectionRoutes(unittest.TestCase): - def setUp(self): - """Set up the Flask test client and app context.""" - self.app = Flask(__name__) - self.app.register_blueprint(create_collection_routes(), url_prefix='/api/collections') - self.client = self.app.test_client() - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_list_collections_success(self, mock_collection_service): - """Test listing all collections successfully""" - # Arrange - mock_collections = [ - {"collection_name": "users", "version": "1.0.0.1"}, - {"collection_name": "organizations", "version": "1.0.0.1"} - ] - mock_collection_service.list_collections.return_value = mock_collections - - # Act - response = self.client.get('/api/collections/') - - # Assert - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json, mock_collections) - mock_collection_service.list_collections.assert_called_once() - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_list_collections_processing_error(self, mock_collection_service): - """Test listing collections with processing error""" - # Arrange - errors = [{"error": "load_error", "error_id": "CFG-001", "message": "Failed to load configs"}] - mock_collection_service.list_collections.side_effect = CollectionProcessingError("collections", errors) - - # Act - response = self.client.get('/api/collections/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json, errors) - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_list_collections_unexpected_error(self, mock_collection_service): - """Test listing collections with unexpected error""" - # Arrange - mock_collection_service.list_collections.side_effect = Exception("Unexpected error") - - # Act - response = self.client.get('/api/collections/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json[0]["error"], "Failed to list collections") - self.assertEqual(response.json[0]["error_id"], "API-001") - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_process_collections_success(self, mock_collection_service): - """Test processing all collections successfully""" - # Arrange - mock_results = [ - {"status": "success", "collection": "users", "operations": []}, - {"status": "success", "collection": "organizations", "operations": []} - ] - mock_collection_service.process_collections.return_value = mock_results - - # Act - response = self.client.post('/api/collections/') - - # Assert - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json, mock_results) - mock_collection_service.process_collections.assert_called_once() - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_process_collections_processing_error(self, mock_collection_service): - """Test processing collections with processing error""" - # Arrange - errors = [{"error": "validation_error", "error_id": "CFG-101", "message": "Invalid config format"}] - mock_collection_service.process_collections.side_effect = CollectionProcessingError("collections", errors) - - # Act - response = self.client.post('/api/collections/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json, errors) - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_get_collection_success(self, mock_collection_service): - """Test getting a specific collection successfully""" - # Arrange - collection_name = "users" - mock_collection = {"name": "users", "versions": [{"version": "1.0.0.1"}]} - mock_collection_service.get_collection.return_value = mock_collection - - # Act - response = self.client.get(f'/api/collections/{collection_name}/') - - # Assert - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json, mock_collection) - mock_collection_service.get_collection.assert_called_once_with(collection_name, mock_collection_service.get_collection.call_args[0][1]) - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_get_collection_not_found(self, mock_collection_service): - """Test getting a collection that doesn't exist""" - # Arrange - collection_name = "nonexistent" - mock_collection_service.get_collection.side_effect = CollectionNotFoundError(collection_name) - - # Act - response = self.client.get(f'/api/collections/{collection_name}/') - - # Assert - self.assertEqual(response.status_code, 404) - self.assertEqual(response.data.decode(), "Collection not found") - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_get_collection_processing_error(self, mock_collection_service): - """Test getting a collection with processing error""" - # Arrange - collection_name = "users" - errors = [{"error": "load_error", "error_id": "CFG-001", "message": "Failed to load configs"}] - mock_collection_service.get_collection.side_effect = CollectionProcessingError(collection_name, errors) - - # Act - response = self.client.get(f'/api/collections/{collection_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json, errors) - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_process_specific_collection_success(self, mock_collection_service): - """Test processing a specific collection successfully""" - # Arrange - collection_name = "users" - mock_result = { - "status": "success", - "collection": collection_name, - "operations": [{"operation": "apply_schema", "status": "success"}] - } - mock_collection_service.process_collection.return_value = mock_result - - # Act - response = self.client.post(f'/api/collections/{collection_name}/') - - # Assert - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json, mock_result) - mock_collection_service.process_collection.assert_called_once_with(collection_name, mock_collection_service.process_collection.call_args[0][1]) - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_process_specific_collection_not_found(self, mock_collection_service): - """Test processing a collection that doesn't exist""" - # Arrange - collection_name = "nonexistent" - mock_collection_service.process_collection.side_effect = CollectionNotFoundError(collection_name) - - # Act - response = self.client.post(f'/api/collections/{collection_name}/') - - # Assert - self.assertEqual(response.status_code, 404) - self.assertEqual(response.data.decode(), "Collection not found") - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_process_specific_collection_processing_error(self, mock_collection_service): - """Test processing a specific collection with processing error""" - # Arrange - collection_name = "users" - errors = [{"error": "processing_error", "error_id": "API-005", "message": "Failed to process collection"}] - mock_collection_service.process_collection.side_effect = CollectionProcessingError(collection_name, errors) - - # Act - response = self.client.post(f'/api/collections/{collection_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json, errors) - - @patch('stage0_mongodb_api.routes.collection_routes.CollectionService') - def test_process_specific_collection_unexpected_error(self, mock_collection_service): - """Test processing a specific collection with unexpected error""" - # Arrange - collection_name = "users" - mock_collection_service.process_collection.side_effect = Exception("Unexpected error") - - # Act - response = self.client.post(f'/api/collections/{collection_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json[0]["error"], "Failed to process collection") - self.assertEqual(response.json[0]["error_id"], "API-004") - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/routes/test_config_routes.py b/tests/routes/test_config_routes.py new file mode 100644 index 0000000..b2ef44e --- /dev/null +++ b/tests/routes/test_config_routes.py @@ -0,0 +1,108 @@ +import unittest +from unittest.mock import patch, Mock +from flask import Flask +from configurator.routes.config_routes import create_config_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestConfigRoutes(unittest.TestCase): + """Test cases for config routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_config_routes(), url_prefix='/api/config') + self.client = self.app.test_client() + + @patch('configurator.routes.config_routes.Config') + def test_get_config_success(self, mock_config_class): + """Test successful GET /api/config.""" + # Arrange + mock_config = Mock() + mock_config.to_dict.return_value = {"config_items": [{"name": "TEST", "value": "test"}]} + mock_config_class.get_instance.return_value = mock_config + + # Act + response = self.client.get('/api/config/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertIn("config_items", response_data) + self.assertIsInstance(response_data["config_items"], list) + + def test_get_config_method_not_allowed(self): + """Test that POST method is not allowed on /api/config.""" + # Act + response = self.client.post('/api/config/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_get_config_put_method_not_allowed(self): + """Test that PUT method is not allowed on /api/config.""" + # Act + response = self.client.put('/api/config/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_get_config_delete_method_not_allowed(self): + """Test that DELETE method is not allowed on /api/config.""" + # Act + response = self.client.delete('/api/config/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_get_config_configurator_exception(self): + """Test GET /api/config when ConfiguratorException is raised.""" + # Patch Config.get_instance().to_dict to raise ConfiguratorException + from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + import configurator.routes.config_routes as config_routes_mod + orig_get_instance = config_routes_mod.Config.get_instance + class DummyConfig: + def to_dict(self): + raise ConfiguratorException("Config error", ConfiguratorEvent("CFG-01", "CONFIG_ERROR")) + config_routes_mod.Config.get_instance = staticmethod(lambda: DummyConfig()) + try: + app = Flask(__name__) + app.register_blueprint(config_routes_mod.create_config_routes(), url_prefix='/api/config') + client = app.test_client() + response = client.get('/api/config/') + self.assertEqual(response.status_code, 500) + self.assertIsInstance(response.json, dict) + self.assertIn("id", response.json) + self.assertIn("type", response.json) + finally: + config_routes_mod.Config.get_instance = orig_get_instance + + @patch('configurator.routes.config_routes.Config') + def test_get_config_general_exception(self, mock_config_class): + """Test GET /api/config when a general Exception is raised.""" + # Arrange + mock_config = Mock() + mock_config.to_dict.side_effect = Exception("Unexpected error") + mock_config_class.get_instance.return_value = mock_config + + # Create a new app with the mocked Config + app = Flask(__name__) + app.register_blueprint(create_config_routes(), url_prefix='/api/config') + client = app.test_client() + + # Act + response = client.get('/api/config/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/routes/test_configuration_routes.py b/tests/routes/test_configuration_routes.py new file mode 100644 index 0000000..66d59ce --- /dev/null +++ b/tests/routes/test_configuration_routes.py @@ -0,0 +1,446 @@ +import unittest +from unittest.mock import patch, Mock +from flask import Flask +from configurator.routes.configuration_routes import create_configuration_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestConfigurationRoutes(unittest.TestCase): + """Test cases for configuration routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_configuration_routes(), url_prefix='/api/configurations') + self.client = self.app.test_client() + + @patch('configurator.routes.configuration_routes.FileIO') + def test_list_configurations_success(self, mock_file_io): + """Test successful GET /api/configurations/.""" + # Arrange + # Create mock File objects with file_name attribute + mock_file1 = Mock() + mock_file1.to_dict.return_value = {"file_name": "config1.yaml", "size": 100, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"} + mock_file2 = Mock() + mock_file2.to_dict.return_value = {"file_name": "config2.yaml", "size": 200, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"} + mock_files = [mock_file1, mock_file2] + mock_file_io.get_documents.return_value = mock_files + + # Act + response = self.client.get('/api/configurations/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + expected_data = [mock_file1.to_dict.return_value, mock_file2.to_dict.return_value] + self.assertEqual(response_data, expected_data) + + @patch('configurator.routes.configuration_routes.FileIO') + def test_list_configurations_general_exception(self, mock_file_io): + """Test GET /api/configurations/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_documents.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/configurations/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.FileIO') + @patch('configurator.routes.configuration_routes.Configuration') + def test_process_configurations_success(self, mock_configuration_class, mock_file_io): + """Test successful POST /api/configurations/.""" + # Arrange + # Create mock File objects with name attribute + mock_file1 = Mock() + mock_file1.name = "config1.yaml" + mock_file2 = Mock() + mock_file2.name = "config2.yaml" + mock_files = [mock_file1, mock_file2] + mock_file_io.get_documents.return_value = mock_files + + # Mock Configuration.process() to return ConfiguratorEvent objects + mock_config1 = Mock() + mock_event1 = ConfiguratorEvent("CFG-00", "PROCESS") + mock_event1.record_success() + mock_config1.process.return_value = mock_event1 + + mock_config2 = Mock() + mock_event2 = ConfiguratorEvent("CFG-00", "PROCESS") + mock_event2.record_success() + mock_config2.process.return_value = mock_event2 + + mock_configuration_class.side_effect = [mock_config1, mock_config2] + + # Act + response = self.client.post('/api/configurations/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For endpoints that return events, expect event envelope structure + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertEqual(response_data["status"], "SUCCESS") + self.assertEqual(response_data["type"], "PROCESS_CONFIGURATIONS") + self.assertIn("sub_events", response_data) + + @patch('configurator.routes.configuration_routes.FileIO') + def test_process_configurations_general_exception(self, mock_file_io): + """Test POST /api/configurations/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_documents.side_effect = Exception("Unexpected error") + + # Act + response = self.client.post('/api/configurations/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_get_configuration_success(self, mock_configuration_class): + """Test successful GET /api/configurations//.""" + # Arrange + mock_configuration = Mock() + mock_configuration.to_dict.return_value = {"name": "test_config", "version": "1.0.0"} + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.get('/api/configurations/test_config/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertEqual(response_data, {"name": "test_config", "version": "1.0.0"}) + + @patch('configurator.routes.configuration_routes.Configuration') + def test_get_configuration_general_exception(self, mock_configuration_class): + """Test GET /api/configurations// when Configuration raises a general exception.""" + # Arrange + mock_configuration_class.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/configurations/test_config/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_put_configuration_success(self, mock_configuration_class): + """Test successful PUT /api/configurations//.""" + # Arrange + test_data = {"name": "test_config", "version": "1.0.0", "_locked": False} + mock_configuration = Mock() + mock_configuration.to_dict.return_value = {"name": "test_config", "version": "1.0.0", "_locked": False} + mock_configuration.save.return_value = mock_configuration + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.put('/api/configurations/test_config/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect configuration data directly + self.assertEqual(response_data, {"name": "test_config", "version": "1.0.0", "_locked": False}) + + @patch('configurator.routes.configuration_routes.Configuration') + def test_put_configuration_general_exception(self, mock_configuration_class): + """Test PUT /api/configurations// when Configuration raises a general exception.""" + # Arrange + mock_configuration_class.side_effect = Exception("Unexpected error") + test_data = {"name": "test_config", "version": "1.0.0"} + + # Act + response = self.client.put('/api/configurations/test_config/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_delete_configuration_success(self, mock_configuration_class): + """Test successful DELETE /api/configurations//.""" + # Arrange + mock_configuration = Mock() + mock_event = Mock() + mock_event.to_dict.return_value = { + "id": "CFG-ROUTES-07", + "type": "DELETE_CONFIGURATION", + "status": "SUCCESS", + "data": {}, + "sub_events": [] + } + mock_configuration.delete.return_value = mock_event + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.delete('/api/configurations/test_config/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "SUCCESS") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_delete_configuration_general_exception(self, mock_configuration_class): + """Test DELETE /api/configurations// when Configuration raises a general exception.""" + # Arrange + mock_configuration = Mock() + mock_configuration.delete.side_effect = Exception("Unexpected error") + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.delete('/api/configurations/test_config/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_lock_unlock_configuration_success(self, mock_configuration_class): + """Test successful PATCH /api/configurations// - removed as no longer supported.""" + # This test is no longer applicable as we removed lock/unlock functionality + pass + + @patch('configurator.routes.configuration_routes.Configuration') + def test_lock_unlock_configuration_general_exception(self, mock_configuration_class): + """Test PATCH /api/configurations// when Configuration raises a general exception - removed as no longer supported.""" + # This test is no longer applicable as we removed lock/unlock functionality + pass + + @patch('configurator.routes.configuration_routes.Configuration') + def test_process_configuration_success(self, mock_configuration_class): + """Test successful POST /api/configurations//.""" + # Arrange + mock_configuration = Mock() + mock_event = Mock() + mock_event.to_dict.return_value = {"result": "success"} + mock_configuration.process.return_value = mock_event + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.post('/api/configurations/test_config/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertEqual(response_data, {"result": "success"}) + + @patch('configurator.routes.configuration_routes.Configuration') + def test_process_configuration_general_exception(self, mock_configuration_class): + """Test POST /api/configurations// when Configuration raises a general exception.""" + # Arrange + mock_configuration = Mock() + mock_configuration.process.side_effect = Exception("Unexpected error") + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.post('/api/configurations/test_config/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_get_json_schema_success(self, mock_configuration_class): + """Test successful GET /api/configurations/json_schema///.""" + # Arrange + mock_configuration = Mock() + mock_configuration.get_json_schema.return_value = {"type": "object"} + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.get('/api/configurations/json_schema/test_config/1.0.0/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertEqual(response_data, {"type": "object"}) + + @patch('configurator.routes.configuration_routes.Configuration') + def test_get_json_schema_general_exception(self, mock_configuration_class): + """Test GET /api/configurations/json_schema/// when Configuration raises a general exception.""" + # Arrange + mock_configuration = Mock() + mock_configuration.get_json_schema.side_effect = Exception("Unexpected error") + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.get('/api/configurations/json_schema/test_config/1.0.0/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_get_bson_schema_success(self, mock_configuration_class): + """Test successful GET /api/configurations/bson_schema///.""" + # Arrange + mock_configuration = Mock() + mock_configuration.get_bson_schema_for_version.return_value = {"type": "object"} + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.get('/api/configurations/bson_schema/test_config/1.0.0/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertEqual(response_data, {"type": "object"}) + + @patch('configurator.routes.configuration_routes.Configuration') + def test_get_bson_schema_general_exception(self, mock_configuration_class): + """Test GET /api/configurations/bson_schema/// when Configuration raises a general exception.""" + # Arrange + mock_configuration = Mock() + mock_configuration.get_bson_schema_for_version.side_effect = Exception("Unexpected error") + mock_configuration_class.return_value = mock_configuration + + # Act + response = self.client.get('/api/configurations/bson_schema/test_config/1.0.0/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.TemplateService') + def test_create_collection_success(self, mock_template_service_class): + """Test successful POST /api/configurations/collection/.""" + # Arrange + mock_template_service = Mock() + mock_template_service.create_collection.return_value = {"created": True} + mock_template_service_class.return_value = mock_template_service + + # Act + response = self.client.post('/api/configurations/collection/test_collection/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertEqual(response_data, {"created": True}) + + @patch('configurator.routes.configuration_routes.TemplateService') + def test_create_collection_configurator_exception(self, mock_template_service_class): + """Test POST /api/configurations/collection/ when TemplateService raises ConfiguratorException.""" + # Arrange + mock_template_service = Mock() + event = ConfiguratorEvent("TPL-01", "TEMPLATE_ERROR") + mock_template_service.create_collection.side_effect = ConfiguratorException("Template error", event) + mock_template_service_class.return_value = mock_template_service + + # Act + response = self.client.post('/api/configurations/collection/test_collection/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.TemplateService') + def test_create_collection_general_exception(self, mock_template_service_class): + """Test POST /api/configurations/collection/ when TemplateService raises a general exception.""" + # Arrange + mock_template_service = Mock() + mock_template_service.create_collection.side_effect = Exception("Unexpected error") + mock_template_service_class.return_value = mock_template_service + + # Act + response = self.client.post('/api/configurations/collection/test_collection/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.configuration_routes.Configuration') + def test_lock_all_configurations(self, mock_configuration_class): + """Test locking all configurations.""" + # Arrange + mock_event = ConfiguratorEvent("CFG-ROUTES-03", "LOCK_ALL_CONFIGURATIONS") + mock_event.data = { + "total_files": 2, + "operation": "lock_all" + } + mock_event.record_success() + mock_configuration_class.lock_all.return_value = mock_event + + # Act + response = self.client.patch('/api/configurations/') + + # Assert + self.assertEqual(response.status_code, 200) + data = response.get_json() + self.assertIn('id', data) + self.assertIn('type', data) + self.assertIn('status', data) + self.assertIn('sub_events', data) + self.assertIn('data', data) + self.assertIn('total_files', data['data']) + self.assertIn('operation', data['data']) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/routes/test_database_routes.py b/tests/routes/test_database_routes.py new file mode 100644 index 0000000..b6d0153 --- /dev/null +++ b/tests/routes/test_database_routes.py @@ -0,0 +1,152 @@ +import unittest +from unittest.mock import patch, Mock +from flask import Flask +from configurator.routes.database_routes import create_database_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestDatabaseRoutes(unittest.TestCase): + """Test cases for database routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_database_routes(), url_prefix='/api/database') + self.client = self.app.test_client() + + @patch('configurator.routes.database_routes.MongoIO') + def test_drop_database_success(self, mock_mongo_io_class): + """Test successful DELETE /api/database.""" + # Arrange + mock_mongo_io = Mock() + # Create a mock event that returns a proper to_dict() response + mock_event = Mock() + mock_event.to_dict.return_value = { + "id": "MON-12", + "type": "DROP_DATABASE", + "status": "SUCCESS", + "data": {"message": "Database Dropped"}, + "starts": "2024-01-01T00:00:00.000Z", + "ends": "2024-01-01T00:00:00.000Z", + "sub_events": [] + } + mock_mongo_io.drop_database.return_value = [mock_event] + mock_mongo_io_class.return_value = mock_mongo_io + + # Act + response = self.client.delete('/api/database/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # Expect a list of events, not a simple message + self.assertIsInstance(response_data, list) + self.assertEqual(len(response_data), 1) + self.assertEqual(response_data[0]["id"], "MON-12") + self.assertEqual(response_data[0]["type"], "DROP_DATABASE") + self.assertEqual(response_data[0]["status"], "SUCCESS") + mock_mongo_io.drop_database.assert_called_once() + mock_mongo_io.disconnect.assert_called_once() + + @patch('configurator.routes.database_routes.MongoIO') + def test_drop_database_configurator_exception(self, mock_mongo_io_class): + """Test DELETE /api/database when MongoIO raises ConfiguratorException.""" + # Arrange + mock_mongo_io = Mock() + mock_event = ConfiguratorEvent("TEST-01", "TEST", {"error": "test"}) + mock_mongo_io.drop_database.side_effect = ConfiguratorException("Database error", mock_event) + mock_mongo_io_class.return_value = mock_mongo_io + + # Act + response = self.client.delete('/api/database/') + + # Assert + self.assertEqual(response.status_code, 500) + # The response should contain the to_dict() structure from the exception + self.assertIsInstance(response.json, dict) + self.assertIn("id", response.json) + self.assertIn("type", response.json) + + @patch('configurator.routes.database_routes.MongoIO') + def test_drop_database_safety_limit_exceeded(self, mock_mongo_io_class): + """Test DELETE /api/database when collections have more than 100 documents.""" + # Arrange + mock_mongo_io = Mock() + mock_event = ConfiguratorEvent( + "MON-14", + "DROP_DATABASE", + {"collections_exceeding_limit": [ + {"collection": "users", "document_count": 150}, + {"collection": "orders", "document_count": 200} + ]} + ) + mock_mongo_io.drop_database.side_effect = ConfiguratorException( + "Drop database Safety Limit Exceeded - Collections with >100 documents found", + mock_event + ) + mock_mongo_io_class.return_value = mock_mongo_io + + # Act + response = self.client.delete('/api/database/') + + # Assert + self.assertEqual(response.status_code, 500) + self.assertIsInstance(response.json, dict) + self.assertIn("id", response.json) + self.assertIn("type", response.json) + + @patch('configurator.routes.database_routes.MongoIO') + def test_drop_database_general_exception(self, mock_mongo_io_class): + """Test DELETE /api/database when MongoIO raises a general exception.""" + # Arrange + mock_mongo_io = Mock() + mock_mongo_io.drop_database.side_effect = Exception("Unexpected error") + mock_mongo_io_class.return_value = mock_mongo_io + + # Act + response = self.client.delete('/api/database/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + def test_drop_database_get_method_not_allowed(self): + """Test that GET method is not allowed on /api/database.""" + # Act + response = self.client.get('/api/database/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_drop_database_post_method_not_allowed(self): + """Test that POST method is not allowed on /api/database.""" + # Act + response = self.client.post('/api/database/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_drop_database_put_method_not_allowed(self): + """Test that PUT method is not allowed on /api/database.""" + # Act + response = self.client.put('/api/database/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_drop_database_patch_method_not_allowed(self): + """Test that PATCH method is not allowed on /api/database.""" + # Act + response = self.client.patch('/api/database/') + + # Assert + self.assertEqual(response.status_code, 405) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/routes/test_dictionary_routes.py b/tests/routes/test_dictionary_routes.py new file mode 100644 index 0000000..713f1db --- /dev/null +++ b/tests/routes/test_dictionary_routes.py @@ -0,0 +1,196 @@ +import unittest +from unittest.mock import patch, Mock +from flask import Flask +from configurator.routes.dictionary_routes import create_dictionary_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestDictionaryRoutes(unittest.TestCase): + """Test cases for dictionary routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_dictionary_routes(), url_prefix='/api/dictionaries') + self.client = self.app.test_client() + + @patch('configurator.routes.dictionary_routes.FileIO') + def test_get_dictionaries_success(self, mock_file_io): + """Test successful GET /api/dictionaries.""" + # Arrange + # Create mock File objects with to_dict() method + mock_file1 = Mock() + mock_file1.to_dict.return_value = {"name": "dict1.yaml"} + mock_file2 = Mock() + mock_file2.to_dict.return_value = {"name": "dict2.yaml"} + mock_files = [mock_file1, mock_file2] + mock_file_io.get_documents.return_value = mock_files + + # Act + response = self.client.get('/api/dictionaries/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertEqual(response_data, [{"name": "dict1.yaml"}, {"name": "dict2.yaml"}]) + + @patch('configurator.routes.dictionary_routes.FileIO') + def test_get_dictionaries_general_exception(self, mock_file_io): + """Test GET /api/dictionaries when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_documents.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/dictionaries/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.dictionary_routes.Dictionary') + def test_get_dictionary_success(self, mock_dictionary_class): + """Test successful GET /api/dictionaries/.""" + # Arrange + mock_dictionary = Mock() + mock_dictionary.to_dict.return_value = {"name": "test_dict", "version": "1.0.0"} + mock_dictionary_class.return_value = mock_dictionary + + # Act + response = self.client.get('/api/dictionaries/test_dict/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"name": "test_dict", "version": "1.0.0"}) + + @patch('configurator.routes.dictionary_routes.Dictionary') + def test_get_dictionary_general_exception(self, mock_dictionary_class): + """Test GET /api/dictionaries/ when Dictionary raises a general exception.""" + # Arrange + mock_dictionary_class.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/dictionaries/test_dict/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.dictionary_routes.Dictionary') + def test_update_dictionary_success(self, mock_dictionary_class): + """Test successful PUT /api/dictionaries/.""" + # Arrange + test_data = {"name": "test_dict", "version": "1.0.0"} + mock_dictionary = Mock() + mock_saved_file = Mock() + mock_saved_file.to_dict.return_value = {"name": "test_dict.yaml", "path": "/path/to/test_dict.yaml"} + mock_dictionary.save.return_value = mock_saved_file + mock_dictionary_class.return_value = mock_dictionary + + # Act + response = self.client.put('/api/dictionaries/test_dict/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"name": "test_dict.yaml", "path": "/path/to/test_dict.yaml"}) + + @patch('configurator.routes.dictionary_routes.Dictionary') + def test_update_dictionary_general_exception(self, mock_dictionary_class): + """Test PUT /api/dictionaries/ when Dictionary raises a general exception.""" + # Arrange + mock_dictionary_class.side_effect = Exception("Unexpected error") + test_data = {"name": "test_dict", "version": "1.0.0"} + + # Act + response = self.client.put('/api/dictionaries/test_dict/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.dictionary_routes.Dictionary') + def test_delete_dictionary_success(self, mock_dictionary_class): + """Test successful DELETE /api/dictionaries/.""" + # Arrange + mock_dictionary = Mock() + mock_event = Mock() + mock_event.to_dict.return_value = {"deleted": True} + mock_dictionary.delete.return_value = mock_event + mock_dictionary_class.return_value = mock_dictionary + + # Act + response = self.client.delete('/api/dictionaries/test_dict/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"deleted": True}) + + @patch('configurator.routes.dictionary_routes.Dictionary') + def test_delete_dictionary_general_exception(self, mock_dictionary_class): + """Test DELETE /api/dictionaries/ when Dictionary raises a general exception.""" + # Arrange + mock_dictionary = Mock() + mock_dictionary.delete.side_effect = Exception("Unexpected error") + mock_dictionary_class.return_value = mock_dictionary + + # Act + response = self.client.delete('/api/dictionaries/test_dict/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + # Lock/unlock tests removed as functionality was removed + + @patch('configurator.routes.dictionary_routes.Dictionary') + def test_lock_all_dictionaries(self, mock_dictionary_class): + """Test locking all dictionaries.""" + # Arrange + mock_event = ConfiguratorEvent("DIC-04", "LOCK_ALL_DICTIONARIES") + mock_event.data = { + "total_files": 2, + "operation": "lock_all" + } + mock_event.record_success() + mock_dictionary_class.lock_all.return_value = mock_event + + # Act + response = self.client.patch('/api/dictionaries/') + + # Assert + self.assertEqual(response.status_code, 200) + data = response.get_json() + self.assertIn('id', data) + self.assertIn('type', data) + self.assertIn('status', data) + self.assertIn('sub_events', data) + self.assertIn('data', data) + self.assertIn('total_files', data['data']) + self.assertIn('operation', data['data']) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/routes/test_enumerator_routes.py b/tests/routes/test_enumerator_routes.py new file mode 100644 index 0000000..bbde927 --- /dev/null +++ b/tests/routes/test_enumerator_routes.py @@ -0,0 +1,167 @@ +import unittest +from unittest.mock import patch, Mock +from flask import Flask +from configurator.routes.enumerator_routes import create_enumerator_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestEnumeratorRoutes(unittest.TestCase): + """Test cases for enumerator routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_enumerator_routes(), url_prefix='/api/enumerators') + self.client = self.app.test_client() + + def test_get_enumerators_success(self): + """Test successful GET /api/enumerators - Get Enumerators.""" + # Arrange + mock_enumerators = Mock() + mock_enumerators.to_dict.return_value = {"enumerators": []} + + with patch('configurator.routes.enumerator_routes.Enumerators') as mock_enumerators_class: + mock_enumerators_class.return_value = mock_enumerators + + # Act + response = self.client.get('/api/enumerators/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"enumerators": []}) + + @patch('configurator.routes.enumerator_routes.Enumerators') + def test_get_enumerators_not_found(self, mock_enumerators_class): + """Test GET /api/enumerators when file is not found.""" + # Arrange + event = ConfiguratorEvent("FIL-02", "FILE_NOT_FOUND", {"file_path": "/input/test_data/enumerators.json"}) + mock_enumerators_class.side_effect = ConfiguratorException("File not found", event) + + # Act + response = self.client.get('/api/enumerators/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.enumerator_routes.Enumerators') + def test_get_enumerators_configurator_exception(self, mock_enumerators_class): + """Test GET /api/enumerators when Enumerators raises ConfiguratorException.""" + # Arrange + event = ConfiguratorEvent("TEST-01", "TEST", {"error": "test"}) + mock_enumerators_class.side_effect = ConfiguratorException("Other error", event) + + # Act + response = self.client.get('/api/enumerators/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.enumerator_routes.Enumerators') + def test_get_enumerators_general_exception(self, mock_enumerators_class): + """Test GET /api/enumerators when Enumerators raises a general exception.""" + # Arrange + mock_enumerators_class.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/enumerators/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.enumerator_routes.Enumerators') + def test_put_enumerators_success(self, mock_enumerators_class): + """Test successful PUT /api/enumerators.""" + # Arrange + test_data = [{"name": "enum1", "status": "active", "version": 1, "enumerators": {}}] + mock_enumerators = Mock() + mock_saved_file = Mock() + mock_saved_file.to_dict.return_value = {"name": "enumerators.json", "path": "/path/to/enumerators.json"} + mock_enumerators.save.return_value = mock_saved_file + mock_enumerators_class.return_value = mock_enumerators + + # Act + response = self.client.put('/api/enumerators/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"name": "enumerators.json", "path": "/path/to/enumerators.json"}) + mock_enumerators_class.assert_called_once_with(data=test_data) + + @patch('configurator.routes.enumerator_routes.Enumerators') + def test_put_enumerators_configurator_exception(self, mock_enumerators_class): + """Test PUT /api/enumerators when Enumerators raises ConfiguratorException.""" + # Arrange + event = ConfiguratorEvent("TEST-01", "TEST", {"error": "test"}) + mock_enumerators_class.side_effect = ConfiguratorException("Save error", event) + test_data = [{"name": "enum1", "status": "active", "version": 1, "enumerators": {}}] + + # Act + response = self.client.put('/api/enumerators/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.enumerator_routes.Enumerators') + def test_put_enumerators_general_exception(self, mock_enumerators_class): + """Test PUT /api/enumerators when Enumerators raises a general exception.""" + # Arrange + mock_enumerators_class.side_effect = Exception("Unexpected error") + test_data = [{"name": "enum1", "status": "active", "version": 1, "enumerators": {}}] + + # Act + response = self.client.put('/api/enumerators/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + def test_enumerators_delete_method_not_allowed(self): + """Test that DELETE method is not allowed on /api/enumerators.""" + # Act + response = self.client.delete('/api/enumerators/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_enumerators_with_filename_not_allowed(self): + """Test that enumerators with filename is not allowed.""" + # Act + response = self.client.get('/api/enumerators/test.json') + + # Assert + self.assertEqual(response.status_code, 404) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/routes/test_migration_routes.py b/tests/routes/test_migration_routes.py new file mode 100644 index 0000000..38e4aca --- /dev/null +++ b/tests/routes/test_migration_routes.py @@ -0,0 +1,288 @@ +import os +import shutil +import tempfile +import json +import unittest +from flask import Flask +from configurator.server import app as real_app +from configurator.utils.config import Config +from unittest.mock import patch, Mock +from configurator.routes.migration_routes import create_migration_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + +class MigrationRoutesTestCase(unittest.TestCase): + def setUp(self): + # Create a temp directory for migrations + self.temp_dir = tempfile.mkdtemp() + self.migrations_dir = os.path.join(self.temp_dir, "migrations") + os.makedirs(self.migrations_dir, exist_ok=True) + # Patch config to use temp dir + self._original_input_folder = Config.get_instance().INPUT_FOLDER + Config.get_instance().INPUT_FOLDER = self.temp_dir + # Create some fake migration files + self.migration1 = os.path.join(self.migrations_dir, "mig1.json") + self.migration2 = os.path.join(self.migrations_dir, "mig2.json") + with open(self.migration1, "w") as f: + json.dump([{"$addFields": {"foo": "bar"}}], f) + with open(self.migration2, "w") as f: + json.dump([{"$unset": ["foo"]}], f) + # Use Flask test client + self.app = real_app.test_client() + + def tearDown(self): + Config.get_instance().INPUT_FOLDER = self._original_input_folder + shutil.rmtree(self.temp_dir) + + def test_list_migrations(self): + """Test GET /api/migrations/ endpoint.""" + resp = self.app.get("/api/migrations/") + self.assertEqual(resp.status_code, 200) + data = resp.get_json() + # For successful responses, expect data directly, not wrapped in event envelope + self.assertIn("mig1.json", data) + self.assertIn("mig2.json", data) + + def test_get_migration(self): + resp = self.app.get("/api/migrations/mig1.json/") + self.assertEqual(resp.status_code, 200) + data = resp.get_json() + # For successful responses, expect data directly, not wrapped in event envelope + self.assertIsInstance(data, list) + self.assertEqual(data, [{"$addFields": {"foo": "bar"}}]) + + def test_get_migration_not_found(self): + resp = self.app.get("/api/migrations/doesnotexist.json/") + self.assertEqual(resp.status_code, 500) + data = resp.get_json() + self.assertIn("id", data) + self.assertIn("type", data) + self.assertIn("status", data) + self.assertIn("data", data) + self.assertEqual(data["status"], "FAILURE") + + def test_put_migration(self): + """Test PUT /api/migrations//.""" + # Arrange + test_data = {"migration": "test data"} + + # Act + response = self.app.put('/api/migrations/mig1.json/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 200) + data = response.get_json() + self.assertIn("file_name", data) + self.assertIn("created_at", data) + self.assertIn("updated_at", data) + self.assertIn("size", data) + + def test_delete_migration(self): + resp = self.app.delete("/api/migrations/mig1.json/") + self.assertEqual(resp.status_code, 200) + data = resp.get_json() + # For successful responses, expect ConfiguratorEvent with SUCCESS status + self.assertIn("id", data) + self.assertIn("type", data) + self.assertIn("status", data) + self.assertIn("data", data) + self.assertEqual(data["status"], "SUCCESS") + + def test_delete_migration_not_found(self): + resp = self.app.delete("/api/migrations/doesnotexist.json/") + self.assertEqual(resp.status_code, 200) # Now returns 200 with failure event + data = resp.get_json() + self.assertIn("id", data) + self.assertIn("type", data) + self.assertIn("status", data) + self.assertIn("data", data) + self.assertEqual(data["status"], "FAILURE") + +class TestMigrationRoutes(unittest.TestCase): + """Test cases for migration routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_migration_routes(), url_prefix='/api/migrations') + self.client = self.app.test_client() + + @patch('configurator.routes.migration_routes.FileIO') + def test_list_migrations_success(self, mock_file_io): + """Test successful GET /api/migrations/.""" + # Arrange + mock_file1 = Mock() + mock_file1.file_name = "migration1.json" + mock_file2 = Mock() + mock_file2.file_name = "migration2.json" + mock_files = [mock_file1, mock_file2] + + with patch('configurator.routes.migration_routes.FileIO') as mock_file_io: + mock_file_io.get_documents.return_value = mock_files + + # Act + response = self.client.get('/api/migrations/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, ["migration1.json", "migration2.json"]) + + @patch('configurator.routes.migration_routes.FileIO') + def test_list_migrations_general_exception(self, mock_file_io): + """Test GET /api/migrations/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_documents.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/migrations/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.migration_routes.FileIO') + def test_get_migration_success(self, mock_file_io): + """Test successful GET /api/migrations/.""" + # Arrange + mock_file_io.get_document.return_value = {"name": "test_migration", "operations": []} + + # Act + response = self.client.get('/api/migrations/test_migration.json/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"name": "test_migration", "operations": []}) + + @patch('configurator.routes.migration_routes.FileIO') + def test_get_migration_general_exception(self, mock_file_io): + """Test GET /api/migrations/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_document.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/migrations/test_migration.json/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.migration_routes.FileIO') + def test_put_migration_success(self, mock_file_io): + """Test successful PUT /api/migrations/.""" + # Arrange + test_data = {"name": "test_migration", "operations": []} + mock_file = Mock() + mock_file.to_dict.return_value = {"file_name": "test_migration.json", "size": 100} + mock_file_io.put_document.return_value = mock_file + + # Act + response = self.client.put('/api/migrations/test_migration.json/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertIsInstance(response_data, dict) + self.assertIn("file_name", response_data) + + @patch('configurator.routes.migration_routes.FileIO') + def test_put_migration_general_exception(self, mock_file_io): + """Test PUT /api/migrations/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.put_document.side_effect = Exception("Unexpected error") + test_data = {"name": "test_migration", "operations": []} + + # Act + response = self.client.put('/api/migrations/test_migration.json/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.migration_routes.os.path.exists') + @patch('configurator.routes.migration_routes.FileIO') + def test_delete_migration_success(self, mock_file_io, mock_exists): + """Test successful DELETE /api/migrations/.""" + # Arrange + mock_exists.return_value = True + mock_event = Mock() + mock_event.status = "SUCCESS" + mock_file_io.delete_document.return_value = mock_event + + # Act + response = self.client.delete('/api/migrations/test_migration.json/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "SUCCESS") + + @patch('configurator.routes.migration_routes.FileIO') + def test_delete_migration_general_exception(self, mock_file_io): + """Test DELETE /api/migrations/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.delete_document.side_effect = Exception("Unexpected error") + + # Act + response = self.client.delete('/api/migrations/test_migration.json/') + + # Assert + self.assertEqual(response.status_code, 200) # Now returns 200 with failure event + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + def test_migrations_post_method_not_allowed(self): + """Test that POST method is not allowed.""" + response = self.client.post('/api/migrations/') + self.assertEqual(response.status_code, 405) + + def test_migrations_patch_method_not_allowed(self): + """Test that PATCH method is not allowed for individual migrations.""" + response = self.client.patch('/api/migrations/test_migration.json/') + self.assertEqual(response.status_code, 405) + + def test_get_migrations_success(self): + """Test successful GET /api/migrations/.""" + # Arrange + mock_file1 = Mock() + mock_file1.file_name = "migration1.json" + mock_file2 = Mock() + mock_file2.file_name = "migration2.json" + mock_files = [mock_file1, mock_file2] + + with patch('configurator.routes.migration_routes.FileIO') as mock_file_io: + mock_file_io.get_documents.return_value = mock_files + + # Act + response = self.client.get('/api/migrations/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, ["migration1.json", "migration2.json"]) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/tests/routes/test_render_routes.py b/tests/routes/test_render_routes.py deleted file mode 100644 index 9fcb725..0000000 --- a/tests/routes/test_render_routes.py +++ /dev/null @@ -1,204 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -from flask import Flask -from stage0_mongodb_api.routes.render_routes import create_render_routes -from stage0_mongodb_api.services.render_service import RenderNotFoundError, RenderProcessingError - -class TestRenderRoutes(unittest.TestCase): - def setUp(self): - """Set up test fixtures before each test method.""" - self.app = Flask(__name__) - self.app.register_blueprint(create_render_routes(), url_prefix='/api/render') - self.client = self.app.test_client() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_json_schema(self, mock_render_service): - """Test rendering JSON schema for a schema.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - mock_schema = {"test": "schema"} - mock_render_service.render_json_schema.return_value = mock_schema - - # Act - response = self.client.get(f'/api/render/json_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json, mock_schema) - mock_render_service.render_json_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_bson_schema(self, mock_render_service): - """Test rendering BSON schema for a schema.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - mock_schema = {"test": "schema"} - mock_render_service.render_bson_schema.return_value = mock_schema - - # Act - response = self.client.get(f'/api/render/bson_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json, mock_schema) - mock_render_service.render_bson_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_openapi(self, mock_render_service): - """Test rendering OpenAPI specification for a schema.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - expected_message = {"message": "OpenAPI rendering not yet implemented"} - mock_render_service.render_openapi.return_value = expected_message - - # Act - response = self.client.get(f'/api/render/openapi/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 200) - self.assertEqual(response.data.decode('utf-8'), 'message: OpenAPI rendering not yet implemented\n') - mock_render_service.render_openapi.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_json_schema_not_found(self, mock_render_service): - """Test error handling when schema is not found for JSON schema.""" - # Arrange - schema_name = "nonexistent_collection.1.0.0.1" - mock_render_service.render_json_schema.side_effect = RenderNotFoundError(schema_name) - - # Act - response = self.client.get(f'/api/render/json_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 404) - self.assertEqual(response.data.decode('utf-8'), 'Schema not found') - mock_render_service.render_json_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_bson_schema_not_found(self, mock_render_service): - """Test error handling when schema is not found for BSON schema.""" - # Arrange - schema_name = "nonexistent_collection.1.0.0.1" - mock_render_service.render_bson_schema.side_effect = RenderNotFoundError(schema_name) - - # Act - response = self.client.get(f'/api/render/bson_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 404) - self.assertEqual(response.data.decode('utf-8'), 'Schema not found') - mock_render_service.render_bson_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_json_schema_processing_error(self, mock_render_service): - """Test error handling when JSON schema processing fails.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - errors = [{"error": "processing_error", "message": "Test error"}] - mock_render_service.render_json_schema.side_effect = RenderProcessingError(schema_name, errors) - - # Act - response = self.client.get(f'/api/render/json_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json, errors) - mock_render_service.render_json_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_bson_schema_processing_error(self, mock_render_service): - """Test error handling when BSON schema processing fails.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - errors = [{"error": "processing_error", "message": "Test error"}] - mock_render_service.render_bson_schema.side_effect = RenderProcessingError(schema_name, errors) - - # Act - response = self.client.get(f'/api/render/bson_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json, errors) - mock_render_service.render_bson_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_openapi_not_found(self, mock_render_service): - """Test error handling when schema is not found for OpenAPI.""" - # Arrange - schema_name = "nonexistent_collection.1.0.0.1" - mock_render_service.render_openapi.side_effect = RenderNotFoundError(schema_name) - - # Act - response = self.client.get(f'/api/render/openapi/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 404) - self.assertEqual(response.data.decode('utf-8'), 'Schema not found') - mock_render_service.render_openapi.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_openapi_processing_error(self, mock_render_service): - """Test error handling when OpenAPI processing fails.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - errors = [{"error": "processing_error", "message": "Test error"}] - mock_render_service.render_openapi.side_effect = RenderProcessingError(schema_name, errors) - - # Act - response = self.client.get(f'/api/render/openapi/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json, errors) - mock_render_service.render_openapi.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_json_schema_unexpected_error(self, mock_render_service): - """Test error handling when unexpected error occurs during JSON schema rendering.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - mock_render_service.render_json_schema.side_effect = Exception("Unexpected error") - - # Act - response = self.client.get(f'/api/render/json_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json[0]["error"], "Failed to render JSON schema") - self.assertEqual(response.json[0]["error_id"], "API-005") - mock_render_service.render_json_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_bson_schema_unexpected_error(self, mock_render_service): - """Test error handling when unexpected error occurs during BSON schema rendering.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - mock_render_service.render_bson_schema.side_effect = Exception("Unexpected error") - - # Act - response = self.client.get(f'/api/render/bson_schema/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json[0]["error"], "Failed to render BSON schema") - self.assertEqual(response.json[0]["error_id"], "API-006") - mock_render_service.render_bson_schema.assert_called_once() - - @patch('stage0_mongodb_api.routes.render_routes.RenderService') - def test_render_openapi_unexpected_error(self, mock_render_service): - """Test error handling when unexpected error occurs during OpenAPI rendering.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - mock_render_service.render_openapi.side_effect = Exception("Unexpected error") - - # Act - response = self.client.get(f'/api/render/openapi/{schema_name}/') - - # Assert - self.assertEqual(response.status_code, 500) - self.assertEqual(response.json[0]["error"], "Failed to render OpenAPI") - self.assertEqual(response.json[0]["error_id"], "API-007") - mock_render_service.render_openapi.assert_called_once() - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/routes/test_test_data_routes.py b/tests/routes/test_test_data_routes.py new file mode 100644 index 0000000..5e85cee --- /dev/null +++ b/tests/routes/test_test_data_routes.py @@ -0,0 +1,176 @@ +import unittest +from unittest.mock import patch, Mock +from flask import Flask +from configurator.routes.test_data_routes import create_test_data_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestTestDataRoutes(unittest.TestCase): + """Test cases for test data routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_test_data_routes(), url_prefix='/api/test_data') + self.client = self.app.test_client() + + @patch('configurator.routes.test_data_routes.FileIO') + def test_get_data_files_success(self, mock_file_io): + """Test successful GET /api/test_data.""" + # Arrange + mock_file1 = Mock() + mock_file1.to_dict.return_value = {"file_name": "data1.json", "size": 100, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"} + mock_file2 = Mock() + mock_file2.to_dict.return_value = {"file_name": "data2.json", "size": 200, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"} + mock_files = [mock_file1, mock_file2] + mock_file_io.get_documents.return_value = mock_files + + # Act + response = self.client.get('/api/test_data/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, [{"file_name": "data1.json", "size": 100, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"}, {"file_name": "data2.json", "size": 200, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"}]) + + @patch('configurator.routes.test_data_routes.FileIO') + def test_get_data_files_general_exception(self, mock_file_io): + """Test GET /api/test_data when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_documents.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/test_data/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.test_data_routes.FileIO') + def test_get_data_file_success(self, mock_file_io): + """Test successful GET /api/test_data/.""" + import json + from unittest.mock import mock_open, patch + # Arrange + mock_file_io.get_document.return_value = {"data": "test content"} + mock_json = '{"data": "test content"}' + with patch('builtins.open', mock_open(read_data=mock_json)): + # Act + response = self.client.get('/api/test_data/test_file.json/') + # Assert + self.assertEqual(response.status_code, 200) + response_data = json.loads(response.data) + self.assertEqual(response_data, {"data": "test content"}) + + @patch('configurator.routes.test_data_routes.FileIO') + def test_get_data_file_general_exception(self, mock_file_io): + """Test GET /api/test_data/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_document.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/test_data/test_file.json/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.test_data_routes.FileIO') + def test_put_data_file_success(self, mock_file_io): + """Test successful PUT /api/test_data/.""" + # Arrange + test_data = {"data": "test content"} + mock_file = Mock() + mock_file.to_dict.return_value = {"name": "test_file.json", "size": 100, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"} + mock_file_io.put_document.return_value = mock_file + + # Act + response = self.client.put('/api/test_data/test_file.json/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"file_name": "test_file.json", "size": 100, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-01-01T00:00:00"}) + + @patch('configurator.routes.test_data_routes.FileIO') + def test_put_data_file_general_exception(self, mock_file_io): + """Test PUT /api/test_data/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.put_document.side_effect = Exception("Unexpected error") + test_data = {"data": "test content"} + + # Act + response = self.client.put('/api/test_data/test_file.json/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.test_data_routes.FileIO') + def test_delete_data_file_success(self, mock_file_io): + """Test successful DELETE /api/test_data/.""" + # Arrange + mock_event = Mock() + mock_event.to_dict.return_value = {"deleted": True, "file_name": "test_file.json"} + mock_file_io.delete_document.return_value = mock_event + + # Act + response = self.client.delete('/api/test_data/test_file.json/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"deleted": True, "file_name": "test_file.json"}) + + @patch('configurator.routes.test_data_routes.FileIO') + def test_delete_data_file_general_exception(self, mock_file_io): + """Test DELETE /api/test_data/ when FileIO raises a general exception.""" + # Arrange + mock_file_io.delete_document.side_effect = Exception("Unexpected error") + + # Act + response = self.client.delete('/api/test_data/test_file.json/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + def test_test_data_post_method_not_allowed(self): + """Test that POST method is not allowed on /api/test_data.""" + # Act + response = self.client.post('/api/test_data/') + + # Assert + self.assertEqual(response.status_code, 405) + + def test_test_data_patch_method_not_allowed(self): + """Test that PATCH method is not allowed on /api/test_data.""" + # Act + response = self.client.patch('/api/test_data/') + + # Assert + self.assertEqual(response.status_code, 405) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/routes/test_type_routes.py b/tests/routes/test_type_routes.py new file mode 100644 index 0000000..369b2b4 --- /dev/null +++ b/tests/routes/test_type_routes.py @@ -0,0 +1,196 @@ +import unittest +from unittest.mock import patch, Mock +from flask import Flask +from configurator.routes.type_routes import create_type_routes +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestTypeRoutes(unittest.TestCase): + """Test cases for type routes.""" + + def setUp(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.app.register_blueprint(create_type_routes(), url_prefix='/api/types') + self.client = self.app.test_client() + + @patch('configurator.routes.type_routes.FileIO') + def test_get_types_success(self, mock_file_io): + """Test successful GET /api/types.""" + # Arrange + # Create mock File objects with to_dict() method + mock_file1 = Mock() + mock_file1.to_dict.return_value = {"name": "type1.yaml"} + mock_file2 = Mock() + mock_file2.to_dict.return_value = {"name": "type2.yaml"} + mock_files = [mock_file1, mock_file2] + mock_file_io.get_documents.return_value = mock_files + + # Act + response = self.client.get('/api/types/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + # For successful responses, expect data directly, not wrapped in event envelope + self.assertEqual(response_data, [{"name": "type1.yaml"}, {"name": "type2.yaml"}]) + + @patch('configurator.routes.type_routes.FileIO') + def test_get_types_general_exception(self, mock_file_io): + """Test GET /api/types when FileIO raises a general exception.""" + # Arrange + mock_file_io.get_documents.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/types/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.type_routes.Type') + def test_get_type_success(self, mock_type_class): + """Test successful GET /api/types/.""" + # Arrange + mock_type = Mock() + mock_type.to_dict.return_value = {"name": "test_type", "_locked": False, "version": "1.0.0"} + mock_type_class.return_value = mock_type + + # Act + response = self.client.get('/api/types/test_type/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"name": "test_type", "_locked": False, "version": "1.0.0"}) + + @patch('configurator.routes.type_routes.Type') + def test_get_type_general_exception(self, mock_type_class): + """Test GET /api/types/ when Type raises a general exception.""" + # Arrange + mock_type_class.side_effect = Exception("Unexpected error") + + # Act + response = self.client.get('/api/types/test_type/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.type_routes.Type') + def test_update_type_success(self, mock_type_class): + """Test successful PUT /api/types/.""" + # Arrange + test_data = {"name": "test_type", "version": "1.0.0"} + mock_type = Mock() + mock_saved_file = Mock() + mock_saved_file.to_dict.return_value = {"name": "test_type.yaml", "path": "/path/to/test_type.yaml"} + mock_type.save.return_value = mock_saved_file + mock_type_class.return_value = mock_type + + # Act + response = self.client.put('/api/types/test_type/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"name": "test_type.yaml", "path": "/path/to/test_type.yaml"}) + + @patch('configurator.routes.type_routes.Type') + def test_update_type_general_exception(self, mock_type_class): + """Test PUT /api/types/ when Type raises a general exception.""" + # Arrange + mock_type_class.side_effect = Exception("Unexpected error") + test_data = {"name": "test_type", "version": "1.0.0"} + + # Act + response = self.client.put('/api/types/test_type/', json=test_data) + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + @patch('configurator.routes.type_routes.Type') + def test_delete_type_success(self, mock_type_class): + """Test successful DELETE /api/types/.""" + # Arrange + mock_type = Mock() + mock_event = Mock() + mock_event.to_dict.return_value = {"deleted": True} + mock_type.delete.return_value = mock_event + mock_type_class.return_value = mock_type + + # Act + response = self.client.delete('/api/types/test_type/') + + # Assert + self.assertEqual(response.status_code, 200) + response_data = response.json + self.assertEqual(response_data, {"deleted": True}) + + @patch('configurator.routes.type_routes.Type') + def test_delete_type_general_exception(self, mock_type_class): + """Test DELETE /api/types/ when Type raises a general exception.""" + # Arrange + mock_type = Mock() + mock_type.delete.side_effect = Exception("Unexpected error") + mock_type_class.return_value = mock_type + + # Act + response = self.client.delete('/api/types/test_type/') + + # Assert + self.assertEqual(response.status_code, 500) + response_data = response.json + self.assertIn("id", response_data) + self.assertIn("type", response_data) + self.assertIn("status", response_data) + self.assertIn("data", response_data) + self.assertEqual(response_data["status"], "FAILURE") + + # Lock/unlock tests removed as functionality was removed + + @patch('configurator.routes.type_routes.Type') + def test_lock_all_types(self, mock_type_class): + """Test locking all types.""" + # Arrange + mock_event = ConfiguratorEvent("TYP-04", "LOCK_ALL_TYPES") + mock_event.data = { + "total_files": 2, + "operation": "lock_all" + } + mock_event.record_success() + mock_type_class.lock_all.return_value = mock_event + + # Act + response = self.client.patch('/api/types/') + + # Assert + self.assertEqual(response.status_code, 200) + data = response.get_json() + self.assertIn('id', data) + self.assertIn('type', data) + self.assertIn('status', data) + self.assertIn('sub_events', data) + self.assertIn('data', data) + self.assertIn('total_files', data['data']) + self.assertIn('operation', data['data']) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_circular_references.py b/tests/services/test_circular_references.py new file mode 100644 index 0000000..cae4264 --- /dev/null +++ b/tests/services/test_circular_references.py @@ -0,0 +1,161 @@ +import unittest +from configurator.services.dictionary_services import Dictionary +from configurator.services.type_services import Type +from configurator.services.enumerator_service import Enumerators +from configurator.utils.configurator_exception import ConfiguratorException +import os +import yaml + + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + from configurator.utils.config import Config + Config._instance = None + + +class TestCircularReferences(unittest.TestCase): + """Test circular reference detection in Dictionary and Type services""" + + def setUp(self): + self.config = set_config_input_folder("./tests/test_cases/complex_refs") + self.enumerators = Enumerators(None) + + def tearDown(self): + clear_config() + + def test_dictionary_circular_reference_detection(self): + """Test that circular references in dictionaries are detected""" + # Create a simple circular reference test + circular_dict_data = { + "description": "Test dictionary with circular reference", + "type": "object", + "properties": { + "ref_prop": { + "description": "Property that creates circular reference", + "ref": "circular_test.1.0.0.yaml" + } + } + } + + # Save the test file + test_file = "circular_test.1.0.0.yaml" + with open(f"{self.config.INPUT_FOLDER}/dictionaries/{test_file}", 'w') as f: + yaml.dump(circular_dict_data, f) + + try: + # This should raise a circular reference exception + dictionary = Dictionary(test_file) + with self.assertRaises(ConfiguratorException) as context: + dictionary.get_json_schema(self.enumerators.version(0)) + + # Verify the error message and event + self.assertIn("Circular reference detected", str(context.exception)) + self.assertEqual(context.exception.event.id, "DIC-07") + self.assertEqual(context.exception.event.type, "CIRCULAR_REFERENCE") + + finally: + # Clean up test file + if os.path.exists(f"{self.config.INPUT_FOLDER}/dictionaries/{test_file}"): + os.remove(f"{self.config.INPUT_FOLDER}/dictionaries/{test_file}") + + def test_type_circular_reference_detection(self): + """Test that circular references in types are detected""" + # Create a simple circular reference test + circular_type_data = { + "description": "Test type with circular reference", + "type": "circular_type" + } + + # Save the test file + test_file = "circular_type.yaml" + with open(f"{self.config.INPUT_FOLDER}/types/{test_file}", 'w') as f: + yaml.dump(circular_type_data, f) + + try: + # This should raise a circular reference exception + type_obj = Type(test_file) + with self.assertRaises(ConfiguratorException) as context: + type_obj.get_json_schema() + + # Verify the error message and event + self.assertIn("Circular type reference detected", str(context.exception)) + self.assertEqual(context.exception.event.id, "TYP-07") + self.assertEqual(context.exception.event.type, "CIRCULAR_TYPE_REFERENCE") + + finally: + # Clean up test file + if os.path.exists(f"{self.config.INPUT_FOLDER}/types/{test_file}"): + os.remove(f"{self.config.INPUT_FOLDER}/types/{test_file}") + + def test_stack_depth_limit_detection(self): + """Test that stack depth limits are enforced""" + # Create a deep reference chain (but not circular) + deep_dict_data = { + "description": "Test dictionary with deep references", + "type": "object", + "properties": { + "deep_prop": { + "description": "Property that creates deep reference chain", + "ref": "deep_test_1.1.0.0.yaml" + } + } + } + + # Create a chain of references + for i in range(1, 102): # Exceed the default limit of 100 + ref_data = { + "description": f"Deep reference level {i}", + "type": "object", + "properties": { + "next_prop": { + "description": f"Next level property {i}", + "ref": f"deep_test_{i+1}.1.0.0.yaml" if i < 101 else "deep_test_1.1.0.0.yaml" + } + } + } + + test_file = f"deep_test_{i}.1.0.0.yaml" + with open(f"{self.config.INPUT_FOLDER}/dictionaries/{test_file}", 'w') as f: + yaml.dump(ref_data, f) + + try: + # This should raise a stack depth exceeded exception + dictionary = Dictionary("deep_test_1.1.0.0.yaml") + with self.assertRaises(ConfiguratorException) as context: + dictionary.get_json_schema(self.enumerators.version(0)) + + # Verify the error message and event + self.assertIn("Reference stack depth exceeded maximum", str(context.exception)) + self.assertEqual(context.exception.event.id, "DIC-08") + self.assertEqual(context.exception.event.type, "STACK_DEPTH_EXCEEDED") + + finally: + # Clean up test files + for i in range(1, 102): + test_file = f"deep_test_{i}.1.0.0.yaml" + if os.path.exists(f"{self.config.INPUT_FOLDER}/dictionaries/{test_file}"): + os.remove(f"{self.config.INPUT_FOLDER}/dictionaries/{test_file}") + + def test_valid_non_circular_references(self): + """Test that valid non-circular references still work""" + # Test with the existing complex_refs data + try: + dictionary = Dictionary("workshop.1.0.0.yaml") + schema = dictionary.get_json_schema(self.enumerators.version(0)) + self.assertIsInstance(schema, dict) + self.assertEqual(schema["type"], "object") + except ConfiguratorException as e: + # If there's a circular reference in the test data, that's fine + # We just want to make sure our detection works + pass + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_collection_services.py b/tests/services/test_collection_services.py deleted file mode 100644 index 5500bcb..0000000 --- a/tests/services/test_collection_services.py +++ /dev/null @@ -1,318 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -import os -from stage0_mongodb_api.services.collection_service import CollectionService, CollectionNotFoundError, CollectionProcessingError -from stage0_py_utils import Config - -class TestCollectionServices(unittest.TestCase): - def setUp(self): - """Set up test fixtures before each test method.""" - # Set up test input folder - self.config = Config.get_instance() - self.test_cases_dir = os.path.join(os.path.dirname(__file__), "..", "test_cases") - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_list_collections_success(self, mock_config_manager): - """Test listing all collections successfully.""" - mock_config_manager.return_value.collection_configs = { - "simple": { - "name": "simple", - "versions": [ - {"version": "1.0.0.1"}, - {"version": "1.0.0.2"}, - {"version": "1.0.0.3"} - ] - } - } - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - with patch('stage0_mongodb_api.services.collection_service.VersionManager') as mock_version_manager: - mock_version_manager.get_current_version.return_value = "simple.1.0.0.1" - result = CollectionService.list_collections() - self.assertEqual(len(result), 1) - self.assertIsInstance(result, list) - self.assertEqual(result[0]["collection_name"], "simple") - self.assertEqual(result[0]["version"], "simple.1.0.0.1") - self.assertEqual(result[0]["targeted_version"], "simple.1.0.0.3") - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_list_collections_load_error(self, mock_config_manager): - """Test listing collections with load errors.""" - mock_config_manager.return_value.load_errors = [{"error": "load_error"}] - with self.assertRaises(CollectionProcessingError) as context: - CollectionService.list_collections() - self.assertEqual(context.exception.collection_name, "collections") - self.assertEqual(context.exception.errors, [{"error": "load_error"}]) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_list_collections_validation_error(self, mock_config_manager): - """Test listing collections with validation errors.""" - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [{"error": "validation_error"}] - with self.assertRaises(CollectionProcessingError) as context: - CollectionService.list_collections() - self.assertEqual(context.exception.collection_name, "collections") - self.assertEqual(context.exception.errors, [{"error": "validation_error"}]) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_get_collection_success(self, mock_config_manager): - """Test getting a collection successfully.""" - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - mock_config_manager.return_value.get_collection_config.return_value = {"name": "simple"} - result = CollectionService.get_collection("simple") - self.assertEqual(result, {"name": "simple"}) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_get_collection_not_found(self, mock_config_manager): - """Test getting a collection that does not exist.""" - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - mock_config_manager.return_value.get_collection_config.return_value = None - with self.assertRaises(CollectionNotFoundError): - CollectionService.get_collection("nonexistent") - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_get_collection_load_error(self, mock_config_manager): - """Test getting a collection with load errors.""" - mock_config_manager.return_value.load_errors = [{"error": "load_error"}] - with self.assertRaises(CollectionProcessingError) as context: - CollectionService.get_collection("simple") - self.assertEqual(context.exception.collection_name, "simple") - self.assertEqual(context.exception.errors, [{"error": "load_error"}]) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_get_collection_validation_error(self, mock_config_manager): - """Test getting a collection with validation errors.""" - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [{"error": "validation_error"}] - with self.assertRaises(CollectionProcessingError) as context: - CollectionService.get_collection("simple") - self.assertEqual(context.exception.collection_name, "simple") - self.assertEqual(context.exception.errors, [{"error": "validation_error"}]) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collections_success(self, mock_config_manager): - """Test processing all collections successfully.""" - # Mock process_all_collections to return both enumerators and collections - mock_config_manager.return_value.process_all_collections.return_value = { - "enumerators": [ - {"operation": "process_enumerators", "status": "success"}, - {"operation": "overall_status", "status": "success"} - ], - "user": [ - {"operation": "remove_schema", "status": "success", "collection": "user"}, - {"operation": "apply_schema", "status": "success", "collection": "user"}, - {"operation": "update_version", "status": "success", "collection": "user"}, - {"operation": "overall_status", "status": "success"} - ], - "organization": [ - {"operation": "remove_schema", "status": "success", "collection": "organization"}, - {"operation": "apply_schema", "status": "success", "collection": "organization"}, - {"operation": "update_version", "status": "success", "collection": "organization"}, - {"operation": "overall_status", "status": "success"} - ], - "media": [ - {"operation": "remove_schema", "status": "success", "collection": "media"}, - {"operation": "apply_schema", "status": "success", "collection": "media"}, - {"operation": "update_version", "status": "success", "collection": "media"}, - {"operation": "overall_status", "status": "success"} - ], - "search": [ - {"operation": "remove_schema", "status": "success", "collection": "search"}, - {"operation": "apply_schema", "status": "success", "collection": "search"}, - {"operation": "update_version", "status": "success", "collection": "search"}, - {"operation": "overall_status", "status": "success"} - ] - } - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - from stage0_mongodb_api.services.collection_service import CollectionService - result = CollectionService.process_collections() - # Should include all collections and enumerators - self.assertEqual(len(result), 5) - collections = {r["collection"] for r in result} - self.assertIn("enumerators", collections) - self.assertIn("user", collections) - self.assertIn("organization", collections) - self.assertIn("media", collections) - self.assertIn("search", collections) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collections_with_error(self, mock_config_manager): - """Test processing collections when an error occurs.""" - mock_config_manager.return_value.process_all_collections.return_value = { - "enumerators": [ - {"operation": "process_enumerators", "status": "success"}, - {"operation": "overall_status", "status": "success"} - ], - "simple": [ - {"operation": "remove_schema", "status": "error", "collection": "simple"}, - {"operation": "overall_status", "status": "error"} - ] - } - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - from stage0_mongodb_api.services.collection_service import CollectionService - result = CollectionService.process_collections() - # Should include enumerators and simple - self.assertEqual(len(result), 2) - collections = {r["collection"] for r in result} - self.assertIn("enumerators", collections) - self.assertIn("simple", collections) - # The simple collection should have error status - for r in result: - if r["collection"] == "simple": - self.assertEqual(r["status"], "error") - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collections_skips_not_found(self, mock_config_manager): - """Test process_collections skips collections that raise CollectionNotFoundError.""" - # Simulate only enumerators in the result - mock_config_manager.return_value.process_all_collections.return_value = { - "enumerators": [ - {"operation": "process_enumerators", "status": "success"}, - {"operation": "overall_status", "status": "success"} - ] - } - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - from stage0_mongodb_api.services.collection_service import CollectionService - result = CollectionService.process_collections() - # Should only include enumerators - self.assertEqual(len(result), 1) - self.assertEqual(result[0]["collection"], "enumerators") - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collection_success(self, mock_config_manager): - """Test processing a specific collection successfully.""" - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - # Use the new consistent format - mock_operations = [ - { - "operation": "evaluate_version", - "collection": "simple", - "message": "Evaluating version 1.0.0.1", - "status": "success" - } - ] - mock_config_manager.return_value.process_collection_versions.return_value = mock_operations - collection_name = "simple" - result = CollectionService.process_collection(collection_name) - # Test structure rather than specific values - self.assertIn("status", result) - self.assertIn("collection", result) - self.assertIn("operations", result) - self.assertEqual(result["status"], "success") - self.assertEqual(result["collection"], collection_name) - self.assertEqual(result["operations"], mock_operations) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collection_not_found(self, mock_config_manager): - """Test processing a non-existent collection.""" - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - mock_config_manager.return_value.process_collection_versions.side_effect = ValueError("Collection 'nonexistent' not found in configurations") - collection_name = "nonexistent" - with self.assertRaises(CollectionNotFoundError): - CollectionService.process_collection(collection_name) - mock_config_manager.return_value.process_collection_versions.assert_called_once_with(collection_name) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collection_processing_error(self, mock_config_manager): - """Test process_collection raises CollectionProcessingError for generic errors.""" - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - mock_config_manager.return_value.process_collection_versions.side_effect = Exception("Some error") - with self.assertRaises(CollectionProcessingError) as context: - CollectionService.process_collection("simple") - self.assertEqual(context.exception.collection_name, "simple") - self.assertEqual(context.exception.errors[0]["message"], "Some error") - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collection_returns_error_status_when_operations_fail(self, mock_config_manager): - """Test that process_collection returns error status when operations contain errors.""" - # Arrange - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - - # Mock operations that include an error using the new format - mock_operations = [ - { - "operation": "remove_schema", - "collection": "test_collection", - "message": "Schema removed successfully", - "status": "success" - }, - { - "operation": "apply_schema", - "collection": "test_collection", - "message": "Schema validation failed", - "details_type": "error", - "details": {"error": "Schema validation failed"}, - "status": "error" - }, - { - "operation": "update_version", - "collection": "test_collection", - "message": "Version updated successfully", - "status": "success" - } - ] - mock_config_manager.return_value.process_collection_versions.return_value = mock_operations - - collection_name = "test_collection" - - # Act - result = CollectionService.process_collection(collection_name) - - # Assert - self.assertEqual(result["status"], "error") - self.assertEqual(result["collection"], collection_name) - self.assertEqual(result["operations"], mock_operations) - - @patch('stage0_mongodb_api.services.collection_service.ConfigManager') - def test_process_collection_returns_success_status_when_all_operations_succeed(self, mock_config_manager): - """Test that process_collection returns success status when all operations succeed.""" - # Arrange - mock_config_manager.return_value.load_errors = None - mock_config_manager.return_value.validate_configs.return_value = [] - - # Mock operations that all succeed using the new format - mock_operations = [ - { - "operation": "remove_schema", - "collection": "test_collection", - "message": "Schema removed successfully", - "status": "success" - }, - { - "operation": "apply_schema", - "collection": "test_collection", - "message": "Schema applied successfully", - "details_type": "schema", - "details": {"schema": {}, "version": "1.0.0.1"}, - "status": "success" - }, - { - "operation": "update_version", - "collection": "test_collection", - "message": "Version updated successfully", - "status": "success" - } - ] - mock_config_manager.return_value.process_collection_versions.return_value = mock_operations - - collection_name = "test_collection" - - # Act - result = CollectionService.process_collection(collection_name) - - # Assert - self.assertEqual(result["status"], "success") - self.assertEqual(result["collection"], collection_name) - self.assertEqual(result["operations"], mock_operations) - -if __name__ == '__main__': - unittest.main() diff --git a/tests/services/test_configuration_service_integration.py b/tests/services/test_configuration_service_integration.py new file mode 100644 index 0000000..1e03d8c --- /dev/null +++ b/tests/services/test_configuration_service_integration.py @@ -0,0 +1,304 @@ +import unittest +import os +import json +import tempfile +import shutil +from configurator.services.configuration_services import Configuration +from configurator.utils.mongo_io import MongoIO +from configurator.utils.config import Config +from configurator.utils.configurator_exception import ConfiguratorException +import logging +from unittest.mock import patch, Mock +from bson import json_util + +# Suppress logging during tests +logging.getLogger().setLevel(logging.CRITICAL) + + +def set_config_input_folder(folder): + """Set the input folder for configuration processing.""" + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + + +def clear_config(): + """Clear the configuration environment.""" + for key in ['INPUT_FOLDER']: + if key in os.environ: + del os.environ[key] + from configurator.utils.config import Config + Config._instance = None + + +class DatabaseHarvester: + """Utility class to harvest database state for comparison.""" + + def __init__(self, mongo_io: MongoIO, config: Config): + self.mongo_io = mongo_io + self.config = config + + def harvest_collection_versions(self): + """Harvest the CollectionVersions collection.""" + try: + versions = self.mongo_io.get_documents( + self.config.VERSION_COLLECTION_NAME, + sort_by=[("collection_name", 1)] + ) + # Serialize to Extended JSON format + return json.loads(json_util.dumps(versions)) + except Exception as e: + return [] + + def harvest_collection_data(self, collection_name): + """Harvest all documents from a collection.""" + try: + documents = self.mongo_io.get_documents( + collection_name, + sort_by=[("_id", 1)] + ) + # Serialize to Extended JSON format + return json.loads(json_util.dumps(documents)) + except Exception as e: + return [] + + def harvest_all_collections(self): + """Harvest all collections in the database.""" + result = {} + + # Harvest CollectionVersions + versions = self.harvest_collection_versions() + result[self.config.VERSION_COLLECTION_NAME] = versions + + # Get all collection names + collection_names = self.mongo_io.db.list_collection_names() + print(f"Harvester found collections: {collection_names}") + + # Harvest each collection (except CollectionVersions which we already did) + for collection_name in collection_names: + if collection_name != self.config.VERSION_COLLECTION_NAME: + documents = self.harvest_collection_data(collection_name) + print(f"Harvested {len(documents)} documents from {collection_name}") + result[collection_name] = documents + + return result + + def save_harvested_data(self, output_dir: str, harvested_data: dict): + """Save harvested data to JSON files for comparison.""" + os.makedirs(output_dir, exist_ok=True) + + for collection_name, documents in harvested_data.items(): + filename = f"{collection_name}.json" + filepath = os.path.join(output_dir, filename) + + with open(filepath, 'w') as f: + json.dump(documents, f, indent=2) + + +class TestConfigurationIntegration(unittest.TestCase): + """Test configuration processing integration against verified output files""" + + def setUp(self): + self.test_case = getattr(self, 'test_case', 'small_sample') + self.config = set_config_input_folder(f"./tests/test_cases/{self.test_case}") + + # Drop the database before starting + mongo_io = MongoIO(self.config.MONGO_CONNECTION_STRING, self.config.MONGO_DB_NAME) + mongo_io.drop_database() + mongo_io.disconnect() + + # Add a pause after dropping the database + import time + time.sleep(1) + + # Create temporary directory for test output + self.temp_dir = tempfile.mkdtemp(prefix="test_config_integration_") + + # Initialize MongoDB connection + self.mongo_io = MongoIO(self.config.MONGO_CONNECTION_STRING, self.config.MONGO_DB_NAME) + self.harvester = DatabaseHarvester(self.mongo_io, self.config) + + def tearDown(self): + """Clean up after tests.""" + # Note: Database is NOT dropped here to allow inspection of final state + # Database is only dropped in setUp for clean start + + # Clean up temporary directory + if os.path.exists(self.temp_dir): + shutil.rmtree(self.temp_dir) + + clear_config() + + def test_configuration_processing_integration(self): + """Test configuration processing integration against verified output.""" + # Step 1: Process all configurations + self._process_all_configurations() + + # Step 2: Harvest database state + harvested_data = self.harvester.harvest_all_collections() + + # Step 3: Compare against verified output + self._compare_against_verified_output(harvested_data) + + def _process_all_configurations(self): + """Process all configuration files in the test case.""" + config_dir = f"{self.config.INPUT_FOLDER}/configurations" + + if not os.path.exists(config_dir): + self.skipTest(f"No configurations directory found: {config_dir}") + + config_files = [f for f in os.listdir(config_dir) if f.endswith('.yaml')] + + if not config_files: + self.skipTest(f"No configuration files found in: {config_dir}") + + # For large_sample, process in dependency order to handle one_of schemas + if self.test_case == 'large_sample': + # Define dependency order: process dependencies first + dependency_order = [ + 'media.yaml', + 'organization.yaml', + 'user.yaml', # user must be processed before notification + 'notification.yaml', # notification depends on user + 'content.yaml', # content depends on user + 'search.yaml' # Process search last as it depends on others + ] + + # Filter to only include files that exist + ordered_files = [f for f in dependency_order if f in config_files] + + # Add any remaining files not in the dependency order + remaining_files = [f for f in config_files if f not in dependency_order] + ordered_files.extend(remaining_files) + + for filename in ordered_files: + self._process_configuration(filename) + else: + # For other test cases, process in alphabetical order + for filename in sorted(config_files): + self._process_configuration(filename) + + def _process_configuration(self, config_filename): + """Process a single configuration file.""" + try: + configuration = Configuration(config_filename) + event = configuration.process() + + # Debug: Print the result events + print(f"Processing {config_filename} - status: {event.status}") + for sub_event in event.sub_events: + print(f" Sub-event: {sub_event.type} - {sub_event.status}") + if hasattr(sub_event, 'data') and sub_event.data: + print(f" Data: {sub_event.data}") + + if event.status == "FAILURE": + self.fail(f"Configuration processing failed for {config_filename}: {event.data.get('error', 'Unknown error')}") + + except Exception as e: + self.fail(f"Exception processing configuration {config_filename}: {str(e)}") + + def _compare_against_verified_output(self, harvested_data): + """Compare harvested data against verified output files.""" + verified_dir = f"{self.config.INPUT_FOLDER}/verified_output/test_database" + + if not os.path.exists(verified_dir): + self.skipTest(f"No verified output directory found: {verified_dir}") + + # Compare each collection + for collection_name, documents in harvested_data.items(): + self._compare_collection(collection_name, documents, verified_dir) + + def _compare_collection(self, collection_name, actual_documents, verified_dir): + """Compare a collection's actual data against verified output.""" + verified_file = os.path.join(verified_dir, f"{collection_name}.json") + + if not os.path.exists(verified_file): + # If no verified file exists, save the actual data for review + self.harvester.save_harvested_data(self.temp_dir, {collection_name: actual_documents}) + self.fail(f"No verified output file found for {collection_name}. " + f"Actual data saved to {self.temp_dir}/{collection_name}.json") + + # Load verified data + with open(verified_file, 'r') as f: + expected_documents = json.load(f) + + # Debug: Print what we're comparing + print(f"Comparing {collection_name}:") + print(f" Expected count: {len(expected_documents)}") + print(f" Actual count: {len(actual_documents)}") + if actual_documents: + print(f" Actual documents: {actual_documents}") + + # Compare document counts + self.assertEqual( + len(actual_documents), + len(expected_documents), + f"Document count mismatch for {collection_name}: " + f"expected {len(expected_documents)}, got {len(actual_documents)}" + ) + + # Sort documents by collection_name for consistent comparison + actual_sorted = sorted(actual_documents, key=lambda x: x.get('collection_name', '')) + expected_sorted = sorted(expected_documents, key=lambda x: x.get('collection_name', '')) + + # Compare each document + for i, (actual_doc, expected_doc) in enumerate(zip(actual_sorted, expected_sorted)): + self._assert_document_equality( + actual_doc, + expected_doc, + f"{collection_name}[{i}]" + ) + + def _assert_document_equality(self, actual, expected, context): + """Assert document equality with detailed diff reporting.""" + # Remove _id fields for comparison since MongoDB generates new ObjectIds + actual_copy = actual.copy() + expected_copy = expected.copy() + + if '_id' in actual_copy: + del actual_copy['_id'] + if '_id' in expected_copy: + del expected_copy['_id'] + + if actual_copy != expected_copy: + diff = self._dict_diff(actual_copy, expected_copy) + self.fail(f"{context} mismatch:\n{diff}") + + def _dict_diff(self, dict1, dict2): + """Generate a detailed diff between two dictionaries.""" + def _diff_dict(d1, d2, path=""): + diff = [] + all_keys = set(d1.keys()) | set(d2.keys()) + + for key in sorted(all_keys): + current_path = f"{path}.{key}" if path else key + + if key not in d1: + diff.append(f"Missing in actual: {current_path}") + elif key not in d2: + diff.append(f"Extra in actual: {current_path} = {d1[key]}") + elif isinstance(d1[key], dict) and isinstance(d2[key], dict): + diff.extend(_diff_dict(d1[key], d2[key], current_path)) + elif d1[key] != d2[key]: + diff.append(f"Value mismatch at {current_path}:") + diff.append(f" Expected: {d2[key]}") + diff.append(f" Actual: {d1[key]}") + + return diff + + return "\n".join(_diff_dict(dict1, dict2)) + + +class TestSmallSampleConfigurationIntegration(TestConfigurationIntegration): + """Test configuration processing integration for small_sample test case.""" + test_case = 'small_sample' + + +class TestLargeSampleConfigurationIntegration(TestConfigurationIntegration): + """Test configuration processing integration for large_sample test case with advanced features.""" + test_case = 'large_sample' + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_configuration_service_operations.py b/tests/services/test_configuration_service_operations.py new file mode 100644 index 0000000..6e0b916 --- /dev/null +++ b/tests/services/test_configuration_service_operations.py @@ -0,0 +1,189 @@ +import unittest +from configurator.services.configuration_services import Configuration, Version +from configurator.utils.version_number import VersionNumber +import os +import yaml +import json +from configurator.utils.config import Config + + +def load_yaml(path): + with open(path, 'r') as f: + return yaml.safe_load(f) + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + Config._instance = None + return Config.get_instance() + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + Config._instance = None + + +class TestConfigurationOperations(unittest.TestCase): + """Test configuration service operations""" + + def setUp(self): + import os + print(f"[DEBUG] INPUT_FOLDER={os.environ.get('INPUT_FOLDER')}") + print(f"[DEBUG] CWD={os.getcwd()}") + self.config = set_config_input_folder("./tests/test_cases/small_sample") + + def tearDown(self): + clear_config() + + def test_configuration_loading(self): + """Test loading configuration from YAML file""" + config = Configuration("sample.yaml") + + self.assertEqual(config.file_name, "sample.yaml") + self.assertEqual(len(config.versions), 1) + + # Test version details + version = config.versions[0] + self.assertEqual(version.collection_name, "sample") + self.assertEqual(version.version_str, "1.0.0.1") + self.assertEqual(version.test_data, "sample.1.0.0.1.json") + + def test_configuration_to_dict(self): + """Test configuration serialization to dictionary""" + config = Configuration("sample.yaml") + config_dict = config.to_dict() + + self.assertEqual(config_dict["file_name"], "sample.yaml") + self.assertEqual(len(config_dict["versions"]), 1) + + version_dict = config_dict["versions"][0] + self.assertEqual(version_dict["version"], "1.0.0.1") + self.assertEqual(version_dict["test_data"], "sample.1.0.0.1.json") + + def test_version_creation(self): + """Test Version object creation and properties""" + version_data = { + "version": "1.0.0.1", + "drop_indexes": ["old_index"], + "add_indexes": ["new_index"], + "migrations": ["migration1"], + "test_data": "test.json" + } + + version = Version("test_collection", version_data, Config.get_instance()) + + self.assertEqual(version.collection_name, "test_collection") + self.assertEqual(version.version_str, "1.0.0.1") + self.assertEqual(version.drop_indexes, ["old_index"]) + self.assertEqual(version.add_indexes, ["new_index"]) + self.assertEqual(version.migrations, ["migration1"]) + self.assertEqual(version.test_data, "test.json") + + + + def test_version_to_dict(self): + """Test Version serialization to dictionary""" + version_data = { + "version": "1.0.0.1", + "drop_indexes": ["index1"], + "add_indexes": ["index2"], + "migrations": ["migration1"], + "test_data": "test.json" + } + + version = Version("test_collection", version_data, Config.get_instance()) + version_dict = version.to_dict() + + self.assertEqual(version_dict["version"], "1.0.0.1") + self.assertEqual(version_dict["drop_indexes"], ["index1"]) + self.assertEqual(version_dict["add_indexes"], ["index2"]) + self.assertEqual(version_dict["migrations"], ["migration1"]) + self.assertEqual(version_dict["test_data"], "test.json") + + def test_version_number_parsing(self): + """Test that Version correctly parses version numbers""" + version_data = {"version": "1.0.0.1", "drop_indexes": [], "add_indexes": [], "migrations": [], "test_data": "test.json"} + version = Version("test_collection", version_data, Config.get_instance()) + + # Test that collection_version is a VersionNumber object + self.assertIsInstance(version.collection_version, VersionNumber) + self.assertEqual(str(version.collection_version), "test_collection.1.0.0.yaml") + self.assertEqual(version.collection_version.get_enumerator_version(), 1) + + def test_configuration_with_multiple_versions(self): + """Test configuration with multiple versions""" + config_data = { + "title": "Test Configuration", + "description": "Test configuration with multiple versions", + "versions": [ + { + "version": "1.0.0", + "test_data": "test.1.0.0.1.json" + }, + { + "version": "1.1.0", + "test_data": "test.1.1.0.1.json" + } + ] + } + + config = Configuration("test.yaml", config_data) + + self.assertEqual(config.file_name, "test.yaml") + self.assertEqual(config.title, "Test Configuration") + self.assertEqual(config.description, "Test configuration with multiple versions") + self.assertEqual(len(config.versions), 2) + + # Test first version - VersionNumber defaults enumerator to 0 + version1 = config.versions[0] + self.assertEqual(version1.collection_name, "test") + self.assertEqual(version1.version_str, "1.0.0.0") + self.assertEqual(version1.test_data, "test.1.0.0.1.json") + + # Test second version - VersionNumber defaults enumerator to 0 + version2 = config.versions[1] + self.assertEqual(version2.collection_name, "test") + self.assertEqual(version2.version_str, "1.1.0.0") + self.assertEqual(version2.test_data, "test.1.1.0.1.json") + + def test_get_json_schema_for_version(self): + """Test getting JSON schema for a specific version""" + config = Configuration("sample.yaml") + + # Test getting schema for existing version + schema = config.get_json_schema("1.0.0.1") + self.assertIsInstance(schema, dict) + self.assertIn("type", schema) + self.assertEqual(schema["type"], "object") + + def test_get_bson_schema_for_version(self): + """Test getting BSON schema for a specific version""" + config = Configuration("sample.yaml") + + # Test getting schema for existing version + schema = config.get_bson_schema_for_version("1.0.0.1") + self.assertIsInstance(schema, dict) + self.assertIn("bsonType", schema) + self.assertEqual(schema["bsonType"], "object") + + def test_get_schema_for_nonexistent_version(self): + """Test error handling when requesting schema for non-existent version""" + config = Configuration("sample.yaml") + + # Test getting schema for non-existent version + with self.assertRaises(Exception): + config.get_json_schema("2.0.0.0") + + with self.assertRaises(Exception): + config.get_bson_schema_for_version("2.0.0.0") + + def test_version_enumerator_access(self): + """Test accessing enumerator version from Version object""" + version_data = {"version": "1.0.0.1", "drop_indexes": [], "add_indexes": [], "migrations": [], "test_data": "test.json"} + version = Version("test_collection", version_data, Config.get_instance()) + + # Test that we can access the enumerator version + self.assertEqual(version.collection_version.get_enumerator_version(), 1) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_configuration_service_renders.py b/tests/services/test_configuration_service_renders.py new file mode 100644 index 0000000..732ff25 --- /dev/null +++ b/tests/services/test_configuration_service_renders.py @@ -0,0 +1,137 @@ +import unittest +from configurator.services.configuration_services import Configuration +from configurator.services.enumerator_service import Enumerators +from configurator.utils.version_number import VersionNumber +import os +import yaml +import json + + +def load_yaml(path): + with open(path, 'r') as f: + return yaml.safe_load(f) + +def load_json(path): + with open(path, 'r') as f: + return json.load(f) + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + from configurator.utils.config import Config + Config._instance = None + + +class TestConfigurationRendering(unittest.TestCase): + """Test configuration rendering against verified output files""" + + def setUp(self): + self.config = set_config_input_folder("./tests/test_cases/small_sample") + self.enumerators_service = Enumerators(None) + + def tearDown(self): + clear_config() + + def test_all_verified_renders(self): + """Test all verified renders match actual renders""" + # Test JSON schema renders + json_dir = f"{self.config.INPUT_FOLDER}/verified_output/json_schema" + for file in os.listdir(json_dir): + if file.endswith('.yaml'): + self._test_json_render(file) + + # Test BSON schema renders + bson_dir = f"{self.config.INPUT_FOLDER}/verified_output/bson_schema" + for file in os.listdir(bson_dir): + if file.endswith('.json'): + self._test_bson_render(file) + + def _test_json_render(self, expected_file): + """Test JSON schema render for a configuration""" + # Parse version from expected file name + # Format: collection.major.minor.patch.enumerator.yaml + base_name = expected_file.replace('.yaml', '') + parts = base_name.split('.') + + if len(parts) != 5: + raise ValueError(f"Expected 5-part format (collection.major.minor.patch.enumerator), got: {expected_file}") + + # Format: collection.major.minor.patch.enumerator.yaml + collection_name = parts[0] + version_str = f"{parts[1]}.{parts[2]}.{parts[3]}.{parts[4]}" + + # Load configuration and render with specific version + configuration = Configuration("sample.yaml") + actual = configuration.get_json_schema(version_str) + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/json_schema/{expected_file}" + expected = load_yaml(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"JSON schema for {collection_name} version {version_str}") + + def _test_bson_render(self, expected_file): + """Test BSON schema render for a configuration""" + # Parse version from expected file name + # Format: collection.major.minor.patch.enumerator.json + base_name = expected_file.replace('.json', '') + parts = base_name.split('.') + + if len(parts) != 5: + raise ValueError(f"Expected 5-part format (collection.major.minor.patch.enumerator), got: {expected_file}") + + # Format: collection.major.minor.patch.enumerator.json + collection_name = parts[0] + version_str = f"{parts[1]}.{parts[2]}.{parts[3]}.{parts[4]}" + + # Load configuration and render with specific version + configuration = Configuration("sample.yaml") + actual = configuration.get_bson_schema_for_version(version_str) + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/bson_schema/{expected_file}" + expected = load_json(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"BSON schema for {collection_name} version {version_str}") + + def _assert_dict_equality(self, actual, expected, context): + """Assert dictionary equality with detailed diff reporting""" + if actual != expected: + diff = self._dict_diff(actual, expected) + self.fail(f"{context} mismatch:\n{diff}") + + def _dict_diff(self, dict1, dict2): + """Generate a detailed diff between two dictionaries""" + def _diff_dict(d1, d2, path=""): + diff = [] + all_keys = set(d1.keys()) | set(d2.keys()) + + for key in sorted(all_keys): + current_path = f"{path}.{key}" if path else key + + if key not in d1: + diff.append(f"Missing in actual: {current_path}") + elif key not in d2: + diff.append(f"Extra in actual: {current_path} = {d1[key]}") + elif isinstance(d1[key], dict) and isinstance(d2[key], dict): + diff.extend(_diff_dict(d1[key], d2[key], current_path)) + elif d1[key] != d2[key]: + diff.append(f"Value mismatch at {current_path}:") + diff.append(f" Expected: {d2[key]}") + diff.append(f" Actual: {d1[key]}") + + return diff + + return "\n".join(_diff_dict(dict1, dict2)) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_dictionary_service_operations.py b/tests/services/test_dictionary_service_operations.py new file mode 100644 index 0000000..cd1a5ce --- /dev/null +++ b/tests/services/test_dictionary_service_operations.py @@ -0,0 +1,380 @@ +import unittest +from unittest.mock import patch, MagicMock +from configurator.services.dictionary_services import Dictionary, Property +import os +import yaml +import json +from configurator.services.enumerator_service import Enumerators + + +def load_yaml(path): + with open(path, 'r') as f: + return yaml.safe_load(f) + +def load_json(path): + with open(path, 'r') as f: + return json.load(f) + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + from configurator.utils.config import Config + Config._instance = None + + +class TestProperty(unittest.TestCase): + """Test cases for Property class - non-rendering operations""" + + def test_init_with_basic_property(self): + """Test Property initialization with basic property""" + property_data = { + "description": "Test description", + "type": "string", + "required": True + } + prop = Property("test_prop", property_data) + + self.assertEqual(prop.name, "test_prop") + self.assertEqual(prop.description, "Test description") + self.assertEqual(prop.type, "string") + self.assertTrue(prop.required) + self.assertFalse(prop.additional_properties) + self.assertIsNone(prop.ref) + self.assertIsNone(prop.enums) + + def test_init_with_ref(self): + """Test Property initialization with ref""" + property_data = { + "ref": "sample.1.0.0.yaml", + "description": "Reference to another dictionary" + } + prop = Property("test_ref", property_data) + + self.assertEqual(prop.ref, "sample.1.0.0.yaml") + self.assertEqual(prop.description, "Reference to another dictionary") + self.assertEqual(prop.type, "void") + self.assertFalse(prop.required) + + def test_init_with_enum(self): + """Test Property initialization with enum type""" + property_data = { + "description": "Status enum", + "type": "enum", + "enums": "default_status", + "required": True + } + prop = Property("test_enum", property_data) + + self.assertEqual(prop.type, "enum") + self.assertEqual(prop.enums, "default_status") + self.assertTrue(prop.required) + + def test_init_with_enum_array(self): + """Test Property initialization with enum_array type""" + property_data = { + "description": "Array of status enums", + "type": "enum_array", + "enums": "default_status" + } + prop = Property("test_enum_array", property_data) + + self.assertEqual(prop.type, "enum_array") + self.assertEqual(prop.enums, "default_status") + + def test_init_with_array_type(self): + """Test Property initialization with array type""" + property_data = { + "description": "Array of strings", + "type": "array", + "items": { + "description": "String item", + "type": "string" + } + } + prop = Property("test_array", property_data) + + self.assertEqual(prop.type, "array") + self.assertIsInstance(prop.items, Property) + self.assertEqual(prop.items.description, "String item") + + def test_init_with_object_type(self): + """Test Property initialization with object type""" + property_data = { + "description": "Object with properties", + "type": "object", + "additionalProperties": True, + "properties": { + "name": { + "description": "Name property", + "type": "string" + }, + "age": { + "description": "Age property", + "type": "number" + } + } + } + prop = Property("test_object", property_data) + + self.assertEqual(prop.type, "object") + self.assertTrue(prop.additional_properties) + self.assertIn("name", prop.properties) + self.assertIn("age", prop.properties) + self.assertEqual(prop.properties["name"].description, "Name property") + + def test_init_with_missing_values(self): + """Test Property initialization with missing values""" + property_data = {} + prop = Property("test_prop", property_data) + + self.assertEqual(prop.description, "Missing Required Description") + self.assertIsNone(prop.ref) + self.assertEqual(prop.type, "void") + self.assertIsNone(prop.enums) + self.assertFalse(prop.required) + self.assertFalse(prop.additional_properties) + self.assertEqual(prop.properties, {}) + self.assertIsNone(prop.items) + + def test_to_dict_with_ref(self): + """Test to_dict method for ref property""" + property_data = { + "ref": "sample.1.0.0.yaml" + } + prop = Property("test_ref", property_data) + result = prop.to_dict() + + expected = {"ref": "sample.1.0.0.yaml"} + self.assertEqual(result, expected) + + def test_to_dict_basic(self): + """Test to_dict method for basic property""" + property_data = { + "description": "Test description", + "type": "string", + "required": True + } + prop = Property("test_prop", property_data) + result = prop.to_dict() + + expected = { + "description": "Test description", + "type": "string", + "required": True + } + self.assertEqual(result, expected) + + def test_to_dict_with_array(self): + """Test to_dict method for array property""" + property_data = { + "description": "Array of strings", + "type": "array", + "required": True, + "items": { + "description": "String item", + "type": "string" + } + } + prop = Property("test_array", property_data) + result = prop.to_dict() + + self.assertEqual(result["description"], "Array of strings") + self.assertEqual(result["type"], "array") + self.assertTrue(result["required"]) + self.assertIn("items", result) + + def test_to_dict_with_object(self): + """Test to_dict method for object property""" + property_data = { + "description": "Object with properties", + "type": "object", + "properties": { + "name": {"description": "Name property", "type": "string"} + }, + "additionalProperties": True + } + prop = Property("test_object", property_data) + result = prop.to_dict() + self.assertEqual(result["type"], "object") + self.assertIn("properties", result) + self.assertIn("name", result["properties"]) + self.assertEqual(result["properties"]["name"]["type"], "string") + self.assertIn("required", result) + self.assertFalse(result["required"]) # required should be present and False + + def test_to_dict_with_enum(self): + """Test to_dict method for enum property""" + property_data = { + "description": "Status enum", + "type": "enum", + "enums": "default_status", + "required": True + } + prop = Property("test_enum", property_data) + result = prop.to_dict() + + self.assertEqual(result["description"], "Status enum") + self.assertEqual(result["type"], "enum") + self.assertEqual(result["enums"], "default_status") + self.assertTrue(result["required"]) + + def test_to_dict_with_enum_array(self): + """Test to_dict method for enum_array property""" + property_data = { + "description": "Array of status enums", + "type": "enum_array", + "enums": "default_status" + } + prop = Property("test_enum_array", property_data) + result = prop.to_dict() + + self.assertEqual(result["description"], "Array of status enums") + self.assertEqual(result["type"], "enum_array") + self.assertEqual(result["enums"], "default_status") + + def test_get_required(self): + """Test _get_required method""" + property_data = { + "description": "Object with required properties", + "type": "object", + "properties": { + "id": { + "description": "Required ID", + "type": "string", + "required": True + }, + "name": { + "description": "Optional name", + "type": "string", + "required": False + }, + "status": { + "description": "Required status", + "type": "enum", + "enums": "default_status", + "required": True + } + } + } + prop = Property("test_object", property_data) + required = prop._get_required() + + self.assertIn("id", required) + self.assertIn("status", required) + self.assertNotIn("name", required) + self.assertEqual(len(required), 2) + + +class TestDictionary(unittest.TestCase): + """Test cases for Dictionary class - non-rendering operations""" + + @patch('configurator.services.dictionary_services.FileIO') + def test_init_with_file_name(self, mock_file_io): + """Test Dictionary initialization with file name""" + mock_file_io.get_document.return_value = { + "description": "Test dictionary", + "version": "1.0.0", + "properties": { + "name": { + "description": "Name property", + "type": "string" + } + } + } + + dictionary = Dictionary("test.yaml") + + self.assertEqual(dictionary.file_name, "test.yaml") + self.assertIsInstance(dictionary.property, Property) + self.assertEqual(dictionary.property.description, "Test dictionary") + + def test_init_with_document(self): + """Test Dictionary initialization with document""" + doc = { + "description": "Test dictionary", + "version": "1.0.0", + "properties": { + "name": { + "description": "Name property", + "type": "string" + } + } + } + dictionary = Dictionary("test.yaml", doc) + self.assertEqual(dictionary.file_name, "test.yaml") + self.assertIsInstance(dictionary.property, Property) + self.assertEqual(dictionary.property.description, "Test dictionary") + + def test_to_dict(self): + """Test Dictionary to_dict method""" + dictionary = Dictionary("test.yaml", { + "description": "Test dictionary", + "version": "1.0.0", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name field" + } + } + }) + + result = dictionary.to_dict() + self.assertEqual(result["description"], "Test dictionary") + self.assertIn("properties", result) + self.assertEqual(result["file_name"], "test.yaml") + self.assertEqual(result["_locked"], False) + + +class TestDictionaryCanonical(unittest.TestCase): + """Test cases for Dictionary class using canonical test data""" + + def setUp(self): + self.config = set_config_input_folder("tests/test_cases/small_sample") + + def tearDown(self): + clear_config() + + def test_dictionary_object(self): + """Test Dictionary with object type from test data""" + doc = load_yaml("tests/test_cases/small_sample/dictionaries/sample.1.0.0.yaml") + + dictionary = Dictionary("sample.1.0.0.yaml", doc) + + self.assertEqual(dictionary.file_name, "sample.1.0.0.yaml") + self.assertEqual(dictionary.property.description, "A simple collection for testing") + self.assertEqual(dictionary.property.type, "object") + + # Test to_dict + result = dictionary.to_dict() + self.assertEqual(result["description"], "A simple collection for testing") + self.assertEqual(result["type"], "object") + self.assertIn("properties", result) + self.assertEqual(result["file_name"], "sample.1.0.0.yaml") + self.assertEqual(result["_locked"], False) + + def test_dictionary_without_properties(self): + """Test Dictionary without properties""" + doc = { + "description": "Test dictionary without properties", + "version": "1.0.0" + } + + dictionary = Dictionary("test.yaml", doc) + + self.assertEqual(dictionary.property.description, "Test dictionary without properties") + + # Test to_dict + result = dictionary.to_dict() + self.assertEqual(result["description"], "Test dictionary without properties") + self.assertEqual(result["file_name"], "test.yaml") + self.assertEqual(result["_locked"], False) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_dictionary_service_renders.py b/tests/services/test_dictionary_service_renders.py new file mode 100644 index 0000000..bb7ceff --- /dev/null +++ b/tests/services/test_dictionary_service_renders.py @@ -0,0 +1,158 @@ +import unittest +from configurator.services.dictionary_services import Dictionary +from configurator.services.enumerator_service import Enumerators +from configurator.utils.version_number import VersionNumber +import os +import yaml +import json + + +def load_yaml(path): + with open(path, 'r') as f: + return yaml.safe_load(f) + +def load_json(path): + with open(path, 'r') as f: + return json.load(f) + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + from configurator.utils.config import Config + Config._instance = None + + +class TestDictionaryRendering(unittest.TestCase): + """Test dictionary rendering against verified output files""" + + def setUp(self): + self.test_case = getattr(self, 'test_case', 'small_sample') + self.config = set_config_input_folder(f"./tests/test_cases/{self.test_case}") + self.enumerators_service = Enumerators(None) + + def tearDown(self): + clear_config() + + def test_all_verified_renders(self): + """Test all verified renders match actual renders""" + # Test JSON schema renders + json_dir = f"{self.config.INPUT_FOLDER}/verified_output/json_schema" + for file in os.listdir(json_dir): + if file.endswith('.yaml'): + self._test_json_render(file) + + # Test BSON schema renders + bson_dir = f"{self.config.INPUT_FOLDER}/verified_output/bson_schema" + for file in os.listdir(bson_dir): + if file.endswith('.json'): + self._test_bson_render(file) + + def _test_json_render(self, expected_file): + """Test JSON schema render for a dictionary""" + # Parse version from expected file name + # Format: collection.major.minor.patch.enumerator.yaml + base_name = expected_file.replace('.yaml', '') + parts = base_name.split('.') + + if len(parts) != 5: + raise ValueError(f"Expected 5-part format (collection.major.minor.patch.enumerator), got: {expected_file}") + + # Format: collection.major.minor.patch.enumerator.yaml + dictionary_name = f"{parts[0]}.{parts[1]}.{parts[2]}.{parts[3]}" + enumerator_version = int(parts[4]) + + # Load dictionary and render with specific enumerator version + dictionary_path = f"{self.config.INPUT_FOLDER}/dictionaries/{dictionary_name}.yaml" + dictionary_data = load_yaml(dictionary_path) + dictionary_instance = Dictionary(dictionary_name, dictionary_data) + + # Get the specific enumerator version + enumerators = self.enumerators_service.version(enumerator_version) + actual = dictionary_instance.get_json_schema(enumerators) + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/json_schema/{expected_file}" + expected = load_yaml(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"JSON schema for {dictionary_name} with enumerator version {enumerator_version}") + + def _test_bson_render(self, expected_file): + """Test BSON schema render for a dictionary""" + # Parse version from expected file name + # Format: collection.major.minor.patch.enumerator.json + base_name = expected_file.replace('.json', '') + parts = base_name.split('.') + + if len(parts) != 5: + raise ValueError(f"Expected 5-part format (collection.major.minor.patch.enumerator), got: {expected_file}") + + # Format: collection.major.minor.patch.enumerator.json + dictionary_name = f"{parts[0]}.{parts[1]}.{parts[2]}.{parts[3]}" + enumerator_version = int(parts[4]) + + # Load dictionary and render with specific enumerator version + dictionary_path = f"{self.config.INPUT_FOLDER}/dictionaries/{dictionary_name}.yaml" + dictionary_data = load_yaml(dictionary_path) + dictionary_instance = Dictionary(dictionary_name, dictionary_data) + + # Get the specific enumerator version + enumerators = self.enumerators_service.version(enumerator_version) + actual = dictionary_instance.get_bson_schema(enumerators) + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/bson_schema/{expected_file}" + expected = load_json(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"BSON schema for {dictionary_name} with enumerator version {enumerator_version}") + + def _assert_dict_equality(self, actual, expected, context): + """Assert dictionary equality with detailed diff reporting""" + if actual != expected: + diff = self._dict_diff(actual, expected) + self.fail(f"{context} mismatch:\n{diff}") + + def _dict_diff(self, dict1, dict2): + """Generate a detailed diff between two dictionaries""" + def _diff_dict(d1, d2, path=""): + diff = [] + all_keys = set(d1.keys()) | set(d2.keys()) + + for key in sorted(all_keys): + current_path = f"{path}.{key}" if path else key + + if key not in d1: + diff.append(f"Missing in actual: {current_path}") + elif key not in d2: + diff.append(f"Extra in actual: {current_path} = {d1[key]}") + elif isinstance(d1[key], dict) and isinstance(d2[key], dict): + diff.extend(_diff_dict(d1[key], d2[key], current_path)) + elif d1[key] != d2[key]: + diff.append(f"Value mismatch at {current_path}:") + diff.append(f" Expected: {d2[key]}") + diff.append(f" Actual: {d1[key]}") + + return diff + + return "\n".join(_diff_dict(dict1, dict2)) + + +class TestSmallSampleRendering(TestDictionaryRendering): + """Test dictionary rendering for small_sample test case""" + test_case = 'small_sample' + + +class TestLargeSampleRendering(TestDictionaryRendering): + """Test dictionary rendering for large_sample test case with advanced features""" + test_case = 'large_sample' + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_enumerator_service.py b/tests/services/test_enumerator_service.py new file mode 100644 index 0000000..869cea4 --- /dev/null +++ b/tests/services/test_enumerator_service.py @@ -0,0 +1,220 @@ +import unittest +from unittest.mock import patch, MagicMock, Mock +from configurator.services.enumerator_service import Enumerators, Enumerations +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + +class TestEnumerators(unittest.TestCase): + @patch('configurator.services.enumerator_service.FileIO.get_document') + def test_init_with_none_data_loads_file(self, mock_get_document): + """Test Enumerators initialization with None data loads from file.""" + mock_get_document.return_value = [{"version": 0, "enumerators": {}}] + enum = Enumerators(None) + self.assertEqual(enum.dict, [{"version": 0, "enumerators": {}}]) + mock_get_document.assert_called_once_with("test_data", "enumerators.json") + + def test_init_with_data(self): + """Test Enumerators initialization with provided data.""" + data = [{"version": 0, "enumerators": {}}] + enum = Enumerators(data) + self.assertEqual(enum.dict, data) + + @patch('configurator.services.enumerator_service.FileIO.put_document') + @patch('configurator.services.enumerator_service.FileIO.get_document') + def test_save_success_with_changes(self, mock_get_document, mock_put_document): + """Test successful save with file comparison showing changes.""" + # Arrange + original_data = [{"version": 0, "enumerators": {"old": {"value": 1}}}] + cleaned_data = [{"version": 0, "enumerators": {"new": {"value": 2}}}] + + # Mock get_document to return different original and saved content + mock_get_document.side_effect = [original_data, cleaned_data] + + # Mock put_document to return None (no longer returns File object) + mock_put_document.return_value = None + + enum = Enumerators(cleaned_data) + + # Act + result = enum.save() + + # Assert + self.assertEqual(result, enum) # save() now returns self + + # Verify FileIO calls + expected_data = cleaned_data + mock_put_document.assert_called_once_with("test_data", "enumerators.json", expected_data) + + @patch('configurator.services.enumerator_service.FileIO.put_document') + @patch('configurator.services.enumerator_service.FileIO.get_document') + def test_save_success_no_changes(self, mock_get_document, mock_put_document): + """Test successful save with no changes detected.""" + # Arrange + same_data = [{"version": 0, "enumerators": {"same": {"value": 1}}}] + + # Mock get_document to return same content for original and saved + mock_get_document.side_effect = [same_data, same_data] + + # Mock put_document to return None (no longer returns File object) + mock_put_document.return_value = None + + enum = Enumerators(same_data) + + # Act + result = enum.save() + + # Assert + self.assertEqual(result, enum) # save() now returns self + + # Verify FileIO calls + expected_data = same_data + mock_put_document.assert_called_once_with("test_data", "enumerators.json", expected_data) + + @patch('configurator.services.enumerator_service.FileIO.put_document') + @patch('configurator.services.enumerator_service.FileIO.get_document') + def test_save_configurator_exception(self, mock_get_document, mock_put_document): + """Test save when ConfiguratorException is raised during file operations.""" + # Arrange + data = [{"version": 0, "enumerators": {}}] + enum = Enumerators(data) + + # Mock put_document to raise ConfiguratorException + event = ConfiguratorEvent("TEST-01", "TEST_ERROR") + mock_put_document.side_effect = ConfiguratorException("Test error", event) + + # Act & Assert + with self.assertRaises(ConfiguratorException) as cm: + enum.save() + + self.assertIn("Failed to save enumerators", str(cm.exception)) + + @patch('configurator.services.enumerator_service.FileIO.put_document') + @patch('configurator.services.enumerator_service.FileIO.get_document') + def test_save_general_exception(self, mock_get_document, mock_put_document): + """Test save when general Exception is raised during file operations.""" + # Arrange + data = [{"version": 0, "enumerators": {}}] + enum = Enumerators(data) + + # Mock put_document to raise general Exception + mock_put_document.side_effect = Exception("Unexpected error") + + # Act & Assert + with self.assertRaises(ConfiguratorException) as cm: + enum.save() + + self.assertIn("Failed to save enumerators", str(cm.exception)) + + def test_version_returns_correct_version(self): + """Test that version method returns the correct version.""" + data = [ + {"version": 0, "enumerators": {"a": 1}}, + {"version": 1, "enumerators": {"b": 2}} + ] + enum = Enumerators(data) + # Test that version method returns the correct version + result = enum.version(1) + self.assertEqual(result.version, 1) + self.assertEqual(result.enumerators, {"b": 2}) + + def test_to_dict_returns_data(self): + """Test that to_dict method returns the enumerators data.""" + data = [{"version": 0, "enumerators": {"test": {"value": 1}}}] + enum = Enumerators(data) + result = enum.to_dict() + expected = data + self.assertEqual(result, expected) + +class TestEnumerations(unittest.TestCase): + def test_init_with_valid_data(self): + """Test Enumerations initialization with valid data.""" + data = { + "name": "test", + "status": "active", + "version": 1, + "enumerators": {"foo": {"bar": 1}}, + "_locked": True + } + enum = Enumerations(data) + self.assertEqual(enum.name, "test") + self.assertEqual(enum.status, "active") + self.assertEqual(enum.version, 1) + self.assertEqual(enum.enumerators, {"foo": {"bar": 1}}) + self.assertTrue(enum._locked) + + def test_init_without_locked_defaults_to_false(self): + """Test Enumerations initialization without _locked defaults to False.""" + data = { + "name": "test", + "status": "active", + "version": 1, + "enumerators": {"foo": {"bar": 1}} + } + enum = Enumerations(data) + self.assertFalse(enum._locked) + + def test_to_dict_includes_locked(self): + """Test that to_dict includes _locked property.""" + data = { + "name": "test", + "status": "active", + "version": 1, + "enumerators": {"foo": {"bar": 1}}, + "_locked": True + } + enum = Enumerations(data) + result = enum.to_dict() + self.assertEqual(result["_locked"], True) + self.assertEqual(result["name"], "test") + self.assertEqual(result["status"], "active") + self.assertEqual(result["version"], 1) + self.assertEqual(result["enumerators"], {"foo": {"bar": 1}}) + + def test_init_with_none_data_raises(self): + """Test Enumerations initialization with None data raises ConfiguratorException.""" + with self.assertRaises(ConfiguratorException): + Enumerations(None) + + def test_init_with_invalid_data_raises(self): + """Test Enumerations initialization with invalid data raises ConfiguratorException.""" + with self.assertRaises(ConfiguratorException): + Enumerations("invalid_data") + + def test_get_enum_values_success(self): + """Test successful retrieval of enum values.""" + data = { + "name": "test", + "status": "active", + "version": 1, + "enumerators": {"foo": {"bar": 1, "baz": 2}} + } + enum = Enumerations(data) + values = enum.get_enum_values("foo") + self.assertIn("bar", values) + self.assertIn("baz", values) + + def test_get_enum_values_invalid_name_raises(self): + """Test that get_enum_values raises ConfiguratorException for invalid enum name.""" + data = { + "name": "test", + "status": "active", + "version": 1, + "enumerators": {"foo": {"bar": 1}} + } + enum = Enumerations(data) + with self.assertRaises(ConfiguratorException): + enum.get_enum_values("not_a_key") + + def test_get_enum_values_with_none_enumerators_raises(self): + """Test that get_enum_values raises ConfiguratorException when enumerators is None.""" + data = { + "name": "test", + "status": "active", + "version": 1, + "enumerators": None + } + enum = Enumerations(data) + with self.assertRaises(ConfiguratorException): + enum.get_enum_values("foo") + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_migration_events.py b/tests/services/test_migration_events.py new file mode 100644 index 0000000..a216d3d --- /dev/null +++ b/tests/services/test_migration_events.py @@ -0,0 +1,137 @@ +import unittest +from unittest.mock import Mock, patch, MagicMock +import os +import tempfile +import json + +from configurator.services.configuration_services import Configuration, Version +from configurator.utils.mongo_io import MongoIO +from configurator.utils.config import Config + + +class TestMigrationEvents(unittest.TestCase): + """Test migration event structure and nesting.""" + + def setUp(self): + """Set up test fixtures.""" + self.config = Config.get_instance() + self.config.INPUT_FOLDER = tempfile.mkdtemp() + + # Create a test migration file + self.migration_file = os.path.join(self.config.INPUT_FOLDER, "migrations", "test_migration.json") + os.makedirs(os.path.dirname(self.migration_file), exist_ok=True) + + # Create a simple migration pipeline + migration_pipeline = [ + {"$addFields": {"test_field": "test_value"}}, + {"$out": "test_collection"} + ] + + with open(self.migration_file, 'w') as f: + json.dump(migration_pipeline, f) + + def tearDown(self): + """Clean up test fixtures.""" + import shutil + shutil.rmtree(self.config.INPUT_FOLDER) + + @patch('configurator.utils.mongo_io.MongoClient') + @patch('configurator.services.configuration_services.Enumerators') + def test_migration_event_structure(self, mock_enumerators, mock_client): + """Test that migration events are properly nested.""" + # Use MagicMock for __getitem__ support + mock_client_instance = MagicMock() + mock_db = MagicMock() + mock_collection = MagicMock() + mock_client.return_value = mock_client_instance + mock_client_instance.__getitem__.return_value = mock_db + mock_db.__getitem__.return_value = mock_collection + mock_collection.aggregate.return_value = [] + + # Mock the admin command for ping + mock_admin = MagicMock() + mock_client_instance.admin = mock_admin + mock_admin.command.return_value = {"ok": 1} + + # Patch Enumerators.version to return a dummy enumerations object + mock_enumerators.return_value.version.return_value = {} + + # Create a configuration with migrations + config_data = { + "title": "Test Collection", + "description": "Test collection for migration events", + "name": "test_collection", + "versions": [ + { + "version": "1.0.0.1", + "migrations": ["test_migration.json"] + } + ] + } + + # Create configuration and version objects + config = Configuration("test.yaml", config_data) + version = config.versions[0] + + # Patch get_bson_schema to return a minimal valid schema + version.get_bson_schema = MagicMock(return_value={"type": "object", "properties": {}}) + + # Mock MongoIO + mongo_io = MongoIO("mongodb://localhost:27017", "test_db") + + # Process the version + event = version.process(mongo_io) + + # Verify the main event structure + self.assertEqual(event.id, "test.1.0.0.1") + self.assertEqual(event.type, "PROCESS") + self.assertEqual(event.status, "SUCCESS") + + # Find the EXECUTE_MIGRATIONS sub-event + migrations_event = None + for sub_event in event.sub_events: + if sub_event.type == "EXECUTE_MIGRATIONS": + migrations_event = sub_event + break + + self.assertIsNotNone(migrations_event, "EXECUTE_MIGRATIONS event should exist") + self.assertEqual(migrations_event.id, "PRO-03") + self.assertEqual(migrations_event.status, "SUCCESS") + + # Verify migration file event (MON-14) exists and is nested + migration_file_event = None + for sub_event in migrations_event.sub_events: + if sub_event.type == "EXECUTE_MIGRATION_FILE": + migration_file_event = sub_event + break + + self.assertIsNotNone(migration_file_event, "EXECUTE_MIGRATION_FILE event should exist") + self.assertEqual(migration_file_event.id, "MON-14") + self.assertEqual(migration_file_event.status, "SUCCESS") + + # Verify the migration file event has the correct data + self.assertIn("migration_file", migration_file_event.data) + self.assertEqual(migration_file_event.data["migration_file"], "test_migration.json") + self.assertIn("pipeline_stages", migration_file_event.data) + self.assertEqual(migration_file_event.data["pipeline_stages"], 2) + + # Verify that MON-13 (LOAD_MIGRATION) and MON-08 (EXECUTE_MIGRATION) events are nested + load_event = None + execute_event = None + + for sub_event in migration_file_event.sub_events: + if sub_event.type == "LOAD_MIGRATION": + load_event = sub_event + elif sub_event.type == "EXECUTE_MIGRATION": + execute_event = sub_event + + self.assertIsNotNone(load_event, "LOAD_MIGRATION event should be nested") + self.assertIsNotNone(execute_event, "EXECUTE_MIGRATION event should be nested") + self.assertEqual(load_event.id, "MON-13") + self.assertEqual(execute_event.id, "MON-08") + self.assertEqual(load_event.status, "SUCCESS") + self.assertEqual(execute_event.status, "SUCCESS") + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_oneof_operations.py b/tests/services/test_oneof_operations.py new file mode 100644 index 0000000..4f5ba62 --- /dev/null +++ b/tests/services/test_oneof_operations.py @@ -0,0 +1,17 @@ +import unittest +from configurator.services.dictionary_services import OneOf + +class TestOneOfMinimal(unittest.TestCase): + def test_basic_construction_and_to_dict(self): + data = {'schemas': {'foo': {'type': 'object', 'properties': {}}}} + one_of = OneOf(data) + result = one_of.to_dict() + + # Check that the structure is correct + self.assertIn('schemas', result) + self.assertIn('foo', result['schemas']) + self.assertEqual(result['schemas']['foo']['type'], 'object') + self.assertIn('properties', result['schemas']['foo']) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_render_service.py b/tests/services/test_render_service.py deleted file mode 100644 index c7ce1f5..0000000 --- a/tests/services/test_render_service.py +++ /dev/null @@ -1,173 +0,0 @@ -import unittest -from unittest.mock import patch, MagicMock -from stage0_mongodb_api.services.render_service import RenderService, RenderNotFoundError, RenderProcessingError - -class TestRenderService(unittest.TestCase): - """Test cases for RenderService static methods.""" - - @patch('stage0_mongodb_api.services.render_service.ConfigManager') - @patch('stage0_mongodb_api.services.render_service.SchemaManager') - def test_render_json_schema_success(self, mock_schema_manager_class, mock_config_manager_class): - """Test successful JSON schema rendering.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - mock_schema = {"type": "object", "properties": {"test": {"type": "string"}}} - - mock_config_manager = MagicMock() - mock_config_manager.load_errors = [] - mock_config_manager.validate_configs.return_value = [] - mock_config_manager_class.return_value = mock_config_manager - - mock_schema_manager = MagicMock() - mock_schema_manager.render_one.return_value = mock_schema - mock_schema_manager_class.return_value = mock_schema_manager - - # Act - result = RenderService.render_json_schema(schema_name) - - # Assert - self.assertEqual(result, mock_schema) - # Check that render_one was called with the correct arguments - mock_schema_manager.render_one.assert_called_once() - call_args = mock_schema_manager.render_one.call_args - self.assertEqual(call_args[0][0], schema_name) - self.assertEqual(call_args[0][1].value, "json") - - @patch('stage0_mongodb_api.services.render_service.ConfigManager') - @patch('stage0_mongodb_api.services.render_service.SchemaManager') - def test_render_bson_schema_success(self, mock_schema_manager_class, mock_config_manager_class): - """Test successful BSON schema rendering.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - mock_schema = {"bsonType": "object", "properties": {"test": {"bsonType": "string"}}} - - mock_config_manager = MagicMock() - mock_config_manager.load_errors = [] - mock_config_manager.validate_configs.return_value = [] - mock_config_manager_class.return_value = mock_config_manager - - mock_schema_manager = MagicMock() - mock_schema_manager.render_one.return_value = mock_schema - mock_schema_manager_class.return_value = mock_schema_manager - - # Act - result = RenderService.render_bson_schema(schema_name) - - # Assert - self.assertEqual(result, mock_schema) - # Check that render_one was called with the correct arguments - mock_schema_manager.render_one.assert_called_once() - call_args = mock_schema_manager.render_one.call_args - self.assertEqual(call_args[0][0], schema_name) - self.assertEqual(call_args[0][1].value, "bson") - - @patch('stage0_mongodb_api.services.render_service.ConfigManager') - def test_render_openapi_not_implemented(self, mock_config_manager_class): - """Test that OpenAPI rendering returns a not implemented message.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - expected_message = {"message": "OpenAPI rendering not yet implemented"} - - # Act - result = RenderService.render_openapi(schema_name) - - # Assert - self.assertEqual(result, expected_message) - - @patch('stage0_mongodb_api.managers.schema_manager.MongoIO') - @patch('stage0_mongodb_api.services.render_service.ConfigManager') - def test_render_json_schema_load_errors(self, mock_config_manager_class, mock_mongoio_class): - # This ensures that any call to MongoIO.get_instance() returns a mock - mock_mongoio_class.get_instance.return_value = MagicMock() - - # Arrange - schema_name = "test_collection.1.0.0.1" - load_errors = [{"error": "load_error", "message": "Failed to load config"}] - - mock_config_manager = MagicMock() - mock_config_manager.load_errors = load_errors - mock_config_manager_class.return_value = mock_config_manager - - # Act & Assert - with self.assertRaises(RenderProcessingError) as context: - RenderService.render_json_schema(schema_name) - - self.assertEqual(context.exception.schema_name, schema_name) - self.assertEqual(context.exception.errors, load_errors) - - @patch('stage0_mongodb_api.managers.schema_manager.MongoIO') - @patch('stage0_mongodb_api.services.render_service.ConfigManager') - def test_render_bson_schema_validation_errors(self, mock_config_manager_class, mock_mongoio_class): - # This ensures that any call to MongoIO.get_instance() returns a mock - mock_mongoio_class.get_instance.return_value = MagicMock() - - # Arrange - schema_name = "test_collection.1.0.0.1" - validation_errors = [{"error": "validation_error", "message": "Invalid schema"}] - - mock_config_manager = MagicMock() - mock_config_manager.load_errors = [] - mock_config_manager.validate_configs.return_value = validation_errors - mock_config_manager_class.return_value = mock_config_manager - - # Act & Assert - with self.assertRaises(RenderProcessingError) as context: - RenderService.render_bson_schema(schema_name) - - self.assertEqual(context.exception.schema_name, schema_name) - self.assertEqual(context.exception.errors, validation_errors) - - @patch('stage0_mongodb_api.services.render_service.ConfigManager') - @patch('stage0_mongodb_api.services.render_service.SchemaManager') - def test_render_json_schema_rendering_error(self, mock_schema_manager_class, mock_config_manager_class): - """Test JSON schema rendering when schema manager raises an exception.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - - mock_config_manager = MagicMock() - mock_config_manager.load_errors = [] - mock_config_manager.validate_configs.return_value = [] - mock_config_manager_class.return_value = mock_config_manager - - mock_schema_manager = MagicMock() - mock_schema_manager.render_one.side_effect = Exception("Schema rendering failed") - mock_schema_manager_class.return_value = mock_schema_manager - - # Act & Assert - with self.assertRaises(RenderProcessingError) as context: - RenderService.render_json_schema(schema_name) - - self.assertEqual(context.exception.schema_name, schema_name) - self.assertEqual(len(context.exception.errors), 1) - self.assertEqual(context.exception.errors[0]["error"], "rendering_error") - self.assertEqual(context.exception.errors[0]["error_id"], "RND-002") - self.assertEqual(context.exception.errors[0]["message"], "Schema rendering failed") - - @patch('stage0_mongodb_api.services.render_service.ConfigManager') - @patch('stage0_mongodb_api.services.render_service.SchemaManager') - def test_render_bson_schema_rendering_error(self, mock_schema_manager_class, mock_config_manager_class): - """Test BSON schema rendering when schema manager raises an exception.""" - # Arrange - schema_name = "test_collection.1.0.0.1" - - mock_config_manager = MagicMock() - mock_config_manager.load_errors = [] - mock_config_manager.validate_configs.return_value = [] - mock_config_manager_class.return_value = mock_config_manager - - mock_schema_manager = MagicMock() - mock_schema_manager.render_one.side_effect = Exception("Schema rendering failed") - mock_schema_manager_class.return_value = mock_schema_manager - - # Act & Assert - with self.assertRaises(RenderProcessingError) as context: - RenderService.render_bson_schema(schema_name) - - self.assertEqual(context.exception.schema_name, schema_name) - self.assertEqual(len(context.exception.errors), 1) - self.assertEqual(context.exception.errors[0]["error"], "rendering_error") - self.assertEqual(context.exception.errors[0]["error_id"], "RND-003") - self.assertEqual(context.exception.errors[0]["message"], "Schema rendering failed") - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/services/test_template_service.py b/tests/services/test_template_service.py new file mode 100644 index 0000000..b433151 --- /dev/null +++ b/tests/services/test_template_service.py @@ -0,0 +1,223 @@ +import unittest +from unittest.mock import patch, MagicMock +from configurator.services.template_service import TemplateService +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent +import yaml +import os + + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + from configurator.utils.config import Config + Config._instance = None + + +class TestTemplateService(unittest.TestCase): + """Test cases for TemplateService""" + + def setUp(self): + self.config = set_config_input_folder("tests/test_cases/small_sample") + self.template_service = TemplateService() + + def tearDown(self): + clear_config() + + def test_validate_collection_name_valid(self): + """Test validation of valid collection names""" + valid_names = ["test", "test123", "test_collection", "test-collection", "TestCollection"] + for name in valid_names: + # Should not raise exception + self.template_service._validate_collection_name(name) + + def test_validate_collection_name_invalid(self): + """Test validation of invalid collection names""" + invalid_names = ["", " ", "test@collection", "test.collection", "test collection"] + for name in invalid_names: + with self.assertRaises(ConfiguratorException): + self.template_service._validate_collection_name(name) + + def test_replace_placeholders(self): + """Test placeholder replacement in templates""" + content = "Hello {{collection_name}}, welcome to {{collection_name}} system" + result = self.template_service._replace_placeholders(content, "test") + expected = "Hello test, welcome to test system" + self.assertEqual(result, expected) + + @patch('configurator.services.template_service.FileIO.get_document') + def test_load_template_success(self, mock_get_document): + """Test successful template loading""" + mock_get_document.return_value = "template content" + result = self.template_service._load_template("test.yaml") + self.assertEqual(result, "template content") + mock_get_document.assert_called_once_with("templates", "test.yaml") + + @patch('configurator.services.template_service.FileIO.get_document') + def test_load_template_not_found(self, mock_get_document): + """Test template loading when file not found""" + mock_get_document.side_effect = Exception("File not found") + with self.assertRaises(ConfiguratorException) as cm: + self.template_service._load_template("missing.yaml") + self.assertEqual(cm.exception.event.id, "TPL-01") + self.assertEqual(cm.exception.event.type, "TEMPLATE_NOT_FOUND") + + def test_process_configuration_template(self): + """Test configuration template processing""" + with patch.object(self.template_service, '_load_template') as mock_load: + mock_load.return_value = """ +description: Collection for managing {{collection_name}} +name: {{collection_name}} +versions: + - version: "0.0.1" + test_data: {{collection_name}}.0.0.1.json +""" + result = self.template_service.process_configuration_template("test_collection") + + expected = { + "description": "Collection for managing test_collection", + "name": "test_collection", + "versions": [ + { + "version": "0.0.1", + "test_data": "test_collection.0.0.1.json" + } + ] + } + self.assertEqual(result, expected) + + def test_process_dictionary_template(self): + """Test dictionary template processing""" + with patch.object(self.template_service, '_load_template') as mock_load: + mock_load.return_value = """ +description: A {{collection_name}} collection for testing the schema system +type: object +properties: + _id: + description: The unique identifier for a {{collection_name}} + type: identifier + required: true + name: + description: The name of the {{collection_name}} + type: word + required: true +""" + result = self.template_service.process_dictionary_template("test_collection") + + expected = { + "description": "A test_collection collection for testing the schema system", + "type": "object", + "properties": { + "_id": { + "description": "The unique identifier for a test_collection", + "type": "identifier", + "required": True + }, + "name": { + "description": "The name of the test_collection", + "type": "word", + "required": True + } + } + } + self.assertEqual(result, expected) + + @patch('configurator.services.template_service.FileIO.get_document') + @patch('configurator.services.template_service.FileIO.put_document') + def test_create_collection_success(self, mock_put_document, mock_get_document): + """Test successful collection creation""" + # Mock template loading + config_template = """ +description: Collection for managing {{collection_name}} +name: {{collection_name}} +versions: + - version: "0.0.1" + test_data: {{collection_name}}.0.0.1.json +""" + dict_template = """ +description: A {{collection_name}} collection for testing the schema system +type: object +properties: + _id: + description: The unique identifier for a {{collection_name}} + type: identifier + required: true +""" + + def mock_get_side_effect(folder, filename): + if filename == "configuration.yaml": + return config_template + elif filename == "dictionary.yaml": + return dict_template + elif filename in ["test_collection.yaml", "test_collection.0.0.1.yaml"]: + # Simulate file not found + event = ConfiguratorEvent("FIL-02", "FILE_NOT_FOUND", {"file_path": f"{folder}/{filename}"}) + raise ConfiguratorException(f"File not found: {folder}/{filename}", event) + else: + raise Exception("File not found") + + mock_get_document.side_effect = mock_get_side_effect + + result = self.template_service.create_collection("test_collection") + + expected = { + "collection_name": "test_collection", + "configuration_file": "test_collection.yaml", + "dictionary_file": "test_collection.0.0.1.yaml" + } + self.assertEqual(result, expected) + + # Verify files were saved + self.assertEqual(mock_put_document.call_count, 2) + + @patch('configurator.services.template_service.FileIO.get_document') + def test_create_collection_configuration_exists(self, mock_get_document): + """Test collection creation when configuration already exists""" + # Mock that configuration file exists + def mock_get_side_effect(folder, filename): + if filename == "test_collection.yaml": + return "existing content" # File exists + elif filename in ["configuration.yaml", "dictionary.yaml"]: + return "template content" # Templates exist + else: + # Simulate file not found + event = ConfiguratorEvent("FIL-02", "FILE_NOT_FOUND", {"file_path": f"{folder}/{filename}"}) + raise ConfiguratorException(f"File not found: {folder}/{filename}", event) + + mock_get_document.side_effect = mock_get_side_effect + + with self.assertRaises(ConfiguratorException) as cm: + self.template_service.create_collection("test_collection") + self.assertEqual(cm.exception.event.id, "TPL-03") + self.assertEqual(cm.exception.event.type, "CONFIGURATION_EXISTS") + + @patch('configurator.services.template_service.FileIO.get_document') + def test_create_collection_dictionary_exists(self, mock_get_document): + """Test collection creation when dictionary already exists""" + # Mock that dictionary file exists + def mock_get_side_effect(folder, filename): + if filename == "test_collection.0.0.1.yaml": + return "existing content" # File exists + elif filename in ["configuration.yaml", "dictionary.yaml"]: + return "template content" # Templates exist + else: + # Simulate file not found + event = ConfiguratorEvent("FIL-02", "FILE_NOT_FOUND", {"file_path": f"{folder}/{filename}"}) + raise ConfiguratorException(f"File not found: {folder}/{filename}", event) + + mock_get_document.side_effect = mock_get_side_effect + + with self.assertRaises(ConfiguratorException) as cm: + self.template_service.create_collection("test_collection") + self.assertEqual(cm.exception.event.id, "TPL-03") + self.assertEqual(cm.exception.event.type, "DICTIONARY_EXISTS") + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_type_service_operations.py b/tests/services/test_type_service_operations.py new file mode 100644 index 0000000..c45fe65 --- /dev/null +++ b/tests/services/test_type_service_operations.py @@ -0,0 +1,446 @@ +import unittest +from unittest.mock import patch, MagicMock +from configurator.services.type_services import Type, TypeProperty +import os +import yaml +import json + + +def load_yaml(path): + with open(path, 'r') as f: + return yaml.safe_load(f) + +def load_json(path): + with open(path, 'r') as f: + return json.load(f) + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + from configurator.utils.config import Config + Config._instance = None + + +class TestTypeProperty(unittest.TestCase): + """Test cases for TypeProperty class - non-rendering operations""" + + def test_init_with_basic_property(self): + """Test TypeProperty initialization with basic property""" + property_data = { + "description": "Test description", + "type": "string", + "required": True + } + type_prop = TypeProperty("test_prop", property_data) + + self.assertEqual(type_prop.name, "test_prop") + self.assertEqual(type_prop.description, "Test description") + self.assertEqual(type_prop.type, "string") + self.assertTrue(type_prop.required) + self.assertFalse(type_prop.additional_properties) + + def test_init_with_schema(self): + """Test TypeProperty initialization with universal primitive schema""" + property_data = { + "description": "Test description", + "schema": {"type": "string", "format": "email"} + } + type_prop = TypeProperty("test_prop", property_data) + self.assertEqual(type_prop.schema, property_data["schema"]) + self.assertIsNone(type_prop.json_type) + self.assertIsNone(type_prop.bson_type) + self.assertTrue(type_prop.is_primitive) + self.assertTrue(type_prop.is_universal) + result = type_prop.to_dict() + self.assertEqual(result["description"], "Test description") + self.assertEqual(result["schema"], {"type": "string", "format": "email"}) + self.assertNotIn("json_type", result) + self.assertNotIn("bson_type", result) + + def test_init_with_array_type(self): + """Test TypeProperty initialization with array type""" + property_data = { + "description": "Array of strings", + "type": "array", + "items": { + "description": "String item", + "type": "string" + } + } + type_prop = TypeProperty("test_array", property_data) + + self.assertEqual(type_prop.type, "array") + self.assertIsInstance(type_prop.items, TypeProperty) + self.assertEqual(type_prop.items.description, "String item") + + def test_init_with_object_type(self): + """Test TypeProperty initialization with object type""" + property_data = { + "description": "Object with properties", + "type": "object", + "additionalProperties": True, + "properties": { + "name": { + "description": "Name property", + "type": "string" + }, + "age": { + "description": "Age property", + "type": "number" + } + } + } + type_prop = TypeProperty("test_object", property_data) + + self.assertEqual(type_prop.type, "object") + self.assertTrue(type_prop.additional_properties) + self.assertIn("name", type_prop.properties) + self.assertIn("age", type_prop.properties) + self.assertEqual(type_prop.properties["name"].description, "Name property") + + def test_init_with_missing_values(self): + """Test TypeProperty initialization with missing values""" + property_data = {} + type_prop = TypeProperty("test_prop", property_data) + + self.assertEqual(type_prop.description, "Missing Required Description") + self.assertIsNone(type_prop.schema) + self.assertIsNone(type_prop.json_type) + self.assertIsNone(type_prop.bson_type) + self.assertEqual(type_prop.type, "void") + self.assertFalse(type_prop.required) + self.assertFalse(type_prop.additional_properties) + + def test_to_dict_basic(self): + """Test to_dict method for basic property""" + property_data = { + "description": "Test description", + "type": "string", + "required": True + } + type_prop = TypeProperty("test_prop", property_data) + result = type_prop.to_dict() + + expected = { + "description": "Test description", + "type": "string", + "required": True + } + self.assertEqual(result, expected) + + def test_to_dict_with_array(self): + """Test to_dict method for array property""" + property_data = { + "description": "Array of strings", + "type": "array", + "required": True, + "items": { + "description": "String item", + "type": "string" + } + } + type_prop = TypeProperty("test_array", property_data) + result = type_prop.to_dict() + + self.assertEqual(result["description"], "Array of strings") + self.assertEqual(result["type"], "array") + self.assertTrue(result["required"]) + self.assertIn("items", result) + + def test_to_dict_with_object(self): + """Test to_dict method for object property""" + property_data = { + "description": "Object with properties", + "type": "object", + "additionalProperties": True, + "required": False, + "properties": { + "name": { + "description": "Name property", + "type": "string" + } + } + } + type_prop = TypeProperty("test_object", property_data) + result = type_prop.to_dict() + + self.assertEqual(result["description"], "Object with properties") + self.assertEqual(result["type"], "object") + self.assertTrue(result["additionalProperties"]) + self.assertFalse(result["required"]) + self.assertIn("properties", result) + self.assertIn("name", result["properties"]) + + def test_to_dict_with_primitive_universal(self): + """Test to_dict method for primitive universal property""" + property_data = { + "description": "Test description", + "schema": {"type": "string", "format": "email"} + } + type_prop = TypeProperty("test_prop", property_data) + result = type_prop.to_dict() + + self.assertEqual(result["description"], "Test description") + self.assertEqual(result["schema"], {"type": "string", "format": "email"}) + self.assertIn("required", result) + self.assertFalse(result["required"]) + + +class TestType(unittest.TestCase): + """Test cases for Type class - non-rendering operations""" + + @patch('configurator.services.type_services.FileIO') + @patch('configurator.services.type_services.Config') + def test_init_with_file_name(self, mock_config_class, mock_file_io): + """Test Type initialization with file name""" + mock_config = MagicMock() + mock_config_class.get_instance.return_value = mock_config + mock_config.TYPE_FOLDER = "/test/types" + + mock_file_io.get_document.return_value = { + "description": "Test type description", + "type": "object", + "properties": { + "name": {"type": "string"} + } + } + + type_instance = Type("test_type.yaml") + + self.assertEqual(type_instance.file_name, "test_type.yaml") + self.assertEqual(type_instance.property.description, "Test type description") + mock_file_io.get_document.assert_called_once_with("/test/types", "test_type.yaml") + + @patch('configurator.services.type_services.Config') + def test_init_with_document(self, mock_config_class): + """Test Type initialization with document""" + mock_config = MagicMock() + mock_config_class.get_instance.return_value = mock_config + + document = { + "description": "Test type description", + "type": "object", + "properties": { + "name": {"type": "string"} + } + } + + type_instance = Type("test_type.yaml", document) + + self.assertEqual(type_instance.file_name, "test_type.yaml") + self.assertEqual(type_instance.property.description, "Test type description") + self.assertIn("name", type_instance.property.properties) + + @patch('configurator.services.type_services.Config') + def test_to_dict(self, mock_config_class): + """Test to_dict method""" + mock_config = MagicMock() + mock_config_class.get_instance.return_value = mock_config + + document = { + "description": "Test type description", + "type": "object", + "properties": { + "name": {"type": "string"} + } + } + + type_instance = Type("test_type", document) + result = type_instance.property.to_dict() + + self.assertEqual(result["description"], "Test type description") + self.assertEqual(result["type"], "object") + self.assertIn("properties", result) + self.assertIn("name", result["properties"]) + + +class TestTypePropertyCanonical(unittest.TestCase): + """Test canonical scenarios for TypeProperty - non-rendering operations""" + + def test_object_type(self): + """Test object type property initialization and to_dict""" + property_data = { + "description": "Object with properties", + "type": "object", + "additionalProperties": True, + "properties": { + "name": { + "description": "Name property", + "type": "string" + }, + "age": { + "description": "Age property", + "type": "number" + } + } + } + type_prop = TypeProperty("test_object", property_data) + + # Test initialization + self.assertEqual(type_prop.type, "object") + self.assertTrue(type_prop.additional_properties) + self.assertIn("name", type_prop.properties) + self.assertIn("age", type_prop.properties) + + # Test to_dict + result = type_prop.to_dict() + self.assertEqual(result["description"], "Object with properties") + self.assertEqual(result["type"], "object") + self.assertTrue(result["additionalProperties"]) + self.assertIn("properties", result) + + def test_array_type(self): + """Test array type property initialization and to_dict""" + property_data = { + "description": "Array of strings", + "type": "array", + "items": { + "description": "String item", + "type": "string" + } + } + type_prop = TypeProperty("test_array", property_data) + + # Test initialization + self.assertEqual(type_prop.type, "array") + self.assertIsInstance(type_prop.items, TypeProperty) + self.assertEqual(type_prop.items.description, "String item") + + # Test to_dict + result = type_prop.to_dict() + self.assertEqual(result["description"], "Array of strings") + self.assertEqual(result["type"], "array") + self.assertIn("items", result) + + def test_primitive_with_schema(self): + """Test primitive property with json_type/bson_type initialization and to_dict""" + property_data = { + "description": "Test description", + "json_type": {"type": "string"}, + "bson_type": {"bsonType": "string"} + } + type_prop = TypeProperty("test_prop", property_data) + self.assertIsNone(type_prop.schema) + self.assertEqual(type_prop.json_type, {"type": "string"}) + self.assertEqual(type_prop.bson_type, {"bsonType": "string"}) + self.assertTrue(type_prop.is_primitive) + self.assertFalse(type_prop.is_universal) + result = type_prop.to_dict() + self.assertEqual(result["description"], "Test description") + self.assertEqual(result["json_type"], {"type": "string"}) + self.assertEqual(result["bson_type"], {"bsonType": "string"}) + self.assertNotIn("schema", result) + + +class TestTypeCanonical(unittest.TestCase): + """Test canonical scenarios for Type - non-rendering operations""" + + def setUp(self): + self.config = set_config_input_folder("tests/test_cases/small_sample") + + def tearDown(self): + clear_config() + + def test_type_object(self): + """Test object type initialization and to_dict""" + type_data = { + "description": "Test object type description", + "type": "object", + "properties": { + "name": { + "description": "Name property", + "type": "string" + }, + "age": { + "description": "Age property", + "type": "number" + } + } + } + type_instance = Type("test_object.yaml", type_data) + + # Test initialization + self.assertEqual(type_instance.file_name, "test_object.yaml") + self.assertEqual(type_instance.property.description, "Test object type description") + self.assertEqual(type_instance.property.type, "object") + self.assertIn("name", type_instance.property.properties) + self.assertIn("age", type_instance.property.properties) + + # Test to_dict + result = type_instance.property.to_dict() + self.assertEqual(result["description"], "Test object type description") + self.assertEqual(result["type"], "object") + self.assertIn("properties", result) + self.assertIn("name", result["properties"]) + self.assertIn("age", result["properties"]) + + # Test full to_dict + full_result = type_instance.to_dict() + self.assertEqual(full_result["file_name"], "test_object.yaml") + self.assertEqual(full_result["_locked"], False) + self.assertIn("description", full_result) + self.assertIn("type", full_result) + + def test_type_array(self): + """Test array type initialization and to_dict""" + type_data = { + "description": "Test array type description", + "type": "array", + "items": { + "type": "string" + } + } + type_instance = Type("test_array.yaml", type_data) + + # Test initialization + self.assertEqual(type_instance.file_name, "test_array.yaml") + self.assertEqual(type_instance.property.description, "Test array type description") + self.assertEqual(type_instance.property.type, "array") + self.assertIsNotNone(type_instance.property.items) + + # Test to_dict + result = type_instance.property.to_dict() + self.assertEqual(result["description"], "Test array type description") + self.assertEqual(result["type"], "array") + self.assertIn("items", result) + + # Test full to_dict + full_result = type_instance.to_dict() + self.assertEqual(full_result["file_name"], "test_array.yaml") + self.assertEqual(full_result["_locked"], False) + + def test_type_primitive_schema(self): + """Test primitive type with json_type/bson_type initialization and to_dict""" + type_data = { + "description": "Test primitive type description", + "json_type": {"type": "string", "format": "email"}, + "bson_type": {"bsonType": "string"} + } + type_instance = Type("test_primitive.yaml", type_data) + + # Test initialization + self.assertEqual(type_instance.file_name, "test_primitive.yaml") + self.assertEqual(type_instance.property.description, "Test primitive type description") + self.assertTrue(type_instance.property.is_primitive) + self.assertFalse(type_instance.property.is_universal) + + # Test to_dict + result = type_instance.property.to_dict() + self.assertEqual(result["description"], "Test primitive type description") + self.assertIn("json_type", result) + self.assertIn("bson_type", result) + + # Test full to_dict + full_result = type_instance.to_dict() + self.assertEqual(full_result["file_name"], "test_primitive.yaml") + self.assertEqual(full_result["_locked"], False) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/services/test_type_service_renders.py b/tests/services/test_type_service_renders.py new file mode 100644 index 0000000..25c913e --- /dev/null +++ b/tests/services/test_type_service_renders.py @@ -0,0 +1,165 @@ +import unittest +from configurator.services.type_services import Type +from configurator.utils.version_number import VersionNumber +import os +import yaml +import json + + +def load_yaml(path): + with open(path, 'r') as f: + return yaml.safe_load(f) + +def load_json(path): + with open(path, 'r') as f: + return json.load(f) + +def set_config_input_folder(folder): + os.environ['INPUT_FOLDER'] = folder + from configurator.utils.config import Config + Config._instance = None + return Config.get_instance() + +def clear_config(): + if 'INPUT_FOLDER' in os.environ: + del os.environ['INPUT_FOLDER'] + from configurator.utils.config import Config + Config._instance = None + + +class TestTypeRendering(unittest.TestCase): + """Test type rendering against verified output files""" + + def setUp(self): + self.config = set_config_input_folder("./tests/test_cases/type_unit_test") + + def tearDown(self): + clear_config() + + def test_all_type_renders(self): + """Test all individual type renders match actual renders""" + # Test JSON schema type renders + json_dir = f"{self.config.INPUT_FOLDER}/verified_output/type_renders/json_schema" + for file in os.listdir(json_dir): + if file.endswith('.yaml'): + type_name = file.replace('.yaml', '') + self._test_type_json_render(type_name, file) + + # Test BSON schema type renders + bson_dir = f"{self.config.INPUT_FOLDER}/verified_output/type_renders/bson_schema" + for file in os.listdir(bson_dir): + if file.endswith('.json'): + type_name = file.replace('.json', '') + self._test_type_bson_render(type_name, file) + + def test_all_full_schemas(self): + """Test all full schema renders match actual renders""" + # Test JSON schema full renders + json_dir = f"{self.config.INPUT_FOLDER}/verified_output/full_schemas/json_schema" + if os.path.exists(json_dir): + for file in os.listdir(json_dir): + if file.endswith('.yaml'): + type_name = file.replace('.yaml', '') + self._test_full_json_render(type_name, file) + + # Test BSON schema full renders + bson_dir = f"{self.config.INPUT_FOLDER}/verified_output/full_schemas/bson_schema" + if os.path.exists(bson_dir): + for file in os.listdir(bson_dir): + if file.endswith('.json'): + type_name = file.replace('.json', '') + self._test_full_bson_render(type_name, file) + + def _test_type_json_render(self, type_name, expected_file): + """Test individual type JSON schema render""" + # Load type and render + type_path = f"{self.config.INPUT_FOLDER}/types/{type_name}.yaml" + type_data = load_yaml(type_path) + type_instance = Type(type_name, type_data) + actual = type_instance.get_json_schema() + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/type_renders/json_schema/{expected_file}" + expected = load_yaml(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"Type JSON schema for {type_name}") + + def _test_type_bson_render(self, type_name, expected_file): + """Test individual type BSON schema render""" + # Load type and render + type_path = f"{self.config.INPUT_FOLDER}/types/{type_name}.yaml" + type_data = load_yaml(type_path) + type_instance = Type(type_name, type_data) + actual = type_instance.get_bson_schema() + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/type_renders/bson_schema/{expected_file}" + expected = load_json(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"Type BSON schema for {type_name}") + + def _test_full_json_render(self, type_name, expected_file): + """Test full JSON schema render with all references resolved""" + # Load type and render with full resolution + type_path = f"{self.config.INPUT_FOLDER}/types/{type_name}.yaml" + type_data = load_yaml(type_path) + type_instance = Type(type_name, type_data) + actual = type_instance.get_json_schema() + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/full_schemas/json_schema/{expected_file}" + expected = load_yaml(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"Full JSON schema for {type_name}") + + def _test_full_bson_render(self, type_name, expected_file): + """Test full BSON schema render with all references resolved""" + # Load type and render with full resolution + type_path = f"{self.config.INPUT_FOLDER}/types/{type_name}.yaml" + type_data = load_yaml(type_path) + type_instance = Type(type_name, type_data) + actual = type_instance.get_bson_schema() + + # Load expected + expected_path = f"{self.config.INPUT_FOLDER}/verified_output/full_schemas/bson_schema/{expected_file}" + expected = load_json(expected_path) + + # Compare + self._assert_dict_equality(actual, expected, f"Full BSON schema for {type_name}") + + def _assert_dict_equality(self, actual, expected, context): + """Assert dictionary equality with detailed diff reporting""" + if actual != expected: + diff = self._dict_diff(actual, expected) + self.fail(f"{context} mismatch:\n{diff}") + + def _dict_diff(self, dict1, dict2): + """Generate a detailed diff between two dictionaries""" + def _diff_dict(d1, d2, path=""): + diff = [] + all_keys = set(d1.keys()) | set(d2.keys()) + + for key in sorted(all_keys): + current_path = f"{path}.{key}" if path else key + + if key not in d1: + diff.append(f"Missing in actual: {current_path}") + elif key not in d2: + diff.append(f"Extra in actual: {current_path} = {d1[key]}") + elif isinstance(d1[key], dict) and isinstance(d2[key], dict): + diff.extend(_diff_dict(d1[key], d2[key], current_path)) + elif d1[key] != d2[key]: + diff.append(f"Value mismatch at {current_path}:") + diff.append(f" Expected: {d2[key]}") + diff.append(f" Actual: {d1[key]}") + + return diff + + return "\n".join(_diff_dict(dict1, dict2)) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/stepci/configurations.yaml b/tests/stepci/configurations.yaml new file mode 100644 index 0000000..ecd93e8 --- /dev/null +++ b/tests/stepci/configurations.yaml @@ -0,0 +1,246 @@ +version: "1.1" +name: Test Configurations Endpoints +env: + host: localhost:8081 + +tests: + configurations: + name: Test Configurations Endpoints + steps: + # Setup - Delete test file if it exists (no checks needed) + - name: Delete Test Configuration (Setup) + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: DELETE + check: + status: /200|500/ + + # Create - PUT new configuration + - name: Create Test Configuration + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Test collection for stepCI testing", + "versions": [ + { + "version": "1.0.0.1", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.0.1.json" + } + ] + } + check: + status: /200/ + jsonpath: + file_name: test_config.yaml + _locked: false + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Read - GET the created configuration + - name: Get Test Configuration + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: GET + check: + status: /200/ + jsonpath: + description: "Test collection for stepCI testing" + versions[0].version: "1.0.0.1" + versions[0].drop_indexes: [] + versions[0].migrations: [] + versions[0].add_indexes: [] + versions[0].test_data: "test_data.1.0.0.1.json" + + # Update - PUT to modify the configuration + - name: Update Test Configuration + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test collection for stepCI testing", + "versions": [ + { + "version": "1.0.0.1", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.0.1.json" + }, + { + "version": "1.0.1.2", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.1.2.json" + } + ] + } + check: + status: /200/ + jsonpath: + file_name: test_config.yaml + _locked: false + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Lock - PUT to lock the configuration + - name: Lock Test Configuration + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test collection for stepCI testing", + "versions": [ + { + "version": "1.0.0.1", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.0.1.json" + }, + { + "version": "1.0.1.2", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.1.2.json" + } + ], + "_locked": true + } + check: + status: /200/ + jsonpath: + file_name: test_config.yaml + _locked: true + + # Try to Delete a locked configuration + - name: Delete a locked configuration - should fail + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: DELETE + check: + status: /500/ + jsonpath: + status: FAILURE + + # Unlock - PUT to unlock the configuration + - name: Unlock Test Configuration + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test collection for stepCI testing", + "versions": [ + { + "version": "1.0.0.1", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.0.1.json" + }, + { + "version": "1.0.1.2", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.1.2.json" + } + ], + "_locked": false + } + check: + status: /200/ + jsonpath: + file_name: test_config.yaml + _locked: false + + # Lock All - PATCH to lock all configurations + - name: Lock All Configurations + http: + url: http://${{env.host}}/api/configurations/ + method: PATCH + check: + status: /200/ + jsonpath: + status: SUCCESS + schema: + event_id: + type: string + event_type: + type: string + sub_events: + type: array + + # Unlock before cleanup + - name: Unlock Test Configuration (Cleanup) + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test collection for stepCI testing", + "versions": [ + { + "version": "1.0.0.1", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.0.1.json" + }, + { + "version": "1.0.1.2", + "drop_indexes": [], + "migrations": [], + "add_indexes": [], + "test_data": "test_data.1.0.1.2.json" + } + ], + "_locked": false + } + check: + status: /200/ + jsonpath: + file_name: test_config.yaml + _locked: false + + # Cleanup - DELETE the test configuration + - name: Delete Test Configuration (Cleanup) + http: + url: http://${{env.host}}/api/configurations/test_config.yaml/ + method: DELETE + check: + status: /200/ + \ No newline at end of file diff --git a/tests/stepci/dictionaries.yaml b/tests/stepci/dictionaries.yaml new file mode 100644 index 0000000..7fbabec --- /dev/null +++ b/tests/stepci/dictionaries.yaml @@ -0,0 +1,228 @@ +version: "1.1" +name: Test Dictionaries Endpoints +env: + host: localhost:8081 + +tests: + dictionaries: + name: Test Dictionaries Endpoints + steps: +# # Setup - Delete test file if it exists (no checks needed) +# - name: Delete Test Dictionary (Setup) +# http: +# url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ +# method: DELETE +# check: +# status: /200|500/ + + # Create - PUT new dictionary (simple, no properties) + - name: Create Test Dictionary + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Test dictionary for stepCI testing" + } + check: + status: /200/ + jsonpath: + file_name: test_dict.yaml + _locked: false + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Read - GET the created dictionary (expect no version, no properties) + - name: Get Test Dictionary + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: GET + check: + status: /200/ + jsonpath: + description: "Test dictionary for stepCI testing" + type: "void" + + # Update - PUT to modify the dictionary + - name: Update Test Dictionary + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test dictionary for stepCI testing", + "version": "1.0.1", + "properties": { + "name": { + "type": "string", + "description": "Name field" + }, + "email": { + "type": "email", + "description": "Email field" + }, + "phone": { + "type": "us_phone", + "description": "Phone field" + } + } + } + check: + status: /200/ + jsonpath: + file_name: test_dict.yaml + _locked: false + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Lock - PUT to lock the dictionary + - name: Lock Test Dictionary + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test dictionary for stepCI testing", + "version": "1.0.1", + "properties": { + "name": { + "type": "string", + "description": "Name field" + }, + "email": { + "type": "email", + "description": "Email field" + }, + "phone": { + "type": "us_phone", + "description": "Phone field" + } + }, + "_locked": true + } + check: + status: /200/ + jsonpath: + file_name: test_dict.yaml + _locked: true + + # Try to Delete a locked dictionary + - name: Delete a locked dictionary - should fail + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: DELETE + check: + status: /500/ + jsonpath: + status: FAILURE + + # Unlock - PUT to unlock the dictionary + - name: Unlock Test Dictionary + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test dictionary for stepCI testing", + "version": "1.0.1", + "properties": { + "name": { + "type": "string", + "description": "Name field" + }, + "email": { + "type": "email", + "description": "Email field" + }, + "phone": { + "type": "us_phone", + "description": "Phone field" + } + }, + "_locked": false + } + check: + status: /200/ + jsonpath: + file_name: test_dict.yaml + _locked: false + + # Lock All - PATCH to lock all dictionaries + - name: Lock All Dictionaries + http: + url: http://${{env.host}}/api/dictionaries/ + method: PATCH + check: + status: /200/ + jsonpath: + status: SUCCESS + schema: + event_id: + type: string + event_type: + type: string + sub_events: + type: array + + # Unlock before cleanup + - name: Unlock Test Dictionary (Cleanup) + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test dictionary for stepCI testing", + "version": "1.0.1", + "properties": { + "name": { + "type": "string", + "description": "Name field" + }, + "email": { + "type": "email", + "description": "Email field" + }, + "phone": { + "type": "us_phone", + "description": "Phone field" + } + }, + "_locked": false + } + check: + status: /200/ + jsonpath: + file_name: test_dict.yaml + _locked: false + + # Cleanup - DELETE the test dictionary + - name: Delete Test Dictionary (Cleanup) + http: + url: http://${{env.host}}/api/dictionaries/test_dict.yaml/ + method: DELETE + check: + status: /200/ \ No newline at end of file diff --git a/tests/stepci/enumerators.yaml b/tests/stepci/enumerators.yaml new file mode 100644 index 0000000..a7ccd6c --- /dev/null +++ b/tests/stepci/enumerators.yaml @@ -0,0 +1,109 @@ +version: "1.1" +name: Test Enumerators Endpoints +env: + host: localhost:8081 + +tests: + enumerators: + name: Test Enumerators Endpoints + steps: + # Test PUT with modified data (preserving existing content and adding new test values) + - name: Put Enumerators + http: + url: http://${{env.host}}/api/enumerators/ + method: PUT + headers: + Content-Type: application/json + body: | + [ + { + "version": 0, + "enumerators": {} + }, + { + "version": 1, + "enumerators": { + "default_status": { + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + }, + "test_enum": { + "foo": "bar" + } + } + }, + { + "version": 2, + "enumerators": { + "default_status": { + "draft": "Draft", + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + }, + "test_enum": { + "foo": "bar" + } + } + } + ] + check: + status: /200/ + jsonpath: + $[1].enumerators.test_enum.foo: "bar" + $[1].enumerators.default_status.active: "Not Deleted" + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Verify the PUT worked + - name: Get Enumerators After Put + http: + url: http://${{env.host}}/api/enumerators/ + method: GET + check: + status: /200/ + jsonpath: + $[1].enumerators.test_enum.foo: "bar" + $[1].enumerators.default_status.active: "Not Deleted" + + # Restore to original state from enumerators.json + - name: Restore Original Enumerators + http: + url: http://${{env.host}}/api/enumerators/ + method: PUT + headers: + Content-Type: application/json + body: | + [ + { + "version": 0, + "enumerators": {} + }, + { + "version": 1, + "enumerators": { + "default_status": { + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + }, + { + "version": 2, + "enumerators": { + "default_status": { + "draft": "Draft", + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + } + ] + check: + status: /200/ \ No newline at end of file diff --git a/tests/stepci/large_sample.yaml b/tests/stepci/large_sample.yaml deleted file mode 100644 index bfa1626..0000000 --- a/tests/stepci/large_sample.yaml +++ /dev/null @@ -1,836 +0,0 @@ -name: mongodb_api_large_sample -version: "1.0" -env: - host: localhost:8081 - -tests: - collections: - name: Collection Processing - steps: - - name: Process A Collection - http: - url: http://${{env.host}}/api/collections/user/ - method: POST - headers: - Content-Type: application/json - check: - status: /200/ - - name: Process All Collections - http: - url: http://${{env.host}}/api/collections/ - method: POST - headers: - Content-Type: application/json - check: - status: /200/ - - name: List Collections - http: - url: http://${{env.host}}/api/collections/ - method: GET - check: - status: /200/ - schema: - type: "array" - items: - type: "object" - properties: - collection_name: - type: "string" - version: - type: "string" - targeted_version: - type: "string" - required: - - collection_name - - version - - targeted_version - - name: GET A Collection - http: - url: http://${{env.host}}/api/collections/user/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - name: "user" - title: "User Collection" - description: "Collection for managing users" - versions[0].version: "1.0.0.1" - versions[0].test_data: "user.1.0.0.1.json" - versions[0].add_indexes[0].name: "nameIndex" - versions[0].add_indexes[0].key.userName: 1 - versions[0].add_indexes[0].options.unique: true - versions[0].add_indexes[1].name: "statusIndex" - versions[0].add_indexes[1].key.status: 1 - versions[0].add_indexes[1].options.unique: false - versions[0].add_indexes[2].name: "savedIndex" - versions[0].add_indexes[2].key["last_saved.saved_at"]: 1 - versions[0].add_indexes[2].options.unique: false - versions[1].version: "1.0.0.2" - versions[1].test_data: "user.1.0.0.2.json" - versions[1].drop_indexes[0]: "statusIndex" - versions[2].version: "1.0.1.3" - versions[2].test_data: "user.1.0.1.3.json" - versions[2].aggregations[0].name: "merge_name_fields" - versions[2].aggregations[0].pipeline[0]["$addFields"].full_name["$concat"][0]: "$first_name" - versions[2].aggregations[0].pipeline[0]["$addFields"].full_name["$concat"][1]: " " - versions[2].aggregations[0].pipeline[0]["$addFields"].full_name["$concat"][2]: "$last_name" - versions[2].aggregations[0].pipeline[1]["$unset"][0]: "first_name" - versions[2].aggregations[0].pipeline[1]["$unset"][1]: "last_name" - versions[2].aggregations[0].pipeline[2]["$out"]: "user" - bson: - name: Render BSON Schemas - steps: - - name: Render User BSON Schema - http: - url: http://${{env.host}}/api/render/bson_schema/user.1.0.0.1/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "User Collection" - description: "A user collection for testing the schema system" - bsonType: "object" - additionalProperties: false - properties._id.bsonType: "objectId" - properties._id.description: "The unique identifier for a user" - properties.user_name.bsonType: "string" - properties.user_name.description: "Username" - properties.user_name.pattern: "^[^\\s]{4,40}$" - properties.first_name.bsonType: "string" - properties.first_name.description: "Users First Name" - properties.first_name.pattern: "^[^\\s]{4,40}$" - properties.last_name.bsonType: "string" - properties.last_name.description: "Users Last Name" - properties.last_name.pattern: "^[^\\s]{4,40}$" - properties.status.bsonType: "string" - properties.status.description: "document status" - properties.status.enum[0]: "active" - properties.status.enum[1]: "archived" - properties.last_saved.bsonType: "object" - properties.last_saved.title: "Breadcrumb" - properties.last_saved.description: "The last time this document was saved" - properties.last_saved.additionalProperties: false - properties.last_saved.properties.from_ip.bsonType: "string" - properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.by_user.bsonType: "string" - properties.last_saved.properties.by_user.description: "ID Of User" - properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.at_time.bsonType: "date" - properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.last_saved.properties.correlation_id.bsonType: "string" - properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.last_saved.required[0]: "from_ip" - properties.last_saved.required[1]: "by_user" - properties.last_saved.required[2]: "at_time" - properties.last_saved.required[3]: "correlation_id" - required[0]: "_id" - required[1]: "user_name" - required[2]: "status" - required[3]: "last_saved" - - name: Render media BSON Schema - http: - url: http://${{env.host}}/api/render/bson_schema/media.1.0.0.1 - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "Media" - description: "A media item in the system" - bsonType: "object" - additionalProperties: false - properties._id.bsonType: "objectId" - properties._id.description: "The unique identifier for the media" - properties.title.bsonType: "string" - properties.title.description: "The title of the media" - properties.title.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.status.bsonType: "string" - properties.status.description: "The current status of the media" - properties.status.enum[0]: "draft" - properties.status.enum[1]: "published" - properties.status.enum[2]: "archived" - properties.type.bsonType: "string" - properties.type.description: "The type of media" - properties.type.enum[0]: "movie" - properties.type.enum[1]: "tv_show" - properties.type.enum[2]: "documentary" - properties.type.enum[3]: "short" - properties.tags.bsonType: "array" - properties.tags.description: "Tags associated with the media" - properties.tags.items.enum[0]: "action" - properties.tags.items.enum[1]: "comedy" - properties.tags.items.enum[2]: "drama" - properties.tags.items.enum[3]: "sci_fi" - properties.tags.items.enum[4]: "documentary" - properties.metadata.bsonType: "object" - properties.metadata.description: "Additional metadata about the media" - properties.metadata.additionalProperties: false - properties.metadata.properties.duration.bsonType: "int" - properties.metadata.properties.duration.description: "Duration in minutes" - properties.metadata.properties.duration.minimum: 1 - properties.metadata.properties.format.bsonType: "string" - properties.metadata.properties.format.description: "Media format" - properties.metadata.properties.format.enum[0]: "dvd" - properties.metadata.properties.format.enum[1]: "bluray" - properties.metadata.properties.format.enum[2]: "digital" - properties.metadata.properties.format.enum[3]: "streaming" - properties.metadata.properties.quality.bsonType: "string" - properties.metadata.properties.quality.description: "Quality rating" - properties.metadata.properties.quality.enum[0]: "sd" - properties.metadata.properties.quality.enum[1]: "hd" - properties.metadata.properties.quality.enum[2]: "uhd" - properties.last_saved.bsonType: "object" - properties.last_saved.title: "Breadcrumb" - properties.last_saved.description: "When the media was last updated" - properties.last_saved.additionalProperties: false - properties.last_saved.properties.from_ip.bsonType: "string" - properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.by_user.bsonType: "string" - properties.last_saved.properties.by_user.description: "ID Of User" - properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.at_time.bsonType: "date" - properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.last_saved.properties.correlation_id.bsonType: "string" - properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.last_saved.required[0]: "from_ip" - properties.last_saved.required[1]: "by_user" - properties.last_saved.required[2]: "at_time" - properties.last_saved.required[3]: "correlation_id" - required[0]: "_id" - required[1]: "title" - required[2]: "status" - required[3]: "last_saved" - - name: Render organization BSON Schema - http: - url: http://${{env.host}}/api/render/bson_schema/organization.1.0.0.1/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "Organization" - description: "An organization in the system" - bsonType: "object" - additionalProperties: false - properties._id.bsonType: "objectId" - properties._id.description: "The unique identifier for the organization" - properties.name.bsonType: "string" - properties.name.description: "The organization name" - properties.name.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.status.bsonType: "string" - properties.status.description: "The current status of the organization" - properties.status.enum[0]: "active" - properties.status.enum[1]: "archived" - properties.website.bsonType: "string" - properties.website.description: "Organization website" - properties.website.pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - properties.users.bsonType: "array" - properties.users.description: "Users associated with this organization" - properties.users.items.bsonType: "objectId" - properties.users.items.description: "A user identifier" - properties.home_address.bsonType: "object" - properties.home_address.title: "Street Address" - properties.home_address.description: "Organization home address" - properties.home_address.additionalProperties: false - properties.home_address.properties.street.bsonType: "string" - properties.home_address.properties.street.description: "Street address" - properties.home_address.properties.street.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.home_address.properties.city.bsonType: "string" - properties.home_address.properties.city.description: "City" - properties.home_address.properties.city.pattern: "^[^\\s]{4,40}$" - properties.home_address.properties.state.bsonType: "string" - properties.home_address.properties.state.description: "State or province" - properties.home_address.properties.state.pattern: "^[A-Z]{2}$" - properties.home_address.properties.postal_code.bsonType: "string" - properties.home_address.properties.postal_code.description: "Postal code" - properties.home_address.properties.postal_code.pattern: "^[^\\s]{4,40}$" - properties.home_address.required[0]: "postal_code" - properties.last_saved.bsonType: "object" - properties.last_saved.title: "Breadcrumb" - properties.last_saved.description: "When the organization document was last updated" - properties.last_saved.additionalProperties: false - properties.last_saved.properties.from_ip.bsonType: "string" - properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.by_user.bsonType: "string" - properties.last_saved.properties.by_user.description: "ID Of User" - properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.at_time.bsonType: "date" - properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.last_saved.properties.correlation_id.bsonType: "string" - properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.last_saved.required[0]: "from_ip" - properties.last_saved.required[1]: "by_user" - properties.last_saved.required[2]: "at_time" - properties.last_saved.required[3]: "correlation_id" - required[0]: "_id" - required[1]: "name" - required[2]: "status" - required[3]: "last_saved" - - name: Render Search BSON Schema - http: - url: http://${{env.host}}/api/render/bson_schema/search.1.0.0.1/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "search" - description: "A search index that is used to support a elastic search polymorphic query service" - bsonType: "object" - additionalProperties: false - properties.collection_name.bsonType: "string" - properties.collection_name.description: "The name of the collection" - properties.collection_name.pattern: "^[^\\s]{4,40}$" - properties.collection_id.bsonType: "objectId" - properties.collection_id.description: "The unique identifier for this source document" - properties.media.bsonType: "object" - properties.media.title: "Media" - properties.media.description: "A media item in the system" - properties.media.additionalProperties: false - properties.media.properties._id.bsonType: "objectId" - properties.media.properties._id.description: "The unique identifier for the media" - properties.media.properties.title.bsonType: "string" - properties.media.properties.title.description: "The title of the media" - properties.media.properties.title.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.media.properties.status.bsonType: "string" - properties.media.properties.status.description: "The current status of the media" - properties.media.properties.status.enum[0]: "draft" - properties.media.properties.status.enum[1]: "published" - properties.media.properties.status.enum[2]: "archived" - properties.media.properties.type.bsonType: "string" - properties.media.properties.type.description: "The type of media" - properties.media.properties.type.enum[0]: "movie" - properties.media.properties.type.enum[1]: "tv_show" - properties.media.properties.type.enum[2]: "documentary" - properties.media.properties.type.enum[3]: "short" - properties.media.properties.tags.bsonType: "array" - properties.media.properties.tags.description: "Tags associated with the media" - properties.media.properties.tags.items.enum[0]: "action" - properties.media.properties.tags.items.enum[1]: "comedy" - properties.media.properties.tags.items.enum[2]: "drama" - properties.media.properties.tags.items.enum[3]: "sci_fi" - properties.media.properties.tags.items.enum[4]: "documentary" - properties.media.properties.metadata.bsonType: "object" - properties.media.properties.metadata.description: "Additional metadata about the media" - properties.media.properties.metadata.additionalProperties: false - properties.media.properties.metadata.properties.duration.bsonType: "int" - properties.media.properties.metadata.properties.duration.description: "Duration in minutes" - properties.media.properties.metadata.properties.duration.minimum: 1 - properties.media.properties.metadata.properties.format.bsonType: "string" - properties.media.properties.metadata.properties.format.description: "Media format" - properties.media.properties.metadata.properties.format.enum[0]: "dvd" - properties.media.properties.metadata.properties.format.enum[1]: "bluray" - properties.media.properties.metadata.properties.format.enum[2]: "digital" - properties.media.properties.metadata.properties.format.enum[3]: "streaming" - properties.media.properties.metadata.properties.quality.bsonType: "string" - properties.media.properties.metadata.properties.quality.description: "Quality rating" - properties.media.properties.metadata.properties.quality.enum[0]: "sd" - properties.media.properties.metadata.properties.quality.enum[1]: "hd" - properties.media.properties.metadata.properties.quality.enum[2]: "uhd" - properties.media.properties.last_saved.bsonType: "object" - properties.media.properties.last_saved.title: "Breadcrumb" - properties.media.properties.last_saved.description: "When the media was last updated" - properties.media.properties.last_saved.additionalProperties: false - properties.media.properties.last_saved.properties.from_ip.bsonType: "string" - properties.media.properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.media.properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.media.properties.last_saved.properties.by_user.bsonType: "string" - properties.media.properties.last_saved.properties.by_user.description: "ID Of User" - properties.media.properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.media.properties.last_saved.properties.at_time.bsonType: "date" - properties.media.properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.media.properties.last_saved.properties.correlation_id.bsonType: "string" - properties.media.properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.media.properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.media.properties.last_saved.required[0]: "from_ip" - properties.media.properties.last_saved.required[1]: "by_user" - properties.media.properties.last_saved.required[2]: "at_time" - properties.media.properties.last_saved.required[3]: "correlation_id" - properties.media.required[0]: "_id" - properties.media.required[1]: "title" - properties.media.required[2]: "status" - properties.media.required[3]: "last_saved" - properties.organization.bsonType: "object" - properties.organization.title: "Organization" - properties.organization.description: "An organization in the system" - properties.organization.additionalProperties: false - properties.organization.properties._id.bsonType: "objectId" - properties.organization.properties._id.description: "The unique identifier for the organization" - properties.organization.properties.name.bsonType: "string" - properties.organization.properties.name.description: "The organization name" - properties.organization.properties.name.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.organization.properties.status.bsonType: "string" - properties.organization.properties.status.description: "The current status of the organization" - properties.organization.properties.status.enum[0]: "active" - properties.organization.properties.status.enum[1]: "archived" - properties.organization.properties.website.bsonType: "string" - properties.organization.properties.website.description: "Organization website" - properties.organization.properties.website.pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - properties.organization.properties.users.bsonType: "array" - properties.organization.properties.users.description: "Users associated with this organization" - properties.organization.properties.users.items.bsonType: "objectId" - properties.organization.properties.users.items.description: "A user identifier" - properties.organization.properties.home_address.bsonType: "object" - properties.organization.properties.home_address.title: "Street Address" - properties.organization.properties.home_address.description: "Organization home address" - properties.organization.properties.home_address.additionalProperties: false - properties.organization.properties.home_address.properties.street.bsonType: "string" - properties.organization.properties.home_address.properties.street.description: "Street address" - properties.organization.properties.home_address.properties.street.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.organization.properties.home_address.properties.city.bsonType: "string" - properties.organization.properties.home_address.properties.city.description: "City" - properties.organization.properties.home_address.properties.city.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.home_address.properties.state.bsonType: "string" - properties.organization.properties.home_address.properties.state.description: "State or province" - properties.organization.properties.home_address.properties.state.pattern: "^[A-Z]{2}$" - properties.organization.properties.home_address.properties.postal_code.bsonType: "string" - properties.organization.properties.home_address.properties.postal_code.description: "Postal code" - properties.organization.properties.home_address.properties.postal_code.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.home_address.required[0]: "postal_code" - properties.organization.properties.last_saved.bsonType: "object" - properties.organization.properties.last_saved.title: "Breadcrumb" - properties.organization.properties.last_saved.description: "When the organization document was last updated" - properties.organization.properties.last_saved.additionalProperties: false - properties.organization.properties.last_saved.properties.from_ip.bsonType: "string" - properties.organization.properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.organization.properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.last_saved.properties.by_user.bsonType: "string" - properties.organization.properties.last_saved.properties.by_user.description: "ID Of User" - properties.organization.properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.last_saved.properties.at_time.bsonType: "date" - properties.organization.properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.organization.properties.last_saved.properties.correlation_id.bsonType: "string" - properties.organization.properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.organization.properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.last_saved.required[0]: "from_ip" - properties.organization.properties.last_saved.required[1]: "by_user" - properties.organization.properties.last_saved.required[2]: "at_time" - properties.organization.properties.last_saved.required[3]: "correlation_id" - properties.organization.required[0]: "_id" - properties.organization.required[1]: "name" - properties.organization.required[2]: "status" - properties.organization.required[3]: "last_saved" - properties.user.bsonType: "object" - properties.user.title: "User Collection" - properties.user.description: "A user collection for testing the schema system" - properties.user.additionalProperties: false - properties.user.properties._id.bsonType: "objectId" - properties.user.properties._id.description: "The unique identifier for a user" - properties.user.properties.user_name.bsonType: "string" - properties.user.properties.user_name.description: "Username" - properties.user.properties.user_name.pattern: "^[^\\s]{4,40}$" - properties.user.properties.first_name.bsonType: "string" - properties.user.properties.first_name.description: "Users First Name" - properties.user.properties.first_name.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_name.bsonType: "string" - properties.user.properties.last_name.description: "Users Last Name" - properties.user.properties.last_name.pattern: "^[^\\s]{4,40}$" - properties.user.properties.status.bsonType: "string" - properties.user.properties.status.description: "document status" - properties.user.properties.status.enum[0]: "active" - properties.user.properties.status.enum[1]: "archived" - properties.user.properties.last_saved.bsonType: "object" - properties.user.properties.last_saved.title: "Breadcrumb" - properties.user.properties.last_saved.description: "The last time this document was saved" - properties.user.properties.last_saved.additionalProperties: false - properties.user.properties.last_saved.properties.from_ip.bsonType: "string" - properties.user.properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.user.properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_saved.properties.by_user.bsonType: "string" - properties.user.properties.last_saved.properties.by_user.description: "ID Of User" - properties.user.properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_saved.properties.at_time.bsonType: "date" - properties.user.properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.user.properties.last_saved.properties.correlation_id.bsonType: "string" - properties.user.properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.user.properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_saved.required[0]: "from_ip" - properties.user.properties.last_saved.required[1]: "by_user" - properties.user.properties.last_saved.required[2]: "at_time" - properties.user.properties.last_saved.required[3]: "correlation_id" - properties.user.required[0]: "_id" - properties.user.required[1]: "user_name" - properties.user.required[2]: "status" - properties.user.required[3]: "last_saved" - required[0]: "collection_name" - required[1]: "collection_id" - json: - name: Render JSON Schemas - steps: - - name: Render User JSON Schema - http: - url: http://${{env.host}}/api/render/json_schema/user.1.0.1.3/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "User Collection" - description: "A user collection for testing the schema system" - type: "object" - additionalProperties: false - properties._id.type: "string" - properties._id.description: "The unique identifier for a user" - properties._id.pattern: "^[0-9a-fA-F]{24}$" - properties.user_name.type: "string" - properties.user_name.description: "Username" - properties.user_name.pattern: "^[^\\s]{4,40}$" - properties.status.type: "string" - properties.status.description: "The status" - properties.status.enum[0]: "draft" - properties.status.enum[1]: "active" - properties.status.enum[2]: "archived" - properties.full_name.type: "string" - properties.full_name.description: "Users Full Name" - properties.full_name.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.email.type: "string" - properties.email.description: "The person's email address" - properties.email.pattern: "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$" - properties.phone.type: "string" - properties.phone.description: "The person's phone number" - properties.phone.pattern: "^\\+1[2-9][0-9]{9}$" - properties.categories.type: "array" - properties.categories.description: "A users list of categorized tags" - properties.categories.items.type: "object" - properties.categories.items.description: "A user category" - properties.categories.items.additionalProperties: false - properties.categories.items.properties.name.type: "string" - properties.categories.items.properties.name.description: "Category Name assigned by the user" - properties.categories.items.properties.name.pattern: "^[^\\s]{4,40}$" - properties.categories.items.properties.category.type: "string" - properties.categories.items.properties.category.description: "The category type assigned by the user" - properties.categories.items.properties.category.enum[0]: "work" - properties.categories.items.properties.category.enum[1]: "personal" - properties.categories.items.properties.category.enum[2]: "project" - properties.categories.items.properties.category.enum[3]: "reference" - properties.categories.items.properties.tags.type: "array" - properties.categories.items.properties.tags.description: "A list of enumerated values assigned by the user" - properties.categories.items.properties.tags.items.enum[0]: "urgent" - properties.categories.items.properties.tags.items.enum[1]: "important" - properties.categories.items.properties.tags.items.enum[2]: "normal" - properties.categories.items.properties.tags.items.enum[3]: "low" - properties.categories.items.properties.tags.items.enum[4]: "completed" - properties.categories.items.properties.tags.items.enum[5]: "in_progress" - properties.categories.items.properties.tags.items.enum[6]: "blocked" - properties.categories.items.properties.tags.items.enum[7]: "review" - properties.categories.items.required[0]: "name" - properties.categories.items.required[1]: "category" - properties.categories.items.required[2]: "tags" - properties.last_saved.type: "object" - properties.last_saved.title: "Breadcrumb" - properties.last_saved.description: "The last time this document was saved" - properties.last_saved.additionalProperties: false - properties.last_saved.properties.from_ip.type: "string" - properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.by_user.type: "string" - properties.last_saved.properties.by_user.description: "ID Of User" - properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.at_time.type: "string" - properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.last_saved.properties.at_time.format: "date-time" - properties.last_saved.properties.correlation_id.type: "string" - properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.last_saved.required[0]: "from_ip" - properties.last_saved.required[1]: "by_user" - properties.last_saved.required[2]: "at_time" - properties.last_saved.required[3]: "correlation_id" - required[0]: "_id" - required[1]: "user_name" - required[2]: "status" - required[3]: "last_saved" - - name: Render media JSON Schema - http: - url: http://${{env.host}}/api/render/json_schema/media.1.0.0.1/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "Media" - description: "A media item in the system" - type: "object" - additionalProperties: false - properties._id.type: "string" - properties._id.description: "The unique identifier for the media" - properties.title.type: "string" - properties.title.description: "The title of the media" - properties.title.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.status.type: "string" - properties.status.description: "The current status of the media" - properties.status.enum[0]: "draft" - properties.status.enum[1]: "published" - properties.status.enum[2]: "archived" - properties.type.type: "string" - properties.type.description: "The type of media" - properties.type.enum[0]: "movie" - properties.type.enum[1]: "tv_show" - properties.type.enum[2]: "documentary" - properties.type.enum[3]: "short" - properties.tags.type: "array" - properties.tags.description: "Tags associated with the media" - properties.tags.items.enum[0]: "action" - properties.tags.items.enum[1]: "comedy" - properties.tags.items.enum[2]: "drama" - properties.tags.items.enum[3]: "sci_fi" - properties.tags.items.enum[4]: "documentary" - properties.metadata.type: "object" - properties.metadata.description: "Additional metadata about the media" - properties.metadata.additionalProperties: false - properties.metadata.properties.duration.type: "number" - properties.metadata.properties.duration.description: "Duration in minutes" - properties.metadata.properties.duration.minimum: 1 - properties.metadata.properties.duration.multipleOf: 1 - properties.metadata.properties.format.type: "string" - properties.metadata.properties.format.description: "Media format" - properties.metadata.properties.format.enum[0]: "dvd" - properties.metadata.properties.format.enum[1]: "bluray" - properties.metadata.properties.format.enum[2]: "digital" - properties.metadata.properties.format.enum[3]: "streaming" - properties.metadata.properties.quality.type: "string" - properties.metadata.properties.quality.description: "Quality rating" - properties.metadata.properties.quality.enum[0]: "sd" - properties.metadata.properties.quality.enum[1]: "hd" - properties.metadata.properties.quality.enum[2]: "uhd" - properties.last_saved.type: "object" - properties.last_saved.title: "Breadcrumb" - properties.last_saved.description: "When the media was last updated" - properties.last_saved.additionalProperties: false - properties.last_saved.properties.from_ip.type: "string" - properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.by_user.type: "string" - properties.last_saved.properties.by_user.description: "ID Of User" - properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.last_saved.properties.at_time.type: "string" - properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.last_saved.properties.at_time.format: "date-time" - properties.last_saved.properties.correlation_id.type: "string" - properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.last_saved.required[0]: "from_ip" - properties.last_saved.required[1]: "by_user" - properties.last_saved.required[2]: "at_time" - properties.last_saved.required[3]: "correlation_id" - - name: Render organization JSON Schema - http: - url: http://${{env.host}}/api/render/json_schema/organization.1.0.0.1/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - - name: Render Search JSON Schema - http: - url: http://${{env.host}}/api/render/json_schema/search.1.0.0.1/ - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "search" - description: "A search index that is used to support a elastic search polymorphic query service" - type: "object" - additionalProperties: false - properties.collection_name.type: "string" - properties.collection_name.description: "The name of the collection" - properties.collection_name.pattern: "^[^\\s]{4,40}$" - properties.collection_id.type: "string" - properties.collection_id.description: "The unique identifier for this source document" - properties.collection_id.pattern: "^[0-9a-fA-F]{24}$" - properties.media.type: "object" - properties.media.title: "Media" - properties.media.description: "A media item in the system" - properties.media.additionalProperties: false - properties.media.properties._id.type: "string" - properties.media.properties._id.description: "The unique identifier for the media" - properties.media.properties._id.pattern: "^[0-9a-fA-F]{24}$" - properties.media.properties.title.type: "string" - properties.media.properties.title.description: "The title of the media" - properties.media.properties.title.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.media.properties.status.type: "string" - properties.media.properties.status.description: "The current status of the media" - properties.media.properties.status.enum[0]: "draft" - properties.media.properties.status.enum[1]: "published" - properties.media.properties.status.enum[2]: "archived" - properties.media.properties.type.type: "string" - properties.media.properties.type.description: "The type of media" - properties.media.properties.type.enum[0]: "movie" - properties.media.properties.type.enum[1]: "tv_show" - properties.media.properties.type.enum[2]: "documentary" - properties.media.properties.type.enum[3]: "short" - properties.media.properties.tags.type: "array" - properties.media.properties.tags.description: "Tags associated with the media" - properties.media.properties.tags.items.enum[0]: "action" - properties.media.properties.tags.items.enum[1]: "comedy" - properties.media.properties.tags.items.enum[2]: "drama" - properties.media.properties.tags.items.enum[3]: "sci_fi" - properties.media.properties.tags.items.enum[4]: "documentary" - properties.media.properties.metadata.type: "object" - properties.media.properties.metadata.description: "Additional metadata about the media" - properties.media.properties.metadata.additionalProperties: false - properties.media.properties.metadata.properties.duration.type: "number" - properties.media.properties.metadata.properties.duration.description: "Duration in minutes" - properties.media.properties.metadata.properties.duration.minimum: 1 - properties.media.properties.metadata.properties.duration.multipleOf: 1 - properties.media.properties.metadata.properties.format.type: "string" - properties.media.properties.metadata.properties.format.description: "Media format" - properties.media.properties.metadata.properties.format.enum[0]: "dvd" - properties.media.properties.metadata.properties.format.enum[1]: "bluray" - properties.media.properties.metadata.properties.format.enum[2]: "digital" - properties.media.properties.metadata.properties.format.enum[3]: "streaming" - properties.media.properties.metadata.properties.quality.type: "string" - properties.media.properties.metadata.properties.quality.description: "Quality rating" - properties.media.properties.metadata.properties.quality.enum[0]: "sd" - properties.media.properties.metadata.properties.quality.enum[1]: "hd" - properties.media.properties.metadata.properties.quality.enum[2]: "uhd" - properties.media.properties.last_saved.type: "object" - properties.media.properties.last_saved.title: "Breadcrumb" - properties.media.properties.last_saved.description: "When the media was last updated" - properties.media.properties.last_saved.additionalProperties: false - properties.media.properties.last_saved.properties.from_ip.type: "string" - properties.media.properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.media.properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.media.properties.last_saved.properties.by_user.type: "string" - properties.media.properties.last_saved.properties.by_user.description: "ID Of User" - properties.media.properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.media.properties.last_saved.properties.at_time.type: "string" - properties.media.properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.media.properties.last_saved.properties.at_time.format: "date-time" - properties.media.properties.last_saved.properties.correlation_id.type: "string" - properties.media.properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.media.properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.media.properties.last_saved.required[0]: "from_ip" - properties.media.properties.last_saved.required[1]: "by_user" - properties.media.properties.last_saved.required[2]: "at_time" - properties.media.properties.last_saved.required[3]: "correlation_id" - properties.media.required[0]: "_id" - properties.media.required[1]: "title" - properties.media.required[2]: "status" - properties.media.required[3]: "last_saved" - properties.organization.type: "object" - properties.organization.title: "Organization" - properties.organization.description: "An organization in the system" - properties.organization.additionalProperties: false - properties.organization.properties._id.type: "string" - properties.organization.properties._id.description: "The unique identifier for the organization" - properties.organization.properties._id.pattern: "^[0-9a-fA-F]{24}$" - properties.organization.properties.name.type: "string" - properties.organization.properties.name.description: "The organization name" - properties.organization.properties.name.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.organization.properties.status.type: "string" - properties.organization.properties.status.description: "The current status of the organization" - properties.organization.properties.status.enum[0]: "active" - properties.organization.properties.status.enum[1]: "archived" - properties.organization.properties.website.type: "string" - properties.organization.properties.website.description: "Organization website" - properties.organization.properties.website.pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - properties.organization.properties.users.type: "array" - properties.organization.properties.users.description: "Users associated with this organization" - properties.organization.properties.users.items.type: "string" - properties.organization.properties.users.items.description: "A user identifier" - properties.organization.properties.users.items.pattern: "^[0-9a-fA-F]{24}$" - properties.organization.properties.home_address.type: "object" - properties.organization.properties.home_address.title: "Street Address" - properties.organization.properties.home_address.description: "Organization home address" - properties.organization.properties.home_address.additionalProperties: false - properties.organization.properties.home_address.properties.street.type: "string" - properties.organization.properties.home_address.properties.street.description: "Street address" - properties.organization.properties.home_address.properties.street.pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - properties.organization.properties.home_address.properties.city.type: "string" - properties.organization.properties.home_address.properties.city.description: "City" - properties.organization.properties.home_address.properties.city.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.home_address.properties.state.type: "string" - properties.organization.properties.home_address.properties.state.description: "State or province" - properties.organization.properties.home_address.properties.state.pattern: "^[A-Z]{2}$" - properties.organization.properties.home_address.properties.postal_code.type: "string" - properties.organization.properties.home_address.properties.postal_code.description: "Postal code" - properties.organization.properties.home_address.properties.postal_code.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.home_address.required[0]: "postal_code" - properties.organization.properties.last_saved.type: "object" - properties.organization.properties.last_saved.title: "Breadcrumb" - properties.organization.properties.last_saved.description: "When the organization document was last updated" - properties.organization.properties.last_saved.additionalProperties: false - properties.organization.properties.last_saved.properties.from_ip.type: "string" - properties.organization.properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.organization.properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.last_saved.properties.by_user.type: "string" - properties.organization.properties.last_saved.properties.by_user.description: "ID Of User" - properties.organization.properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.last_saved.properties.at_time.type: "string" - properties.organization.properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.organization.properties.last_saved.properties.at_time.format: "date-time" - properties.organization.properties.last_saved.properties.correlation_id.type: "string" - properties.organization.properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.organization.properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.organization.properties.last_saved.required[0]: "from_ip" - properties.organization.properties.last_saved.required[1]: "by_user" - properties.organization.properties.last_saved.required[2]: "at_time" - properties.organization.properties.last_saved.required[3]: "correlation_id" - properties.organization.required[0]: "_id" - properties.organization.required[1]: "name" - properties.organization.required[2]: "status" - properties.organization.required[3]: "last_saved" - properties.user.type: "object" - properties.user.title: "User Collection" - properties.user.description: "A user collection for testing the schema system" - properties.user.additionalProperties: false - properties.user.properties._id.type: "string" - properties.user.properties._id.description: "The unique identifier for a user" - properties.user.properties._id.pattern: "^[0-9a-fA-F]{24}$" - properties.user.properties.user_name.type: "string" - properties.user.properties.user_name.description: "Username" - properties.user.properties.user_name.pattern: "^[^\\s]{4,40}$" - properties.user.properties.first_name.type: "string" - properties.user.properties.first_name.description: "Users First Name" - properties.user.properties.first_name.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_name.type: "string" - properties.user.properties.last_name.description: "Users Last Name" - properties.user.properties.last_name.pattern: "^[^\\s]{4,40}$" - properties.user.properties.status.type: "string" - properties.user.properties.status.description: "document status" - properties.user.properties.status.enum[0]: "active" - properties.user.properties.status.enum[1]: "archived" - properties.user.properties.last_saved.type: "object" - properties.user.properties.last_saved.title: "Breadcrumb" - properties.user.properties.last_saved.description: "The last time this document was saved" - properties.user.properties.last_saved.additionalProperties: false - properties.user.properties.last_saved.properties.from_ip.type: "string" - properties.user.properties.last_saved.properties.from_ip.description: "Http Request remote IP address" - properties.user.properties.last_saved.properties.from_ip.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_saved.properties.by_user.type: "string" - properties.user.properties.last_saved.properties.by_user.description: "ID Of User" - properties.user.properties.last_saved.properties.by_user.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_saved.properties.at_time.type: "string" - properties.user.properties.last_saved.properties.at_time.description: "The date-time when last updated" - properties.user.properties.last_saved.properties.at_time.format: "date-time" - properties.user.properties.last_saved.properties.correlation_id.type: "string" - properties.user.properties.last_saved.properties.correlation_id.description: "The logging correlation ID of the update transaction" - properties.user.properties.last_saved.properties.correlation_id.pattern: "^[^\\s]{4,40}$" - properties.user.properties.last_saved.required[0]: "from_ip" - properties.user.properties.last_saved.required[1]: "by_user" - properties.user.properties.last_saved.required[2]: "at_time" - properties.user.properties.last_saved.required[3]: "correlation_id" - properties.user.required[0]: "_id" - properties.user.required[1]: "user_name" - properties.user.required[2]: "status" - properties.user.required[3]: "last_saved" - required[0]: "collection_name" - required[1]: "collection_id" diff --git a/tests/stepci/migrations.yaml b/tests/stepci/migrations.yaml new file mode 100644 index 0000000..96b90ca --- /dev/null +++ b/tests/stepci/migrations.yaml @@ -0,0 +1,111 @@ +version: "1.1" +name: Test Migrations Endpoints +env: + host: localhost:8081 + +tests: + migrations: + name: Test Migrations Endpoints + steps: + # Setup - Delete test file if it exists (no checks needed) + - name: Delete Test Migration (Setup) + http: + url: http://${{env.host}}/api/migrations/test_migration.json/ + method: DELETE + check: + status: /200|500/ + + # Create - PUT new migration + - name: Create Test Migration + http: + url: http://${{env.host}}/api/migrations/test_migration.json/ + method: PUT + headers: + Content-Type: application/json + body: | + [ + { + "$addFields": { + "test_field": "test_value" + } + }, + { + "$out": "test_collection" + } + ] + check: + status: /200/ + jsonpath: + file_name: test_migration.json + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Read - GET the created migration + - name: Get Test Migration + http: + url: http://${{env.host}}/api/migrations/test_migration.json/ + method: GET + check: + status: /200/ + jsonpath: + $[0].$addFields.test_field: "test_value" + $[1].$out: "test_collection" + + # Update - PUT to modify the migration + - name: Update Test Migration + http: + url: http://${{env.host}}/api/migrations/test_migration.json/ + method: PUT + headers: + Content-Type: application/json + body: | + [ + { + "$addFields": { + "updated_field": "updated_value" + } + }, + { + "$out": "updated_collection" + } + ] + check: + status: /200/ + jsonpath: + file_name: test_migration.json + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # List all migrations + - name: List All Migrations + http: + url: http://${{env.host}}/api/migrations/ + method: GET + check: + status: /200/ + jsonpath: + $: /.*test_migration\.json.*/ + + # Cleanup - DELETE the test migration + - name: Delete Test Migration (Cleanup) + http: + url: http://${{env.host}}/api/migrations/test_migration.json/ + method: DELETE + check: + status: /200/ + jsonpath: + status: SUCCESS \ No newline at end of file diff --git a/tests/stepci/observability.yaml b/tests/stepci/observability.yaml index 8689bf1..e6cb96c 100644 --- a/tests/stepci/observability.yaml +++ b/tests/stepci/observability.yaml @@ -1,38 +1,47 @@ -name: mongodb_api_observability -version: "1.0" -env: - host: localhost:8081 - -tests: - observable: - name: Test Observability - steps: - - name: GET Config Values - http: - url: http://${{env.host}}/api/config - method: GET - check: - status: /200/ - schema: - type: object - properties: - config_items: - type: array - versions: - type: array - enumerators: - type: object - token: - type: object - required: - - config_items - - versions - - enumerators - - token - additionalProperties: false - - name: GET Health - http: - url: http://${{env.host}}/api/health - method: GET - check: - status: /200/ \ No newline at end of file +observable: + name: Test Observability Endpoints + steps: + - name: GET Config Values + http: + url: http://${{env.host}}/api/config + method: GET + check: + status: /200/ + schema: + type: object + properties: + config_items: + type: array + items: + type: object + properties: + name: + type: string + value: + type: string + from: + type: string + enum: + - file + - environment + - default + required: + - name + - value + - from + required: + - config_items + - name: GET Health + http: + url: http://${{env.host}}/api/health + method: GET + check: + status: /200/ + - name: GET Docs + http: + url: http://${{env.host}}/docs/index.html + method: GET + check: + status: /200/ + schema: + type: string \ No newline at end of file diff --git a/tests/stepci/processing.yaml b/tests/stepci/processing.yaml new file mode 100644 index 0000000..df347a0 --- /dev/null +++ b/tests/stepci/processing.yaml @@ -0,0 +1,9 @@ +processing: + name: Test Processing Endpoints + steps: + - name: Test Processing Endpoint + http: + url: http://${{env.host}}/api/configurations + method: GET + check: + status: /200/ diff --git a/tests/stepci/rendering.yaml b/tests/stepci/rendering.yaml new file mode 100644 index 0000000..71e238f --- /dev/null +++ b/tests/stepci/rendering.yaml @@ -0,0 +1,36 @@ +rendering: + name: Test Rendering Endpoints + steps: + - name: Render JSON Schema for sample.1.0.0.1 + http: + url: http://${{env.host}}/api/configurations/json_schema/sample.yaml/1.0.0.1/ + method: GET + check: + status: /200/ + schema: + type: object + properties: + type: + type: string + properties: + type: object + required: + - type + - properties + + - name: Render BSON Schema for sample.1.0.0.1 + http: + url: http://${{env.host}}/api/configurations/bson_schema/sample.yaml/1.0.0.1/ + method: GET + check: + status: /200/ + schema: + type: object + properties: + bsonType: + type: string + properties: + type: object + required: + - bsonType + - properties diff --git a/tests/stepci/small_sample.yaml b/tests/stepci/small_sample.yaml deleted file mode 100644 index 9c4b4e3..0000000 --- a/tests/stepci/small_sample.yaml +++ /dev/null @@ -1,159 +0,0 @@ -name: mongodb_api_small_sample -version: "1.0" -env: - host: localhost:8081 - -tests: - collections: - name: Collections - steps: - - name: List Collections - http: - url: http://${{env.host}}/api/collections/ - method: GET - check: - status: /200/ - schema: - type: "array" - items: - type: "object" - properties: - collection_name: - type: "string" - version: - type: "string" - targeted_version: - type: "string" - required: - - collection_name - - version - - targeted_version - - name: GET A Collection - http: - url: http://${{env.host}}/api/collections/simple - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - schema: - type: object - properties: - name: - type: string - versions: - type: array - items: - type: object - properties: - version: - type: string - test_data: - type: string - add_indexes: - type: array - drop_indexes: - type: array - items: - type: string - aggregations: - type: array - required: - - name - - versions - - name: Process All Collections - http: - url: http://${{env.host}}/api/collections/ - method: POST - headers: - Content-Type: application/json - check: - status: /200/ - schema: - type: array - items: - type: object - properties: - collection: - type: string - operations: - type: array - status: - type: string - required: - - collection - - operations - - status - - name: Process A Collection - http: - url: http://${{env.host}}/api/collections/simple - method: POST - headers: - Content-Type: application/json - check: - status: /200/ - schema: - type: object - properties: - collection: - type: string - operations: - type: array - status: - type: string - required: - - collection - - operations - - status - render: - name: Test Render endpoints - steps: - - name: Render JSON Schema - http: - url: http://${{env.host}}/api/render/json_schema/simple.1.0.0.1 - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "Simple" - description: "A simple collection for testing" - type: "object" - properties._id.description: "The unique identifier for the media" - properties._id.pattern: "^[0-9a-fA-F]{24}$" - properties._id.type: "string" - properties.name.description: "The name of the document" - properties.name.pattern: "^[a-zA-Z0-9_-]{4,40}$" - properties.name.type: "string" - properties.status.description: "The current status of the document" - properties.status.enum[0]: "active" - properties.status.enum[1]: "archived" - properties.status.type: "string" - required[0]: "_id" - required[1]: "status" - additionalProperties: false - - name: Render BSON Schema - http: - url: http://${{env.host}}/api/render/bson_schema/simple.1.0.0.1 - method: GET - headers: - Content-Type: application/json - check: - status: /200/ - jsonpath: - title: "Simple" - description: "A simple collection for testing" - bsonType: "object" - properties._id.bsonType: "objectId" - properties._id.description: "The unique identifier for the media" - properties.name.bsonType: "string" - properties.name.description: "The name of the document" - properties.name.pattern: "^[a-zA-Z0-9_-]{4,40}$" - properties.status.bsonType: "string" - properties.status.description: "The current status of the document" - properties.status.enum[0]: "active" - properties.status.enum[1]: "archived" - required[0]: "_id" - required[1]: "status" - additionalProperties: false diff --git a/tests/stepci/test_data.yaml b/tests/stepci/test_data.yaml new file mode 100644 index 0000000..87e12f9 --- /dev/null +++ b/tests/stepci/test_data.yaml @@ -0,0 +1,169 @@ +version: "1.1" +name: Test Test Data Endpoints +env: + host: localhost:8081 + +tests: + test_data: + name: Test Test Data Endpoints + steps: + # Setup - Delete test file if it exists (no checks needed) + - name: Delete Test Data (Setup) + http: + url: http://${{env.host}}/api/test_data/test_data.json/ + method: DELETE + check: + status: /200|500/ + + # Create - PUT new test data with MongoDB objects + - name: Create Test Data + http: + url: http://${{env.host}}/api/test_data/test_data.json/ + method: PUT + headers: + Content-Type: application/json + body: | + [ + { + "_id": {"$oid": "A00000000000000000000001"}, + "name": "John Doe", + "email": "john.doe@example.com", + "created_at": {"$date": "2024-01-01T00:00:00Z"}, + "status": "active", + "metadata": { + "last_login": {"$date": "2024-01-15T10:30:00Z"}, + "user_id": {"$oid": "B00000000000000000000001"} + } + }, + { + "_id": {"$oid": "A00000000000000000000002"}, + "name": "Jane Smith", + "email": "jane.smith@example.com", + "created_at": {"$date": "2024-01-02T12:00:00Z"}, + "status": "inactive", + "metadata": { + "last_login": {"$date": "2024-01-10T14:45:00Z"}, + "user_id": {"$oid": "B00000000000000000000002"} + } + } + ] + check: + status: /200/ + jsonpath: + file_name: test_data.json + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Read - GET the created test data + - name: Get Test Data + http: + url: http://${{env.host}}/api/test_data/test_data.json/ + method: GET + check: + status: /200/ + jsonpath: + $[0]._id.$oid: "A00000000000000000000001" + $[0].name: "John Doe" + $[0].email: "john.doe@example.com" + $[0].status: "active" + $[1]._id.$oid: "A00000000000000000000002" + $[1].name: "Jane Smith" + $[1].status: "inactive" + + # Update - PUT to modify the test data + - name: Update Test Data + http: + url: http://${{env.host}}/api/test_data/test_data.json/ + method: PUT + headers: + Content-Type: application/json + body: | + [ + { + "_id": {"$oid": "A00000000000000000000001"}, + "name": "John Doe Updated", + "email": "john.doe.updated@example.com", + "created_at": {"$date": "2024-01-01T00:00:00Z"}, + "status": "active", + "metadata": { + "last_login": {"$date": "2024-01-20T15:30:00Z"}, + "user_id": {"$oid": "B00000000000000000000001"}, + "new_field": "added value" + } + }, + { + "_id": {"$oid": "A00000000000000000000002"}, + "name": "Jane Smith", + "email": "jane.smith@example.com", + "created_at": {"$date": "2024-01-02T12:00:00Z"}, + "status": "active", + "metadata": { + "last_login": {"$date": "2024-01-18T09:15:00Z"}, + "user_id": {"$oid": "B00000000000000000000002"} + } + }, + { + "_id": {"$oid": "A00000000000000000000003"}, + "name": "Bob Wilson", + "email": "bob.wilson@example.com", + "created_at": {"$date": "2024-01-03T08:00:00Z"}, + "status": "pending", + "metadata": { + "last_login": {"$date": "2024-01-19T11:20:00Z"}, + "user_id": {"$oid": "B00000000000000000000003"} + } + } + ] + check: + status: /200/ + jsonpath: + file_name: test_data.json + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Verify updated data + - name: Verify Updated Test Data + http: + url: http://${{env.host}}/api/test_data/test_data.json/ + method: GET + check: + status: /200/ + jsonpath: + $[0].name: "John Doe Updated" + $[0].email: "john.doe.updated@example.com" + $[0].metadata.new_field: "added value" + $[1].status: "active" + $[2].name: "Bob Wilson" + $[2].status: "pending" + + # Test list all test data files + - name: List All Test Data Files + http: + url: http://${{env.host}}/api/test_data/ + method: GET + check: + status: /200/ + jsonpath: + $[*].file_name: /.*\.json$/ + + # Cleanup - DELETE the test data + - name: Delete Test Data (Cleanup) + http: + url: http://${{env.host}}/api/test_data/test_data.json/ + method: DELETE + check: + status: /200/ \ No newline at end of file diff --git a/tests/stepci/types.yaml b/tests/stepci/types.yaml new file mode 100644 index 0000000..2de89d7 --- /dev/null +++ b/tests/stepci/types.yaml @@ -0,0 +1,179 @@ +version: "1.1" +name: Test Types Endpoints +env: + host: localhost:8081 + +tests: + types: + name: Test Types Endpoints + steps: + # Setup - Delete test file if it exists (no checks needed) + - name: Delete Test Type (Setup) + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: DELETE + check: + status: /200|500/ + + # Create - PUT new type + - name: Create Test Type + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Test type for stepCI testing", + "type": "string", + "required": false + } + check: + status: /200/ + jsonpath: + file_name: test_type.yaml + _locked: false + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Read - GET the created type + - name: Get Test Type + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: GET + check: + status: /200/ + jsonpath: + description: "Test type for stepCI testing" + type: "string" + required: false + + # Update - PUT to modify the type + - name: Update Test Type + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test type for stepCI testing", + "type": "string", + "required": true + } + check: + status: /200/ + jsonpath: + file_name: test_type.yaml + _locked: false + schema: + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + size: + type: number + + # Lock - PUT to lock the type + - name: Lock Test Type + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test type for stepCI testing", + "type": "string", + "required": true, + "_locked": true + } + check: + status: /200/ + jsonpath: + file_name: test_type.yaml + _locked: true + + # Try to Delete a locked type + - name: Delete a locked type - should fail + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: DELETE + check: + status: /500/ + jsonpath: + status: FAILURE + + # Unlock - PUT to unlock the type + - name: Unlock Test Type + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test type for stepCI testing", + "type": "string", + "required": true, + "_locked": false + } + check: + status: /200/ + jsonpath: + file_name: test_type.yaml + _locked: false + + # Lock All - PATCH to lock all types + - name: Lock All Types + http: + url: http://${{env.host}}/api/types/ + method: PATCH + check: + status: /200/ + jsonpath: + status: SUCCESS + schema: + event_id: + type: string + event_type: + type: string + sub_events: + type: array + + # Unlock before cleanup + - name: Unlock Test Type (Cleanup) + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: PUT + headers: + Content-Type: application/json + body: | + { + "description": "Updated test type for stepCI testing", + "type": "string", + "required": true, + "_locked": false + } + check: + status: /200/ + jsonpath: + file_name: test_type.yaml + _locked: false + + # Cleanup - DELETE the test type + - name: Delete Test Type (Cleanup) + http: + url: http://${{env.host}}/api/types/test_type.yaml/ + method: DELETE + check: + status: /200/ \ No newline at end of file diff --git a/tests/stepci/workflow.yaml b/tests/stepci/workflow.yaml new file mode 100644 index 0000000..521a5aa --- /dev/null +++ b/tests/stepci/workflow.yaml @@ -0,0 +1,25 @@ +version: "1.1" +name: Full Test Suite +env: + host: localhost:8081 + +tests: + observable: + $ref: ./tests/stepci/observability.yaml#/observable + configurations: + $ref: ./tests/stepci/configurations.yaml#/tests/configurations + rendering: + $ref: ./tests/stepci/rendering.yaml#/rendering + processing: + $ref: ./tests/stepci/processing.yaml#/processing + dictionaries: + $ref: ./tests/stepci/dictionaries.yaml#/tests/dictionaries + custom_types: + $ref: ./tests/stepci/types.yaml#/tests/types + test_data: + $ref: ./tests/stepci/test_data.yaml#/tests/test_data + migrations: + $ref: ./tests/stepci/migrations.yaml#/tests/migrations + enumerators: + $ref: ./tests/stepci/enumerators.yaml#/tests/enumerators + diff --git a/tests/test_cases/complex_refs/collections/workshop.yaml b/tests/test_cases/complex_refs/configurations/workshop.yaml similarity index 74% rename from tests/test_cases/complex_refs/collections/workshop.yaml rename to tests/test_cases/complex_refs/configurations/workshop.yaml index 851a92e..517a71e 100644 --- a/tests/test_cases/complex_refs/collections/workshop.yaml +++ b/tests/test_cases/complex_refs/configurations/workshop.yaml @@ -1,4 +1,3 @@ -title: Workshop Collection description: A record of a workshop name: user versions: diff --git a/tests/test_cases/complex_refs/dictionary/observation.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/observation.1.0.0.yaml similarity index 81% rename from tests/test_cases/complex_refs/dictionary/observation.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/observation.1.0.0.yaml index d6eb46b..1d8f04e 100644 --- a/tests/test_cases/complex_refs/dictionary/observation.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/observation.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Observation description: A workshop observation type: object properties: @@ -7,7 +6,6 @@ properties: type: word required: true one_of: - type_property: exercise schemas: stakeholders: description: Stakeholders Exercise @@ -17,7 +15,7 @@ one_of: description: Stakeholder Observations type: array items: - $ref: observation_stakeholder.1.0.0 + ref: observation_stakeholder.1.0.0 empathy: description: Empathy Mapping exercise type: object @@ -36,7 +34,7 @@ one_of: description: Empathy Observations type: array items: - $ref: observation_empathy.1.0.0 + ref: observation_empathy.1.0.0 hills: description: Hills of the exercise type: object @@ -45,7 +43,7 @@ one_of: description: Hills Observations type: array items: - $ref: observation_hills.1.0.0 + ref: observation_hills.1.0.0 priority: description: Priority of the exercise type: object @@ -54,9 +52,9 @@ one_of: description: Priority Observations type: array items: - $ref: observation_priority.1.0.0 - outcome: - $ref: observation_ranking.1.0.0 + ref: observation_priority.1.0.0 + outcome: + ref: observation_ranking.1.0.0 retrospective: description: Retrospective of the exercise type: object @@ -65,4 +63,4 @@ one_of: description: Retrospective Observations type: array items: - $ref: observation_retrospective.1.0.0 + ref: observation_retrospective.1.0.0 diff --git a/tests/test_cases/complex_refs/dictionary/observation_empathy.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/observation_empathy.1.0.0.yaml similarity index 94% rename from tests/test_cases/complex_refs/dictionary/observation_empathy.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/observation_empathy.1.0.0.yaml index dc861b0..773c45a 100644 --- a/tests/test_cases/complex_refs/dictionary/observation_empathy.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/observation_empathy.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Empathy Observation description: A empathy of a Empathy Mapping exercise type: object properties: diff --git a/tests/test_cases/complex_refs/dictionary/observation_hills.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/observation_hills.1.0.0.yaml similarity index 93% rename from tests/test_cases/complex_refs/dictionary/observation_hills.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/observation_hills.1.0.0.yaml index 25db2ee..e9441cd 100644 --- a/tests/test_cases/complex_refs/dictionary/observation_hills.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/observation_hills.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Hills Exercise Observation description: A hill of a Hills exercise type: object properties: diff --git a/tests/test_cases/complex_refs/dictionary/observation_priority.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/observation_priority.1.0.0.yaml similarity index 92% rename from tests/test_cases/complex_refs/dictionary/observation_priority.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/observation_priority.1.0.0.yaml index 6b7d84a..9d05f78 100644 --- a/tests/test_cases/complex_refs/dictionary/observation_priority.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/observation_priority.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Priority Exercise Observation description: A priority of a Priority exercise type: object properties: diff --git a/tests/test_cases/complex_refs/dictionary/observation_ranking.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/observation_ranking.1.0.0.yaml similarity index 96% rename from tests/test_cases/complex_refs/dictionary/observation_ranking.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/observation_ranking.1.0.0.yaml index 32699ff..2cba08c 100644 --- a/tests/test_cases/complex_refs/dictionary/observation_ranking.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/observation_ranking.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Ranking Outcome description: The outcome of a Priority exercise type: object properties: diff --git a/tests/test_cases/complex_refs/dictionary/observation_retrospective.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/observation_retrospective.1.0.0.yaml similarity index 94% rename from tests/test_cases/complex_refs/dictionary/observation_retrospective.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/observation_retrospective.1.0.0.yaml index ebb7d79..4b8b4ec 100644 --- a/tests/test_cases/complex_refs/dictionary/observation_retrospective.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/observation_retrospective.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Retrospective Observation description: A observation for a Retrospective exercise type: object properties: diff --git a/tests/test_cases/complex_refs/dictionary/observation_stakeholder.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/observation_stakeholder.1.0.0.yaml similarity index 94% rename from tests/test_cases/complex_refs/dictionary/observation_stakeholder.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/observation_stakeholder.1.0.0.yaml index e7ae2ba..3824688 100644 --- a/tests/test_cases/complex_refs/dictionary/observation_stakeholder.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/observation_stakeholder.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Observation Stakeholder description: A stakeholder of a Stakeholder exercise type: object properties: diff --git a/tests/test_cases/complex_refs/dictionary/workshop.1.0.0.yaml b/tests/test_cases/complex_refs/dictionaries/workshop.1.0.0.yaml similarity index 97% rename from tests/test_cases/complex_refs/dictionary/workshop.1.0.0.yaml rename to tests/test_cases/complex_refs/dictionaries/workshop.1.0.0.yaml index f13e27e..16543e6 100644 --- a/tests/test_cases/complex_refs/dictionary/workshop.1.0.0.yaml +++ b/tests/test_cases/complex_refs/dictionaries/workshop.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Workshop description: A record of a specific design thinking workshop. type: object properties: @@ -59,7 +58,7 @@ properties: type: array required: true items: - $ref: observation.1.0.0 + ref: observation.1.0.0 last_saved: description: Last Saved breadcrumb type: breadcrumb diff --git a/tests/test_cases/complex_refs/expected/json_schema/workshop.1.0.0.1.yaml b/tests/test_cases/complex_refs/expected/json_schema/workshop.1.0.0.1.yaml index 70ac563..92d6748 100644 --- a/tests/test_cases/complex_refs/expected/json_schema/workshop.1.0.0.1.yaml +++ b/tests/test_cases/complex_refs/expected/json_schema/workshop.1.0.0.1.yaml @@ -368,7 +368,6 @@ properties: - by_user - at_time - correlation_id -title: Workshop required: - _id - status diff --git a/tests/test_cases/complex_refs/data/enumerators.json b/tests/test_cases/complex_refs/test_data/enumerators.json similarity index 100% rename from tests/test_cases/complex_refs/data/enumerators.json rename to tests/test_cases/complex_refs/test_data/enumerators.json diff --git a/tests/test_cases/complex_refs/data/workshops.1.0.0.1.json b/tests/test_cases/complex_refs/test_data/workshops.1.0.0.1.json similarity index 100% rename from tests/test_cases/complex_refs/data/workshops.1.0.0.1.json rename to tests/test_cases/complex_refs/test_data/workshops.1.0.0.1.json diff --git a/tests/test_cases/complex_refs/types/appointment.yaml b/tests/test_cases/complex_refs/types/appointment.yaml index 45e427b..2becace 100644 --- a/tests/test_cases/complex_refs/types/appointment.yaml +++ b/tests/test_cases/complex_refs/types/appointment.yaml @@ -1,12 +1,11 @@ -title: Appointment description: A date/time range type: object properties: from: - description: Starting Date-Time + description: Starting Date/Time type: date-time required: true to: - description: Ending Date-Time + description: Ending Date/Time type: date-time - required: true + required: true \ No newline at end of file diff --git a/tests/test_cases/complex_refs/types/breadcrumb.yaml b/tests/test_cases/complex_refs/types/breadcrumb.yaml index f61bc50..eb40fa6 100644 --- a/tests/test_cases/complex_refs/types/breadcrumb.yaml +++ b/tests/test_cases/complex_refs/types/breadcrumb.yaml @@ -1,4 +1,3 @@ -title: Breadcrumb description: A tracking breadcrumb type: object properties: diff --git a/tests/test_cases/complex_refs/types/count.yaml b/tests/test_cases/complex_refs/types/count.yaml index 57bf4ea..05d049a 100644 --- a/tests/test_cases/complex_refs/types/count.yaml +++ b/tests/test_cases/complex_refs/types/count.yaml @@ -1,4 +1,3 @@ -title: Count description: A positive integer value json_type: type: number diff --git a/tests/test_cases/complex_refs/types/date-time.yaml b/tests/test_cases/complex_refs/types/date-time.yaml index cfed5a3..223a4bd 100644 --- a/tests/test_cases/complex_refs/types/date-time.yaml +++ b/tests/test_cases/complex_refs/types/date-time.yaml @@ -1,4 +1,3 @@ -title: DateTime description: An ISO 8601 formatted date-time string json_type: type: string diff --git a/tests/test_cases/complex_refs/types/email.yaml b/tests/test_cases/complex_refs/types/email.yaml index 53613a8..d771770 100644 --- a/tests/test_cases/complex_refs/types/email.yaml +++ b/tests/test_cases/complex_refs/types/email.yaml @@ -1,5 +1,4 @@ -title: Email description: A valid email address schema: type: string - pattern: "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$" \ No newline at end of file + pattern: "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$" \ No newline at end of file diff --git a/tests/test_cases/complex_refs/types/identifier.yaml b/tests/test_cases/complex_refs/types/identifier.yaml index fc4df48..4950c75 100644 --- a/tests/test_cases/complex_refs/types/identifier.yaml +++ b/tests/test_cases/complex_refs/types/identifier.yaml @@ -1,4 +1,3 @@ -title: Identifier description: A unique identifier for a document json_type: type: string diff --git a/tests/test_cases/complex_refs/types/index.yaml b/tests/test_cases/complex_refs/types/index.yaml index 9eea5eb..1152e38 100644 --- a/tests/test_cases/complex_refs/types/index.yaml +++ b/tests/test_cases/complex_refs/types/index.yaml @@ -1,4 +1,3 @@ -title: Index description: A zero-based array index json_type: type: number diff --git a/tests/test_cases/complex_refs/types/ip_address.yaml b/tests/test_cases/complex_refs/types/ip_address.yaml index 80f7171..28ba551 100644 --- a/tests/test_cases/complex_refs/types/ip_address.yaml +++ b/tests/test_cases/complex_refs/types/ip_address.yaml @@ -1,5 +1,4 @@ -title: IP Address description: A valid IP Address schema: type: string - pattern: "^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$" + pattern: "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" diff --git a/tests/test_cases/complex_refs/types/markdown.yaml b/tests/test_cases/complex_refs/types/markdown.yaml index 60fd1ee..bf39557 100644 --- a/tests/test_cases/complex_refs/types/markdown.yaml +++ b/tests/test_cases/complex_refs/types/markdown.yaml @@ -1,5 +1,4 @@ -title: Markdown description: A String of text, at least 1 and no more than 4k characters. May contain markdown, newlines, and tabs. schema: type: string - pattern: "[\\s\\S]{1,4096}" \ No newline at end of file + maxLength: 4096 \ No newline at end of file diff --git a/tests/test_cases/complex_refs/types/sentence.yaml b/tests/test_cases/complex_refs/types/sentence.yaml index 0a902a6..aad5494 100644 --- a/tests/test_cases/complex_refs/types/sentence.yaml +++ b/tests/test_cases/complex_refs/types/sentence.yaml @@ -1,5 +1,4 @@ -title: Sentence -description: A String of text, at least 1 and no more than 255 characters with no special characters +description: A String of text, 0 to 255 characters with no special characters schema: type: string - pattern: "^[^\\t\\n\\r]{1,255}$" \ No newline at end of file + pattern: "^[^\\t\\n\\r]{0,255}$" \ No newline at end of file diff --git a/tests/test_cases/complex_refs/types/state_code.yaml b/tests/test_cases/complex_refs/types/state_code.yaml index 2b8b3d6..b04f1c6 100644 --- a/tests/test_cases/complex_refs/types/state_code.yaml +++ b/tests/test_cases/complex_refs/types/state_code.yaml @@ -1,4 +1,3 @@ -title: State code description: A two character state code schema: type: string diff --git a/tests/test_cases/complex_refs/types/street_address.yaml b/tests/test_cases/complex_refs/types/street_address.yaml index 8ff59a9..4630dc1 100644 --- a/tests/test_cases/complex_refs/types/street_address.yaml +++ b/tests/test_cases/complex_refs/types/street_address.yaml @@ -1,4 +1,3 @@ -title: Street Address description: A street address type: object properties: diff --git a/tests/test_cases/complex_refs/types/url.yaml b/tests/test_cases/complex_refs/types/url.yaml index 1c35e86..f50de06 100644 --- a/tests/test_cases/complex_refs/types/url.yaml +++ b/tests/test_cases/complex_refs/types/url.yaml @@ -1,5 +1,4 @@ -title: URL description: A valid URL schema: type: string - pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" + pattern: "^https?://[^\\s]+$" diff --git a/tests/test_cases/complex_refs/types/us_phone.yaml b/tests/test_cases/complex_refs/types/us_phone.yaml index 860e1fb..833ede6 100644 --- a/tests/test_cases/complex_refs/types/us_phone.yaml +++ b/tests/test_cases/complex_refs/types/us_phone.yaml @@ -1,4 +1,3 @@ -title: US Phone description: A US phone number in E.164 format schema: type: string diff --git a/tests/test_cases/complex_refs/types/word.yaml b/tests/test_cases/complex_refs/types/word.yaml index 5d05367..af8dbd3 100644 --- a/tests/test_cases/complex_refs/types/word.yaml +++ b/tests/test_cases/complex_refs/types/word.yaml @@ -1,5 +1,4 @@ -title: Word -description: A String of text, at least 1 and no more than 40 characters with no spaces, or special characters like /t or /n +description: A String of text, 1 to 40 characters with no spaces, or special characters like /t or /n schema: type: string pattern: "^\\S{1,40}$" diff --git a/tests/test_cases/config_files/api_config/API_CONFIG_FOLDER b/tests/test_cases/config_files/api_config/API_CONFIG_FOLDER new file mode 100644 index 0000000..7285e35 --- /dev/null +++ b/tests/test_cases/config_files/api_config/API_CONFIG_FOLDER @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/API_PORT b/tests/test_cases/config_files/api_config/API_PORT new file mode 100644 index 0000000..e83e892 --- /dev/null +++ b/tests/test_cases/config_files/api_config/API_PORT @@ -0,0 +1 @@ +9999 \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/AUTO_PROCESS b/tests/test_cases/config_files/api_config/AUTO_PROCESS new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/config_files/api_config/AUTO_PROCESS @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/CONFIGURATION_FOLDER b/tests/test_cases/config_files/api_config/CONFIGURATION_FOLDER new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/CONFIGURATION_FOLDER @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/DICTIONARY_FOLDER b/tests/test_cases/config_files/api_config/DICTIONARY_FOLDER new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/DICTIONARY_FOLDER @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/ENABLE_DROP_DATABASE b/tests/test_cases/config_files/api_config/ENABLE_DROP_DATABASE new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/config_files/api_config/ENABLE_DROP_DATABASE @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/ENUMERATORS_COLLECTION_NAME b/tests/test_cases/config_files/api_config/ENUMERATORS_COLLECTION_NAME new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/ENUMERATORS_COLLECTION_NAME @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/EXIT_AFTER_PROCESSING b/tests/test_cases/config_files/api_config/EXIT_AFTER_PROCESSING new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/config_files/api_config/EXIT_AFTER_PROCESSING @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/LOAD_TEST_DATA b/tests/test_cases/config_files/api_config/LOAD_TEST_DATA new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/config_files/api_config/LOAD_TEST_DATA @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/MIGRATIONS_FOLDER b/tests/test_cases/config_files/api_config/MIGRATIONS_FOLDER new file mode 100644 index 0000000..7285e35 --- /dev/null +++ b/tests/test_cases/config_files/api_config/MIGRATIONS_FOLDER @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/MONGO_CONNECTION_STRING b/tests/test_cases/config_files/api_config/MONGO_CONNECTION_STRING new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/MONGO_CONNECTION_STRING @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/MONGO_DB_NAME b/tests/test_cases/config_files/api_config/MONGO_DB_NAME new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/MONGO_DB_NAME @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/RENDER_STACK_MAX_DEPTH b/tests/test_cases/config_files/api_config/RENDER_STACK_MAX_DEPTH new file mode 100644 index 0000000..745d5a8 --- /dev/null +++ b/tests/test_cases/config_files/api_config/RENDER_STACK_MAX_DEPTH @@ -0,0 +1 @@ +9999 \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/SPA_PORT b/tests/test_cases/config_files/api_config/SPA_PORT new file mode 100644 index 0000000..e83e892 --- /dev/null +++ b/tests/test_cases/config_files/api_config/SPA_PORT @@ -0,0 +1 @@ +9999 \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/TEMPLATE_FOLDER b/tests/test_cases/config_files/api_config/TEMPLATE_FOLDER new file mode 100644 index 0000000..7285e35 --- /dev/null +++ b/tests/test_cases/config_files/api_config/TEMPLATE_FOLDER @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/TEST_DATA_FOLDER b/tests/test_cases/config_files/api_config/TEST_DATA_FOLDER new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/TEST_DATA_FOLDER @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/TYPE_FOLDER b/tests/test_cases/config_files/api_config/TYPE_FOLDER new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/TYPE_FOLDER @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/config_files/api_config/VERSION_COLLECTION_NAME b/tests/test_cases/config_files/api_config/VERSION_COLLECTION_NAME new file mode 100644 index 0000000..93befde --- /dev/null +++ b/tests/test_cases/config_files/api_config/VERSION_COLLECTION_NAME @@ -0,0 +1 @@ +TEST_VALUE \ No newline at end of file diff --git a/tests/test_cases/large_sample/api_config/BUILT_AT b/tests/test_cases/large_sample/api_config/BUILT_AT new file mode 100644 index 0000000..28ca4ab --- /dev/null +++ b/tests/test_cases/large_sample/api_config/BUILT_AT @@ -0,0 +1 @@ +Local \ No newline at end of file diff --git a/tests/test_cases/large_sample/api_config/ENABLE_DROP_DATABASE b/tests/test_cases/large_sample/api_config/ENABLE_DROP_DATABASE new file mode 100644 index 0000000..6707ef5 --- /dev/null +++ b/tests/test_cases/large_sample/api_config/ENABLE_DROP_DATABASE @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/large_sample/api_config/LOAD_TEST_DATA b/tests/test_cases/large_sample/api_config/LOAD_TEST_DATA new file mode 100644 index 0000000..6707ef5 --- /dev/null +++ b/tests/test_cases/large_sample/api_config/LOAD_TEST_DATA @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/large_sample/configurations/content.yaml b/tests/test_cases/large_sample/configurations/content.yaml new file mode 100644 index 0000000..3d6f54a --- /dev/null +++ b/tests/test_cases/large_sample/configurations/content.yaml @@ -0,0 +1,30 @@ +title: Content Collection +description: Collection for managing various types of content +name: content +versions: + - version: "1.0.0.1" + add_indexes: + - name: contentTypeIndex + key: + content_type: 1 + options: + unique: false + - name: statusIndex + key: + status: 1 + options: + unique: false + test_data: content.1.0.0.1.json + - version: "1.0.0.2" + add_indexes: + - name: authorIndex + key: + author_id: 1 + options: + unique: false + test_data: content.1.0.0.2.json + - version: "1.0.1.3" + migrations: + - "content_merge_content_fields.json" + test_data: content.1.0.1.3.json + \ No newline at end of file diff --git a/tests/test_cases/large_sample/collections/media.yaml b/tests/test_cases/large_sample/configurations/media.yaml similarity index 100% rename from tests/test_cases/large_sample/collections/media.yaml rename to tests/test_cases/large_sample/configurations/media.yaml diff --git a/tests/test_cases/large_sample/configurations/notification.yaml b/tests/test_cases/large_sample/configurations/notification.yaml new file mode 100644 index 0000000..5a4e667 --- /dev/null +++ b/tests/test_cases/large_sample/configurations/notification.yaml @@ -0,0 +1,29 @@ +title: Notification Collection +description: Collection for managing user notifications +name: notification +versions: + - version: "1.0.0.1" + add_indexes: + - name: userIndex + key: + user_id: 1 + options: + unique: false + - name: statusIndex + key: + status: 1 + options: + unique: false + test_data: notification.1.0.0.1.json + - version: "1.0.0.2" + add_indexes: + - name: typeIndex + key: + notification_type: 1 + options: + unique: false + test_data: notification.1.0.0.2.json + - version: "1.0.1.3" + migrations: + - "notification_merge_notification_data.json" + test_data: notification.1.0.1.3.json \ No newline at end of file diff --git a/tests/test_cases/large_sample/collections/organization.yaml b/tests/test_cases/large_sample/configurations/organization.yaml similarity index 100% rename from tests/test_cases/large_sample/collections/organization.yaml rename to tests/test_cases/large_sample/configurations/organization.yaml diff --git a/tests/test_cases/large_sample/collections/search.yaml b/tests/test_cases/large_sample/configurations/search.yaml similarity index 100% rename from tests/test_cases/large_sample/collections/search.yaml rename to tests/test_cases/large_sample/configurations/search.yaml diff --git a/tests/test_cases/large_sample/collections/user.yaml b/tests/test_cases/large_sample/configurations/user.yaml similarity index 69% rename from tests/test_cases/large_sample/collections/user.yaml rename to tests/test_cases/large_sample/configurations/user.yaml index 5581b4e..c093015 100644 --- a/tests/test_cases/large_sample/collections/user.yaml +++ b/tests/test_cases/large_sample/configurations/user.yaml @@ -25,13 +25,6 @@ versions: - statusIndex test_data: user.1.0.0.2.json - version: "1.0.1.3" - aggregations: - - name: "merge_name_fields" - pipeline: - - $addFields: - full_name: { $concat: ["$first_name", " ", "$last_name"] } - - $unset: - - "first_name" - - "last_name" - - $out: "user" + migrations: + - "user_merge_name_fields.json" test_data: user.1.0.1.3.json diff --git a/tests/test_cases/large_sample/dictionaries/content.1.0.0.yaml b/tests/test_cases/large_sample/dictionaries/content.1.0.0.yaml new file mode 100644 index 0000000..e8e7e51 --- /dev/null +++ b/tests/test_cases/large_sample/dictionaries/content.1.0.0.yaml @@ -0,0 +1,99 @@ +title: Content Collection +description: A simple content collection for testing +type: object +properties: + _id: + description: The unique identifier for content + type: identity + required: true + title: + description: Content title + type: word + required: true + subtitle: + description: Content subtitle + type: sentence + content_type: + description: Type of content (article, video, podcast, etc.) + type: word + required: true + status: + description: Content status + type: enum + enums: default_status + required: true + author_id: + description: Reference to author in user collection + ref: user.1.0.0.yaml + content_data: + description: The actual content data + type: object + required: true + properties: + body: + description: Article body text + type: markdown + url: + description: Video URL + type: url + audio_url: + description: Audio file URL + type: url + transcript: + description: Podcast transcript + type: markdown + tags: + description: Content tags + type: array + items: + type: word + word_count: + description: Article word count + type: count + duration: + description: Video duration in seconds + type: count + quality: + description: Video quality + type: word + episode_number: + description: Episode number + type: count + metadata: + description: Additional metadata for the content + type: object + properties: + created_at: + description: When the content was created + type: date-time + required: true + updated_at: + description: When the content was last updated + type: date-time + published_at: + description: When the content was published + type: date-time + categories: + description: Content categories + type: array + items: + description: A content category + type: object + properties: + name: + description: Category name + type: word + required: true + type: + description: Category type + type: word + required: true + tags: + description: Category tags + type: array + items: + type: word + last_saved: + description: Last saved breadcrumb + type: breadcrumb + required: true \ No newline at end of file diff --git a/tests/test_cases/large_sample/dictionaries/content.1.0.1.yaml b/tests/test_cases/large_sample/dictionaries/content.1.0.1.yaml new file mode 100644 index 0000000..4b7e369 --- /dev/null +++ b/tests/test_cases/large_sample/dictionaries/content.1.0.1.yaml @@ -0,0 +1,120 @@ +title: Content Collection +description: A content collection for testing one_of structures and advanced schema features +type: object +properties: + _id: + description: The unique identifier for content + type: identity + required: true + title: + description: Content title + type: word + required: true + subtitle: + description: Content subtitle + type: sentence + content_type: + description: Type of content (article, video, podcast, etc.) + type: enum + enums: content_type + required: true + status: + description: Content status + type: enum + enums: default_status + required: true + author_id: + description: Reference to author in user collection + ref: user.1.0.0.yaml + full_title: + description: Full title including subtitle + type: sentence + content_data: + description: The actual content data - varies by type + type: object + required: true + one_of: + type_property: content_type + schemas: + article: + description: Article content structure + type: object + properties: + body: + description: Article body text + type: markdown + required: true + tags: + description: Article tags + type: enum_array + enums: content_tags + word_count: + description: Article word count + type: count + video: + description: Video content structure + type: object + properties: + url: + description: Video URL + type: url + required: true + duration: + description: Video duration in seconds + type: count + quality: + description: Video quality + type: enum + enums: media_quality + podcast: + description: Podcast content structure + type: object + properties: + audio_url: + description: Audio file URL + type: url + required: true + transcript: + description: Podcast transcript + type: markdown + episode_number: + description: Episode number + type: count + metadata: + description: Additional metadata for the content + type: object + properties: + created_at: + description: When the content was created + type: date-time + required: true + updated_at: + description: When the content was last updated + type: date-time + published_at: + description: When the content was published + type: date-time + categories: + description: Content categories + type: array + items: + description: A content category + type: object + properties: + name: + description: Category name + type: word + required: true + type: + description: Category type + type: enum + enums: category_type + required: true + tags: + description: Category tags + type: enum_array + enums: category_tags + last_saved: + description: Last saved breadcrumb + type: breadcrumb + required: true \ No newline at end of file diff --git a/tests/test_cases/large_sample/dictionaries/media.1.0.0.yaml b/tests/test_cases/large_sample/dictionaries/media.1.0.0.yaml new file mode 100644 index 0000000..a7c8b43 --- /dev/null +++ b/tests/test_cases/large_sample/dictionaries/media.1.0.0.yaml @@ -0,0 +1,107 @@ +title: Media +description: A media item in the system +type: object +properties: + _id: + description: The unique identifier for the media + type: identity + required: true + title: + description: The title of the media + type: sentence + required: true + type: + description: The type of media + type: enum + enums: media_type + status: + description: The current status of the media + type: enum + enums: media_status + required: true + last_saved: + description: When the media was last updated + type: breadcrumb + required: true + tags: + description: Tags associated with the media + type: enum_array + enums: media_tags + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: count + format: + description: Media format + type: enum + enums: media_format + quality: + description: Quality rating + type: enum + enums: media_quality + content_data: + description: Media-specific content data + type: object + one_of: + type_property: type + schemas: + movie: + description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: word + required: true + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: word + genre: + description: Movie genre + type: enum_array + enums: media_tags + tv_show: + description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: count + required: true + episode: + description: Episode number + type: count + required: true + network: + description: Broadcasting network + type: word + documentary: + description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: sentence + required: true + narrator: + description: Documentary narrator + type: word + short: + description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: word + awards: + description: Awards received + type: array + items: + description: Award name + type: word \ No newline at end of file diff --git a/tests/test_cases/large_sample/dictionaries/notification.1.0.0.yaml b/tests/test_cases/large_sample/dictionaries/notification.1.0.0.yaml new file mode 100644 index 0000000..d68aaae --- /dev/null +++ b/tests/test_cases/large_sample/dictionaries/notification.1.0.0.yaml @@ -0,0 +1,29 @@ +title: Notification Collection +description: A simple notification collection for testing +type: object +properties: + _id: + description: The unique identifier for notification + type: identity + required: true + user_id: + description: Reference to user receiving the notification + ref: user.1.0.0.yaml + required: true + title: + description: Notification title + type: word + required: true + message: + description: Notification message + type: sentence + required: true + status: + description: Notification status + type: enum + enums: default_status + required: true + last_saved: + description: Last saved breadcrumb + type: breadcrumb + required: true \ No newline at end of file diff --git a/tests/test_cases/large_sample/dictionaries/notification.1.0.1.yaml b/tests/test_cases/large_sample/dictionaries/notification.1.0.1.yaml new file mode 100644 index 0000000..2662e8c --- /dev/null +++ b/tests/test_cases/large_sample/dictionaries/notification.1.0.1.yaml @@ -0,0 +1,85 @@ +title: Notification Collection +description: A notification collection for testing enum_array and cross-collection references +type: object +properties: + _id: + description: The unique identifier for notification + type: identity + required: true + user_id: + description: Reference to user receiving the notification + ref: user.1.0.0.yaml + required: true + title: + description: Notification title + type: word + required: true + message: + description: Notification message + type: sentence + required: true + notification_type: + description: Type of notification + type: enum + enums: notification_type + required: true + status: + description: Notification status + type: enum + enums: default_status + required: true + priority: + description: Notification priority level + type: enum + enums: priority_level + required: true + tags: + description: Notification tags for categorization + type: enum_array + enums: notification_tags + categories: + description: Notification categories + type: enum_array + enums: category_type + channels: + description: Delivery channels for this notification + type: enum_array + enums: delivery_channel + required: true + metadata: + description: Additional notification metadata + type: object + properties: + created_at: + description: When the notification was created + type: date-time + required: true + sent_at: + description: When the notification was sent + type: date-time + read_at: + description: When the notification was read + type: date-time + expires_at: + description: When the notification expires + type: date-time + source: + description: Source of the notification + type: object + properties: + collection: + description: Source collection name + type: word + required: true + document_id: + description: Source document ID + type: identifier + required: true + action: + description: Action that triggered the notification + type: enum + enums: notification_action + last_saved: + description: Last saved breadcrumb + type: breadcrumb + required: true \ No newline at end of file diff --git a/tests/test_cases/large_sample/dictionary/organization.1.0.0.yaml b/tests/test_cases/large_sample/dictionaries/organization.1.0.0.yaml similarity index 100% rename from tests/test_cases/large_sample/dictionary/organization.1.0.0.yaml rename to tests/test_cases/large_sample/dictionaries/organization.1.0.0.yaml diff --git a/tests/test_cases/large_sample/dictionary/search.1.0.0.yaml b/tests/test_cases/large_sample/dictionaries/search.1.0.0.yaml similarity index 78% rename from tests/test_cases/large_sample/dictionary/search.1.0.0.yaml rename to tests/test_cases/large_sample/dictionaries/search.1.0.0.yaml index af00cd3..5eacee5 100644 --- a/tests/test_cases/large_sample/dictionary/search.1.0.0.yaml +++ b/tests/test_cases/large_sample/dictionaries/search.1.0.0.yaml @@ -11,12 +11,11 @@ properties: type: identity required: true one_of: - type_property: collection_name schemas: media: - $ref: media.1.0.0 + ref: media.1.0.0.yaml organization: - $ref: organization.1.0.0 + ref: organization.1.0.0.yaml user: - $ref: user.1.0.0 + ref: user.1.0.0.yaml \ No newline at end of file diff --git a/tests/test_cases/large_sample/dictionary/search.1.0.1.yaml b/tests/test_cases/large_sample/dictionaries/search.1.0.1.yaml similarity index 78% rename from tests/test_cases/large_sample/dictionary/search.1.0.1.yaml rename to tests/test_cases/large_sample/dictionaries/search.1.0.1.yaml index 00ad4b8..fd69626 100644 --- a/tests/test_cases/large_sample/dictionary/search.1.0.1.yaml +++ b/tests/test_cases/large_sample/dictionaries/search.1.0.1.yaml @@ -11,12 +11,11 @@ properties: type: identity required: true one_of: - type_property: collection_name schemas: media: - $ref: media.1.0.0 + ref: media.1.0.0.yaml organization: - $ref: organization.1.0.0 + ref: organization.1.0.0.yaml user: - $ref: user.1.0.1 + ref: user.1.0.1.yaml \ No newline at end of file diff --git a/tests/test_cases/large_sample/dictionary/user.1.0.0.yaml b/tests/test_cases/large_sample/dictionaries/user.1.0.0.yaml similarity index 100% rename from tests/test_cases/large_sample/dictionary/user.1.0.0.yaml rename to tests/test_cases/large_sample/dictionaries/user.1.0.0.yaml diff --git a/tests/test_cases/large_sample/dictionary/user.1.0.1.yaml b/tests/test_cases/large_sample/dictionaries/user.1.0.1.yaml similarity index 64% rename from tests/test_cases/large_sample/dictionary/user.1.0.1.yaml rename to tests/test_cases/large_sample/dictionaries/user.1.0.1.yaml index 385b22b..3a6e6d4 100644 --- a/tests/test_cases/large_sample/dictionary/user.1.0.1.yaml +++ b/tests/test_cases/large_sample/dictionaries/user.1.0.1.yaml @@ -39,6 +39,28 @@ properties: type: enum_array enums: category_tags required: true + preferences: + description: User preferences and settings + type: object + properties: + notification_types: + description: Types of notifications the user wants to receive + type: enum_array + enums: notification_type + required: true + delivery_channels: + description: Preferred delivery channels for notifications + type: enum_array + enums: delivery_channel + required: true + content_tags: + description: Content tags the user is interested in + type: enum_array + enums: content_tags + priority_levels: + description: Priority levels the user wants to receive + type: enum_array + enums: priority_level email: description: The person's email address type: email diff --git a/tests/test_cases/large_sample/dictionary/media.1.0.0.yaml b/tests/test_cases/large_sample/dictionary/media.1.0.0.yaml deleted file mode 100644 index 6d3046c..0000000 --- a/tests/test_cases/large_sample/dictionary/media.1.0.0.yaml +++ /dev/null @@ -1,44 +0,0 @@ -title: Media -description: A media item in the system -type: object -properties: - _id: - description: The unique identifier for the media - type: identity - required: true - title: - description: The title of the media - type: sentence - required: true - type: - description: The type of media - type: enum - enums: media_type - status: - description: The current status of the media - type: enum - enums: media_status - required: true - last_saved: - description: When the media was last updated - type: breadcrumb - required: true - tags: - description: Tags associated with the media - type: enum_array - enums: media_tags - metadata: - description: Additional metadata about the media - type: object - properties: - duration: - description: Duration in minutes - type: count - format: - description: Media format - type: enum - enums: media_format - quality: - description: Quality rating - type: enum - enums: media_quality \ No newline at end of file diff --git a/tests/test_cases/large_sample/expected/bson_schema/media.1.0.0.1.json b/tests/test_cases/large_sample/expected/bson_schema/media.1.0.0.1.json deleted file mode 100644 index ad93943..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/media.1.0.0.1.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "description": "A media item in the system", - "bsonType": "object", - "additionalProperties": false, - "properties": { - "_id": { - "bsonType": "objectId", - "description": "The unique identifier for the media" - }, - "title": { - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$", - "bsonType": "string", - "description": "The title of the media" - }, - "type": { - "description": "The type of media", - "bsonType": "string", - "enum": ["movie", "tv_show", "documentary", "short"] - }, - "status": { - "description": "The current status of the media", - "bsonType": "string", - "enum": ["draft", "published", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the media was last updated", - "bsonType": "object", - "additionalProperties": false, - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - }, - "tags": { - "description": "Tags associated with the media", - "bsonType": "array", - "items": { - "bsonType": "string", - "enum": ["action", "comedy", "drama", "sci_fi", "documentary"] - } - }, - "metadata": { - "description": "Additional metadata about the media", - "bsonType": "object", - "properties": { - "duration": { - "description": "Duration in minutes", - "bsonType": "int", - "minimum": 1 - }, - "format": { - "description": "Media format", - "bsonType": "string", - "enum": ["dvd", "bluray", "digital", "streaming"] - }, - "quality": { - "description": "Quality rating", - "bsonType": "string", - "enum": ["sd", "hd", "uhd"] - } - }, - "additionalProperties": false - } - }, - "required": ["_id", "title", "status", "last_saved"], - "title": "Media" -} diff --git a/tests/test_cases/large_sample/expected/bson_schema/organization.1.0.0.1.json b/tests/test_cases/large_sample/expected/bson_schema/organization.1.0.0.1.json deleted file mode 100644 index e8794c3..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/organization.1.0.0.1.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "title": "Organization", - "description": "An organization in the system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for the organization", - "bsonType": "objectId" - }, - "name": { - "description": "The organization name", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "status": { - "description": "The current status of the organization", - "bsonType": "string", - "enum": ["active", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the organization document was last updated", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - }, - "users": { - "description": "Users associated with this organization", - "bsonType": "array", - "items": { - "description": "A user identifier", - "bsonType": "objectId" - } - }, - "website": { - "description": "Organization website", - "bsonType": "string", - "pattern": "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - }, - "home_address": { - "title": "Street Address", - "description": "Organization home address", - "bsonType": "object", - "properties": { - "street": { - "description": "Street address", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "city": { - "description": "City", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "state": { - "description": "State or province", - "bsonType": "string", - "pattern": "^[A-Z]{2}$" - }, - "postal_code": { - "description": "Postal code", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["postal_code"] - } - }, - "additionalProperties": false, - "required": ["_id", "name", "status", "last_saved"] -} diff --git a/tests/test_cases/large_sample/expected/bson_schema/search.1.0.0.1.json b/tests/test_cases/large_sample/expected/bson_schema/search.1.0.0.1.json deleted file mode 100644 index 90d1bd9..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/search.1.0.0.1.json +++ /dev/null @@ -1,258 +0,0 @@ -{ - "title": "search", - "description": "A search index that is used to support a elastic search polymorphic query service", - "bsonType": "object", - "properties": { - "collection_name": { - "description": "The name of the collection", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "collection_id": { - "description": "The unique identifier for this source document", - "bsonType": "objectId" - }, - "media": { - "title": "Media", - "description": "A media item in the system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for the media", - "bsonType": "objectId" - }, - "title": { - "description": "The title of the media", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "type": { - "description": "The type of media", - "bsonType": "string", - "enum": ["movie", "tv_show", "documentary", "short"] - }, - "status": { - "description": "The current status of the media", - "bsonType": "string", - "enum": ["draft", "published", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the media was last updated", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - }, - "tags": { - "description": "Tags associated with the media", - "bsonType": "array", - "items": { - "bsonType": "string", - "enum": ["action", "comedy", "drama", "sci_fi", "documentary"] - } - }, - "metadata": { - "description": "Additional metadata about the media", - "bsonType": "object", - "properties": { - "duration": { - "bsonType": "int", - "minimum": 1, - "description": "Duration in minutes" - }, - "format": { - "description": "Media format", - "bsonType": "string", - "enum": ["dvd", "bluray", "digital", "streaming"] - }, - "quality": { - "description": "Quality rating", - "bsonType": "string", - "enum": ["sd", "hd", "uhd"] - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false, - "required": ["_id", "title", "status", "last_saved"] - }, - "organization": { - "title": "Organization", - "description": "An organization in the system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for the organization", - "bsonType": "objectId" - }, - "name": { - "description": "The organization name", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "status": { - "description": "The current status of the organization", - "bsonType": "string", - "enum": ["active", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the organization document was last updated", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - }, - "users": { - "description": "Users associated with this organization", - "bsonType": "array", - "items": { - "description": "A user identifier", - "bsonType": "objectId" - } - }, - "website": { - "description": "Organization website", - "bsonType": "string", - "pattern": "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - }, - "home_address": { - "title": "Street Address", - "description": "Organization home address", - "bsonType": "object", - "properties": { - "street": { - "description": "Street address", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "city": { - "description": "City", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "state": { - "description": "State or province", - "bsonType": "string", - "pattern": "^[A-Z]{2}$" - }, - "postal_code": { - "description": "Postal code", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["postal_code"] - } - }, - "additionalProperties": false, - "required": ["_id", "name", "status", "last_saved"] - }, - "user": { - "title": "User Collection", - "description": "A user collection for testing the schema system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for a user", - "bsonType": "objectId" - }, - "user_name": { - "description": "Username", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "first_name": { - "description": "Users First Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "last_name": { - "description": "Users Last Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "status": { - "description": "document status", - "bsonType": "string", - "enum": ["active", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "The last time this document was saved", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - } - }, - "additionalProperties": false, - "required": ["_id", "user_name", "status", "last_saved"] - } - }, - "additionalProperties": false, - "required": ["collection_name", "collection_id"] -} diff --git a/tests/test_cases/large_sample/expected/bson_schema/search.1.0.0.2.json b/tests/test_cases/large_sample/expected/bson_schema/search.1.0.0.2.json deleted file mode 100644 index 10c9c0e..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/search.1.0.0.2.json +++ /dev/null @@ -1,258 +0,0 @@ -{ - "title": "search", - "description": "A search index that is used to support a elastic search polymorphic query service", - "bsonType": "object", - "properties": { - "collection_name": { - "description": "The name of the collection", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "collection_id": { - "description": "The unique identifier for this source document", - "bsonType": "objectId" - }, - "media": { - "title": "Media", - "description": "A media item in the system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for the media", - "bsonType": "objectId" - }, - "title": { - "description": "The title of the media", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "type": { - "description": "The type of media", - "bsonType": "string", - "enum": ["movie", "tv_show", "documentary", "short"] - }, - "status": { - "description": "The current status of the media", - "bsonType": "string", - "enum": ["draft", "published", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the media was last updated", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - }, - "tags": { - "description": "Tags associated with the media", - "bsonType": "array", - "items": { - "bsonType": "string", - "enum": ["action", "comedy", "drama", "sci_fi", "documentary"] - } - }, - "metadata": { - "description": "Additional metadata about the media", - "bsonType": "object", - "properties": { - "duration": { - "bsonType": "int", - "minimum": 1, - "description": "Duration in minutes" - }, - "format": { - "description": "Media format", - "bsonType": "string", - "enum": ["dvd", "bluray", "digital", "streaming"] - }, - "quality": { - "description": "Quality rating", - "bsonType": "string", - "enum": ["sd", "hd", "uhd"] - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false, - "required": ["_id", "title", "status", "last_saved"] - }, - "organization": { - "title": "Organization", - "description": "An organization in the system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for the organization", - "bsonType": "objectId" - }, - "name": { - "description": "The organization name", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "status": { - "description": "The current status of the organization", - "bsonType": "string", - "enum": ["draft", "active", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the organization document was last updated", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - }, - "users": { - "description": "Users associated with this organization", - "bsonType": "array", - "items": { - "description": "A user identifier", - "bsonType": "objectId" - } - }, - "website": { - "description": "Organization website", - "bsonType": "string", - "pattern": "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - }, - "home_address": { - "title": "Street Address", - "description": "Organization home address", - "bsonType": "object", - "properties": { - "street": { - "description": "Street address", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "city": { - "description": "City", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "state": { - "description": "State or province", - "bsonType": "string", - "pattern": "^[A-Z]{2}$" - }, - "postal_code": { - "description": "Postal code", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["postal_code"] - } - }, - "additionalProperties": false, - "required": ["_id", "name", "status", "last_saved"] - }, - "user": { - "title": "User Collection", - "description": "A user collection for testing the schema system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for a user", - "bsonType": "objectId" - }, - "user_name": { - "description": "Username", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "first_name": { - "description": "Users First Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "last_name": { - "description": "Users Last Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "status": { - "description": "document status", - "bsonType": "string", - "enum": ["draft", "active", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "The last time this document was saved", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - } - }, - "additionalProperties": false, - "required": ["_id", "user_name", "status", "last_saved"] - } - }, - "additionalProperties": false, - "required": ["collection_name", "collection_id"] -} diff --git a/tests/test_cases/large_sample/expected/bson_schema/search.1.0.1.3.json b/tests/test_cases/large_sample/expected/bson_schema/search.1.0.1.3.json deleted file mode 100644 index fd79d42..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/search.1.0.1.3.json +++ /dev/null @@ -1,376 +0,0 @@ -{ - "title": "search", - "description": "A search index that is used to support a elastic search polymorphic query service", - "bsonType": "object", - "properties": { - "collection_name": { - "description": "The name of the collection", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "collection_id": { - "description": "The unique identifier for this source document", - "bsonType": "objectId" - }, - "media": { - "title": "Media", - "description": "A media item in the system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for the media", - "bsonType": "objectId" - }, - "title": { - "description": "The title of the media", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "type": { - "description": "The type of media", - "bsonType": "string", - "enum": [ - "movie", - "tv_show", - "documentary", - "short" - ] - }, - "status": { - "description": "The current status of the media", - "bsonType": "string", - "enum": [ - "draft", - "published", - "archived" - ] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the media was last updated", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": [ - "from_ip", - "by_user", - "at_time", - "correlation_id" - ] - }, - "tags": { - "description": "Tags associated with the media", - "bsonType": "array", - "items": { - "bsonType": "string", - "enum": [ - "action", - "comedy", - "drama", - "sci_fi", - "documentary" - ] - } - }, - "metadata": { - "description": "Additional metadata about the media", - "bsonType": "object", - "properties": { - "duration": { - "bsonType": "int", - "minimum": 1, - "description": "Duration in minutes" - }, - "format": { - "description": "Media format", - "bsonType": "string", - "enum": [ - "dvd", - "bluray", - "digital", - "streaming" - ] - }, - "quality": { - "description": "Quality rating", - "bsonType": "string", - "enum": [ - "sd", - "hd", - "uhd" - ] - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false, - "required": [ - "_id", - "title", - "status", - "last_saved" - ] - }, - "organization": { - "title": "Organization", - "description": "An organization in the system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for the organization", - "bsonType": "objectId" - }, - "name": { - "description": "The organization name", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "status": { - "description": "The current status of the organization", - "bsonType": "string", - "enum": [ - "draft", - "active", - "archived" - ] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "When the organization document was last updated", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": [ - "from_ip", - "by_user", - "at_time", - "correlation_id" - ] - }, - "users": { - "description": "Users associated with this organization", - "bsonType": "array", - "items": { - "description": "A user identifier", - "bsonType": "objectId" - } - }, - "website": { - "description": "Organization website", - "bsonType": "string", - "pattern": "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - }, - "home_address": { - "title": "Street Address", - "description": "Organization home address", - "bsonType": "object", - "properties": { - "street": { - "description": "Street address", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "city": { - "description": "City", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "state": { - "description": "State or province", - "bsonType": "string", - "pattern": "^[A-Z]{2}$" - }, - "postal_code": { - "description": "Postal code", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["postal_code"] - } - }, - "additionalProperties": false, - "required": [ - "_id", - "name", - "status", - "last_saved" - ] - }, - "user": { - "title": "User Collection", - "description": "A user collection for testing the schema system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for a user", - "bsonType": "objectId" - }, - "user_name": { - "description": "Username", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "full_name": { - "description": "Users Full Name", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "status": { - "description": "The status", - "bsonType": "string", - "enum": [ - "draft", - "active", - "archived" - ] - }, - "categories": { - "description": "A users list of categorized tags", - "bsonType": "array", - "items": { - "description": "A user category", - "bsonType": "object", - "properties": { - "name": { - "description": "Category Name assigned by the user", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "category": { - "description": "The category type assigned by the user", - "bsonType": "string", - "enum": [ - "work", - "personal", - "project", - "reference" - ] - }, - "tags": { - "description": "A list of enumerated values assigned by the user", - "bsonType": "array", - "items": { - "bsonType": "string", - "enum": [ - "urgent", - "important", - "normal", - "low", - "completed", - "in_progress", - "blocked", - "review" - ] - } - } - }, - "required": [ - "name", - "category", - "tags" - ], - "additionalProperties": false - } - }, - "email": { - "description": "The person's email address", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$" - }, - "phone": { - "description": "The person's phone number", - "bsonType": "string", - "pattern": "^\\+1[2-9][0-9]{9}$" - }, - "last_saved": { - "title": "Breadcrumb", - "description": "The last time this document was saved", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "required": [ - "from_ip", - "by_user", - "at_time", - "correlation_id" - ], - "additionalProperties": false - } - }, - "required": [ - "_id", - "user_name", - "status", - "last_saved" - ], - "additionalProperties": false - } - }, - "additionalProperties": false, - "required": [ - "collection_name", - "collection_id" - ] -} diff --git a/tests/test_cases/large_sample/expected/bson_schema/user.1.0.0.1.json b/tests/test_cases/large_sample/expected/bson_schema/user.1.0.0.1.json deleted file mode 100644 index f6d565d..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/user.1.0.0.1.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "title": "User Collection", - "description": "A user collection for testing the schema system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for a user", - "bsonType": "objectId" - }, - "user_name": { - "description": "Username", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "first_name": { - "description": "Users First Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "last_name": { - "description": "Users Last Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "status": { - "description": "document status", - "bsonType": "string", - "enum": ["active", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "The last time this document was saved", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "additionalProperties": false, - "required": ["from_ip", "by_user", "at_time", "correlation_id"] - } - }, - "additionalProperties": false, - "required": ["_id", "user_name", "status", "last_saved"] -} diff --git a/tests/test_cases/large_sample/expected/bson_schema/user.1.0.0.2.json b/tests/test_cases/large_sample/expected/bson_schema/user.1.0.0.2.json deleted file mode 100644 index d1a15f9..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/user.1.0.0.2.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "title": "User Collection", - "description": "A user collection for testing the schema system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for a user", - "bsonType": "objectId" - }, - "user_name": { - "description": "Username", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "first_name": { - "description": "Users First Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "last_name": { - "description": "Users Last Name", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "status": { - "description": "document status", - "bsonType": "string", - "enum": ["draft", "active", "archived"] - }, - "last_saved": { - "title": "Breadcrumb", - "description": "The last time this document was saved", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "required": ["from_ip", "by_user", "at_time", "correlation_id"], - "additionalProperties": false - } - }, - "required": ["_id", "user_name", "status", "last_saved"], - "additionalProperties": false -} \ No newline at end of file diff --git a/tests/test_cases/large_sample/expected/bson_schema/user.1.0.1.3.json b/tests/test_cases/large_sample/expected/bson_schema/user.1.0.1.3.json deleted file mode 100644 index 946a60f..0000000 --- a/tests/test_cases/large_sample/expected/bson_schema/user.1.0.1.3.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "title": "User Collection", - "description": "A user collection for testing the schema system", - "bsonType": "object", - "properties": { - "_id": { - "description": "The unique identifier for a user", - "bsonType": "objectId" - }, - "user_name": { - "description": "Username", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "full_name": { - "description": "Users Full Name", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - }, - "status": { - "description": "The status", - "bsonType": "string", - "enum": ["draft", "active", "archived"] - }, - "categories": { - "description": "A users list of categorized tags", - "bsonType": "array", - "items": { - "description": "A user category", - "bsonType": "object", - "properties": { - "name": { - "description": "Category Name assigned by the user", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "category": { - "description": "The category type assigned by the user", - "bsonType": "string", - "enum": ["work", "personal", "project", "reference"] - }, - "tags": { - "description": "A list of enumerated values assigned by the user", - "bsonType": "array", - "items": { - "bsonType": "string", - "enum": ["urgent", "important", "normal", "low", - "completed", "in_progress", "blocked", "review"] - } - } - }, - "required": ["name", "category", "tags"], - "additionalProperties": false - } - }, - "email": { - "description": "The person's email address", - "bsonType": "string", - "pattern": "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$" - }, - "phone": { - "description": "The person's phone number", - "bsonType": "string", - "pattern": "^\\+1[2-9][0-9]{9}$" - }, - "last_saved": { - "title": "Breadcrumb", - "description": "The last time this document was saved", - "bsonType": "object", - "properties": { - "from_ip": { - "description": "Http Request remote IP address", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "by_user": { - "description": "ID Of User", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - }, - "at_time": { - "description": "The date-time when last updated", - "bsonType": "date" - }, - "correlation_id": { - "description": "The logging correlation ID of the update transaction", - "bsonType": "string", - "pattern": "^[^\\s]{4,40}$" - } - }, - "required": ["from_ip", "by_user", "at_time", "correlation_id"], - "additionalProperties": false - } - }, - "required": ["_id", "user_name", "status", "last_saved"], - "additionalProperties": false -} diff --git a/tests/test_cases/large_sample/expected/json_schema/media.1.0.0.1.yaml b/tests/test_cases/large_sample/expected/json_schema/media.1.0.0.1.yaml deleted file mode 100644 index 3812615..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/media.1.0.0.1.yaml +++ /dev/null @@ -1,96 +0,0 @@ -description: A media item in the system -type: object -additionalProperties: false -properties: - _id: - type: string - pattern: "^[0-9a-fA-F]{24}$" - description: The unique identifier for the media - title: - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - description: The title of the media - type: - description: The type of media - type: string - enum: - - movie - - tv_show - - documentary - - short - status: - description: The current status of the media - type: string - enum: - - draft - - published - - archived - last_saved: - title: Breadcrumb - description: When the media was last updated - type: object - additionalProperties: false - properties: - from_ip: - type: string - pattern: "^[^\\s]{4,40}$" - description: Http Request remote IP address - by_user: - type: string - pattern: "^[^\\s]{4,40}$" - description: ID Of User - at_time: - type: string - format: date-time - description: The date-time when last updated - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - tags: - type: array - description: Tags associated with the media - items: - type: string - enum: - - action - - comedy - - drama - - sci_fi - - documentary - metadata: - type: object - description: Additional metadata about the media - additionalProperties: false - properties: - duration: - description: Duration in minutes - type: number - minimum: 1 - multipleOf: 1 - format: - type: string - description: Media format - enum: - - dvd - - bluray - - digital - - streaming - quality: - type: string - description: Quality rating - enum: - - sd - - hd - - uhd -required: -- _id -- title -- status -- last_saved -title: Media diff --git a/tests/test_cases/large_sample/expected/json_schema/organization.1.0.0.1.yaml b/tests/test_cases/large_sample/expected/json_schema/organization.1.0.0.1.yaml deleted file mode 100644 index a22b65c..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/organization.1.0.0.1.yaml +++ /dev/null @@ -1,86 +0,0 @@ -title: Organization -description: An organization in the system -type: object -properties: - _id: - description: The unique identifier for the organization - type: string - pattern: "^[0-9a-fA-F]{24}$" - name: - description: The organization name - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - status: - description: The current status of the organization - type: string - enum: - - active - - archived - last_saved: - title: Breadcrumb - description: When the organization document was last updated - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - additionalProperties: false - required: - - from_ip - - by_user - - at_time - - correlation_id - users: - description: Users associated with this organization - type: array - items: - description: A user identifier - type: string - pattern: "^[0-9a-fA-F]{24}$" - website: - description: Organization website - type: string - pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - home_address: - title: Street Address - type: object - description: Organization home address - properties: - street: - description: Street address - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - city: - description: City - type: string - pattern: "^[^\\s]{4,40}$" - state: - description: State or province - type: string - pattern: "^[A-Z]{2}$" - postal_code: - description: Postal code - type: string - pattern: "^[^\\s]{4,40}$" - additionalProperties: false - required: - - postal_code -additionalProperties: false -required: - - _id - - name - - status - - last_saved diff --git a/tests/test_cases/large_sample/expected/json_schema/search.1.0.0.1.yaml b/tests/test_cases/large_sample/expected/json_schema/search.1.0.0.1.yaml deleted file mode 100644 index 9df43ab..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/search.1.0.0.1.yaml +++ /dev/null @@ -1,260 +0,0 @@ -title: search -description: A search index that is used to support a elastic search polymorphic query service -type: object -properties: - collection_name: - description: The name of the collection - type: string - pattern: "^[^\\s]{4,40}$" - collection_id: - description: The unique identifier for this source document - type: string - pattern: "^[0-9a-fA-F]{24}$" - media: - title: Media - description: A media item in the system - type: object - properties: - _id: - description: The unique identifier for the media - type: string - pattern: "^[0-9a-fA-F]{24}$" - title: - description: The title of the media - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - type: - description: The type of media - type: string - enum: - - movie - - tv_show - - documentary - - short - status: - description: The current status of the media - type: string - enum: - - draft - - published - - archived - last_saved: - title: Breadcrumb - description: When the media was last updated - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - tags: - description: Tags associated with the media - type: array - items: - type: string - enum: - - action - - comedy - - drama - - sci_fi - - documentary - metadata: - description: Additional metadata about the media - type: object - properties: - duration: - description: Duration in minutes - type: number - minimum: 1 - multipleOf: 1 - format: - description: Media format - type: string - enum: - - dvd - - bluray - - digital - - streaming - quality: - description: Quality rating - type: string - enum: - - sd - - hd - - uhd - additionalProperties: false - required: - - _id - - title - - status - - last_saved - additionalProperties: false - organization: - title: Organization - description: An organization in the system - type: object - properties: - _id: - type: string - description: The unique identifier for the organization - pattern: "^[0-9a-fA-F]{24}$" - name: - description: The organization name - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - status: - description: The current status of the organization - type: string - enum: - - active - - archived - last_saved: - title: Breadcrumb - description: When the organization document was last updated - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - users: - description: Users associated with this organization - type: array - items: - description: A user identifier - type: string - pattern: "^[0-9a-fA-F]{24}$" - website: - description: Organization website - type: string - pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - home_address: - title: Street Address - description: Organization home address - type: object - properties: - street: - description: Street address - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - city: - description: City - type: string - pattern: "^[^\\s]{4,40}$" - state: - description: State or province - type: string - pattern: "^[A-Z]{2}$" - postal_code: - description: Postal code - type: string - pattern: "^[^\\s]{4,40}$" - required: - - postal_code - additionalProperties: false - required: - - _id - - name - - status - - last_saved - additionalProperties: false - user: - title: User Collection - description: A user collection for testing the schema system - type: object - properties: - _id: - description: The unique identifier for a user - type: string - pattern: "^[0-9a-fA-F]{24}$" - user_name: - description: Username - type: string - pattern: "^[^\\s]{4,40}$" - first_name: - description: Users First Name - type: string - pattern: "^[^\\s]{4,40}$" - last_name: - description: Users Last Name - type: string - pattern: "^[^\\s]{4,40}$" - status: - description: document status - type: string - enum: - - active - - archived - last_saved: - title: Breadcrumb - type: object - description: The last time this document was saved - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - required: - - _id - - user_name - - status - - last_saved - additionalProperties: false -required: - - collection_name - - collection_id -additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/large_sample/expected/json_schema/search.1.0.0.2.yaml b/tests/test_cases/large_sample/expected/json_schema/search.1.0.0.2.yaml deleted file mode 100644 index 03e687b..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/search.1.0.0.2.yaml +++ /dev/null @@ -1,262 +0,0 @@ -title: search -description: A search index that is used to support a elastic search polymorphic query service -type: object -properties: - collection_name: - description: The name of the collection - type: string - pattern: "^[^\\s]{4,40}$" - collection_id: - description: The unique identifier for this source document - type: string - pattern: "^[0-9a-fA-F]{24}$" - media: - title: Media - description: A media item in the system - type: object - properties: - _id: - description: The unique identifier for the media - type: string - pattern: "^[0-9a-fA-F]{24}$" - title: - description: The title of the media - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - type: - description: The type of media - type: string - enum: - - movie - - tv_show - - documentary - - short - status: - description: The current status of the media - type: string - enum: - - draft - - published - - archived - last_saved: - title: Breadcrumb - description: When the media was last updated - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - tags: - description: Tags associated with the media - type: array - items: - type: string - enum: - - action - - comedy - - drama - - sci_fi - - documentary - metadata: - description: Additional metadata about the media - type: object - properties: - duration: - description: Duration in minutes - type: number - minimum: 1 - multipleOf: 1 - format: - description: Media format - type: string - enum: - - dvd - - bluray - - digital - - streaming - quality: - description: Quality rating - type: string - enum: - - sd - - hd - - uhd - additionalProperties: false - required: - - _id - - title - - status - - last_saved - additionalProperties: false - organization: - title: Organization - description: An organization in the system - type: object - properties: - _id: - type: string - description: The unique identifier for the organization - pattern: "^[0-9a-fA-F]{24}$" - name: - description: The organization name - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - status: - description: The current status of the organization - type: string - enum: - - draft - - active - - archived - last_saved: - title: Breadcrumb - description: When the organization document was last updated - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - users: - description: Users associated with this organization - type: array - items: - description: A user identifier - type: string - pattern: "^[0-9a-fA-F]{24}$" - website: - description: Organization website - type: string - pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - home_address: - title: Street Address - description: Organization home address - type: object - properties: - street: - description: Street address - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - city: - description: City - type: string - pattern: "^[^\\s]{4,40}$" - state: - description: State or province - type: string - pattern: "^[A-Z]{2}$" - postal_code: - description: Postal code - type: string - pattern: "^[^\\s]{4,40}$" - required: - - postal_code - additionalProperties: false - required: - - _id - - name - - status - - last_saved - additionalProperties: false - user: - title: User Collection - description: A user collection for testing the schema system - type: object - properties: - _id: - description: The unique identifier for a user - type: string - pattern: "^[0-9a-fA-F]{24}$" - user_name: - description: Username - type: string - pattern: "^[^\\s]{4,40}$" - first_name: - description: Users First Name - type: string - pattern: "^[^\\s]{4,40}$" - last_name: - description: Users Last Name - type: string - pattern: "^[^\\s]{4,40}$" - status: - description: document status - type: string - enum: - - draft - - active - - archived - last_saved: - title: Breadcrumb - type: object - description: The last time this document was saved - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - required: - - _id - - user_name - - status - - last_saved - additionalProperties: false -required: - - collection_name - - collection_id -additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/large_sample/expected/json_schema/search.1.0.1.3.yaml b/tests/test_cases/large_sample/expected/json_schema/search.1.0.1.3.yaml deleted file mode 100644 index 872b74d..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/search.1.0.1.3.yaml +++ /dev/null @@ -1,305 +0,0 @@ -title: search -description: A search index that is used to support a elastic search polymorphic query service -type: object -properties: - collection_name: - description: The name of the collection - type: string - pattern: "^[^\\s]{4,40}$" - collection_id: - description: The unique identifier for this source document - type: string - pattern: "^[0-9a-fA-F]{24}$" - media: - title: Media - description: A media item in the system - type: object - properties: - _id: - description: The unique identifier for the media - type: string - pattern: "^[0-9a-fA-F]{24}$" - title: - description: The title of the media - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - type: - description: The type of media - type: string - enum: - - movie - - tv_show - - documentary - - short - status: - description: The current status of the media - type: string - enum: - - draft - - published - - archived - last_saved: - title: Breadcrumb - description: When the media was last updated - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - tags: - description: Tags associated with the media - type: array - items: - type: string - enum: - - action - - comedy - - drama - - sci_fi - - documentary - metadata: - description: Additional metadata about the media - type: object - properties: - duration: - description: Duration in minutes - type: number - minimum: 1 - multipleOf: 1 - format: - description: Media format - type: string - enum: - - dvd - - bluray - - digital - - streaming - quality: - description: Quality rating - type: string - enum: - - sd - - hd - - uhd - additionalProperties: false - required: - - _id - - title - - status - - last_saved - additionalProperties: false - organization: - title: Organization - description: An organization in the system - type: object - properties: - _id: - type: string - description: The unique identifier for the organization - pattern: "^[0-9a-fA-F]{24}$" - name: - description: The organization name - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - status: - description: The current status of the organization - type: string - enum: - - draft - - active - - archived - last_saved: - title: Breadcrumb - description: When the organization document was last updated - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - users: - description: Users associated with this organization - type: array - items: - description: A user identifier - type: string - pattern: "^[0-9a-fA-F]{24}$" - website: - description: Organization website - type: string - pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" - home_address: - title: Street Address - description: Organization home address - type: object - properties: - street: - description: Street address - type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - city: - description: City - type: string - pattern: "^[^\\s]{4,40}$" - state: - description: State or province - type: string - pattern: "^[A-Z]{2}$" - postal_code: - description: Postal code - type: string - pattern: "^[^\\s]{4,40}$" - required: - - postal_code - additionalProperties: false - required: - - _id - - name - - status - - last_saved - additionalProperties: false - user: - title: User Collection - description: A user collection for testing the schema system - type: object - properties: - _id: - type: string - description: The unique identifier for a user - pattern: "^[0-9a-fA-F]{24}$" - user_name: - type: string - description: Username - pattern: "^[^\\s]{4,40}$" - full_name: - type: string - description: Users Full Name - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - status: - type: string - description: The status - enum: - - draft - - active - - archived - categories: - type: array - description: A users list of categorized tags - items: - type: object - description: A user category - properties: - name: - type: string - description: Category Name assigned by the user - pattern: "^[^\\s]{4,40}$" - category: - type: string - description: The category type assigned by the user - enum: - - work - - personal - - project - - reference - tags: - type: array - description: A list of enumerated values assigned by the user - items: - type: string - enum: - - urgent - - important - - normal - - low - - completed - - in_progress - - blocked - - review - required: - - name - - category - - tags - additionalProperties: false - email: - type: string - description: The person's email address - pattern: "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$" - phone: - type: string - description: The person's phone number - pattern: "^\\+1[2-9][0-9]{9}$" - last_saved: - title: Breadcrumb - description: The last time this document was saved - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false - required: - - _id - - user_name - - status - - last_saved - additionalProperties: false - -required: - - collection_name - - collection_id -additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/large_sample/expected/json_schema/user.1.0.0.1.yaml b/tests/test_cases/large_sample/expected/json_schema/user.1.0.0.1.yaml deleted file mode 100644 index c037ffd..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/user.1.0.0.1.yaml +++ /dev/null @@ -1,59 +0,0 @@ -title: User Collection -description: A user collection for testing the schema system -type: object -properties: - _id: - description: The unique identifier for a user - type: string - pattern: "^[0-9a-fA-F]{24}$" - user_name: - description: Username - type: string - pattern: "^[^\\s]{4,40}$" - first_name: - description: Users First Name - type: string - pattern: "^[^\\s]{4,40}$" - last_name: - description: Users Last Name - type: string - pattern: "^[^\\s]{4,40}$" - status: - description: document status - type: string - enum: - - active - - archived - last_saved: - title: Breadcrumb - description: The last time this document was saved - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false -required: - - _id - - user_name - - status - - last_saved -additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/large_sample/expected/json_schema/user.1.0.0.2.yaml b/tests/test_cases/large_sample/expected/json_schema/user.1.0.0.2.yaml deleted file mode 100644 index c89470d..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/user.1.0.0.2.yaml +++ /dev/null @@ -1,60 +0,0 @@ -type: object -title: User Collection -description: A user collection for testing the schema system -properties: - _id: - type: string - description: The unique identifier for a user - pattern: "^[0-9a-fA-F]{24}$" - user_name: - type: string - description: Username - pattern: "^[^\\s]{4,40}$" - first_name: - type: string - description: Users First Name - pattern: "^[^\\s]{4,40}$" - last_name: - type: string - description: Users Last Name - pattern: "^[^\\s]{4,40}$" - status: - type: string - description: document status - enum: - - draft - - active - - archived - last_saved: - title: Breadcrumb - description: The last time this document was saved - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false -required: - - _id - - user_name - - status - - last_saved -additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/large_sample/expected/json_schema/user.1.0.1.3.yaml b/tests/test_cases/large_sample/expected/json_schema/user.1.0.1.3.yaml deleted file mode 100644 index 2069a0f..0000000 --- a/tests/test_cases/large_sample/expected/json_schema/user.1.0.1.3.yaml +++ /dev/null @@ -1,102 +0,0 @@ -title: User Collection -description: A user collection for testing the schema system -type: object -properties: - _id: - type: string - description: The unique identifier for a user - pattern: "^[0-9a-fA-F]{24}$" - user_name: - type: string - description: Username - pattern: "^[^\\s]{4,40}$" - full_name: - type: string - description: Users Full Name - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" - status: - type: string - description: The status - enum: - - draft - - active - - archived - categories: - type: array - description: A users list of categorized tags - items: - type: object - description: A user category - properties: - name: - type: string - description: Category Name assigned by the user - pattern: "^[^\\s]{4,40}$" - category: - type: string - description: The category type assigned by the user - enum: - - work - - personal - - project - - reference - tags: - type: array - description: A list of enumerated values assigned by the user - items: - type: string - enum: - - urgent - - important - - normal - - low - - completed - - in_progress - - blocked - - review - required: - - name - - category - - tags - additionalProperties: false - email: - type: string - description: The person's email address - pattern: "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$" - phone: - type: string - description: The person's phone number - pattern: "^\\+1[2-9][0-9]{9}$" - last_saved: - title: Breadcrumb - description: The last time this document was saved - type: object - properties: - from_ip: - description: Http Request remote IP address - type: string - pattern: "^[^\\s]{4,40}$" - by_user: - description: ID Of User - type: string - pattern: "^[^\\s]{4,40}$" - at_time: - description: The date-time when last updated - type: string - format: date-time - correlation_id: - description: The logging correlation ID of the update transaction - type: string - pattern: "^[^\\s]{4,40}$" - required: - - from_ip - - by_user - - at_time - - correlation_id - additionalProperties: false -required: - - _id - - user_name - - status - - last_saved -additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/large_sample/migrations/content_merge_content_fields.json b/tests/test_cases/large_sample/migrations/content_merge_content_fields.json new file mode 100644 index 0000000..045b562 --- /dev/null +++ b/tests/test_cases/large_sample/migrations/content_merge_content_fields.json @@ -0,0 +1,17 @@ +[ + { + "$addFields": { + "full_title": { + "$concat": ["$title", " - ", "$subtitle"] + } + } + }, + { + "$unset": [ + "subtitle" + ] + }, + { + "$out": "content" + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/migrations/notification_merge_notification_data.json b/tests/test_cases/large_sample/migrations/notification_merge_notification_data.json new file mode 100644 index 0000000..50b01c1 --- /dev/null +++ b/tests/test_cases/large_sample/migrations/notification_merge_notification_data.json @@ -0,0 +1,15 @@ +[ + { + "$addFields": { + "full_message": { + "$concat": ["$title", ": ", "$message"] + } + } + }, + { + "$unset": ["message"] + }, + { + "$out": "notification" + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/migrations/user_merge_name_fields.json b/tests/test_cases/large_sample/migrations/user_merge_name_fields.json new file mode 100644 index 0000000..bc80295 --- /dev/null +++ b/tests/test_cases/large_sample/migrations/user_merge_name_fields.json @@ -0,0 +1,18 @@ +[ + { + "$addFields": { + "full_name": { + "$concat": ["$first_name", " ", "$last_name"] + } + } + }, + { + "$unset": [ + "first_name", + "last_name" + ] + }, + { + "$out": "user" + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/test_data/content.1.0.0.1.json b/tests/test_cases/large_sample/test_data/content.1.0.0.1.json new file mode 100644 index 0000000..5407af2 --- /dev/null +++ b/tests/test_cases/large_sample/test_data/content.1.0.0.1.json @@ -0,0 +1,94 @@ +[ + { + "_id": "content_001", + "title": "Introduction to MongoDB", + "subtitle": "A comprehensive guide for beginners", + "content_type": "article", + "status": "published", + "author_id": "A00000000000000000000001", + "content_data": { + "body": "# Introduction to MongoDB\n\nMongoDB is a popular NoSQL database...", + "tags": ["technology", "education"], + "word_count": 2500 + }, + "metadata": { + "created_at": "2024-01-15T10:00:00Z", + "updated_at": "2024-01-20T14:30:00Z", + "published_at": "2024-01-20T14:30:00Z", + "categories": [ + { + "name": "Database Tutorials", + "type": "education", + "tags": ["urgent", "important"] + } + ] + }, + "last_saved": { + "from_ip": "192.168.1.107", + "by_user": "admin", + "at_time": "2024-01-20T14:30:00Z", + "correlation_id": "corr_008" + } + }, + { + "_id": "content_002", + "title": "Advanced Schema Design", + "subtitle": "Best practices for complex data modeling", + "content_type": "video", + "status": "draft", + "author_id": "A00000000000000000000002", + "content_data": { + "url": "https://example.com/videos/advanced-schema-design.mp4", + "duration": 1800, + "quality": "hd" + }, + "metadata": { + "created_at": "2024-01-18T09:00:00Z", + "updated_at": "2024-01-19T16:45:00Z", + "categories": [ + { + "name": "Advanced Topics", + "type": "education", + "tags": ["important", "normal"] + } + ] + }, + "last_saved": { + "from_ip": "192.168.1.108", + "by_user": "user_002", + "at_time": "2024-01-19T16:45:00Z", + "correlation_id": "corr_009" + } + }, + { + "_id": "content_003", + "title": "Weekly Tech Roundup", + "subtitle": "Latest developments in the tech world", + "content_type": "podcast", + "status": "published", + "author_id": "A00000000000000000000003", + "content_data": { + "audio_url": "https://example.com/podcasts/weekly-tech-roundup-episode-15.mp3", + "transcript": "Welcome to this week's tech roundup...", + "episode_number": 15 + }, + "metadata": { + "created_at": "2024-01-22T08:00:00Z", + "updated_at": "2024-01-22T08:00:00Z", + "published_at": "2024-01-22T08:00:00Z", + "categories": [ + { + "name": "News and Updates", + "type": "news", + "tags": ["normal", "in_progress"] + } + ] + }, + "last_saved": { + "from_ip": "192.168.1.109", + "by_user": "user_003", + "at_time": "2024-01-22T08:00:00Z", + "correlation_id": "corr_010" + } + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/test_data/content.1.0.0.2.json b/tests/test_cases/large_sample/test_data/content.1.0.0.2.json new file mode 100644 index 0000000..3743b9d --- /dev/null +++ b/tests/test_cases/large_sample/test_data/content.1.0.0.2.json @@ -0,0 +1,63 @@ +[ + { + "_id": "content_004", + "title": "MongoDB Performance Optimization", + "subtitle": "Techniques for improving database performance", + "content_type": "article", + "status": "published", + "author_id": "A00000000000000000000001", + "content_data": { + "body": "# MongoDB Performance Optimization\n\nPerformance optimization is crucial...", + "tags": ["technology", "business"], + "word_count": 3200 + }, + "metadata": { + "created_at": "2024-01-25T11:00:00Z", + "updated_at": "2024-01-26T15:20:00Z", + "published_at": "2024-01-26T15:20:00Z", + "categories": [ + { + "name": "Performance Guides", + "type": "education", + "tags": ["important", "urgent"] + } + ] + }, + "last_saved": { + "from_ip": "192.168.1.110", + "by_user": "admin", + "at_time": "2024-01-26T15:20:00Z", + "correlation_id": "corr_011" + } + }, + { + "_id": "content_005", + "title": "Real-time Data Processing", + "subtitle": "Building scalable real-time systems", + "content_type": "video", + "status": "draft", + "author_id": "A00000000000000000000002", + "content_data": { + "url": "https://example.com/videos/real-time-data-processing.mp4", + "duration": 2400, + "quality": "hd" + }, + "metadata": { + "created_at": "2024-01-28T09:00:00Z", + "updated_at": "2024-01-29T14:30:00Z", + "categories": [ + { + "name": "Real-time Systems", + "type": "technology", + "tags": ["important", "normal"] + } + ] + }, + "last_saved": { + "from_ip": "192.168.1.111", + "by_user": "user_002", + "at_time": "2024-01-29T14:30:00Z", + "correlation_id": "corr_012" + } + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/test_data/content.1.0.1.3.json b/tests/test_cases/large_sample/test_data/content.1.0.1.3.json new file mode 100644 index 0000000..54b58e8 --- /dev/null +++ b/tests/test_cases/large_sample/test_data/content.1.0.1.3.json @@ -0,0 +1,66 @@ +[ + { + "_id": "content_006", + "title": "MongoDB Aggregation Framework", + "subtitle": "Advanced data processing techniques", + "content_type": "article", + "status": "published", + "author_id": "A00000000000000000000001", + "full_title": "MongoDB Aggregation Framework - Advanced data processing techniques", + "content_data": { + "body": "# MongoDB Aggregation Framework\n\nThe aggregation framework provides powerful data processing capabilities...", + "tags": ["technology", "education"], + "word_count": 4100 + }, + "metadata": { + "created_at": "2024-02-01T09:00:00Z", + "updated_at": "2024-02-02T14:30:00Z", + "published_at": "2024-02-02T14:30:00Z", + "categories": [ + { + "name": "Advanced Topics", + "type": "education", + "tags": ["important", "normal"] + } + ] + }, + "last_saved": { + "from_ip": "192.168.1.112", + "by_user": "admin", + "at_time": "2024-02-02T14:30:00Z", + "correlation_id": "corr_013" + } + }, + { + "_id": "content_007", + "title": "Real-time Data Processing", + "subtitle": "Building scalable real-time systems", + "content_type": "video", + "status": "published", + "author_id": "A00000000000000000000002", + "full_title": "Real-time Data Processing - Building scalable real-time systems", + "content_data": { + "url": "https://example.com/videos/real-time-data-processing.mp4", + "duration": 3600, + "quality": "uhd" + }, + "metadata": { + "created_at": "2024-02-05T10:00:00Z", + "updated_at": "2024-02-06T16:00:00Z", + "published_at": "2024-02-06T16:00:00Z", + "categories": [ + { + "name": "Real-time Systems", + "type": "technology", + "tags": ["urgent", "important"] + } + ] + }, + "last_saved": { + "from_ip": "192.168.1.113", + "by_user": "user_002", + "at_time": "2024-02-06T16:00:00Z", + "correlation_id": "corr_014" + } + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/data/enumerators.json b/tests/test_cases/large_sample/test_data/enumerators.json similarity index 63% rename from tests/test_cases/large_sample/data/enumerators.json rename to tests/test_cases/large_sample/test_data/enumerators.json index 9ef52f9..41fbd31 100644 --- a/tests/test_cases/large_sample/data/enumerators.json +++ b/tests/test_cases/large_sample/test_data/enumerators.json @@ -42,6 +42,42 @@ "sd": "Standard definition", "hd": "High definition", "uhd": "Ultra high definition" + }, + "notification_type": { + "system": "System notification", + "user": "User notification", + "content": "Content notification", + "reminder": "Reminder notification" + }, + "priority_level": { + "critical": "Critical priority", + "high": "High priority", + "medium": "Medium priority", + "low": "Low priority" + }, + "notification_tags": { + "urgent": "Urgent notification", + "important": "Important notification", + "normal": "Normal notification", + "low": "Low priority notification" + }, + "category_type": { + "work": "Work related items", + "personal": "Personal items", + "project": "Project specific items", + "reference": "Reference materials" + }, + "delivery_channel": { + "email": "Email delivery", + "sms": "SMS delivery", + "push": "Push notification", + "in_app": "In-app notification" + }, + "notification_action": { + "created": "Document created", + "updated": "Document updated", + "deleted": "Document deleted", + "published": "Document published" } } }, @@ -150,6 +186,48 @@ "in_progress": "Currently being worked on", "blocked": "Cannot proceed", "review": "Needs review" + }, + "content_type": { + "article": "Written content", + "video": "Video content", + "podcast": "Audio content" + }, + "content_tags": { + "technology": "Technology related content", + "business": "Business related content", + "entertainment": "Entertainment content", + "education": "Educational content", + "news": "News content" + }, + "notification_type": { + "system": "System notification", + "user": "User notification", + "content": "Content notification", + "reminder": "Reminder notification" + }, + "notification_tags": { + "urgent": "Urgent notification", + "important": "Important notification", + "normal": "Normal notification", + "low": "Low priority notification" + }, + "priority_level": { + "critical": "Critical priority", + "high": "High priority", + "medium": "Medium priority", + "low": "Low priority" + }, + "delivery_channel": { + "email": "Email delivery", + "sms": "SMS delivery", + "push": "Push notification", + "in_app": "In-app notification" + }, + "notification_action": { + "created": "Document created", + "updated": "Document updated", + "deleted": "Document deleted", + "published": "Document published" } } } diff --git a/tests/test_cases/large_sample/data/media.1.0.0.1.json b/tests/test_cases/large_sample/test_data/media.1.0.0.1.json similarity index 100% rename from tests/test_cases/large_sample/data/media.1.0.0.1.json rename to tests/test_cases/large_sample/test_data/media.1.0.0.1.json diff --git a/tests/test_cases/large_sample/test_data/notification.1.0.0.1.json b/tests/test_cases/large_sample/test_data/notification.1.0.0.1.json new file mode 100644 index 0000000..f3dbd70 --- /dev/null +++ b/tests/test_cases/large_sample/test_data/notification.1.0.0.1.json @@ -0,0 +1,41 @@ +[ + { + "_id": "notification_001", + "user_id": "A00000000000000000000001", + "title": "Welcome to the Platform", + "message": "Thank you for joining our platform!", + "status": "active", + "last_saved": { + "from_ip": "192.168.1.100", + "by_user": "system", + "at_time": "2024-01-15T10:05:00Z", + "correlation_id": "corr_001" + } + }, + { + "_id": "notification_002", + "user_id": "A00000000000000000000002", + "title": "New Content Available", + "message": "A new article has been published that matches your interests.", + "status": "active", + "last_saved": { + "from_ip": "192.168.1.101", + "by_user": "system", + "at_time": "2024-01-20T14:35:00Z", + "correlation_id": "corr_002" + } + }, + { + "_id": "notification_003", + "user_id": "A00000000000000000000003", + "title": "Reminder: Weekly Meeting", + "message": "Your weekly team meeting starts in 30 minutes.", + "status": "active", + "last_saved": { + "from_ip": "192.168.1.102", + "by_user": "system", + "at_time": "2024-01-22T09:30:00Z", + "correlation_id": "corr_003" + } + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/test_data/notification.1.0.0.2.json b/tests/test_cases/large_sample/test_data/notification.1.0.0.2.json new file mode 100644 index 0000000..d6e1438 --- /dev/null +++ b/tests/test_cases/large_sample/test_data/notification.1.0.0.2.json @@ -0,0 +1,28 @@ +[ + { + "_id": "notification_004", + "user_id": "A00000000000000000000001", + "title": "Profile Update Required", + "message": "Please update your profile information to continue using the platform.", + "status": "active", + "last_saved": { + "from_ip": "192.168.1.103", + "by_user": "system", + "at_time": "2024-01-25T11:05:00Z", + "correlation_id": "corr_004" + } + }, + { + "_id": "notification_005", + "user_id": "A00000000000000000000002", + "title": "New Video Available", + "message": "A new video tutorial has been published in your area of interest.", + "status": "active", + "last_saved": { + "from_ip": "192.168.1.104", + "by_user": "system", + "at_time": "2024-01-28T15:05:00Z", + "correlation_id": "corr_005" + } + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/test_data/notification.1.0.1.3.json b/tests/test_cases/large_sample/test_data/notification.1.0.1.3.json new file mode 100644 index 0000000..0e39d8e --- /dev/null +++ b/tests/test_cases/large_sample/test_data/notification.1.0.1.3.json @@ -0,0 +1,59 @@ +[ + { + "_id": "notification_006", + "user_id": "A00000000000000000000001", + "title": "System Maintenance", + "message": "Scheduled maintenance will begin in 2 hours.", + "notification_type": "system", + "status": "active", + "priority": "critical", + "tags": ["urgent"], + "categories": ["work"], + "channels": ["email", "sms", "push", "in_app"], + "full_message": "System Maintenance: Scheduled maintenance will begin in 2 hours.", + "metadata": { + "created_at": "2024-02-01T08:00:00Z", + "sent_at": "2024-02-01T08:05:00Z", + "expires_at": "2024-02-01T10:00:00Z", + "source": { + "collection": "system", + "document_id": "maintenance_001", + "action": "created" + } + }, + "last_saved": { + "from_ip": "192.168.1.105", + "by_user": "system", + "at_time": "2024-02-01T08:05:00Z", + "correlation_id": "corr_006" + } + }, + { + "_id": "notification_007", + "user_id": "A00000000000000000000003", + "title": "New Podcast Episode", + "message": "Episode 16 of Weekly Tech Roundup is now available.", + "notification_type": "content", + "status": "active", + "priority": "medium", + "tags": ["normal"], + "categories": ["news"], + "channels": ["email", "push"], + "full_message": "New Podcast Episode: Episode 16 of Weekly Tech Roundup is now available.", + "metadata": { + "created_at": "2024-02-05T12:00:00Z", + "sent_at": "2024-02-05T12:05:00Z", + "source": { + "collection": "content", + "document_id": "content_007", + "action": "published" + } + }, + "last_saved": { + "from_ip": "192.168.1.106", + "by_user": "system", + "at_time": "2024-02-05T12:05:00Z", + "correlation_id": "corr_007" + } + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/data/organization.1.0.0.1.json b/tests/test_cases/large_sample/test_data/organization.1.0.0.1.json similarity index 100% rename from tests/test_cases/large_sample/data/organization.1.0.0.1.json rename to tests/test_cases/large_sample/test_data/organization.1.0.0.1.json diff --git a/tests/test_cases/large_sample/data/user.1.0.0.1.json b/tests/test_cases/large_sample/test_data/user.1.0.0.1.json similarity index 100% rename from tests/test_cases/large_sample/data/user.1.0.0.1.json rename to tests/test_cases/large_sample/test_data/user.1.0.0.1.json diff --git a/tests/test_cases/large_sample/data/user.1.0.0.2.json b/tests/test_cases/large_sample/test_data/user.1.0.0.2.json similarity index 100% rename from tests/test_cases/large_sample/data/user.1.0.0.2.json rename to tests/test_cases/large_sample/test_data/user.1.0.0.2.json diff --git a/tests/test_cases/large_sample/data/user.1.0.1.3.json b/tests/test_cases/large_sample/test_data/user.1.0.1.3.json similarity index 85% rename from tests/test_cases/large_sample/data/user.1.0.1.3.json rename to tests/test_cases/large_sample/test_data/user.1.0.1.3.json index 3d71edc..c13e802 100644 --- a/tests/test_cases/large_sample/data/user.1.0.1.3.json +++ b/tests/test_cases/large_sample/test_data/user.1.0.1.3.json @@ -18,6 +18,12 @@ ], "email": "john.doe@example.com", "phone": "+15551234567", + "preferences": { + "notification_types": ["system", "content", "reminder"], + "delivery_channels": ["email", "in_app"], + "content_tags": ["technology", "education"], + "priority_levels": ["high", "medium"] + }, "last_saved": { "from_ip": "127.0.0.1", "by_user": "system", @@ -44,6 +50,12 @@ ], "email": "sarah.johnson@techcorp.com", "phone": "+15559876543", + "preferences": { + "notification_types": ["system", "user", "content"], + "delivery_channels": ["email", "push", "in_app"], + "content_tags": ["technology", "business"], + "priority_levels": ["critical", "high", "medium"] + }, "last_saved": { "from_ip": "192.168.1.50", "by_user": "admin", @@ -70,6 +82,12 @@ ], "email": "mike.chen@designstudio.com", "phone": "+15551234568", + "preferences": { + "notification_types": ["content", "reminder"], + "delivery_channels": ["email", "push"], + "content_tags": ["entertainment", "education"], + "priority_levels": ["medium", "low"] + }, "last_saved": { "from_ip": "10.0.0.25", "by_user": "system", diff --git a/tests/test_cases/large_sample/types/appointment.yaml b/tests/test_cases/large_sample/types/appointment.yaml new file mode 100644 index 0000000..2becace --- /dev/null +++ b/tests/test_cases/large_sample/types/appointment.yaml @@ -0,0 +1,11 @@ +description: A date/time range +type: object +properties: + from: + description: Starting Date/Time + type: date-time + required: true + to: + description: Ending Date/Time + type: date-time + required: true \ No newline at end of file diff --git a/tests/test_cases/large_sample/types/breadcrumb.yaml b/tests/test_cases/large_sample/types/breadcrumb.yaml index 3b4cc4d..eb40fa6 100644 --- a/tests/test_cases/large_sample/types/breadcrumb.yaml +++ b/tests/test_cases/large_sample/types/breadcrumb.yaml @@ -1,10 +1,9 @@ -title: Breadcrumb description: A tracking breadcrumb type: object properties: from_ip: description: Http Request remote IP address - type: word + type: ip_address required: true by_user: description: ID Of User diff --git a/tests/test_cases/large_sample/types/count.yaml b/tests/test_cases/large_sample/types/count.yaml index 57bf4ea..05d049a 100644 --- a/tests/test_cases/large_sample/types/count.yaml +++ b/tests/test_cases/large_sample/types/count.yaml @@ -1,4 +1,3 @@ -title: Count description: A positive integer value json_type: type: number diff --git a/tests/test_cases/large_sample/types/date-time.yaml b/tests/test_cases/large_sample/types/date-time.yaml index cfed5a3..223a4bd 100644 --- a/tests/test_cases/large_sample/types/date-time.yaml +++ b/tests/test_cases/large_sample/types/date-time.yaml @@ -1,4 +1,3 @@ -title: DateTime description: An ISO 8601 formatted date-time string json_type: type: string diff --git a/tests/test_cases/large_sample/types/email.yaml b/tests/test_cases/large_sample/types/email.yaml index 53613a8..d771770 100644 --- a/tests/test_cases/large_sample/types/email.yaml +++ b/tests/test_cases/large_sample/types/email.yaml @@ -1,5 +1,4 @@ -title: Email description: A valid email address schema: type: string - pattern: "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$" \ No newline at end of file + pattern: "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$" \ No newline at end of file diff --git a/tests/test_cases/large_sample/types/identifier.yaml b/tests/test_cases/large_sample/types/identifier.yaml new file mode 100644 index 0000000..4950c75 --- /dev/null +++ b/tests/test_cases/large_sample/types/identifier.yaml @@ -0,0 +1,6 @@ +description: A unique identifier for a document +json_type: + type: string + pattern: "^[0-9a-fA-F]{24}$" +bson_type: + bsonType: objectId \ No newline at end of file diff --git a/tests/test_cases/large_sample/types/identity.yaml b/tests/test_cases/large_sample/types/identity.yaml index afd5299..4950c75 100644 --- a/tests/test_cases/large_sample/types/identity.yaml +++ b/tests/test_cases/large_sample/types/identity.yaml @@ -1,4 +1,3 @@ -title: Identity description: A unique identifier for a document json_type: type: string diff --git a/tests/test_cases/large_sample/types/index.yaml b/tests/test_cases/large_sample/types/index.yaml new file mode 100644 index 0000000..1152e38 --- /dev/null +++ b/tests/test_cases/large_sample/types/index.yaml @@ -0,0 +1,8 @@ +description: A zero-based array index +json_type: + type: number + minimum: 0 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 0 \ No newline at end of file diff --git a/tests/test_cases/large_sample/types/ip_address.yaml b/tests/test_cases/large_sample/types/ip_address.yaml new file mode 100644 index 0000000..28ba551 --- /dev/null +++ b/tests/test_cases/large_sample/types/ip_address.yaml @@ -0,0 +1,4 @@ +description: A valid IP Address +schema: + type: string + pattern: "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" diff --git a/tests/test_cases/large_sample/types/markdown.yaml b/tests/test_cases/large_sample/types/markdown.yaml new file mode 100644 index 0000000..bf39557 --- /dev/null +++ b/tests/test_cases/large_sample/types/markdown.yaml @@ -0,0 +1,4 @@ +description: A String of text, at least 1 and no more than 4k characters. May contain markdown, newlines, and tabs. +schema: + type: string + maxLength: 4096 \ No newline at end of file diff --git a/tests/test_cases/large_sample/types/sentence.yaml b/tests/test_cases/large_sample/types/sentence.yaml index 6d81348..aad5494 100644 --- a/tests/test_cases/large_sample/types/sentence.yaml +++ b/tests/test_cases/large_sample/types/sentence.yaml @@ -1,5 +1,4 @@ -title: Sentence -description: A String of text, at least 4 and no more than 255 characters with no special characters +description: A String of text, 0 to 255 characters with no special characters schema: type: string - pattern: "^[a-zA-Z0-9\\s.,!?-]{4,255}$" \ No newline at end of file + pattern: "^[^\\t\\n\\r]{0,255}$" \ No newline at end of file diff --git a/tests/test_cases/large_sample/types/state_code.yaml b/tests/test_cases/large_sample/types/state_code.yaml index ad985db..b04f1c6 100644 --- a/tests/test_cases/large_sample/types/state_code.yaml +++ b/tests/test_cases/large_sample/types/state_code.yaml @@ -1,6 +1,4 @@ -title: State Code -description: 2 character state code +description: A two character state code schema: type: string - pattern: "^[A-Z]{2}$" - + pattern: "^[A-Z]{2}$" \ No newline at end of file diff --git a/tests/test_cases/large_sample/types/street_address.yaml b/tests/test_cases/large_sample/types/street_address.yaml index dfaa527..4630dc1 100644 --- a/tests/test_cases/large_sample/types/street_address.yaml +++ b/tests/test_cases/large_sample/types/street_address.yaml @@ -1,10 +1,10 @@ -title: Street Address description: A street address type: object properties: street: description: Street address type: sentence + required: true city: description: City type: word diff --git a/tests/test_cases/large_sample/types/url.yaml b/tests/test_cases/large_sample/types/url.yaml index 1c35e86..f50de06 100644 --- a/tests/test_cases/large_sample/types/url.yaml +++ b/tests/test_cases/large_sample/types/url.yaml @@ -1,5 +1,4 @@ -title: URL description: A valid URL schema: type: string - pattern: "^https?://[\\w\\d\\-]+(\\.[\\w\\d\\-]+)+([\\w\\d\\-._~:/?#\\[\\]@!$&'()*+,;=]*)?$" + pattern: "^https?://[^\\s]+$" diff --git a/tests/test_cases/large_sample/types/us_phone.yaml b/tests/test_cases/large_sample/types/us_phone.yaml index 860e1fb..833ede6 100644 --- a/tests/test_cases/large_sample/types/us_phone.yaml +++ b/tests/test_cases/large_sample/types/us_phone.yaml @@ -1,4 +1,3 @@ -title: US Phone description: A US phone number in E.164 format schema: type: string diff --git a/tests/test_cases/large_sample/types/word.yaml b/tests/test_cases/large_sample/types/word.yaml index 4ca71db..af8dbd3 100644 --- a/tests/test_cases/large_sample/types/word.yaml +++ b/tests/test_cases/large_sample/types/word.yaml @@ -1,6 +1,5 @@ -title: Word -description: A String of text, at least 4 and no more than 40 characters with no spaces, or special characters like /t or /n +description: A String of text, 1 to 40 characters with no spaces, or special characters like /t or /n schema: type: string - pattern: "^[^\\s]{4,40}$" + pattern: "^\\S{1,40}$" diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.1.json b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.1.json new file mode 100644 index 0000000..9553113 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.1.json @@ -0,0 +1,215 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "subtitle": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "content_type": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "author_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "content_data": { + "bsonType": "object", + "properties": { + "body": { + "maxLength": 4096, + "bsonType": "string" + }, + "url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "audio_url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "transcript": { + "maxLength": 4096, + "bsonType": "string" + }, + "tags": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "word_count": { + "bsonType": "int", + "minimum": 1 + }, + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "quality": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "episode_number": { + "bsonType": "int", + "minimum": 1 + } + }, + "additionalProperties": false + }, + "metadata": { + "bsonType": "object", + "properties": { + "created_at": { + "bsonType": "date" + }, + "updated_at": { + "bsonType": "date" + }, + "published_at": { + "bsonType": "date" + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "object", + "properties": { + "name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "type": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "tags": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + } + } + }, + "required": [ + "created_at" + ], + "additionalProperties": false + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "title", + "content_type", + "status", + "content_data", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.2.json b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.2.json new file mode 100644 index 0000000..c732692 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.2.json @@ -0,0 +1,217 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "subtitle": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "content_type": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "author_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "content_data": { + "bsonType": "object", + "properties": { + "body": { + "maxLength": 4096, + "bsonType": "string" + }, + "url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "audio_url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "transcript": { + "maxLength": 4096, + "bsonType": "string" + }, + "tags": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "word_count": { + "bsonType": "int", + "minimum": 1 + }, + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "quality": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "episode_number": { + "bsonType": "int", + "minimum": 1 + } + }, + "additionalProperties": false + }, + "metadata": { + "bsonType": "object", + "properties": { + "created_at": { + "bsonType": "date" + }, + "updated_at": { + "bsonType": "date" + }, + "published_at": { + "bsonType": "date" + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "object", + "properties": { + "name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "type": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "tags": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + } + } + }, + "required": [ + "created_at" + ], + "additionalProperties": false + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "title", + "content_type", + "status", + "content_data", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.3.json new file mode 100644 index 0000000..c732692 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.0.3.json @@ -0,0 +1,217 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "subtitle": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "content_type": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "author_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "content_data": { + "bsonType": "object", + "properties": { + "body": { + "maxLength": 4096, + "bsonType": "string" + }, + "url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "audio_url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "transcript": { + "maxLength": 4096, + "bsonType": "string" + }, + "tags": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "word_count": { + "bsonType": "int", + "minimum": 1 + }, + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "quality": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "episode_number": { + "bsonType": "int", + "minimum": 1 + } + }, + "additionalProperties": false + }, + "metadata": { + "bsonType": "object", + "properties": { + "created_at": { + "bsonType": "date" + }, + "updated_at": { + "bsonType": "date" + }, + "published_at": { + "bsonType": "date" + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "object", + "properties": { + "name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "type": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "tags": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + } + } + }, + "required": [ + "created_at" + ], + "additionalProperties": false + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "title", + "content_type", + "status", + "content_data", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.1.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.1.3.json new file mode 100644 index 0000000..111d9a9 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/content.1.0.1.3.json @@ -0,0 +1,277 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "subtitle": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "content_type": { + "bsonType": "string", + "enum": [ + "article", + "video", + "podcast" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "author_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "full_title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "body": { + "maxLength": 4096, + "bsonType": "string" + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "technology", + "business", + "entertainment", + "education", + "news" + ] + } + }, + "word_count": { + "bsonType": "int", + "minimum": 1 + } + }, + "required": [ + "body" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + } + }, + "required": [ + "url" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "audio_url": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "transcript": { + "maxLength": 4096, + "bsonType": "string" + }, + "episode_number": { + "bsonType": "int", + "minimum": 1 + } + }, + "required": [ + "audio_url" + ], + "additionalProperties": false + } + ] + }, + "metadata": { + "bsonType": "object", + "properties": { + "created_at": { + "bsonType": "date" + }, + "updated_at": { + "bsonType": "date" + }, + "published_at": { + "bsonType": "date" + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "object", + "properties": { + "name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "work", + "personal", + "project", + "reference" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "urgent", + "important", + "normal", + "low", + "completed", + "in_progress", + "blocked", + "review" + ] + } + } + }, + "required": [ + "name", + "type" + ], + "additionalProperties": false + } + } + }, + "required": [ + "created_at" + ], + "additionalProperties": false + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "title", + "content_type", + "status", + "content_data", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.1.json b/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.1.json new file mode 100644 index 0000000..138277e --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.1.json @@ -0,0 +1,199 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "movie", + "tv_show", + "documentary", + "short" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "published", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "format": { + "bsonType": "string", + "enum": [ + "dvd", + "bluray", + "digital", + "streaming" + ] + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "director": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "cast": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "genre": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + } + }, + "required": [ + "director" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "season": { + "bsonType": "int", + "minimum": 1 + }, + "episode": { + "bsonType": "int", + "minimum": 1 + }, + "network": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "season", + "episode" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "subject": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "narrator": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "subject" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "festival": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "awards": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "additionalProperties": false + } + ] + } + }, + "additionalProperties": false + } + }, + "required": [ + "_id", + "title", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.2.json b/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.2.json new file mode 100644 index 0000000..138277e --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.2.json @@ -0,0 +1,199 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "movie", + "tv_show", + "documentary", + "short" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "published", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "format": { + "bsonType": "string", + "enum": [ + "dvd", + "bluray", + "digital", + "streaming" + ] + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "director": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "cast": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "genre": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + } + }, + "required": [ + "director" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "season": { + "bsonType": "int", + "minimum": 1 + }, + "episode": { + "bsonType": "int", + "minimum": 1 + }, + "network": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "season", + "episode" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "subject": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "narrator": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "subject" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "festival": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "awards": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "additionalProperties": false + } + ] + } + }, + "additionalProperties": false + } + }, + "required": [ + "_id", + "title", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.3.json new file mode 100644 index 0000000..138277e --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/media.1.0.0.3.json @@ -0,0 +1,199 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "movie", + "tv_show", + "documentary", + "short" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "published", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "format": { + "bsonType": "string", + "enum": [ + "dvd", + "bluray", + "digital", + "streaming" + ] + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "director": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "cast": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "genre": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + } + }, + "required": [ + "director" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "season": { + "bsonType": "int", + "minimum": 1 + }, + "episode": { + "bsonType": "int", + "minimum": 1 + }, + "network": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "season", + "episode" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "subject": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "narrator": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "subject" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "festival": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "awards": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "additionalProperties": false + } + ] + } + }, + "additionalProperties": false + } + }, + "required": [ + "_id", + "title", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.1.json b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.1.json new file mode 100644 index 0000000..7bf9086 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.1.json @@ -0,0 +1,120 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "message": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_id", + "title", + "message", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.2.json b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.2.json new file mode 100644 index 0000000..ed0f89e --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.2.json @@ -0,0 +1,122 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "message": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_id", + "title", + "message", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.3.json new file mode 100644 index 0000000..ed0f89e --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.0.3.json @@ -0,0 +1,122 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "message": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_id", + "title", + "message", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.1.1.json b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.1.1.json new file mode 100644 index 0000000..ac2881f --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.1.1.json @@ -0,0 +1,224 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "message": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "notification_type": { + "bsonType": "string", + "enum": [ + "system", + "user", + "content", + "reminder" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "priority": { + "bsonType": "string", + "enum": [ + "critical", + "high", + "medium", + "low" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "urgent", + "important", + "normal", + "low" + ] + } + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "work", + "personal", + "project", + "reference" + ] + } + }, + "channels": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "email", + "sms", + "push", + "in_app" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "created_at": { + "bsonType": "date" + }, + "sent_at": { + "bsonType": "date" + }, + "read_at": { + "bsonType": "date" + }, + "expires_at": { + "bsonType": "date" + }, + "source": { + "bsonType": "object", + "properties": { + "collection": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "document_id": { + "bsonType": "objectId" + }, + "action": { + "bsonType": "string", + "enum": [ + "created", + "updated", + "deleted", + "published" + ] + } + }, + "required": [ + "collection", + "document_id" + ], + "additionalProperties": false + } + }, + "required": [ + "created_at" + ], + "additionalProperties": false + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_id", + "title", + "message", + "notification_type", + "status", + "priority", + "channels", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.1.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.1.3.json new file mode 100644 index 0000000..c6d8dcb --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/notification.1.0.1.3.json @@ -0,0 +1,226 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_id": { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + "title": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "message": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "notification_type": { + "bsonType": "string", + "enum": [ + "system", + "user", + "content", + "reminder" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "priority": { + "bsonType": "string", + "enum": [ + "critical", + "high", + "medium", + "low" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "urgent", + "important", + "normal", + "low" + ] + } + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "work", + "personal", + "project", + "reference" + ] + } + }, + "channels": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "email", + "sms", + "push", + "in_app" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "created_at": { + "bsonType": "date" + }, + "sent_at": { + "bsonType": "date" + }, + "read_at": { + "bsonType": "date" + }, + "expires_at": { + "bsonType": "date" + }, + "source": { + "bsonType": "object", + "properties": { + "collection": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "document_id": { + "bsonType": "objectId" + }, + "action": { + "bsonType": "string", + "enum": [ + "created", + "updated", + "deleted", + "published" + ] + } + }, + "required": [ + "collection", + "document_id" + ], + "additionalProperties": false + } + }, + "required": [ + "created_at" + ], + "additionalProperties": false + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_id", + "title", + "message", + "notification_type", + "status", + "priority", + "channels", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.1.json b/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.1.json new file mode 100644 index 0000000..8e28ab7 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.1.json @@ -0,0 +1,89 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "users": { + "bsonType": "array", + "items": { + "bsonType": "objectId" + } + }, + "website": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "home_address": { + "bsonType": "object", + "properties": { + "street": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "city": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "state": { + "pattern": "^[A-Z]{2}$", + "bsonType": "string" + }, + "postal_code": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "street", + "postal_code" + ] + } + }, + "required": [ + "_id", + "name", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.2.json b/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.2.json new file mode 100644 index 0000000..a7bb95a --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.2.json @@ -0,0 +1,90 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "users": { + "bsonType": "array", + "items": { + "bsonType": "objectId" + } + }, + "website": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "home_address": { + "bsonType": "object", + "properties": { + "street": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "city": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "state": { + "pattern": "^[A-Z]{2}$", + "bsonType": "string" + }, + "postal_code": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "street", + "postal_code" + ] + } + }, + "required": [ + "_id", + "name", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.3.json new file mode 100644 index 0000000..a7bb95a --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/organization.1.0.0.3.json @@ -0,0 +1,90 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "users": { + "bsonType": "array", + "items": { + "bsonType": "objectId" + } + }, + "website": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "home_address": { + "bsonType": "object", + "properties": { + "street": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "city": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "state": { + "pattern": "^[A-Z]{2}$", + "bsonType": "string" + }, + "postal_code": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "street", + "postal_code" + ] + } + }, + "required": [ + "_id", + "name", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.1.json b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.1.json new file mode 100644 index 0000000..ceb2e8d --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.1.json @@ -0,0 +1,368 @@ +{ + "bsonType": "object", + "properties": { + "collection_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "collection_id": { + "bsonType": "objectId" + } + }, + "required": [ + "collection_name", + "collection_id" + ], + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "movie", + "tv_show", + "documentary", + "short" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "published", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "format": { + "bsonType": "string", + "enum": [ + "dvd", + "bluray", + "digital", + "streaming" + ] + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "director": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "cast": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "genre": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + } + }, + "required": [ + "director" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "season": { + "bsonType": "int", + "minimum": 1 + }, + "episode": { + "bsonType": "int", + "minimum": 1 + }, + "network": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "season", + "episode" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "subject": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "narrator": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "subject" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "festival": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "awards": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "additionalProperties": false + } + ] + } + }, + "additionalProperties": false + } + }, + "required": [ + "_id", + "title", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "users": { + "bsonType": "array", + "items": { + "bsonType": "objectId" + } + }, + "website": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "home_address": { + "bsonType": "object", + "properties": { + "street": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "city": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "state": { + "pattern": "^[A-Z]{2}$", + "bsonType": "string" + }, + "postal_code": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "street", + "postal_code" + ] + } + }, + "required": [ + "_id", + "name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + } + ] +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.2.json b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.2.json new file mode 100644 index 0000000..f71cb8e --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.2.json @@ -0,0 +1,370 @@ +{ + "bsonType": "object", + "properties": { + "collection_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "collection_id": { + "bsonType": "objectId" + } + }, + "required": [ + "collection_name", + "collection_id" + ], + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "movie", + "tv_show", + "documentary", + "short" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "published", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "format": { + "bsonType": "string", + "enum": [ + "dvd", + "bluray", + "digital", + "streaming" + ] + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "director": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "cast": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "genre": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + } + }, + "required": [ + "director" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "season": { + "bsonType": "int", + "minimum": 1 + }, + "episode": { + "bsonType": "int", + "minimum": 1 + }, + "network": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "season", + "episode" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "subject": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "narrator": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "subject" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "festival": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "awards": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "additionalProperties": false + } + ] + } + }, + "additionalProperties": false + } + }, + "required": [ + "_id", + "title", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "users": { + "bsonType": "array", + "items": { + "bsonType": "objectId" + } + }, + "website": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "home_address": { + "bsonType": "object", + "properties": { + "street": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "city": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "state": { + "pattern": "^[A-Z]{2}$", + "bsonType": "string" + }, + "postal_code": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "street", + "postal_code" + ] + } + }, + "required": [ + "_id", + "name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + } + ] +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.3.json new file mode 100644 index 0000000..f71cb8e --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.0.3.json @@ -0,0 +1,370 @@ +{ + "bsonType": "object", + "properties": { + "collection_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "collection_id": { + "bsonType": "objectId" + } + }, + "required": [ + "collection_name", + "collection_id" + ], + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "movie", + "tv_show", + "documentary", + "short" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "published", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "format": { + "bsonType": "string", + "enum": [ + "dvd", + "bluray", + "digital", + "streaming" + ] + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "director": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "cast": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "genre": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + } + }, + "required": [ + "director" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "season": { + "bsonType": "int", + "minimum": 1 + }, + "episode": { + "bsonType": "int", + "minimum": 1 + }, + "network": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "season", + "episode" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "subject": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "narrator": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "subject" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "festival": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "awards": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "additionalProperties": false + } + ] + } + }, + "additionalProperties": false + } + }, + "required": [ + "_id", + "title", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "users": { + "bsonType": "array", + "items": { + "bsonType": "objectId" + } + }, + "website": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "home_address": { + "bsonType": "object", + "properties": { + "street": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "city": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "state": { + "pattern": "^[A-Z]{2}$", + "bsonType": "string" + }, + "postal_code": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "street", + "postal_code" + ] + } + }, + "required": [ + "_id", + "name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + } + ] +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.1.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.1.3.json new file mode 100644 index 0000000..c8c5e80 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/search.1.0.1.3.json @@ -0,0 +1,476 @@ +{ + "bsonType": "object", + "properties": { + "collection_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "collection_id": { + "bsonType": "objectId" + } + }, + "required": [ + "collection_name", + "collection_id" + ], + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "title": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "type": { + "bsonType": "string", + "enum": [ + "movie", + "tv_show", + "documentary", + "short" + ] + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "published", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + }, + "metadata": { + "bsonType": "object", + "properties": { + "duration": { + "bsonType": "int", + "minimum": 1 + }, + "format": { + "bsonType": "string", + "enum": [ + "dvd", + "bluray", + "digital", + "streaming" + ] + }, + "quality": { + "bsonType": "string", + "enum": [ + "sd", + "hd", + "uhd" + ] + }, + "content_data": { + "bsonType": "object", + "properties": {}, + "additionalProperties": false, + "oneOf": [ + { + "bsonType": "object", + "properties": { + "director": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "cast": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "genre": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "action", + "comedy", + "drama", + "sci_fi", + "documentary" + ] + } + } + }, + "required": [ + "director" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "season": { + "bsonType": "int", + "minimum": 1 + }, + "episode": { + "bsonType": "int", + "minimum": 1 + }, + "network": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "season", + "episode" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "subject": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "narrator": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "required": [ + "subject" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "festival": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "awards": { + "bsonType": "array", + "items": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + } + }, + "additionalProperties": false + } + ] + } + }, + "additionalProperties": false + } + }, + "required": [ + "_id", + "title", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + }, + "users": { + "bsonType": "array", + "items": { + "bsonType": "objectId" + } + }, + "website": { + "pattern": "^https?://[^\\s]+$", + "bsonType": "string" + }, + "home_address": { + "bsonType": "object", + "properties": { + "street": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "city": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "state": { + "pattern": "^[A-Z]{2}$", + "bsonType": "string" + }, + "postal_code": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "street", + "postal_code" + ] + } + }, + "required": [ + "_id", + "name", + "status", + "last_saved" + ], + "additionalProperties": false + }, + { + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "full_name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "object", + "properties": { + "name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "category": { + "bsonType": "string", + "enum": [ + "work", + "personal", + "project", + "reference" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "urgent", + "important", + "normal", + "low", + "completed", + "in_progress", + "blocked", + "review" + ] + } + } + }, + "required": [ + "name", + "category", + "tags" + ], + "additionalProperties": false + } + }, + "preferences": { + "bsonType": "object", + "properties": { + "notification_types": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "system", + "user", + "content", + "reminder" + ] + } + }, + "delivery_channels": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "email", + "sms", + "push", + "in_app" + ] + } + }, + "content_tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "technology", + "business", + "entertainment", + "education", + "news" + ] + } + }, + "priority_levels": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "critical", + "high", + "medium", + "low" + ] + } + } + }, + "required": [ + "notification_types", + "delivery_channels" + ], + "additionalProperties": false + }, + "email": { + "pattern": "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$", + "bsonType": "string" + }, + "phone": { + "pattern": "^\\+1[2-9][0-9]{9}$", + "bsonType": "string" + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false + } + ] +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.1.json b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.1.json new file mode 100644 index 0000000..dc8ff56 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.1.json @@ -0,0 +1,61 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.2.json b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.2.json new file mode 100644 index 0000000..3c134ec --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.2.json @@ -0,0 +1,62 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.3.json new file mode 100644 index 0000000..3c134ec --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.0.3.json @@ -0,0 +1,62 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "first_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "last_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.1.3.json b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.1.3.json new file mode 100644 index 0000000..fa09441 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/bson_schema/user.1.0.1.3.json @@ -0,0 +1,168 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "user_name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "full_name": { + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "draft", + "active", + "archived" + ] + }, + "categories": { + "bsonType": "array", + "items": { + "bsonType": "object", + "properties": { + "name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "category": { + "bsonType": "string", + "enum": [ + "work", + "personal", + "project", + "reference" + ] + }, + "tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "urgent", + "important", + "normal", + "low", + "completed", + "in_progress", + "blocked", + "review" + ] + } + } + }, + "required": [ + "name", + "category", + "tags" + ], + "additionalProperties": false + } + }, + "preferences": { + "bsonType": "object", + "properties": { + "notification_types": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "system", + "user", + "content", + "reminder" + ] + } + }, + "delivery_channels": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "email", + "sms", + "push", + "in_app" + ] + } + }, + "content_tags": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "technology", + "business", + "entertainment", + "education", + "news" + ] + } + }, + "priority_levels": { + "bsonType": "array", + "items": { + "bsonType": "string", + "enum": [ + "critical", + "high", + "medium", + "low" + ] + } + } + }, + "required": [ + "notification_types", + "delivery_channels" + ], + "additionalProperties": false + }, + "email": { + "pattern": "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$", + "bsonType": "string" + }, + "phone": { + "pattern": "^\\+1[2-9][0-9]{9}$", + "bsonType": "string" + }, + "last_saved": { + "bsonType": "object", + "properties": { + "from_ip": { + "pattern": "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$", + "bsonType": "string" + }, + "by_user": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "at_time": { + "bsonType": "date" + }, + "correlation_id": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + } + }, + "additionalProperties": false, + "required": [ + "from_ip", + "by_user", + "at_time", + "correlation_id" + ] + } + }, + "required": [ + "_id", + "user_name", + "status", + "last_saved" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.1.yaml b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.1.yaml new file mode 100644 index 0000000..2883e02 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.1.yaml @@ -0,0 +1,213 @@ +description: A simple content collection for testing +type: object +properties: + _id: + description: The unique identifier for content + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: Content title + type: string + pattern: ^\S{1,40}$ + subtitle: + description: Content subtitle + type: string + pattern: ^[^\t\n\r]{0,255}$ + content_type: + description: Type of content (article, video, podcast, etc.) + type: string + pattern: ^\S{1,40}$ + status: + description: Content status + type: string + enum: + - active + - archived + author_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + content_data: + description: The actual content data + type: object + properties: + body: + description: Article body text + type: string + maxLength: 4096 + url: + description: Video URL + type: string + pattern: ^https?://[^\s]+$ + audio_url: + description: Audio file URL + type: string + pattern: ^https?://[^\s]+$ + transcript: + description: Podcast transcript + type: string + maxLength: 4096 + tags: + description: Content tags + type: array + items: + description: Missing Required Description + type: string + pattern: ^\S{1,40}$ + word_count: + description: Article word count + type: number + minimum: 1 + multipleOf: 1 + duration: + description: Video duration in seconds + type: number + minimum: 1 + multipleOf: 1 + quality: + description: Video quality + type: string + pattern: ^\S{1,40}$ + episode_number: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + additionalProperties: false + metadata: + description: Additional metadata for the content + type: object + properties: + created_at: + description: When the content was created + type: string + format: date-time + updated_at: + description: When the content was last updated + type: string + format: date-time + published_at: + description: When the content was published + type: string + format: date-time + categories: + description: Content categories + type: array + items: + description: A content category + type: object + properties: + name: + description: Category name + type: string + pattern: ^\S{1,40}$ + type: + description: Category type + type: string + pattern: ^\S{1,40}$ + tags: + description: Category tags + type: array + items: + description: Missing Required Description + type: string + pattern: ^\S{1,40}$ + required: + - name + - type + additionalProperties: false + required: + - created_at + additionalProperties: false + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- title +- content_type +- status +- content_data +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.2.yaml b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.2.yaml new file mode 100644 index 0000000..7c33b94 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.2.yaml @@ -0,0 +1,215 @@ +description: A simple content collection for testing +type: object +properties: + _id: + description: The unique identifier for content + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: Content title + type: string + pattern: ^\S{1,40}$ + subtitle: + description: Content subtitle + type: string + pattern: ^[^\t\n\r]{0,255}$ + content_type: + description: Type of content (article, video, podcast, etc.) + type: string + pattern: ^\S{1,40}$ + status: + description: Content status + type: string + enum: + - draft + - active + - archived + author_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + content_data: + description: The actual content data + type: object + properties: + body: + description: Article body text + type: string + maxLength: 4096 + url: + description: Video URL + type: string + pattern: ^https?://[^\s]+$ + audio_url: + description: Audio file URL + type: string + pattern: ^https?://[^\s]+$ + transcript: + description: Podcast transcript + type: string + maxLength: 4096 + tags: + description: Content tags + type: array + items: + description: Missing Required Description + type: string + pattern: ^\S{1,40}$ + word_count: + description: Article word count + type: number + minimum: 1 + multipleOf: 1 + duration: + description: Video duration in seconds + type: number + minimum: 1 + multipleOf: 1 + quality: + description: Video quality + type: string + pattern: ^\S{1,40}$ + episode_number: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + additionalProperties: false + metadata: + description: Additional metadata for the content + type: object + properties: + created_at: + description: When the content was created + type: string + format: date-time + updated_at: + description: When the content was last updated + type: string + format: date-time + published_at: + description: When the content was published + type: string + format: date-time + categories: + description: Content categories + type: array + items: + description: A content category + type: object + properties: + name: + description: Category name + type: string + pattern: ^\S{1,40}$ + type: + description: Category type + type: string + pattern: ^\S{1,40}$ + tags: + description: Category tags + type: array + items: + description: Missing Required Description + type: string + pattern: ^\S{1,40}$ + required: + - name + - type + additionalProperties: false + required: + - created_at + additionalProperties: false + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- title +- content_type +- status +- content_data +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.3.yaml new file mode 100644 index 0000000..7c33b94 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.0.3.yaml @@ -0,0 +1,215 @@ +description: A simple content collection for testing +type: object +properties: + _id: + description: The unique identifier for content + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: Content title + type: string + pattern: ^\S{1,40}$ + subtitle: + description: Content subtitle + type: string + pattern: ^[^\t\n\r]{0,255}$ + content_type: + description: Type of content (article, video, podcast, etc.) + type: string + pattern: ^\S{1,40}$ + status: + description: Content status + type: string + enum: + - draft + - active + - archived + author_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + content_data: + description: The actual content data + type: object + properties: + body: + description: Article body text + type: string + maxLength: 4096 + url: + description: Video URL + type: string + pattern: ^https?://[^\s]+$ + audio_url: + description: Audio file URL + type: string + pattern: ^https?://[^\s]+$ + transcript: + description: Podcast transcript + type: string + maxLength: 4096 + tags: + description: Content tags + type: array + items: + description: Missing Required Description + type: string + pattern: ^\S{1,40}$ + word_count: + description: Article word count + type: number + minimum: 1 + multipleOf: 1 + duration: + description: Video duration in seconds + type: number + minimum: 1 + multipleOf: 1 + quality: + description: Video quality + type: string + pattern: ^\S{1,40}$ + episode_number: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + additionalProperties: false + metadata: + description: Additional metadata for the content + type: object + properties: + created_at: + description: When the content was created + type: string + format: date-time + updated_at: + description: When the content was last updated + type: string + format: date-time + published_at: + description: When the content was published + type: string + format: date-time + categories: + description: Content categories + type: array + items: + description: A content category + type: object + properties: + name: + description: Category name + type: string + pattern: ^\S{1,40}$ + type: + description: Category type + type: string + pattern: ^\S{1,40}$ + tags: + description: Category tags + type: array + items: + description: Missing Required Description + type: string + pattern: ^\S{1,40}$ + required: + - name + - type + additionalProperties: false + required: + - created_at + additionalProperties: false + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- title +- content_type +- status +- content_data +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.1.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.1.3.yaml new file mode 100644 index 0000000..cd3c4bf --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/content.1.0.1.3.yaml @@ -0,0 +1,260 @@ +description: A content collection for testing one_of structures and advanced schema + features +type: object +properties: + _id: + description: The unique identifier for content + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: Content title + type: string + pattern: ^\S{1,40}$ + subtitle: + description: Content subtitle + type: string + pattern: ^[^\t\n\r]{0,255}$ + content_type: + description: Type of content (article, video, podcast, etc.) + type: string + enum: + - article + - video + - podcast + status: + description: Content status + type: string + enum: + - draft + - active + - archived + author_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + full_title: + description: Full title including subtitle + type: string + pattern: ^[^\t\n\r]{0,255}$ + content_data: + description: The actual content data - varies by type + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Article content structure + type: object + properties: + body: + description: Article body text + type: string + maxLength: 4096 + tags: + description: Article tags + type: array + items: + type: string + enum: + - technology + - business + - entertainment + - education + - news + word_count: + description: Article word count + type: number + minimum: 1 + multipleOf: 1 + required: + - body + additionalProperties: false + - description: Video content structure + type: object + properties: + url: + description: Video URL + type: string + pattern: ^https?://[^\s]+$ + duration: + description: Video duration in seconds + type: number + minimum: 1 + multipleOf: 1 + quality: + description: Video quality + type: string + enum: + - sd + - hd + - uhd + required: + - url + additionalProperties: false + - description: Podcast content structure + type: object + properties: + audio_url: + description: Audio file URL + type: string + pattern: ^https?://[^\s]+$ + transcript: + description: Podcast transcript + type: string + maxLength: 4096 + episode_number: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + required: + - audio_url + additionalProperties: false + metadata: + description: Additional metadata for the content + type: object + properties: + created_at: + description: When the content was created + type: string + format: date-time + updated_at: + description: When the content was last updated + type: string + format: date-time + published_at: + description: When the content was published + type: string + format: date-time + categories: + description: Content categories + type: array + items: + description: A content category + type: object + properties: + name: + description: Category name + type: string + pattern: ^\S{1,40}$ + type: + description: Category type + type: string + enum: + - work + - personal + - project + - reference + tags: + description: Category tags + type: array + items: + type: string + enum: + - urgent + - important + - normal + - low + - completed + - in_progress + - blocked + - review + required: + - name + - type + additionalProperties: false + required: + - created_at + additionalProperties: false + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- title +- content_type +- status +- content_data +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.1.yaml b/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.1.yaml new file mode 100644 index 0000000..6ee517c --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.1.yaml @@ -0,0 +1,180 @@ +description: A media item in the system +type: object +properties: + _id: + description: The unique identifier for the media + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: The title of the media + type: string + pattern: ^[^\t\n\r]{0,255}$ + type: + description: The type of media + type: string + enum: + - movie + - tv_show + - documentary + - short + status: + description: The current status of the media + type: string + enum: + - draft + - published + - archived + last_saved: + description: When the media was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + tags: + description: Tags associated with the media + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: number + minimum: 1 + multipleOf: 1 + format: + description: Media format + type: string + enum: + - dvd + - bluray + - digital + - streaming + quality: + description: Quality rating + type: string + enum: + - sd + - hd + - uhd + content_data: + description: Media-specific content data + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: string + pattern: ^\S{1,40}$ + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: string + pattern: ^\S{1,40}$ + genre: + description: Movie genre + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + required: + - director + additionalProperties: false + - description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: number + minimum: 1 + multipleOf: 1 + episode: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + network: + description: Broadcasting network + type: string + pattern: ^\S{1,40}$ + required: + - season + - episode + additionalProperties: false + - description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: string + pattern: ^[^\t\n\r]{0,255}$ + narrator: + description: Documentary narrator + type: string + pattern: ^\S{1,40}$ + required: + - subject + additionalProperties: false + - description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: string + pattern: ^\S{1,40}$ + awards: + description: Awards received + type: array + items: + description: Award name + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + additionalProperties: false +required: +- _id +- title +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.2.yaml b/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.2.yaml new file mode 100644 index 0000000..6ee517c --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.2.yaml @@ -0,0 +1,180 @@ +description: A media item in the system +type: object +properties: + _id: + description: The unique identifier for the media + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: The title of the media + type: string + pattern: ^[^\t\n\r]{0,255}$ + type: + description: The type of media + type: string + enum: + - movie + - tv_show + - documentary + - short + status: + description: The current status of the media + type: string + enum: + - draft + - published + - archived + last_saved: + description: When the media was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + tags: + description: Tags associated with the media + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: number + minimum: 1 + multipleOf: 1 + format: + description: Media format + type: string + enum: + - dvd + - bluray + - digital + - streaming + quality: + description: Quality rating + type: string + enum: + - sd + - hd + - uhd + content_data: + description: Media-specific content data + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: string + pattern: ^\S{1,40}$ + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: string + pattern: ^\S{1,40}$ + genre: + description: Movie genre + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + required: + - director + additionalProperties: false + - description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: number + minimum: 1 + multipleOf: 1 + episode: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + network: + description: Broadcasting network + type: string + pattern: ^\S{1,40}$ + required: + - season + - episode + additionalProperties: false + - description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: string + pattern: ^[^\t\n\r]{0,255}$ + narrator: + description: Documentary narrator + type: string + pattern: ^\S{1,40}$ + required: + - subject + additionalProperties: false + - description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: string + pattern: ^\S{1,40}$ + awards: + description: Awards received + type: array + items: + description: Award name + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + additionalProperties: false +required: +- _id +- title +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.3.yaml new file mode 100644 index 0000000..6ee517c --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/media.1.0.0.3.yaml @@ -0,0 +1,180 @@ +description: A media item in the system +type: object +properties: + _id: + description: The unique identifier for the media + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: The title of the media + type: string + pattern: ^[^\t\n\r]{0,255}$ + type: + description: The type of media + type: string + enum: + - movie + - tv_show + - documentary + - short + status: + description: The current status of the media + type: string + enum: + - draft + - published + - archived + last_saved: + description: When the media was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + tags: + description: Tags associated with the media + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: number + minimum: 1 + multipleOf: 1 + format: + description: Media format + type: string + enum: + - dvd + - bluray + - digital + - streaming + quality: + description: Quality rating + type: string + enum: + - sd + - hd + - uhd + content_data: + description: Media-specific content data + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: string + pattern: ^\S{1,40}$ + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: string + pattern: ^\S{1,40}$ + genre: + description: Movie genre + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + required: + - director + additionalProperties: false + - description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: number + minimum: 1 + multipleOf: 1 + episode: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + network: + description: Broadcasting network + type: string + pattern: ^\S{1,40}$ + required: + - season + - episode + additionalProperties: false + - description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: string + pattern: ^[^\t\n\r]{0,255}$ + narrator: + description: Documentary narrator + type: string + pattern: ^\S{1,40}$ + required: + - subject + additionalProperties: false + - description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: string + pattern: ^\S{1,40}$ + awards: + description: Awards received + type: array + items: + description: Award name + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + additionalProperties: false +required: +- _id +- title +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.1.yaml b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.1.yaml new file mode 100644 index 0000000..6873860 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.1.yaml @@ -0,0 +1,117 @@ +description: A simple notification collection for testing +type: object +properties: + _id: + description: The unique identifier for notification + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + title: + description: Notification title + type: string + pattern: ^\S{1,40}$ + message: + description: Notification message + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: Notification status + type: string + enum: + - active + - archived + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_id +- title +- message +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.2.yaml b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.2.yaml new file mode 100644 index 0000000..9d315ed --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.2.yaml @@ -0,0 +1,119 @@ +description: A simple notification collection for testing +type: object +properties: + _id: + description: The unique identifier for notification + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + title: + description: Notification title + type: string + pattern: ^\S{1,40}$ + message: + description: Notification message + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: Notification status + type: string + enum: + - draft + - active + - archived + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_id +- title +- message +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.3.yaml new file mode 100644 index 0000000..9d315ed --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.0.3.yaml @@ -0,0 +1,119 @@ +description: A simple notification collection for testing +type: object +properties: + _id: + description: The unique identifier for notification + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + title: + description: Notification title + type: string + pattern: ^\S{1,40}$ + message: + description: Notification message + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: Notification status + type: string + enum: + - draft + - active + - archived + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_id +- title +- message +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.1.1.yaml b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.1.1.yaml new file mode 100644 index 0000000..a52a3ce --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.1.1.yaml @@ -0,0 +1,214 @@ +description: A notification collection for testing enum_array and cross-collection + references +type: object +properties: + _id: + description: The unique identifier for notification + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + title: + description: Notification title + type: string + pattern: ^\S{1,40}$ + message: + description: Notification message + type: string + pattern: ^[^\t\n\r]{0,255}$ + notification_type: + description: Type of notification + type: string + enum: + - system + - user + - content + - reminder + status: + description: Notification status + type: string + enum: + - active + - archived + priority: + description: Notification priority level + type: string + enum: + - critical + - high + - medium + - low + tags: + description: Notification tags for categorization + type: array + items: + type: string + enum: + - urgent + - important + - normal + - low + categories: + description: Notification categories + type: array + items: + type: string + enum: + - work + - personal + - project + - reference + channels: + description: Delivery channels for this notification + type: array + items: + type: string + enum: + - email + - sms + - push + - in_app + metadata: + description: Additional notification metadata + type: object + properties: + created_at: + description: When the notification was created + type: string + format: date-time + sent_at: + description: When the notification was sent + type: string + format: date-time + read_at: + description: When the notification was read + type: string + format: date-time + expires_at: + description: When the notification expires + type: string + format: date-time + source: + description: Source of the notification + type: object + properties: + collection: + description: Source collection name + type: string + pattern: ^\S{1,40}$ + document_id: + description: Source document ID + type: string + pattern: ^[0-9a-fA-F]{24}$ + action: + description: Action that triggered the notification + type: string + enum: + - created + - updated + - deleted + - published + required: + - collection + - document_id + additionalProperties: false + required: + - created_at + additionalProperties: false + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_id +- title +- message +- notification_type +- status +- priority +- channels +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.1.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.1.3.yaml new file mode 100644 index 0000000..fa1773f --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/notification.1.0.1.3.yaml @@ -0,0 +1,216 @@ +description: A notification collection for testing enum_array and cross-collection + references +type: object +properties: + _id: + description: The unique identifier for notification + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_id: + description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false + title: + description: Notification title + type: string + pattern: ^\S{1,40}$ + message: + description: Notification message + type: string + pattern: ^[^\t\n\r]{0,255}$ + notification_type: + description: Type of notification + type: string + enum: + - system + - user + - content + - reminder + status: + description: Notification status + type: string + enum: + - draft + - active + - archived + priority: + description: Notification priority level + type: string + enum: + - critical + - high + - medium + - low + tags: + description: Notification tags for categorization + type: array + items: + type: string + enum: + - urgent + - important + - normal + - low + categories: + description: Notification categories + type: array + items: + type: string + enum: + - work + - personal + - project + - reference + channels: + description: Delivery channels for this notification + type: array + items: + type: string + enum: + - email + - sms + - push + - in_app + metadata: + description: Additional notification metadata + type: object + properties: + created_at: + description: When the notification was created + type: string + format: date-time + sent_at: + description: When the notification was sent + type: string + format: date-time + read_at: + description: When the notification was read + type: string + format: date-time + expires_at: + description: When the notification expires + type: string + format: date-time + source: + description: Source of the notification + type: object + properties: + collection: + description: Source collection name + type: string + pattern: ^\S{1,40}$ + document_id: + description: Source document ID + type: string + pattern: ^[0-9a-fA-F]{24}$ + action: + description: Action that triggered the notification + type: string + enum: + - created + - updated + - deleted + - published + required: + - collection + - document_id + additionalProperties: false + required: + - created_at + additionalProperties: false + last_saved: + description: Last saved breadcrumb + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_id +- title +- message +- notification_type +- status +- priority +- channels +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.1.yaml b/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.1.yaml new file mode 100644 index 0000000..be0410d --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.1.yaml @@ -0,0 +1,88 @@ +description: An organization in the system +type: object +properties: + _id: + description: The unique identifier for the organization + type: string + pattern: ^[0-9a-fA-F]{24}$ + name: + description: The organization name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The current status of the organization + type: string + enum: + - active + - archived + last_saved: + description: When the organization document was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + users: + description: Users associated with this organization + type: array + items: + description: A user identifier + type: string + pattern: ^[0-9a-fA-F]{24}$ + website: + description: Organization website + type: string + pattern: ^https?://[^\s]+$ + home_address: + description: Organization home address + type: object + properties: + street: + description: A String of text, 0 to 255 characters with no special characters + type: string + pattern: ^[^\t\n\r]{0,255}$ + city: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + state: + description: A two character state code + type: string + pattern: ^[A-Z]{2}$ + postal_code: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - street + - postal_code +required: +- _id +- name +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.2.yaml b/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.2.yaml new file mode 100644 index 0000000..c881b9b --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.2.yaml @@ -0,0 +1,89 @@ +description: An organization in the system +type: object +properties: + _id: + description: The unique identifier for the organization + type: string + pattern: ^[0-9a-fA-F]{24}$ + name: + description: The organization name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The current status of the organization + type: string + enum: + - draft + - active + - archived + last_saved: + description: When the organization document was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + users: + description: Users associated with this organization + type: array + items: + description: A user identifier + type: string + pattern: ^[0-9a-fA-F]{24}$ + website: + description: Organization website + type: string + pattern: ^https?://[^\s]+$ + home_address: + description: Organization home address + type: object + properties: + street: + description: A String of text, 0 to 255 characters with no special characters + type: string + pattern: ^[^\t\n\r]{0,255}$ + city: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + state: + description: A two character state code + type: string + pattern: ^[A-Z]{2}$ + postal_code: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - street + - postal_code +required: +- _id +- name +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.3.yaml new file mode 100644 index 0000000..c881b9b --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/organization.1.0.0.3.yaml @@ -0,0 +1,89 @@ +description: An organization in the system +type: object +properties: + _id: + description: The unique identifier for the organization + type: string + pattern: ^[0-9a-fA-F]{24}$ + name: + description: The organization name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The current status of the organization + type: string + enum: + - draft + - active + - archived + last_saved: + description: When the organization document was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + users: + description: Users associated with this organization + type: array + items: + description: A user identifier + type: string + pattern: ^[0-9a-fA-F]{24}$ + website: + description: Organization website + type: string + pattern: ^https?://[^\s]+$ + home_address: + description: Organization home address + type: object + properties: + street: + description: A String of text, 0 to 255 characters with no special characters + type: string + pattern: ^[^\t\n\r]{0,255}$ + city: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + state: + description: A two character state code + type: string + pattern: ^[A-Z]{2}$ + postal_code: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - street + - postal_code +required: +- _id +- name +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.1.yaml b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.1.yaml new file mode 100644 index 0000000..0f0363f --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.1.yaml @@ -0,0 +1,344 @@ +description: A search index that is used to support a elastic search polymorphic query + service +type: object +properties: + collection_name: + description: The name of the collection + type: string + pattern: ^\S{1,40}$ + collection_id: + description: The unique identifier for this source document + type: string + pattern: ^[0-9a-fA-F]{24}$ +required: +- collection_name +- collection_id +additionalProperties: false +oneOf: +- description: A media item in the system + type: object + properties: + _id: + description: The unique identifier for the media + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: The title of the media + type: string + pattern: ^[^\t\n\r]{0,255}$ + type: + description: The type of media + type: string + enum: + - movie + - tv_show + - documentary + - short + status: + description: The current status of the media + type: string + enum: + - draft + - published + - archived + last_saved: + description: When the media was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + tags: + description: Tags associated with the media + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: number + minimum: 1 + multipleOf: 1 + format: + description: Media format + type: string + enum: + - dvd + - bluray + - digital + - streaming + quality: + description: Quality rating + type: string + enum: + - sd + - hd + - uhd + content_data: + description: Media-specific content data + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: string + pattern: ^\S{1,40}$ + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: string + pattern: ^\S{1,40}$ + genre: + description: Movie genre + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + required: + - director + additionalProperties: false + - description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: number + minimum: 1 + multipleOf: 1 + episode: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + network: + description: Broadcasting network + type: string + pattern: ^\S{1,40}$ + required: + - season + - episode + additionalProperties: false + - description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: string + pattern: ^[^\t\n\r]{0,255}$ + narrator: + description: Documentary narrator + type: string + pattern: ^\S{1,40}$ + required: + - subject + additionalProperties: false + - description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: string + pattern: ^\S{1,40}$ + awards: + description: Awards received + type: array + items: + description: Award name + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + additionalProperties: false + required: + - _id + - title + - status + - last_saved + additionalProperties: false +- description: An organization in the system + type: object + properties: + _id: + description: The unique identifier for the organization + type: string + pattern: ^[0-9a-fA-F]{24}$ + name: + description: The organization name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The current status of the organization + type: string + enum: + - active + - archived + last_saved: + description: When the organization document was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + users: + description: Users associated with this organization + type: array + items: + description: A user identifier + type: string + pattern: ^[0-9a-fA-F]{24}$ + website: + description: Organization website + type: string + pattern: ^https?://[^\s]+$ + home_address: + description: Organization home address + type: object + properties: + street: + description: A String of text, 0 to 255 characters with no special characters + type: string + pattern: ^[^\t\n\r]{0,255}$ + city: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + state: + description: A two character state code + type: string + pattern: ^[A-Z]{2}$ + postal_code: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - street + - postal_code + required: + - _id + - name + - status + - last_saved + additionalProperties: false +- description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.2.yaml b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.2.yaml new file mode 100644 index 0000000..812864c --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.2.yaml @@ -0,0 +1,346 @@ +description: A search index that is used to support a elastic search polymorphic query + service +type: object +properties: + collection_name: + description: The name of the collection + type: string + pattern: ^\S{1,40}$ + collection_id: + description: The unique identifier for this source document + type: string + pattern: ^[0-9a-fA-F]{24}$ +required: +- collection_name +- collection_id +additionalProperties: false +oneOf: +- description: A media item in the system + type: object + properties: + _id: + description: The unique identifier for the media + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: The title of the media + type: string + pattern: ^[^\t\n\r]{0,255}$ + type: + description: The type of media + type: string + enum: + - movie + - tv_show + - documentary + - short + status: + description: The current status of the media + type: string + enum: + - draft + - published + - archived + last_saved: + description: When the media was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + tags: + description: Tags associated with the media + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: number + minimum: 1 + multipleOf: 1 + format: + description: Media format + type: string + enum: + - dvd + - bluray + - digital + - streaming + quality: + description: Quality rating + type: string + enum: + - sd + - hd + - uhd + content_data: + description: Media-specific content data + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: string + pattern: ^\S{1,40}$ + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: string + pattern: ^\S{1,40}$ + genre: + description: Movie genre + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + required: + - director + additionalProperties: false + - description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: number + minimum: 1 + multipleOf: 1 + episode: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + network: + description: Broadcasting network + type: string + pattern: ^\S{1,40}$ + required: + - season + - episode + additionalProperties: false + - description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: string + pattern: ^[^\t\n\r]{0,255}$ + narrator: + description: Documentary narrator + type: string + pattern: ^\S{1,40}$ + required: + - subject + additionalProperties: false + - description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: string + pattern: ^\S{1,40}$ + awards: + description: Awards received + type: array + items: + description: Award name + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + additionalProperties: false + required: + - _id + - title + - status + - last_saved + additionalProperties: false +- description: An organization in the system + type: object + properties: + _id: + description: The unique identifier for the organization + type: string + pattern: ^[0-9a-fA-F]{24}$ + name: + description: The organization name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The current status of the organization + type: string + enum: + - draft + - active + - archived + last_saved: + description: When the organization document was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + users: + description: Users associated with this organization + type: array + items: + description: A user identifier + type: string + pattern: ^[0-9a-fA-F]{24}$ + website: + description: Organization website + type: string + pattern: ^https?://[^\s]+$ + home_address: + description: Organization home address + type: object + properties: + street: + description: A String of text, 0 to 255 characters with no special characters + type: string + pattern: ^[^\t\n\r]{0,255}$ + city: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + state: + description: A two character state code + type: string + pattern: ^[A-Z]{2}$ + postal_code: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - street + - postal_code + required: + - _id + - name + - status + - last_saved + additionalProperties: false +- description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.3.yaml new file mode 100644 index 0000000..812864c --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.0.3.yaml @@ -0,0 +1,346 @@ +description: A search index that is used to support a elastic search polymorphic query + service +type: object +properties: + collection_name: + description: The name of the collection + type: string + pattern: ^\S{1,40}$ + collection_id: + description: The unique identifier for this source document + type: string + pattern: ^[0-9a-fA-F]{24}$ +required: +- collection_name +- collection_id +additionalProperties: false +oneOf: +- description: A media item in the system + type: object + properties: + _id: + description: The unique identifier for the media + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: The title of the media + type: string + pattern: ^[^\t\n\r]{0,255}$ + type: + description: The type of media + type: string + enum: + - movie + - tv_show + - documentary + - short + status: + description: The current status of the media + type: string + enum: + - draft + - published + - archived + last_saved: + description: When the media was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + tags: + description: Tags associated with the media + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: number + minimum: 1 + multipleOf: 1 + format: + description: Media format + type: string + enum: + - dvd + - bluray + - digital + - streaming + quality: + description: Quality rating + type: string + enum: + - sd + - hd + - uhd + content_data: + description: Media-specific content data + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: string + pattern: ^\S{1,40}$ + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: string + pattern: ^\S{1,40}$ + genre: + description: Movie genre + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + required: + - director + additionalProperties: false + - description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: number + minimum: 1 + multipleOf: 1 + episode: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + network: + description: Broadcasting network + type: string + pattern: ^\S{1,40}$ + required: + - season + - episode + additionalProperties: false + - description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: string + pattern: ^[^\t\n\r]{0,255}$ + narrator: + description: Documentary narrator + type: string + pattern: ^\S{1,40}$ + required: + - subject + additionalProperties: false + - description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: string + pattern: ^\S{1,40}$ + awards: + description: Awards received + type: array + items: + description: Award name + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + additionalProperties: false + required: + - _id + - title + - status + - last_saved + additionalProperties: false +- description: An organization in the system + type: object + properties: + _id: + description: The unique identifier for the organization + type: string + pattern: ^[0-9a-fA-F]{24}$ + name: + description: The organization name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The current status of the organization + type: string + enum: + - draft + - active + - archived + last_saved: + description: When the organization document was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + users: + description: Users associated with this organization + type: array + items: + description: A user identifier + type: string + pattern: ^[0-9a-fA-F]{24}$ + website: + description: Organization website + type: string + pattern: ^https?://[^\s]+$ + home_address: + description: Organization home address + type: object + properties: + street: + description: A String of text, 0 to 255 characters with no special characters + type: string + pattern: ^[^\t\n\r]{0,255}$ + city: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + state: + description: A two character state code + type: string + pattern: ^[A-Z]{2}$ + postal_code: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - street + - postal_code + required: + - _id + - name + - status + - last_saved + additionalProperties: false +- description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.1.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.1.3.yaml new file mode 100644 index 0000000..9d019c1 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/search.1.0.1.3.yaml @@ -0,0 +1,437 @@ +description: A search index that is used to support a elastic search polymorphic query + service +type: object +properties: + collection_name: + description: The name of the collection + type: string + pattern: ^\S{1,40}$ + collection_id: + description: The unique identifier for this source document + type: string + pattern: ^[0-9a-fA-F]{24}$ +required: +- collection_name +- collection_id +additionalProperties: false +oneOf: +- description: A media item in the system + type: object + properties: + _id: + description: The unique identifier for the media + type: string + pattern: ^[0-9a-fA-F]{24}$ + title: + description: The title of the media + type: string + pattern: ^[^\t\n\r]{0,255}$ + type: + description: The type of media + type: string + enum: + - movie + - tv_show + - documentary + - short + status: + description: The current status of the media + type: string + enum: + - draft + - published + - archived + last_saved: + description: When the media was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + tags: + description: Tags associated with the media + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + metadata: + description: Additional metadata about the media + type: object + properties: + duration: + description: Duration in minutes + type: number + minimum: 1 + multipleOf: 1 + format: + description: Media format + type: string + enum: + - dvd + - bluray + - digital + - streaming + quality: + description: Quality rating + type: string + enum: + - sd + - hd + - uhd + content_data: + description: Media-specific content data + type: object + properties: {} + additionalProperties: false + oneOf: + - description: Movie-specific metadata + type: object + properties: + director: + description: Movie director + type: string + pattern: ^\S{1,40}$ + cast: + description: Movie cast members + type: array + items: + description: Cast member + type: string + pattern: ^\S{1,40}$ + genre: + description: Movie genre + type: array + items: + type: string + enum: + - action + - comedy + - drama + - sci_fi + - documentary + required: + - director + additionalProperties: false + - description: TV show-specific metadata + type: object + properties: + season: + description: Season number + type: number + minimum: 1 + multipleOf: 1 + episode: + description: Episode number + type: number + minimum: 1 + multipleOf: 1 + network: + description: Broadcasting network + type: string + pattern: ^\S{1,40}$ + required: + - season + - episode + additionalProperties: false + - description: Documentary-specific metadata + type: object + properties: + subject: + description: Documentary subject + type: string + pattern: ^[^\t\n\r]{0,255}$ + narrator: + description: Documentary narrator + type: string + pattern: ^\S{1,40}$ + required: + - subject + additionalProperties: false + - description: Short film-specific metadata + type: object + properties: + festival: + description: Film festival + type: string + pattern: ^\S{1,40}$ + awards: + description: Awards received + type: array + items: + description: Award name + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + additionalProperties: false + required: + - _id + - title + - status + - last_saved + additionalProperties: false +- description: An organization in the system + type: object + properties: + _id: + description: The unique identifier for the organization + type: string + pattern: ^[0-9a-fA-F]{24}$ + name: + description: The organization name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The current status of the organization + type: string + enum: + - draft + - active + - archived + last_saved: + description: When the organization document was last updated + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + users: + description: Users associated with this organization + type: array + items: + description: A user identifier + type: string + pattern: ^[0-9a-fA-F]{24}$ + website: + description: Organization website + type: string + pattern: ^https?://[^\s]+$ + home_address: + description: Organization home address + type: object + properties: + street: + description: A String of text, 0 to 255 characters with no special characters + type: string + pattern: ^[^\t\n\r]{0,255}$ + city: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + state: + description: A two character state code + type: string + pattern: ^[A-Z]{2}$ + postal_code: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - street + - postal_code + required: + - _id + - name + - status + - last_saved + additionalProperties: false +- description: A user collection for testing the schema system + type: object + properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + full_name: + description: Users Full Name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The status + type: string + enum: + - draft + - active + - archived + categories: + description: A users list of categorized tags + type: array + items: + description: A user category + type: object + properties: + name: + description: Category Name assigned by the user + type: string + pattern: ^\S{1,40}$ + category: + description: The category type assigned by the user + type: string + enum: + - work + - personal + - project + - reference + tags: + description: A list of enumerated values assigned by the user + type: array + items: + type: string + enum: + - urgent + - important + - normal + - low + - completed + - in_progress + - blocked + - review + required: + - name + - category + - tags + additionalProperties: false + preferences: + description: User preferences and settings + type: object + properties: + notification_types: + description: Types of notifications the user wants to receive + type: array + items: + type: string + enum: + - system + - user + - content + - reminder + delivery_channels: + description: Preferred delivery channels for notifications + type: array + items: + type: string + enum: + - email + - sms + - push + - in_app + content_tags: + description: Content tags the user is interested in + type: array + items: + type: string + enum: + - technology + - business + - entertainment + - education + - news + priority_levels: + description: Priority levels the user wants to receive + type: array + items: + type: string + enum: + - critical + - high + - medium + - low + required: + - notification_types + - delivery_channels + additionalProperties: false + email: + description: The person's email address + type: string + pattern: ^[^\s@]+@[^\s@]+\.[^\s@]+$ + phone: + description: The person's phone number + type: string + pattern: ^\+1[2-9][0-9]{9}$ + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id + required: + - _id + - user_name + - status + - last_saved + additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.1.yaml b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.1.yaml new file mode 100644 index 0000000..243e711 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.1.yaml @@ -0,0 +1,59 @@ +description: A user collection for testing the schema system +type: object +properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_name +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.2.yaml b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.2.yaml new file mode 100644 index 0000000..fb46047 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.2.yaml @@ -0,0 +1,60 @@ +description: A user collection for testing the schema system +type: object +properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_name +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.3.yaml new file mode 100644 index 0000000..fb46047 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.0.3.yaml @@ -0,0 +1,60 @@ +description: A user collection for testing the schema system +type: object +properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + first_name: + description: Users First Name + type: string + pattern: ^\S{1,40}$ + last_name: + description: Users Last Name + type: string + pattern: ^\S{1,40}$ + status: + description: document status + type: string + enum: + - draft + - active + - archived + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_name +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.1.3.yaml b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.1.3.yaml new file mode 100644 index 0000000..e828891 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/json_schema/user.1.0.1.3.yaml @@ -0,0 +1,151 @@ +description: A user collection for testing the schema system +type: object +properties: + _id: + description: The unique identifier for a user + type: string + pattern: ^[0-9a-fA-F]{24}$ + user_name: + description: Username + type: string + pattern: ^\S{1,40}$ + full_name: + description: Users Full Name + type: string + pattern: ^[^\t\n\r]{0,255}$ + status: + description: The status + type: string + enum: + - draft + - active + - archived + categories: + description: A users list of categorized tags + type: array + items: + description: A user category + type: object + properties: + name: + description: Category Name assigned by the user + type: string + pattern: ^\S{1,40}$ + category: + description: The category type assigned by the user + type: string + enum: + - work + - personal + - project + - reference + tags: + description: A list of enumerated values assigned by the user + type: array + items: + type: string + enum: + - urgent + - important + - normal + - low + - completed + - in_progress + - blocked + - review + required: + - name + - category + - tags + additionalProperties: false + preferences: + description: User preferences and settings + type: object + properties: + notification_types: + description: Types of notifications the user wants to receive + type: array + items: + type: string + enum: + - system + - user + - content + - reminder + delivery_channels: + description: Preferred delivery channels for notifications + type: array + items: + type: string + enum: + - email + - sms + - push + - in_app + content_tags: + description: Content tags the user is interested in + type: array + items: + type: string + enum: + - technology + - business + - entertainment + - education + - news + priority_levels: + description: Priority levels the user wants to receive + type: array + items: + type: string + enum: + - critical + - high + - medium + - low + required: + - notification_types + - delivery_channels + additionalProperties: false + email: + description: The person's email address + type: string + pattern: ^[^\s@]+@[^\s@]+\.[^\s@]+$ + phone: + description: The person's phone number + type: string + pattern: ^\+1[2-9][0-9]{9}$ + last_saved: + description: The last time this document was saved + type: object + properties: + from_ip: + description: A valid IP Address + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ + by_user: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + at_time: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + correlation_id: + description: A String of text, 1 to 40 characters with no spaces, or special + characters like /t or /n + type: string + pattern: ^\S{1,40}$ + additionalProperties: false + required: + - from_ip + - by_user + - at_time + - correlation_id +required: +- _id +- user_name +- status +- last_saved +additionalProperties: false diff --git a/tests/test_cases/large_sample/test_database/CollectionVersions.json b/tests/test_cases/large_sample/verified_output/test_database/CollectionVersions.json similarity index 65% rename from tests/test_cases/large_sample/test_database/CollectionVersions.json rename to tests/test_cases/large_sample/verified_output/test_database/CollectionVersions.json index d99afb8..a5e16f4 100644 --- a/tests/test_cases/large_sample/test_database/CollectionVersions.json +++ b/tests/test_cases/large_sample/verified_output/test_database/CollectionVersions.json @@ -26,5 +26,19 @@ }, "collection_name": "user", "current_version": "user.1.0.1.3" + }, + { + "_id": { + "$oid": "68571f0d11401f03ab9100e5" + }, + "collection_name": "content", + "current_version": "content.1.0.1.3" + }, + { + "_id": { + "$oid": "68571f0d11401f03ab9100f9" + }, + "collection_name": "notification", + "current_version": "notification.1.0.1.3" } ] \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/test_database/DatabaseEnumerators.json b/tests/test_cases/large_sample/verified_output/test_database/DatabaseEnumerators.json new file mode 100644 index 0000000..7368d2f --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/test_database/DatabaseEnumerators.json @@ -0,0 +1,246 @@ +[ + { + "_id": { + "$oid": "a00000000000000000000001" + }, + "name": "Enumerations", + "status": "Deprecated", + "version": 0, + "enumerators": {} + }, + { + "_id": { + "$oid": "a00000000000000000000002" + }, + "name": "Enumerations", + "status": "Active", + "version": 1, + "enumerators": { + "default_status": { + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + }, + "media_type": { + "movie": "A motion picture", + "tv_show": "A television series", + "documentary": "A non-fiction film", + "short": "A short film" + }, + "media_status": { + "draft": "Not yet published", + "published": "Available to users", + "archived": "No longer available" + }, + "media_tags": { + "action": "Action genre", + "comedy": "Comedy genre", + "drama": "Drama genre", + "sci_fi": "Science fiction genre", + "documentary": "Documentary genre" + }, + "media_format": { + "dvd": "DVD format", + "bluray": "Blu-ray format", + "digital": "Digital format", + "streaming": "Streaming format" + }, + "media_quality": { + "sd": "Standard definition", + "hd": "High definition", + "uhd": "Ultra high definition" + }, + "notification_type": { + "system": "System notification", + "user": "User notification", + "content": "Content notification", + "reminder": "Reminder notification" + }, + "priority_level": { + "critical": "Critical priority", + "high": "High priority", + "medium": "Medium priority", + "low": "Low priority" + }, + "notification_tags": { + "urgent": "Urgent notification", + "important": "Important notification", + "normal": "Normal notification", + "low": "Low priority notification" + }, + "category_type": { + "work": "Work related items", + "personal": "Personal items", + "project": "Project specific items", + "reference": "Reference materials" + }, + "delivery_channel": { + "email": "Email delivery", + "sms": "SMS delivery", + "push": "Push notification", + "in_app": "In-app notification" + }, + "notification_action": { + "created": "Document created", + "updated": "Document updated", + "deleted": "Document deleted", + "published": "Document published" + } + } + }, + { + "_id": { + "$oid": "a00000000000000000000003" + }, + "name": "Enumerations", + "status": "Active", + "version": 2, + "enumerators": { + "default_status": { + "draft": "Not finalized", + "active": "Not deleted", + "archived": "Soft delete indicator" + }, + "media_type": { + "movie": "A motion picture", + "tv_show": "A television series", + "documentary": "A non-fiction film", + "short": "A short film" + }, + "media_status": { + "draft": "Not yet published", + "published": "Available to users", + "archived": "No longer available" + }, + "media_tags": { + "action": "Action genre", + "comedy": "Comedy genre", + "drama": "Drama genre", + "sci_fi": "Science fiction genre", + "documentary": "Documentary genre" + }, + "media_format": { + "dvd": "DVD format", + "bluray": "Blu-ray format", + "digital": "Digital format", + "streaming": "Streaming format" + }, + "media_quality": { + "sd": "Standard definition", + "hd": "High definition", + "uhd": "Ultra high definition" + } + } + }, + { + "_id": { + "$oid": "a00000000000000000000004" + }, + "name": "Enumerations", + "status": "Active", + "version": 3, + "enumerators": { + "default_status": { + "draft": "Not finalized", + "active": "Not deleted", + "archived": "Soft delete indicator" + }, + "media_type": { + "movie": "A motion picture", + "tv_show": "A television series", + "documentary": "A non-fiction film", + "short": "A short film" + }, + "media_status": { + "draft": "Not yet published", + "published": "Available to users", + "archived": "No longer available" + }, + "media_tags": { + "action": "Action genre", + "comedy": "Comedy genre", + "drama": "Drama genre", + "sci_fi": "Science fiction genre", + "documentary": "Documentary genre" + }, + "media_format": { + "dvd": "DVD format", + "bluray": "Blu-ray format", + "digital": "Digital format", + "streaming": "Streaming format" + }, + "media_quality": { + "sd": "Standard definition", + "hd": "High definition", + "uhd": "Ultra high definition" + }, + "type": { + "radio": "Select one option", + "check": "Select multiple options", + "text": "Enter a text string" + }, + "tags": { + "user": "A User", + "admin": "An administrator", + "super": "A super user" + }, + "category_type": { + "work": "Work related items", + "personal": "Personal items", + "project": "Project specific items", + "reference": "Reference materials" + }, + "category_tags": { + "urgent": "Requires immediate attention", + "important": "High priority", + "normal": "Standard priority", + "low": "Low priority", + "completed": "Task is done", + "in_progress": "Currently being worked on", + "blocked": "Cannot proceed", + "review": "Needs review" + }, + "content_type": { + "article": "Written content", + "video": "Video content", + "podcast": "Audio content" + }, + "content_tags": { + "technology": "Technology related content", + "business": "Business related content", + "entertainment": "Entertainment content", + "education": "Educational content", + "news": "News content" + }, + "notification_type": { + "system": "System notification", + "user": "User notification", + "content": "Content notification", + "reminder": "Reminder notification" + }, + "notification_tags": { + "urgent": "Urgent notification", + "important": "Important notification", + "normal": "Normal notification", + "low": "Low priority notification" + }, + "priority_level": { + "critical": "Critical priority", + "high": "High priority", + "medium": "Medium priority", + "low": "Low priority" + }, + "delivery_channel": { + "email": "Email delivery", + "sms": "SMS delivery", + "push": "Push notification", + "in_app": "In-app notification" + }, + "notification_action": { + "created": "Document created", + "updated": "Document updated", + "deleted": "Document deleted", + "published": "Document published" + } + } + } +] \ No newline at end of file diff --git a/tests/test_cases/large_sample/verified_output/test_database/content.json b/tests/test_cases/large_sample/verified_output/test_database/content.json new file mode 100644 index 0000000..130e4e4 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/test_database/content.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/tests/test_cases/large_sample/test_database/media.json b/tests/test_cases/large_sample/verified_output/test_database/media.json similarity index 100% rename from tests/test_cases/large_sample/test_database/media.json rename to tests/test_cases/large_sample/verified_output/test_database/media.json diff --git a/tests/test_cases/large_sample/verified_output/test_database/notification.json b/tests/test_cases/large_sample/verified_output/test_database/notification.json new file mode 100644 index 0000000..130e4e4 --- /dev/null +++ b/tests/test_cases/large_sample/verified_output/test_database/notification.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/tests/test_cases/large_sample/test_database/organization.json b/tests/test_cases/large_sample/verified_output/test_database/organization.json similarity index 100% rename from tests/test_cases/large_sample/test_database/organization.json rename to tests/test_cases/large_sample/verified_output/test_database/organization.json diff --git a/tests/test_cases/large_sample/test_database/search.json b/tests/test_cases/large_sample/verified_output/test_database/search.json similarity index 100% rename from tests/test_cases/large_sample/test_database/search.json rename to tests/test_cases/large_sample/verified_output/test_database/search.json diff --git a/tests/test_cases/large_sample/test_database/user.json b/tests/test_cases/large_sample/verified_output/test_database/user.json similarity index 92% rename from tests/test_cases/large_sample/test_database/user.json rename to tests/test_cases/large_sample/verified_output/test_database/user.json index e8b108a..ccc7775 100644 --- a/tests/test_cases/large_sample/test_database/user.json +++ b/tests/test_cases/large_sample/verified_output/test_database/user.json @@ -185,6 +185,12 @@ ], "email": "john.doe@example.com", "phone": "+15551234567", + "preferences": { + "notification_types": ["system", "content", "reminder"], + "delivery_channels": ["email", "in_app"], + "content_tags": ["technology", "education"], + "priority_levels": ["high", "medium"] + }, "last_saved": { "from_ip": "127.0.0.1", "by_user": "system", @@ -220,6 +226,12 @@ ], "email": "sarah.johnson@techcorp.com", "phone": "+15559876543", + "preferences": { + "notification_types": ["system", "user", "content"], + "delivery_channels": ["email", "push", "in_app"], + "content_tags": ["technology", "business"], + "priority_levels": ["critical", "high", "medium"] + }, "last_saved": { "from_ip": "192.168.1.50", "by_user": "admin", @@ -255,6 +267,12 @@ ], "email": "mike.chen@designstudio.com", "phone": "+15551234568", + "preferences": { + "notification_types": ["content", "reminder"], + "delivery_channels": ["email", "push"], + "content_tags": ["entertainment", "education"], + "priority_levels": ["medium", "low"] + }, "last_saved": { "from_ip": "10.0.0.25", "by_user": "system", diff --git a/tests/test_cases/minimum_valid/data/enumerators.json b/tests/test_cases/minimum_valid/test_data/enumerators.json similarity index 100% rename from tests/test_cases/minimum_valid/data/enumerators.json rename to tests/test_cases/minimum_valid/test_data/enumerators.json diff --git a/tests/test_cases/playground/api_config/BUILT_AT b/tests/test_cases/playground/api_config/BUILT_AT new file mode 100644 index 0000000..28ca4ab --- /dev/null +++ b/tests/test_cases/playground/api_config/BUILT_AT @@ -0,0 +1 @@ +Local \ No newline at end of file diff --git a/tests/test_cases/playground/api_config/ENABLE_DROP_DATABASE b/tests/test_cases/playground/api_config/ENABLE_DROP_DATABASE new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/playground/api_config/ENABLE_DROP_DATABASE @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/playground/api_config/LOAD_TEST_DATA b/tests/test_cases/playground/api_config/LOAD_TEST_DATA new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/playground/api_config/LOAD_TEST_DATA @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/playground/configurations/sample.yaml b/tests/test_cases/playground/configurations/sample.yaml new file mode 100644 index 0000000..a2ff473 --- /dev/null +++ b/tests/test_cases/playground/configurations/sample.yaml @@ -0,0 +1,33 @@ +file_name: sample.yaml +title: Sample Collection +description: A collection for testing +versions: +- version: 1.0.0.1 + drop_indexes: [] + add_indexes: + - name: nameIndex + key: + first_name: 1 + last_name: 1 + options: + unique: true + - name: statusIndex + key: + status: 1 + options: + unique: false + migrations: [] + test_data: sample.1.0.0.1.json +- version: 1.0.1.2 + drop_indexes: + - name: nameIndex + add_indexes: + - name: fullNameIndex + key: + full_name: 1 + options: + unique: false + migrations: + - first_last_to_full_name.json + test_data: sample.1.0.1.2.json +_locked: true diff --git a/tests/test_cases/playground/dictionaries/sample.1.0.0.yaml b/tests/test_cases/playground/dictionaries/sample.1.0.0.yaml new file mode 100644 index 0000000..b5fa094 --- /dev/null +++ b/tests/test_cases/playground/dictionaries/sample.1.0.0.yaml @@ -0,0 +1,24 @@ +description: A simple collection for testing +type: object +required: false +properties: + _id: + description: The unique identifier for the media + type: identity + required: true + first_name: + description: First Name + type: word + required: false + last_name: + description: Family Name + type: word + required: false + status: + description: The current status of the document + type: enum + required: true + enums: default_status +additionalProperties: false +file_name: sample.1.0.0.yaml +_locked: true diff --git a/tests/test_cases/playground/dictionaries/sample.1.0.1.yaml b/tests/test_cases/playground/dictionaries/sample.1.0.1.yaml new file mode 100644 index 0000000..7443822 --- /dev/null +++ b/tests/test_cases/playground/dictionaries/sample.1.0.1.yaml @@ -0,0 +1,20 @@ +description: A simple collection for testing +type: object +required: false +properties: + _id: + description: The unique identifier for the media + type: identity + required: true + full_name: + description: Full Name + type: sentence + required: false + status: + description: The current status of the document + type: enum + required: true + enums: default_status +additionalProperties: false +file_name: sample.1.0.1.yaml +_locked: true diff --git a/tests/test_cases/playground/migrations/first_last_to_full_name.json b/tests/test_cases/playground/migrations/first_last_to_full_name.json new file mode 100644 index 0000000..bc80295 --- /dev/null +++ b/tests/test_cases/playground/migrations/first_last_to_full_name.json @@ -0,0 +1,18 @@ +[ + { + "$addFields": { + "full_name": { + "$concat": ["$first_name", " ", "$last_name"] + } + } + }, + { + "$unset": [ + "first_name", + "last_name" + ] + }, + { + "$out": "user" + } +] \ No newline at end of file diff --git a/tests/test_cases/playground/test_data/enumerators.json b/tests/test_cases/playground/test_data/enumerators.json new file mode 100644 index 0000000..a82ab39 --- /dev/null +++ b/tests/test_cases/playground/test_data/enumerators.json @@ -0,0 +1,25 @@ +[ + { + "version": 0, + "enumerators": {} + }, + { + "version": 1, + "enumerators": { + "default_status": { + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + }, + { + "version": 2, + "enumerators": { + "default_status": { + "draft": "Draft", + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + } +] \ No newline at end of file diff --git a/tests/test_cases/playground/test_data/sample.1.0.0.1.json b/tests/test_cases/playground/test_data/sample.1.0.0.1.json new file mode 100644 index 0000000..4f50799 --- /dev/null +++ b/tests/test_cases/playground/test_data/sample.1.0.0.1.json @@ -0,0 +1,14 @@ +[ + { + "_id": {"$oid": "A00000000000000000000001"}, + "first_name": "Joe", + "last_name": "Smith", + "status": "active" + }, + { + "_id": {"$oid": "A00000000000000000000002"}, + "first_name": "Jane", + "last_name": "Doe", + "status": "archived" + } +] diff --git a/tests/test_cases/playground/test_data/sample.1.0.1.2.json b/tests/test_cases/playground/test_data/sample.1.0.1.2.json new file mode 100644 index 0000000..959dfa6 --- /dev/null +++ b/tests/test_cases/playground/test_data/sample.1.0.1.2.json @@ -0,0 +1,7 @@ +[ + { + "_id": {"$oid": "A00000000000000000000003"}, + "full name": "Dr. James Earl Ray II", + "status": "draft" + } +] diff --git a/tests/test_cases/playground/types/appointment.yaml b/tests/test_cases/playground/types/appointment.yaml new file mode 100644 index 0000000..96cd219 --- /dev/null +++ b/tests/test_cases/playground/types/appointment.yaml @@ -0,0 +1,15 @@ +file_name: appointment.yaml +_locked: true +description: A date/time range +required: false +type: object +properties: + from: + description: Starting Date/Time + required: true + type: date-time + to: + description: Ending Date/Time + required: true + type: date-time +additionalProperties: false diff --git a/tests/test_cases/playground/types/breadcrumb.yaml b/tests/test_cases/playground/types/breadcrumb.yaml new file mode 100644 index 0000000..09a61c4 --- /dev/null +++ b/tests/test_cases/playground/types/breadcrumb.yaml @@ -0,0 +1,23 @@ +file_name: breadcrumb.yaml +_locked: true +description: A tracking breadcrumb +required: false +type: object +properties: + from_ip: + description: Http Request remote IP address + required: true + type: ip_address + by_user: + description: ID Of User + required: true + type: word + at_time: + description: The date-time when last updated + required: true + type: date-time + correlation_id: + description: The logging correlation ID of the update transaction + required: true + type: word +additionalProperties: false diff --git a/tests/test_cases/playground/types/count.yaml b/tests/test_cases/playground/types/count.yaml new file mode 100644 index 0000000..708c6b1 --- /dev/null +++ b/tests/test_cases/playground/types/count.yaml @@ -0,0 +1,11 @@ +file_name: count.yaml +_locked: true +description: A positive integer value +required: false +json_type: + type: number + minimum: 1 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 1 diff --git a/tests/test_cases/playground/types/date-time.yaml b/tests/test_cases/playground/types/date-time.yaml new file mode 100644 index 0000000..11bfed3 --- /dev/null +++ b/tests/test_cases/playground/types/date-time.yaml @@ -0,0 +1,9 @@ +file_name: date-time.yaml +_locked: true +description: An ISO 8601 formatted date-time string +required: false +json_type: + type: string + format: date-time +bson_type: + bsonType: date diff --git a/tests/test_cases/playground/types/email.yaml b/tests/test_cases/playground/types/email.yaml new file mode 100644 index 0000000..87c0f28 --- /dev/null +++ b/tests/test_cases/playground/types/email.yaml @@ -0,0 +1,7 @@ +file_name: email.yaml +_locked: true +description: A valid email address +required: false +schema: + type: string + pattern: ^[^\s@]+@[^\s@]+\.[^\s@]+$ diff --git a/tests/test_cases/playground/types/identifier.yaml b/tests/test_cases/playground/types/identifier.yaml new file mode 100644 index 0000000..63299e8 --- /dev/null +++ b/tests/test_cases/playground/types/identifier.yaml @@ -0,0 +1,9 @@ +file_name: identifier.yaml +_locked: true +description: A unique identifier for a document +required: false +json_type: + type: string + pattern: ^[0-9a-fA-F]{24}$ +bson_type: + bsonType: objectId diff --git a/tests/test_cases/small_sample/types/identity.yaml b/tests/test_cases/playground/types/identity.yaml similarity index 89% rename from tests/test_cases/small_sample/types/identity.yaml rename to tests/test_cases/playground/types/identity.yaml index afd5299..4950c75 100644 --- a/tests/test_cases/small_sample/types/identity.yaml +++ b/tests/test_cases/playground/types/identity.yaml @@ -1,4 +1,3 @@ -title: Identity description: A unique identifier for a document json_type: type: string diff --git a/tests/test_cases/playground/types/index.yaml b/tests/test_cases/playground/types/index.yaml new file mode 100644 index 0000000..f534514 --- /dev/null +++ b/tests/test_cases/playground/types/index.yaml @@ -0,0 +1,11 @@ +file_name: index.yaml +_locked: true +description: A zero-based array index +required: false +json_type: + type: number + minimum: 0 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 0 diff --git a/tests/test_cases/playground/types/ip_address.yaml b/tests/test_cases/playground/types/ip_address.yaml new file mode 100644 index 0000000..596a182 --- /dev/null +++ b/tests/test_cases/playground/types/ip_address.yaml @@ -0,0 +1,7 @@ +file_name: ip_address.yaml +_locked: true +description: A valid IP Address +required: false +schema: + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ diff --git a/tests/test_cases/playground/types/markdown.yaml b/tests/test_cases/playground/types/markdown.yaml new file mode 100644 index 0000000..60eae65 --- /dev/null +++ b/tests/test_cases/playground/types/markdown.yaml @@ -0,0 +1,8 @@ +file_name: markdown.yaml +_locked: true +description: A String of text, at least 1 and no more than 4k characters. May contain + markdown, newlines, and tabs. +required: false +schema: + type: string + maxLength: 4096 diff --git a/tests/test_cases/playground/types/sentence.yaml b/tests/test_cases/playground/types/sentence.yaml new file mode 100644 index 0000000..3899fee --- /dev/null +++ b/tests/test_cases/playground/types/sentence.yaml @@ -0,0 +1,7 @@ +file_name: sentence.yaml +_locked: true +description: A String of text, 0 to 255 characters with no special characters +required: false +schema: + type: string + pattern: ^[^\t\n\r]{0,255}$ diff --git a/tests/test_cases/playground/types/state_code.yaml b/tests/test_cases/playground/types/state_code.yaml new file mode 100644 index 0000000..d6df0e1 --- /dev/null +++ b/tests/test_cases/playground/types/state_code.yaml @@ -0,0 +1,7 @@ +file_name: state_code.yaml +_locked: true +description: A two character state code +required: false +schema: + type: string + pattern: ^[A-Z]{2}$ diff --git a/tests/test_cases/playground/types/street_address.yaml b/tests/test_cases/playground/types/street_address.yaml new file mode 100644 index 0000000..45d4ab7 --- /dev/null +++ b/tests/test_cases/playground/types/street_address.yaml @@ -0,0 +1,23 @@ +file_name: street_address.yaml +_locked: true +description: A street address +required: false +type: object +properties: + street: + description: Street address + required: true + type: sentence + city: + description: City + required: false + type: word + state: + description: State or province + required: false + type: state_code + postal_code: + description: Postal code + required: true + type: word +additionalProperties: false diff --git a/tests/test_cases/playground/types/url.yaml b/tests/test_cases/playground/types/url.yaml new file mode 100644 index 0000000..63a288a --- /dev/null +++ b/tests/test_cases/playground/types/url.yaml @@ -0,0 +1,7 @@ +file_name: url.yaml +_locked: true +description: A valid URL +required: false +schema: + type: string + pattern: ^https?://[^\s]+$ diff --git a/tests/test_cases/playground/types/us_phone.yaml b/tests/test_cases/playground/types/us_phone.yaml new file mode 100644 index 0000000..8e83f63 --- /dev/null +++ b/tests/test_cases/playground/types/us_phone.yaml @@ -0,0 +1,7 @@ +file_name: us_phone.yaml +_locked: true +description: A US phone number in E.164 format +required: false +schema: + type: string + pattern: ^\+1[2-9][0-9]{9}$ diff --git a/tests/test_cases/playground/types/word.yaml b/tests/test_cases/playground/types/word.yaml new file mode 100644 index 0000000..64d9246 --- /dev/null +++ b/tests/test_cases/playground/types/word.yaml @@ -0,0 +1,8 @@ +file_name: word.yaml +_locked: true +description: A String of text, 1 to 40 characters with no spaces, or special characters + like /t or /n +required: false +schema: + type: string + pattern: ^\S{1,40}$ diff --git a/tests/test_cases/ref_load_errors/collections/circular_ref.yaml b/tests/test_cases/ref_load_errors/collections/circular_ref.yaml index f770238..78c0384 100644 --- a/tests/test_cases/ref_load_errors/collections/circular_ref.yaml +++ b/tests/test_cases/ref_load_errors/collections/circular_ref.yaml @@ -1,4 +1,3 @@ -title: Circular Ref Collection description: Collection for managing circular ref items name: circular_ref versions: diff --git a/tests/test_cases/ref_load_errors/collections/missing_ref.yaml b/tests/test_cases/ref_load_errors/collections/missing_ref.yaml index b8ff788..54c6f62 100644 --- a/tests/test_cases/ref_load_errors/collections/missing_ref.yaml +++ b/tests/test_cases/ref_load_errors/collections/missing_ref.yaml @@ -1,4 +1,3 @@ -title: Missing Ref Collection description: Collection for managing missing ref items name: missing_ref versions: diff --git a/tests/test_cases/ref_load_errors/collections/test.yaml b/tests/test_cases/ref_load_errors/collections/test.yaml index 01315f0..b5f2775 100644 --- a/tests/test_cases/ref_load_errors/collections/test.yaml +++ b/tests/test_cases/ref_load_errors/collections/test.yaml @@ -1,4 +1,3 @@ -title: Test Collection name: test versions: - version: 1.0.0 diff --git a/tests/test_cases/ref_load_errors/dictionary/circular_ref.1.0.0.yaml b/tests/test_cases/ref_load_errors/dictionary/circular_ref.1.0.0.yaml index c411a57..ed7cfcb 100644 --- a/tests/test_cases/ref_load_errors/dictionary/circular_ref.1.0.0.yaml +++ b/tests/test_cases/ref_load_errors/dictionary/circular_ref.1.0.0.yaml @@ -1,6 +1,5 @@ -title: Circular Reference Test description: A schema with a circular reference type: object properties: self_ref: - $ref: circular_ref.1.0.0 \ No newline at end of file + ref: circular_ref.1.0.0 \ No newline at end of file diff --git a/tests/test_cases/ref_load_errors/dictionary/missing_ref.1.0.0.yaml b/tests/test_cases/ref_load_errors/dictionary/missing_ref.1.0.0.yaml index d17035a..1fc436a 100644 --- a/tests/test_cases/ref_load_errors/dictionary/missing_ref.1.0.0.yaml +++ b/tests/test_cases/ref_load_errors/dictionary/missing_ref.1.0.0.yaml @@ -1,6 +1,5 @@ -title: Missing Reference Test description: A schema with a missing reference type: object properties: missing: - $ref: does_not_exist.1.0.0 \ No newline at end of file + ref: does_not_exist.1.0.0 \ No newline at end of file diff --git a/tests/test_cases/small_sample/api_config/BUILT_AT b/tests/test_cases/small_sample/api_config/BUILT_AT new file mode 100644 index 0000000..7434aa1 --- /dev/null +++ b/tests/test_cases/small_sample/api_config/BUILT_AT @@ -0,0 +1 @@ +Local \ No newline at end of file diff --git a/tests/test_cases/small_sample/api_config/ENABLE_DROP_DATABASE b/tests/test_cases/small_sample/api_config/ENABLE_DROP_DATABASE new file mode 100644 index 0000000..6707ef5 --- /dev/null +++ b/tests/test_cases/small_sample/api_config/ENABLE_DROP_DATABASE @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/small_sample/collections/simple.yaml b/tests/test_cases/small_sample/collections/simple.yaml deleted file mode 100644 index 21e4657..0000000 --- a/tests/test_cases/small_sample/collections/simple.yaml +++ /dev/null @@ -1,6 +0,0 @@ -title: Simple Collection -description: Collection for testing -name: simple -versions: - - version: "1.0.0.1" - test_data: simple.1.0.0.1.json \ No newline at end of file diff --git a/tests/test_cases/small_sample/configurations/sample.yaml b/tests/test_cases/small_sample/configurations/sample.yaml new file mode 100644 index 0000000..e89c4b9 --- /dev/null +++ b/tests/test_cases/small_sample/configurations/sample.yaml @@ -0,0 +1,5 @@ +description: A test collection for integration testing +name: sample +versions: + - version: "1.0.0.1" + test_data: sample.1.0.0.1.json diff --git a/tests/test_cases/small_sample/dictionaries/sample.1.0.0.yaml b/tests/test_cases/small_sample/dictionaries/sample.1.0.0.yaml new file mode 100644 index 0000000..f164685 --- /dev/null +++ b/tests/test_cases/small_sample/dictionaries/sample.1.0.0.yaml @@ -0,0 +1,18 @@ +description: A simple collection for testing +type: object +required: false +properties: + _id: + description: The unique identifier for the media + type: identifier + required: true + name: + description: The name of the document + type: word + required: false + status: + description: The current status of the document + type: enum + required: true + enums: default_status +additionalProperties: false diff --git a/tests/test_cases/small_sample/expected/bson_schema/simple.1.0.0.1.json b/tests/test_cases/small_sample/expected/bson_schema/simple.1.0.0.1.json deleted file mode 100644 index daa245a..0000000 --- a/tests/test_cases/small_sample/expected/bson_schema/simple.1.0.0.1.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "description": "A simple collection for testing", - "bsonType": "object", - "additionalProperties": false, - "properties": { - "_id": { - "description": "The unique identifier for the media", - "bsonType": "objectId" - }, - "name": { - "bsonType": "string", - "pattern": "^[a-zA-Z0-9_-]{4,40}$", - "description": "The name of the document" - }, - "status": { - "description": "The current status of the document", - "bsonType": "string", - "enum": ["active", "archived"] - } - }, - "title": "Simple", - "required": ["_id", "status"] -} \ No newline at end of file diff --git a/tests/test_cases/small_sample/templates/configuration.yaml b/tests/test_cases/small_sample/templates/configuration.yaml new file mode 100644 index 0000000..bf19096 --- /dev/null +++ b/tests/test_cases/small_sample/templates/configuration.yaml @@ -0,0 +1,5 @@ +description: Collection for managing {{collection_name}} +name: {{collection_name}} +versions: + - version: "0.0.1" + test_data: {{collection_name}}.0.0.1.json \ No newline at end of file diff --git a/tests/test_cases/small_sample/templates/dictionary.yaml b/tests/test_cases/small_sample/templates/dictionary.yaml new file mode 100644 index 0000000..3a7ad84 --- /dev/null +++ b/tests/test_cases/small_sample/templates/dictionary.yaml @@ -0,0 +1,20 @@ +description: A {{collection_name}} collection for testing the schema system +type: object +properties: + _id: + description: The unique identifier for a {{collection_name}} + type: identifier + required: true + name: + description: The name of the {{collection_name}} + type: word + required: true + status: + description: The current status of the {{collection_name}} + type: enum + enums: default_status + required: true + last_saved: + description: The last time this document was saved + type: breadcrumb + required: true \ No newline at end of file diff --git a/tests/test_cases/small_sample/data/enumerators.json b/tests/test_cases/small_sample/test_data/enumerators.json similarity index 100% rename from tests/test_cases/small_sample/data/enumerators.json rename to tests/test_cases/small_sample/test_data/enumerators.json diff --git a/tests/test_cases/small_sample/data/simple.1.0.0.1.json b/tests/test_cases/small_sample/test_data/sample.1.0.0.1.json similarity index 100% rename from tests/test_cases/small_sample/data/simple.1.0.0.1.json rename to tests/test_cases/small_sample/test_data/sample.1.0.0.1.json diff --git a/tests/test_cases/small_sample/test_database/CollectionVersions.json b/tests/test_cases/small_sample/test_database/CollectionVersions.json deleted file mode 100644 index bb73b2e..0000000 --- a/tests/test_cases/small_sample/test_database/CollectionVersions.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - { - "_id": { - "$oid": "68571db811401f03ab90f71e" - }, - "collection_name": "simple", - "current_version": "simple.1.0.0.1" - } -] \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/appointment.yaml b/tests/test_cases/small_sample/types/appointment.yaml new file mode 100644 index 0000000..2becace --- /dev/null +++ b/tests/test_cases/small_sample/types/appointment.yaml @@ -0,0 +1,11 @@ +description: A date/time range +type: object +properties: + from: + description: Starting Date/Time + type: date-time + required: true + to: + description: Ending Date/Time + type: date-time + required: true \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/breadcrumb.yaml b/tests/test_cases/small_sample/types/breadcrumb.yaml new file mode 100644 index 0000000..eb40fa6 --- /dev/null +++ b/tests/test_cases/small_sample/types/breadcrumb.yaml @@ -0,0 +1,19 @@ +description: A tracking breadcrumb +type: object +properties: + from_ip: + description: Http Request remote IP address + type: ip_address + required: true + by_user: + description: ID Of User + type: word + required: true + at_time: + description: The date-time when last updated + type: date-time + required: true + correlation_id: + description: The logging correlation ID of the update transaction + type: word + required: true diff --git a/tests/test_cases/small_sample/types/count.yaml b/tests/test_cases/small_sample/types/count.yaml new file mode 100644 index 0000000..05d049a --- /dev/null +++ b/tests/test_cases/small_sample/types/count.yaml @@ -0,0 +1,8 @@ +description: A positive integer value +json_type: + type: number + minimum: 1 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 1 \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/date-time.yaml b/tests/test_cases/small_sample/types/date-time.yaml new file mode 100644 index 0000000..223a4bd --- /dev/null +++ b/tests/test_cases/small_sample/types/date-time.yaml @@ -0,0 +1,6 @@ +description: An ISO 8601 formatted date-time string +json_type: + type: string + format: date-time +bson_type: + bsonType: date \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/email.yaml b/tests/test_cases/small_sample/types/email.yaml new file mode 100644 index 0000000..d771770 --- /dev/null +++ b/tests/test_cases/small_sample/types/email.yaml @@ -0,0 +1,4 @@ +description: A valid email address +schema: + type: string + pattern: "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$" \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/identifier.yaml b/tests/test_cases/small_sample/types/identifier.yaml new file mode 100644 index 0000000..4950c75 --- /dev/null +++ b/tests/test_cases/small_sample/types/identifier.yaml @@ -0,0 +1,6 @@ +description: A unique identifier for a document +json_type: + type: string + pattern: "^[0-9a-fA-F]{24}$" +bson_type: + bsonType: objectId \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/index.yaml b/tests/test_cases/small_sample/types/index.yaml new file mode 100644 index 0000000..1152e38 --- /dev/null +++ b/tests/test_cases/small_sample/types/index.yaml @@ -0,0 +1,8 @@ +description: A zero-based array index +json_type: + type: number + minimum: 0 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 0 \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/ip_address.yaml b/tests/test_cases/small_sample/types/ip_address.yaml new file mode 100644 index 0000000..28ba551 --- /dev/null +++ b/tests/test_cases/small_sample/types/ip_address.yaml @@ -0,0 +1,4 @@ +description: A valid IP Address +schema: + type: string + pattern: "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" diff --git a/tests/test_cases/small_sample/types/markdown.yaml b/tests/test_cases/small_sample/types/markdown.yaml new file mode 100644 index 0000000..bf39557 --- /dev/null +++ b/tests/test_cases/small_sample/types/markdown.yaml @@ -0,0 +1,4 @@ +description: A String of text, at least 1 and no more than 4k characters. May contain markdown, newlines, and tabs. +schema: + type: string + maxLength: 4096 \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/sentence.yaml b/tests/test_cases/small_sample/types/sentence.yaml new file mode 100644 index 0000000..aad5494 --- /dev/null +++ b/tests/test_cases/small_sample/types/sentence.yaml @@ -0,0 +1,4 @@ +description: A String of text, 0 to 255 characters with no special characters +schema: + type: string + pattern: "^[^\\t\\n\\r]{0,255}$" \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/state_code.yaml b/tests/test_cases/small_sample/types/state_code.yaml new file mode 100644 index 0000000..b04f1c6 --- /dev/null +++ b/tests/test_cases/small_sample/types/state_code.yaml @@ -0,0 +1,4 @@ +description: A two character state code +schema: + type: string + pattern: "^[A-Z]{2}$" \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/street_address.yaml b/tests/test_cases/small_sample/types/street_address.yaml new file mode 100644 index 0000000..4630dc1 --- /dev/null +++ b/tests/test_cases/small_sample/types/street_address.yaml @@ -0,0 +1,17 @@ +description: A street address +type: object +properties: + street: + description: Street address + type: sentence + required: true + city: + description: City + type: word + state: + description: State or province + type: state_code + postal_code: + description: Postal code + type: word + required: true diff --git a/tests/test_cases/small_sample/types/url.yaml b/tests/test_cases/small_sample/types/url.yaml new file mode 100644 index 0000000..f50de06 --- /dev/null +++ b/tests/test_cases/small_sample/types/url.yaml @@ -0,0 +1,4 @@ +description: A valid URL +schema: + type: string + pattern: "^https?://[^\\s]+$" diff --git a/tests/test_cases/small_sample/types/us_phone.yaml b/tests/test_cases/small_sample/types/us_phone.yaml new file mode 100644 index 0000000..833ede6 --- /dev/null +++ b/tests/test_cases/small_sample/types/us_phone.yaml @@ -0,0 +1,4 @@ +description: A US phone number in E.164 format +schema: + type: string + pattern: "^\\+1[2-9][0-9]{9}$" \ No newline at end of file diff --git a/tests/test_cases/small_sample/types/word.yaml b/tests/test_cases/small_sample/types/word.yaml index e3da35d..af8dbd3 100644 --- a/tests/test_cases/small_sample/types/word.yaml +++ b/tests/test_cases/small_sample/types/word.yaml @@ -1,6 +1,5 @@ -title: Word -description: A String of text, at least 4 and no more than 40 characters with no spaces, or special characters like /t or /n +description: A String of text, 1 to 40 characters with no spaces, or special characters like /t or /n schema: type: string - pattern: "^[a-zA-Z0-9_-]{4,40}$" + pattern: "^\\S{1,40}$" diff --git a/tests/test_cases/small_sample/verified_output/bson_schema/sample.1.0.0.1.json b/tests/test_cases/small_sample/verified_output/bson_schema/sample.1.0.0.1.json new file mode 100644 index 0000000..0ee352d --- /dev/null +++ b/tests/test_cases/small_sample/verified_output/bson_schema/sample.1.0.0.1.json @@ -0,0 +1,24 @@ +{ + "bsonType": "object", + "properties": { + "_id": { + "bsonType": "objectId" + }, + "name": { + "pattern": "^\\S{1,40}$", + "bsonType": "string" + }, + "status": { + "bsonType": "string", + "enum": [ + "active", + "archived" + ] + } + }, + "required": [ + "_id", + "status" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/tests/test_cases/small_sample/expected/json_schema/simple.1.0.0.1.yaml b/tests/test_cases/small_sample/verified_output/json_schema/sample.1.0.0.1.yaml similarity index 73% rename from tests/test_cases/small_sample/expected/json_schema/simple.1.0.0.1.yaml rename to tests/test_cases/small_sample/verified_output/json_schema/sample.1.0.0.1.yaml index 45c1572..b915f91 100644 --- a/tests/test_cases/small_sample/expected/json_schema/simple.1.0.0.1.yaml +++ b/tests/test_cases/small_sample/verified_output/json_schema/sample.1.0.0.1.yaml @@ -1,4 +1,3 @@ -title: Simple description: A simple collection for testing type: object properties: @@ -9,12 +8,14 @@ properties: name: description: The name of the document type: string - pattern: "^[a-zA-Z0-9_-]{4,40}$" + pattern: "^\\S{1,40}$" status: description: The current status of the document type: string - enum: ["active", "archived"] -additionalProperties: false + enum: + - active + - archived required: - _id - - status \ No newline at end of file + - status +additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/small_sample/verified_output/test_database/CollectionVersions.json b/tests/test_cases/small_sample/verified_output/test_database/CollectionVersions.json new file mode 100644 index 0000000..178f9bb --- /dev/null +++ b/tests/test_cases/small_sample/verified_output/test_database/CollectionVersions.json @@ -0,0 +1,9 @@ +[ + { + "_id": { + "$oid": "68571db811401f03ab90f71e" + }, + "collection_name": "sample", + "current_version": "sample.1.0.0.1" + } +] \ No newline at end of file diff --git a/tests/test_cases/small_sample/verified_output/test_database/DatabaseEnumerators.json b/tests/test_cases/small_sample/verified_output/test_database/DatabaseEnumerators.json new file mode 100644 index 0000000..172912b --- /dev/null +++ b/tests/test_cases/small_sample/verified_output/test_database/DatabaseEnumerators.json @@ -0,0 +1,21 @@ +[ + { + "_id": { + "$oid": "a00000000000000000000001" + }, + "version": 0, + "enumerators": {} + }, + { + "_id": { + "$oid": "a00000000000000000000002" + }, + "version": 1, + "enumerators": { + "default_status": { + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + } +] \ No newline at end of file diff --git a/tests/test_cases/small_sample/test_database/simple.json b/tests/test_cases/small_sample/verified_output/test_database/sample.json similarity index 100% rename from tests/test_cases/small_sample/test_database/simple.json rename to tests/test_cases/small_sample/verified_output/test_database/sample.json diff --git a/tests/test_cases/stepci/api_config/BUILT_AT b/tests/test_cases/stepci/api_config/BUILT_AT new file mode 100644 index 0000000..28ca4ab --- /dev/null +++ b/tests/test_cases/stepci/api_config/BUILT_AT @@ -0,0 +1 @@ +Local \ No newline at end of file diff --git a/tests/test_cases/stepci/api_config/ENABLE_DROP_DATABASE b/tests/test_cases/stepci/api_config/ENABLE_DROP_DATABASE new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/stepci/api_config/ENABLE_DROP_DATABASE @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/stepci/api_config/LOAD_TEST_DATA b/tests/test_cases/stepci/api_config/LOAD_TEST_DATA new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/stepci/api_config/LOAD_TEST_DATA @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/stepci/configurations/sample.yaml b/tests/test_cases/stepci/configurations/sample.yaml new file mode 100644 index 0000000..a2ff473 --- /dev/null +++ b/tests/test_cases/stepci/configurations/sample.yaml @@ -0,0 +1,33 @@ +file_name: sample.yaml +title: Sample Collection +description: A collection for testing +versions: +- version: 1.0.0.1 + drop_indexes: [] + add_indexes: + - name: nameIndex + key: + first_name: 1 + last_name: 1 + options: + unique: true + - name: statusIndex + key: + status: 1 + options: + unique: false + migrations: [] + test_data: sample.1.0.0.1.json +- version: 1.0.1.2 + drop_indexes: + - name: nameIndex + add_indexes: + - name: fullNameIndex + key: + full_name: 1 + options: + unique: false + migrations: + - first_last_to_full_name.json + test_data: sample.1.0.1.2.json +_locked: true diff --git a/tests/test_cases/stepci/dictionaries/sample.1.0.0.yaml b/tests/test_cases/stepci/dictionaries/sample.1.0.0.yaml new file mode 100644 index 0000000..b5fa094 --- /dev/null +++ b/tests/test_cases/stepci/dictionaries/sample.1.0.0.yaml @@ -0,0 +1,24 @@ +description: A simple collection for testing +type: object +required: false +properties: + _id: + description: The unique identifier for the media + type: identity + required: true + first_name: + description: First Name + type: word + required: false + last_name: + description: Family Name + type: word + required: false + status: + description: The current status of the document + type: enum + required: true + enums: default_status +additionalProperties: false +file_name: sample.1.0.0.yaml +_locked: true diff --git a/tests/test_cases/stepci/dictionaries/sample.1.0.1.yaml b/tests/test_cases/stepci/dictionaries/sample.1.0.1.yaml new file mode 100644 index 0000000..7443822 --- /dev/null +++ b/tests/test_cases/stepci/dictionaries/sample.1.0.1.yaml @@ -0,0 +1,20 @@ +description: A simple collection for testing +type: object +required: false +properties: + _id: + description: The unique identifier for the media + type: identity + required: true + full_name: + description: Full Name + type: sentence + required: false + status: + description: The current status of the document + type: enum + required: true + enums: default_status +additionalProperties: false +file_name: sample.1.0.1.yaml +_locked: true diff --git a/tests/test_cases/stepci/migrations/first_last_to_full_name.json b/tests/test_cases/stepci/migrations/first_last_to_full_name.json new file mode 100644 index 0000000..bc80295 --- /dev/null +++ b/tests/test_cases/stepci/migrations/first_last_to_full_name.json @@ -0,0 +1,18 @@ +[ + { + "$addFields": { + "full_name": { + "$concat": ["$first_name", " ", "$last_name"] + } + } + }, + { + "$unset": [ + "first_name", + "last_name" + ] + }, + { + "$out": "user" + } +] \ No newline at end of file diff --git a/tests/test_cases/stepci/test_data/enumerators.json b/tests/test_cases/stepci/test_data/enumerators.json new file mode 100644 index 0000000..a82ab39 --- /dev/null +++ b/tests/test_cases/stepci/test_data/enumerators.json @@ -0,0 +1,25 @@ +[ + { + "version": 0, + "enumerators": {} + }, + { + "version": 1, + "enumerators": { + "default_status": { + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + }, + { + "version": 2, + "enumerators": { + "default_status": { + "draft": "Draft", + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + } +] \ No newline at end of file diff --git a/tests/test_cases/stepci/test_data/sample.1.0.0.1.json b/tests/test_cases/stepci/test_data/sample.1.0.0.1.json new file mode 100644 index 0000000..4f50799 --- /dev/null +++ b/tests/test_cases/stepci/test_data/sample.1.0.0.1.json @@ -0,0 +1,14 @@ +[ + { + "_id": {"$oid": "A00000000000000000000001"}, + "first_name": "Joe", + "last_name": "Smith", + "status": "active" + }, + { + "_id": {"$oid": "A00000000000000000000002"}, + "first_name": "Jane", + "last_name": "Doe", + "status": "archived" + } +] diff --git a/tests/test_cases/stepci/test_data/sample.1.0.1.2.json b/tests/test_cases/stepci/test_data/sample.1.0.1.2.json new file mode 100644 index 0000000..959dfa6 --- /dev/null +++ b/tests/test_cases/stepci/test_data/sample.1.0.1.2.json @@ -0,0 +1,7 @@ +[ + { + "_id": {"$oid": "A00000000000000000000003"}, + "full name": "Dr. James Earl Ray II", + "status": "draft" + } +] diff --git a/tests/test_cases/stepci/types/appointment.yaml b/tests/test_cases/stepci/types/appointment.yaml new file mode 100644 index 0000000..96cd219 --- /dev/null +++ b/tests/test_cases/stepci/types/appointment.yaml @@ -0,0 +1,15 @@ +file_name: appointment.yaml +_locked: true +description: A date/time range +required: false +type: object +properties: + from: + description: Starting Date/Time + required: true + type: date-time + to: + description: Ending Date/Time + required: true + type: date-time +additionalProperties: false diff --git a/tests/test_cases/stepci/types/breadcrumb.yaml b/tests/test_cases/stepci/types/breadcrumb.yaml new file mode 100644 index 0000000..09a61c4 --- /dev/null +++ b/tests/test_cases/stepci/types/breadcrumb.yaml @@ -0,0 +1,23 @@ +file_name: breadcrumb.yaml +_locked: true +description: A tracking breadcrumb +required: false +type: object +properties: + from_ip: + description: Http Request remote IP address + required: true + type: ip_address + by_user: + description: ID Of User + required: true + type: word + at_time: + description: The date-time when last updated + required: true + type: date-time + correlation_id: + description: The logging correlation ID of the update transaction + required: true + type: word +additionalProperties: false diff --git a/tests/test_cases/stepci/types/count.yaml b/tests/test_cases/stepci/types/count.yaml new file mode 100644 index 0000000..708c6b1 --- /dev/null +++ b/tests/test_cases/stepci/types/count.yaml @@ -0,0 +1,11 @@ +file_name: count.yaml +_locked: true +description: A positive integer value +required: false +json_type: + type: number + minimum: 1 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 1 diff --git a/tests/test_cases/stepci/types/date-time.yaml b/tests/test_cases/stepci/types/date-time.yaml new file mode 100644 index 0000000..11bfed3 --- /dev/null +++ b/tests/test_cases/stepci/types/date-time.yaml @@ -0,0 +1,9 @@ +file_name: date-time.yaml +_locked: true +description: An ISO 8601 formatted date-time string +required: false +json_type: + type: string + format: date-time +bson_type: + bsonType: date diff --git a/tests/test_cases/stepci/types/email.yaml b/tests/test_cases/stepci/types/email.yaml new file mode 100644 index 0000000..87c0f28 --- /dev/null +++ b/tests/test_cases/stepci/types/email.yaml @@ -0,0 +1,7 @@ +file_name: email.yaml +_locked: true +description: A valid email address +required: false +schema: + type: string + pattern: ^[^\s@]+@[^\s@]+\.[^\s@]+$ diff --git a/tests/test_cases/stepci/types/identifier.yaml b/tests/test_cases/stepci/types/identifier.yaml new file mode 100644 index 0000000..63299e8 --- /dev/null +++ b/tests/test_cases/stepci/types/identifier.yaml @@ -0,0 +1,9 @@ +file_name: identifier.yaml +_locked: true +description: A unique identifier for a document +required: false +json_type: + type: string + pattern: ^[0-9a-fA-F]{24}$ +bson_type: + bsonType: objectId diff --git a/tests/test_cases/stepci/types/identity.yaml b/tests/test_cases/stepci/types/identity.yaml new file mode 100644 index 0000000..4950c75 --- /dev/null +++ b/tests/test_cases/stepci/types/identity.yaml @@ -0,0 +1,6 @@ +description: A unique identifier for a document +json_type: + type: string + pattern: "^[0-9a-fA-F]{24}$" +bson_type: + bsonType: objectId \ No newline at end of file diff --git a/tests/test_cases/stepci/types/index.yaml b/tests/test_cases/stepci/types/index.yaml new file mode 100644 index 0000000..f534514 --- /dev/null +++ b/tests/test_cases/stepci/types/index.yaml @@ -0,0 +1,11 @@ +file_name: index.yaml +_locked: true +description: A zero-based array index +required: false +json_type: + type: number + minimum: 0 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 0 diff --git a/tests/test_cases/stepci/types/ip_address.yaml b/tests/test_cases/stepci/types/ip_address.yaml new file mode 100644 index 0000000..596a182 --- /dev/null +++ b/tests/test_cases/stepci/types/ip_address.yaml @@ -0,0 +1,7 @@ +file_name: ip_address.yaml +_locked: true +description: A valid IP Address +required: false +schema: + type: string + pattern: ^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ diff --git a/tests/test_cases/stepci/types/markdown.yaml b/tests/test_cases/stepci/types/markdown.yaml new file mode 100644 index 0000000..60eae65 --- /dev/null +++ b/tests/test_cases/stepci/types/markdown.yaml @@ -0,0 +1,8 @@ +file_name: markdown.yaml +_locked: true +description: A String of text, at least 1 and no more than 4k characters. May contain + markdown, newlines, and tabs. +required: false +schema: + type: string + maxLength: 4096 diff --git a/tests/test_cases/stepci/types/sentence.yaml b/tests/test_cases/stepci/types/sentence.yaml new file mode 100644 index 0000000..3899fee --- /dev/null +++ b/tests/test_cases/stepci/types/sentence.yaml @@ -0,0 +1,7 @@ +file_name: sentence.yaml +_locked: true +description: A String of text, 0 to 255 characters with no special characters +required: false +schema: + type: string + pattern: ^[^\t\n\r]{0,255}$ diff --git a/tests/test_cases/stepci/types/state_code.yaml b/tests/test_cases/stepci/types/state_code.yaml new file mode 100644 index 0000000..d6df0e1 --- /dev/null +++ b/tests/test_cases/stepci/types/state_code.yaml @@ -0,0 +1,7 @@ +file_name: state_code.yaml +_locked: true +description: A two character state code +required: false +schema: + type: string + pattern: ^[A-Z]{2}$ diff --git a/tests/test_cases/stepci/types/street_address.yaml b/tests/test_cases/stepci/types/street_address.yaml new file mode 100644 index 0000000..45d4ab7 --- /dev/null +++ b/tests/test_cases/stepci/types/street_address.yaml @@ -0,0 +1,23 @@ +file_name: street_address.yaml +_locked: true +description: A street address +required: false +type: object +properties: + street: + description: Street address + required: true + type: sentence + city: + description: City + required: false + type: word + state: + description: State or province + required: false + type: state_code + postal_code: + description: Postal code + required: true + type: word +additionalProperties: false diff --git a/tests/test_cases/stepci/types/url.yaml b/tests/test_cases/stepci/types/url.yaml new file mode 100644 index 0000000..63a288a --- /dev/null +++ b/tests/test_cases/stepci/types/url.yaml @@ -0,0 +1,7 @@ +file_name: url.yaml +_locked: true +description: A valid URL +required: false +schema: + type: string + pattern: ^https?://[^\s]+$ diff --git a/tests/test_cases/stepci/types/us_phone.yaml b/tests/test_cases/stepci/types/us_phone.yaml new file mode 100644 index 0000000..8e83f63 --- /dev/null +++ b/tests/test_cases/stepci/types/us_phone.yaml @@ -0,0 +1,7 @@ +file_name: us_phone.yaml +_locked: true +description: A US phone number in E.164 format +required: false +schema: + type: string + pattern: ^\+1[2-9][0-9]{9}$ diff --git a/tests/test_cases/stepci/types/word.yaml b/tests/test_cases/stepci/types/word.yaml new file mode 100644 index 0000000..64d9246 --- /dev/null +++ b/tests/test_cases/stepci/types/word.yaml @@ -0,0 +1,8 @@ +file_name: word.yaml +_locked: true +description: A String of text, 1 to 40 characters with no spaces, or special characters + like /t or /n +required: false +schema: + type: string + pattern: ^\S{1,40}$ diff --git a/tests/test_cases/template_sample/api_config/LOAD_TEST_DATA b/tests/test_cases/template_sample/api_config/LOAD_TEST_DATA new file mode 100644 index 0000000..f32a580 --- /dev/null +++ b/tests/test_cases/template_sample/api_config/LOAD_TEST_DATA @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/tests/test_cases/template_sample/configurations/sample.yaml b/tests/test_cases/template_sample/configurations/sample.yaml new file mode 100644 index 0000000..9d3230a --- /dev/null +++ b/tests/test_cases/template_sample/configurations/sample.yaml @@ -0,0 +1,30 @@ +title: Sample Collection +description: A collection for testing +name: sample +versions: + - version: "1.0.0.1" + add_indexes: + - name: nameIndex + key: + first_name: 1 + last_name: 1 + options: + unique: true + - name: statusIndex + key: + status: 1 + options: + unique: false + test_data: sample.1.0.0.1.json + - version: "1.0.1.2" + drop_indexes: + - name: nameIndex + migrations: + - first_last_to_full_name.json + add_indexes: + - name: fullNameIndex + key: + full_name: 1 + options: + unique: false + test_data: sample.1.0.1.2.json \ No newline at end of file diff --git a/tests/test_cases/template_sample/dictionaries/sample.1.0.0.yaml b/tests/test_cases/template_sample/dictionaries/sample.1.0.0.yaml new file mode 100644 index 0000000..f58f672 --- /dev/null +++ b/tests/test_cases/template_sample/dictionaries/sample.1.0.0.yaml @@ -0,0 +1,18 @@ +description: A simple collection for testing +type: object +properties: + _id: + description: The unique identifier for the media + type: identity + required: true + first_name: + description: First Name + type: word + last_name: + description: Family Name + type: word + status: + description: The current status of the document + type: enum + enums: default_status + required: true \ No newline at end of file diff --git a/tests/test_cases/small_sample/dictionary/simple.1.0.0.yaml b/tests/test_cases/template_sample/dictionaries/sample.1.0.1.yaml similarity index 78% rename from tests/test_cases/small_sample/dictionary/simple.1.0.0.yaml rename to tests/test_cases/template_sample/dictionaries/sample.1.0.1.yaml index a1e3dad..52016b1 100644 --- a/tests/test_cases/small_sample/dictionary/simple.1.0.0.yaml +++ b/tests/test_cases/template_sample/dictionaries/sample.1.0.1.yaml @@ -1,4 +1,3 @@ -title: Simple description: A simple collection for testing type: object properties: @@ -6,9 +5,9 @@ properties: description: The unique identifier for the media type: identity required: true - name: - description: The name of the document - type: word + full_name: + description: Full Name + type: sentence status: description: The current status of the document type: enum diff --git a/tests/test_cases/template_sample/migrations/first_last_to_full_name.json b/tests/test_cases/template_sample/migrations/first_last_to_full_name.json new file mode 100644 index 0000000..bc80295 --- /dev/null +++ b/tests/test_cases/template_sample/migrations/first_last_to_full_name.json @@ -0,0 +1,18 @@ +[ + { + "$addFields": { + "full_name": { + "$concat": ["$first_name", " ", "$last_name"] + } + } + }, + { + "$unset": [ + "first_name", + "last_name" + ] + }, + { + "$out": "user" + } +] \ No newline at end of file diff --git a/tests/test_cases/template_sample/test_data/enumerators.json b/tests/test_cases/template_sample/test_data/enumerators.json new file mode 100644 index 0000000..270037a --- /dev/null +++ b/tests/test_cases/template_sample/test_data/enumerators.json @@ -0,0 +1,25 @@ +[ + { + "version": 0, + "enumerators": {} + }, + { + "version": 1, + "enumerators": { + "default_status": { + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + }, + { + "version": 2, + "enumerators": { + "default_status": { + "draft": "Draft", + "active": "Not Deleted", + "archived": "Soft Delete Indicator" + } + } + } +] \ No newline at end of file diff --git a/tests/test_cases/template_sample/test_data/sample.1.0.0.1.json b/tests/test_cases/template_sample/test_data/sample.1.0.0.1.json new file mode 100644 index 0000000..4f50799 --- /dev/null +++ b/tests/test_cases/template_sample/test_data/sample.1.0.0.1.json @@ -0,0 +1,14 @@ +[ + { + "_id": {"$oid": "A00000000000000000000001"}, + "first_name": "Joe", + "last_name": "Smith", + "status": "active" + }, + { + "_id": {"$oid": "A00000000000000000000002"}, + "first_name": "Jane", + "last_name": "Doe", + "status": "archived" + } +] diff --git a/tests/test_cases/template_sample/test_data/sample.1.0.1.2.json b/tests/test_cases/template_sample/test_data/sample.1.0.1.2.json new file mode 100644 index 0000000..959dfa6 --- /dev/null +++ b/tests/test_cases/template_sample/test_data/sample.1.0.1.2.json @@ -0,0 +1,7 @@ +[ + { + "_id": {"$oid": "A00000000000000000000003"}, + "full name": "Dr. James Earl Ray II", + "status": "draft" + } +] diff --git a/tests/test_cases/template_sample/types/appointment.yaml b/tests/test_cases/template_sample/types/appointment.yaml new file mode 100644 index 0000000..2becace --- /dev/null +++ b/tests/test_cases/template_sample/types/appointment.yaml @@ -0,0 +1,11 @@ +description: A date/time range +type: object +properties: + from: + description: Starting Date/Time + type: date-time + required: true + to: + description: Ending Date/Time + type: date-time + required: true \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/breadcrumb.yaml b/tests/test_cases/template_sample/types/breadcrumb.yaml new file mode 100644 index 0000000..eb40fa6 --- /dev/null +++ b/tests/test_cases/template_sample/types/breadcrumb.yaml @@ -0,0 +1,19 @@ +description: A tracking breadcrumb +type: object +properties: + from_ip: + description: Http Request remote IP address + type: ip_address + required: true + by_user: + description: ID Of User + type: word + required: true + at_time: + description: The date-time when last updated + type: date-time + required: true + correlation_id: + description: The logging correlation ID of the update transaction + type: word + required: true diff --git a/tests/test_cases/template_sample/types/count.yaml b/tests/test_cases/template_sample/types/count.yaml new file mode 100644 index 0000000..05d049a --- /dev/null +++ b/tests/test_cases/template_sample/types/count.yaml @@ -0,0 +1,8 @@ +description: A positive integer value +json_type: + type: number + minimum: 1 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 1 \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/date-time.yaml b/tests/test_cases/template_sample/types/date-time.yaml new file mode 100644 index 0000000..223a4bd --- /dev/null +++ b/tests/test_cases/template_sample/types/date-time.yaml @@ -0,0 +1,6 @@ +description: An ISO 8601 formatted date-time string +json_type: + type: string + format: date-time +bson_type: + bsonType: date \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/email.yaml b/tests/test_cases/template_sample/types/email.yaml new file mode 100644 index 0000000..d771770 --- /dev/null +++ b/tests/test_cases/template_sample/types/email.yaml @@ -0,0 +1,4 @@ +description: A valid email address +schema: + type: string + pattern: "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$" \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/identifier.yaml b/tests/test_cases/template_sample/types/identifier.yaml new file mode 100644 index 0000000..4950c75 --- /dev/null +++ b/tests/test_cases/template_sample/types/identifier.yaml @@ -0,0 +1,6 @@ +description: A unique identifier for a document +json_type: + type: string + pattern: "^[0-9a-fA-F]{24}$" +bson_type: + bsonType: objectId \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/index.yaml b/tests/test_cases/template_sample/types/index.yaml new file mode 100644 index 0000000..1152e38 --- /dev/null +++ b/tests/test_cases/template_sample/types/index.yaml @@ -0,0 +1,8 @@ +description: A zero-based array index +json_type: + type: number + minimum: 0 + multipleOf: 1 +bson_type: + bsonType: int + minimum: 0 \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/ip_address.yaml b/tests/test_cases/template_sample/types/ip_address.yaml new file mode 100644 index 0000000..28ba551 --- /dev/null +++ b/tests/test_cases/template_sample/types/ip_address.yaml @@ -0,0 +1,4 @@ +description: A valid IP Address +schema: + type: string + pattern: "^((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" diff --git a/tests/test_cases/template_sample/types/markdown.yaml b/tests/test_cases/template_sample/types/markdown.yaml new file mode 100644 index 0000000..bf39557 --- /dev/null +++ b/tests/test_cases/template_sample/types/markdown.yaml @@ -0,0 +1,4 @@ +description: A String of text, at least 1 and no more than 4k characters. May contain markdown, newlines, and tabs. +schema: + type: string + maxLength: 4096 \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/sentence.yaml b/tests/test_cases/template_sample/types/sentence.yaml new file mode 100644 index 0000000..aad5494 --- /dev/null +++ b/tests/test_cases/template_sample/types/sentence.yaml @@ -0,0 +1,4 @@ +description: A String of text, 0 to 255 characters with no special characters +schema: + type: string + pattern: "^[^\\t\\n\\r]{0,255}$" \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/state_code.yaml b/tests/test_cases/template_sample/types/state_code.yaml new file mode 100644 index 0000000..b04f1c6 --- /dev/null +++ b/tests/test_cases/template_sample/types/state_code.yaml @@ -0,0 +1,4 @@ +description: A two character state code +schema: + type: string + pattern: "^[A-Z]{2}$" \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/street_address.yaml b/tests/test_cases/template_sample/types/street_address.yaml new file mode 100644 index 0000000..4630dc1 --- /dev/null +++ b/tests/test_cases/template_sample/types/street_address.yaml @@ -0,0 +1,17 @@ +description: A street address +type: object +properties: + street: + description: Street address + type: sentence + required: true + city: + description: City + type: word + state: + description: State or province + type: state_code + postal_code: + description: Postal code + type: word + required: true diff --git a/tests/test_cases/template_sample/types/url.yaml b/tests/test_cases/template_sample/types/url.yaml new file mode 100644 index 0000000..f50de06 --- /dev/null +++ b/tests/test_cases/template_sample/types/url.yaml @@ -0,0 +1,4 @@ +description: A valid URL +schema: + type: string + pattern: "^https?://[^\\s]+$" diff --git a/tests/test_cases/template_sample/types/us_phone.yaml b/tests/test_cases/template_sample/types/us_phone.yaml new file mode 100644 index 0000000..833ede6 --- /dev/null +++ b/tests/test_cases/template_sample/types/us_phone.yaml @@ -0,0 +1,4 @@ +description: A US phone number in E.164 format +schema: + type: string + pattern: "^\\+1[2-9][0-9]{9}$" \ No newline at end of file diff --git a/tests/test_cases/template_sample/types/word.yaml b/tests/test_cases/template_sample/types/word.yaml new file mode 100644 index 0000000..af8dbd3 --- /dev/null +++ b/tests/test_cases/template_sample/types/word.yaml @@ -0,0 +1,5 @@ +description: A String of text, 1 to 40 characters with no spaces, or special characters like /t or /n +schema: + type: string + pattern: "^\\S{1,40}$" + diff --git a/tests/test_cases/type_unit_test/types/appointment.yaml b/tests/test_cases/type_unit_test/types/appointment.yaml new file mode 100644 index 0000000..c2ac577 --- /dev/null +++ b/tests/test_cases/type_unit_test/types/appointment.yaml @@ -0,0 +1,12 @@ +description: A date/time range +type: object +properties: + from: + description: Starting Date/Time + type: date-time + required: true + to: + description: Ending Date/Time + type: date-time + required: true +additionalProperties: false \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/types/date-time.yaml b/tests/test_cases/type_unit_test/types/date-time.yaml new file mode 100644 index 0000000..223a4bd --- /dev/null +++ b/tests/test_cases/type_unit_test/types/date-time.yaml @@ -0,0 +1,6 @@ +description: An ISO 8601 formatted date-time string +json_type: + type: string + format: date-time +bson_type: + bsonType: date \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/types/email.yaml b/tests/test_cases/type_unit_test/types/email.yaml new file mode 100644 index 0000000..d771770 --- /dev/null +++ b/tests/test_cases/type_unit_test/types/email.yaml @@ -0,0 +1,4 @@ +description: A valid email address +schema: + type: string + pattern: "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$" \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/types/identifier.yaml b/tests/test_cases/type_unit_test/types/identifier.yaml new file mode 100644 index 0000000..4950c75 --- /dev/null +++ b/tests/test_cases/type_unit_test/types/identifier.yaml @@ -0,0 +1,6 @@ +description: A unique identifier for a document +json_type: + type: string + pattern: "^[0-9a-fA-F]{24}$" +bson_type: + bsonType: objectId \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/types/schedule.yaml b/tests/test_cases/type_unit_test/types/schedule.yaml new file mode 100644 index 0000000..fea89cd --- /dev/null +++ b/tests/test_cases/type_unit_test/types/schedule.yaml @@ -0,0 +1,5 @@ +description: A list of appointments +type: array +items: + description: An appointment + type: appointment diff --git a/tests/test_cases/type_unit_test/types/sentence.yaml b/tests/test_cases/type_unit_test/types/sentence.yaml new file mode 100644 index 0000000..aad5494 --- /dev/null +++ b/tests/test_cases/type_unit_test/types/sentence.yaml @@ -0,0 +1,4 @@ +description: A String of text, 0 to 255 characters with no special characters +schema: + type: string + pattern: "^[^\\t\\n\\r]{0,255}$" \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/types/word.yaml b/tests/test_cases/type_unit_test/types/word.yaml new file mode 100644 index 0000000..af8dbd3 --- /dev/null +++ b/tests/test_cases/type_unit_test/types/word.yaml @@ -0,0 +1,5 @@ +description: A String of text, 1 to 40 characters with no spaces, or special characters like /t or /n +schema: + type: string + pattern: "^\\S{1,40}$" + diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/appointment.json b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/appointment.json new file mode 100644 index 0000000..bc4010a --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/appointment.json @@ -0,0 +1,16 @@ +{ + "bsonType": "object", + "properties": { + "from": { + "bsonType": "date" + }, + "to": { + "bsonType": "date" + } + }, + "additionalProperties": false, + "required": [ + "from", + "to" + ] +} \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/date-time.json b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/date-time.json new file mode 100644 index 0000000..dd0b6f9 --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/date-time.json @@ -0,0 +1,3 @@ +{ + "bsonType": "date" +} \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/email.json b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/email.json new file mode 100644 index 0000000..216d305 --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/email.json @@ -0,0 +1,4 @@ +{ + "pattern": "^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$", + "bsonType": "string" +} \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/identifier.json b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/identifier.json new file mode 100644 index 0000000..42a42c4 --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/identifier.json @@ -0,0 +1,3 @@ +{ + "bsonType": "objectId" +} \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/schedule.json b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/schedule.json new file mode 100644 index 0000000..231bfea --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/schedule.json @@ -0,0 +1,19 @@ +{ + "bsonType": "array", + "items": { + "bsonType": "object", + "properties": { + "from": { + "bsonType": "date" + }, + "to": { + "bsonType": "date" + } + }, + "additionalProperties": false, + "required": [ + "from", + "to" + ] + } +} \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/sentence.json b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/sentence.json new file mode 100644 index 0000000..b0d983f --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/sentence.json @@ -0,0 +1,4 @@ +{ + "pattern": "^[^\\t\\n\\r]{0,255}$", + "bsonType": "string" +} \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/word.json b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/word.json new file mode 100644 index 0000000..47e58eb --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/bson_schema/word.json @@ -0,0 +1,4 @@ +{ + "pattern": "^\\S{1,40}$", + "bsonType": "string" +} \ No newline at end of file diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/appointment.yaml b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/appointment.yaml new file mode 100644 index 0000000..eaa6522 --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/appointment.yaml @@ -0,0 +1,15 @@ +description: A date/time range +type: object +properties: + from: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + to: + description: An ISO 8601 formatted date-time string + type: string + format: date-time +additionalProperties: false +required: +- from +- to diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/date-time.yaml b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/date-time.yaml new file mode 100644 index 0000000..3c456cf --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/date-time.yaml @@ -0,0 +1,3 @@ +description: An ISO 8601 formatted date-time string +type: string +format: date-time diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/email.yaml b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/email.yaml new file mode 100644 index 0000000..c026ced --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/email.yaml @@ -0,0 +1,3 @@ +description: A valid email address +type: string +pattern: ^[^\s@]+@[^\s@]+\.[^\s@]+$ diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/identifier.yaml b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/identifier.yaml new file mode 100644 index 0000000..d9cc13c --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/identifier.yaml @@ -0,0 +1,3 @@ +description: A unique identifier for a document +type: string +pattern: ^[0-9a-fA-F]{24}$ diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/schedule.yaml b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/schedule.yaml new file mode 100644 index 0000000..f896bf6 --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/schedule.yaml @@ -0,0 +1,18 @@ +description: A list of appointments +type: array +items: + description: A date/time range + type: object + properties: + from: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + to: + description: An ISO 8601 formatted date-time string + type: string + format: date-time + additionalProperties: false + required: + - from + - to diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/sentence.yaml b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/sentence.yaml new file mode 100644 index 0000000..9eb03d8 --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/sentence.yaml @@ -0,0 +1,3 @@ +description: A String of text, 0 to 255 characters with no special characters +type: string +pattern: ^[^\t\n\r]{0,255}$ diff --git a/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/word.yaml b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/word.yaml new file mode 100644 index 0000000..565a510 --- /dev/null +++ b/tests/test_cases/type_unit_test/verified_output/type_renders/json_schema/word.yaml @@ -0,0 +1,4 @@ +description: A String of text, 1 to 40 characters with no spaces, or special characters + like /t or /n +type: string +pattern: ^\S{1,40}$ diff --git a/tests/test_cases/validation_errors/collections/media.yaml b/tests/test_cases/validation_errors/collections/media.yaml index 2935753..6d1a705 100644 --- a/tests/test_cases/validation_errors/collections/media.yaml +++ b/tests/test_cases/validation_errors/collections/media.yaml @@ -1,4 +1,3 @@ -title: Media Collection description: Collection for managing media items name: media not_versions: diff --git a/tests/test_cases/validation_errors/collections/missing_ref.yaml b/tests/test_cases/validation_errors/collections/missing_ref.yaml index 3e20706..3213a9c 100644 --- a/tests/test_cases/validation_errors/collections/missing_ref.yaml +++ b/tests/test_cases/validation_errors/collections/missing_ref.yaml @@ -1,4 +1,3 @@ -title: MissingRef name: missing_ref versions: - version: 1.0.0 diff --git a/tests/test_cases/validation_errors/collections/name_missing.yaml b/tests/test_cases/validation_errors/collections/name_missing.yaml index 7ed0854..5ab7f81 100644 --- a/tests/test_cases/validation_errors/collections/name_missing.yaml +++ b/tests/test_cases/validation_errors/collections/name_missing.yaml @@ -1,4 +1,3 @@ -title: Name Missing description: Config without a name versions: - version: "1.0.0.1" diff --git a/tests/test_cases/validation_errors/collections/organization.yaml b/tests/test_cases/validation_errors/collections/organization.yaml index e10fcd3..a7195ea 100644 --- a/tests/test_cases/validation_errors/collections/organization.yaml +++ b/tests/test_cases/validation_errors/collections/organization.yaml @@ -1,4 +1,3 @@ -title: Organization Collection description: Collection for managing organizations name: organization versions: diff --git a/tests/test_cases/validation_errors/collections/search.yaml b/tests/test_cases/validation_errors/collections/search.yaml index 677a511..3feb31a 100644 --- a/tests/test_cases/validation_errors/collections/search.yaml +++ b/tests/test_cases/validation_errors/collections/search.yaml @@ -1,4 +1,3 @@ -title: Search Collection description: Collection for managing search operations name: search versions: diff --git a/tests/test_cases/validation_errors/collections/test.yaml b/tests/test_cases/validation_errors/collections/test.yaml index a746f88..63114da 100644 --- a/tests/test_cases/validation_errors/collections/test.yaml +++ b/tests/test_cases/validation_errors/collections/test.yaml @@ -1,4 +1,3 @@ -title: Test Collection description: Collection for testing name: test versions: diff --git a/tests/test_cases/validation_errors/collections/user.yaml b/tests/test_cases/validation_errors/collections/user.yaml index 3d18552..243261f 100644 --- a/tests/test_cases/validation_errors/collections/user.yaml +++ b/tests/test_cases/validation_errors/collections/user.yaml @@ -1,4 +1,3 @@ -title: User Collection description: Collection for managing users name: user versions: diff --git a/tests/test_cases/validation_errors/dictionary/media.1.0.0.yaml b/tests/test_cases/validation_errors/dictionary/media.1.0.0.yaml index e39d0ff..8056279 100644 --- a/tests/test_cases/validation_errors/dictionary/media.1.0.0.yaml +++ b/tests/test_cases/validation_errors/dictionary/media.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Media description: A media item in the system type: object properties: @@ -11,7 +10,7 @@ properties: required: true type: description: The type of media - $ref: bad_file_ref + ref: bad_file_ref status: description: The current status of the media type: word diff --git a/tests/test_cases/validation_errors/dictionary/missing_ref.1.0.0.yaml b/tests/test_cases/validation_errors/dictionary/missing_ref.1.0.0.yaml index 639df57..cca8413 100644 --- a/tests/test_cases/validation_errors/dictionary/missing_ref.1.0.0.yaml +++ b/tests/test_cases/validation_errors/dictionary/missing_ref.1.0.0.yaml @@ -1,7 +1,6 @@ -title: MissingRef -description: This schema references a missing $ref +description: This schema references a missing ref type: object properties: missing: description: This property references a missing schema - $ref: does_not_exist.1.0.0 \ No newline at end of file + ref: does_not_exist.1.0.0 \ No newline at end of file diff --git a/tests/test_cases/validation_errors/dictionary/organization.1.0.0.yaml b/tests/test_cases/validation_errors/dictionary/organization.1.0.0.yaml index 6721cf4..e08513c 100644 --- a/tests/test_cases/validation_errors/dictionary/organization.1.0.0.yaml +++ b/tests/test_cases/validation_errors/dictionary/organization.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Organization description: An organization in the system type: object properties: diff --git a/tests/test_cases/validation_errors/dictionary/search.1.0.0.yaml b/tests/test_cases/validation_errors/dictionary/search.1.0.0.yaml index 20f37d2..f4cd3d3 100644 --- a/tests/test_cases/validation_errors/dictionary/search.1.0.0.yaml +++ b/tests/test_cases/validation_errors/dictionary/search.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Search description: A search index that is used to support an elastic search polymorphic query service type: object properties: @@ -11,12 +10,11 @@ properties: type: identity required: true one_of: - type_property: collection_name schemas: media: - $ref: media.1.0.0 + ref: media.1.0.0 organization: - $ref: organization.1.0.0 + ref: organization.1.0.0 user: - $ref: user.1.0.0 + ref: user.1.0.0 \ No newline at end of file diff --git a/tests/test_cases/validation_errors/dictionary/test.1.0.0.yaml b/tests/test_cases/validation_errors/dictionary/test.1.0.0.yaml index 35d50df..254d6e9 100644 --- a/tests/test_cases/validation_errors/dictionary/test.1.0.0.yaml +++ b/tests/test_cases/validation_errors/dictionary/test.1.0.0.yaml @@ -1,4 +1,3 @@ -title: Media description: A media item in the system type: object properties: @@ -6,7 +5,7 @@ properties: description: The unique identifier for the media type: identity type: - $ref: bad_file_ref + ref: bad_file_ref missing_type: description: A property missing the type field bad_type: diff --git a/tests/test_cases/validation_errors/dictionary/user.1.0.0.yaml b/tests/test_cases/validation_errors/dictionary/user.1.0.0.yaml index ed423b7..e42064a 100644 --- a/tests/test_cases/validation_errors/dictionary/user.1.0.0.yaml +++ b/tests/test_cases/validation_errors/dictionary/user.1.0.0.yaml @@ -1,4 +1,3 @@ -title: User Collection description: A user collection for testing the schema system type: object properties: diff --git a/tests/test_cases/validation_errors/dictionary/user.1.0.1.yaml b/tests/test_cases/validation_errors/dictionary/user.1.0.1.yaml index 1038703..50bc21c 100644 --- a/tests/test_cases/validation_errors/dictionary/user.1.0.1.yaml +++ b/tests/test_cases/validation_errors/dictionary/user.1.0.1.yaml @@ -1,4 +1,3 @@ -title: User Collection description: A user collection for testing the schema system type: object properties: diff --git a/tests/test_cases/validation_errors/types/acount.yaml b/tests/test_cases/validation_errors/types/acount.yaml index 6d891b2..147986a 100644 --- a/tests/test_cases/validation_errors/types/acount.yaml +++ b/tests/test_cases/validation_errors/types/acount.yaml @@ -1,4 +1,3 @@ -title: Count description: A positive integer value json_type: type: number diff --git a/tests/test_cases/validation_errors/types/breadcrumb.yaml b/tests/test_cases/validation_errors/types/breadcrumb.yaml index 5a4d66c..ab8ceb2 100644 --- a/tests/test_cases/validation_errors/types/breadcrumb.yaml +++ b/tests/test_cases/validation_errors/types/breadcrumb.yaml @@ -1,4 +1,3 @@ -title: Breadcrumb description: A tracking breadcrumb type: object properties: diff --git a/tests/test_cases/validation_errors/types/date-time.yaml b/tests/test_cases/validation_errors/types/date-time.yaml index 288b056..8802192 100644 --- a/tests/test_cases/validation_errors/types/date-time.yaml +++ b/tests/test_cases/validation_errors/types/date-time.yaml @@ -1,4 +1,3 @@ -title: DateTime description: An ISO 8601 formatted date-time string schema: type: string diff --git a/tests/test_cases/validation_errors/types/email.yaml b/tests/test_cases/validation_errors/types/email.yaml index 0b06d14..59fa5be 100644 --- a/tests/test_cases/validation_errors/types/email.yaml +++ b/tests/test_cases/validation_errors/types/email.yaml @@ -1,3 +1,2 @@ -title: Email description: A valid email address schema: not_a_dictionary diff --git a/tests/test_cases/validation_errors/types/identity.yaml b/tests/test_cases/validation_errors/types/identity.yaml index b0910b1..acd4e47 100644 --- a/tests/test_cases/validation_errors/types/identity.yaml +++ b/tests/test_cases/validation_errors/types/identity.yaml @@ -1,4 +1,3 @@ -title: Identity description: A unique identifier for a document json_type: "not_a_dictionary" bson_type: diff --git a/tests/test_cases/validation_errors/types/sentence.yaml b/tests/test_cases/validation_errors/types/sentence.yaml index 6d81348..4975590 100644 --- a/tests/test_cases/validation_errors/types/sentence.yaml +++ b/tests/test_cases/validation_errors/types/sentence.yaml @@ -1,4 +1,3 @@ -title: Sentence description: A String of text, at least 4 and no more than 255 characters with no special characters schema: type: string diff --git a/tests/test_cases/validation_errors/types/street_address.yaml b/tests/test_cases/validation_errors/types/street_address.yaml index 3ef1bd6..6028ee7 100644 --- a/tests/test_cases/validation_errors/types/street_address.yaml +++ b/tests/test_cases/validation_errors/types/street_address.yaml @@ -1,4 +1,3 @@ -title: Street Address description: A street address type: object properties: diff --git a/tests/test_cases/validation_errors/types/url.yaml b/tests/test_cases/validation_errors/types/url.yaml index 1c35e86..b85b130 100644 --- a/tests/test_cases/validation_errors/types/url.yaml +++ b/tests/test_cases/validation_errors/types/url.yaml @@ -1,4 +1,3 @@ -title: URL description: A valid URL schema: type: string diff --git a/tests/test_cases/validation_errors/types/us_phone.yaml b/tests/test_cases/validation_errors/types/us_phone.yaml index 860e1fb..833ede6 100644 --- a/tests/test_cases/validation_errors/types/us_phone.yaml +++ b/tests/test_cases/validation_errors/types/us_phone.yaml @@ -1,4 +1,3 @@ -title: US Phone description: A US phone number in E.164 format schema: type: string diff --git a/tests/test_cases/validation_errors/types/word.yaml b/tests/test_cases/validation_errors/types/word.yaml index e3da35d..3a20b69 100644 --- a/tests/test_cases/validation_errors/types/word.yaml +++ b/tests/test_cases/validation_errors/types/word.yaml @@ -1,4 +1,3 @@ -title: Word description: A String of text, at least 4 and no more than 40 characters with no spaces, or special characters like /t or /n schema: type: string diff --git a/tests/test_server.py b/tests/test_server.py index e580f79..2efe0c2 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -1,37 +1,30 @@ import unittest -from unittest.mock import patch, MagicMock -import signal -import sys - -# Mock Config to prevent auto-processing during tests -mock_config = MagicMock() -mock_config.AUTO_PROCESS = False -mock_config.EXIT_AFTER_PROCESSING = False -mock_config.MONGODB_API_PORT = 8081 -mock_config.BUILT_AT = "test" - -# Patch both Config and MongoIO before importing server -with patch('stage0_py_utils.Config.get_instance', return_value=mock_config), \ - patch('stage0_py_utils.MongoIO.get_instance') as mock_get_instance: - mock_get_instance.return_value = MagicMock() - from stage0_mongodb_api.server import app, handle_exit +from configurator.server import app +from configurator.utils.config import Config class TestServer(unittest.TestCase): - """Test suite for server initialization and configuration.""" + """Test suite for server initialization and configuration. + NOTE: Config is never mocked in these tests. The real Config singleton is used, and config values are set/reset in setUp/tearDown. + """ def setUp(self): """Set up test fixtures.""" self.app = app.test_client() - # Patch MongoIO for every test to ensure no real DB connection - patcher = patch('stage0_py_utils.MongoIO.get_instance', return_value=MagicMock()) - self.addCleanup(patcher.stop) - self.mock_mongo = patcher.start() + self.config = Config.get_instance() + self._original_api_port = self.config.API_PORT + self._original_built_at = self.config.BUILT_AT + self.config.API_PORT = 8081 + self.config.BUILT_AT = "test" + + def tearDown(self): + self.config.API_PORT = self._original_api_port + self.config.BUILT_AT = self._original_built_at def test_app_initialization(self): """Test Flask app initialization.""" # Assert self.assertIsNotNone(app) - self.assertEqual(app.name, 'stage0_mongodb_api.server') + self.assertEqual(app.name, 'configurator.server') def test_health_endpoint(self): """Test health check endpoint.""" @@ -49,21 +42,53 @@ def test_config_routes_registered(self): # Assert self.assertNotEqual(response.status_code, 404) - def test_collection_routes_registered(self): - """Test collection routes are registered.""" - with patch('stage0_mongodb_api.routes.collection_routes.CollectionService.list_collections', return_value=[{"collection_name": "dummy", "version": "1.0.0"}]): - # Act - response = self.app.get('/api/collections/') - # Assert - self.assertEqual(response.status_code, 200) + def test_configuration_routes_registered(self): + """Test configuration routes are registered.""" + # Act + response = self.app.get('/api/configurations') + + # Assert + self.assertNotEqual(response.status_code, 404) + + def test_dictionary_routes_registered(self): + """Test dictionary routes are registered.""" + # Act + response = self.app.get('/api/dictionaries') + + # Assert + self.assertNotEqual(response.status_code, 404) - def test_render_routes_registered(self): - """Test render routes are registered.""" + def test_type_routes_registered(self): + """Test type routes are registered.""" # Act - response = self.app.get('/api/render/json_schema/users') + response = self.app.get('/api/types') # Assert self.assertNotEqual(response.status_code, 404) + def test_database_routes_registered(self): + """Test database routes are registered.""" + # Act + response = self.app.get('/api/database/') + + # Assert + self.assertNotEqual(response.status_code, 404) + + def test_enumerator_routes_registered(self): + """Test enumerator routes are registered.""" + # Act + response = self.app.get('/api/enumerators/') + + # Assert + # The route is registered, but may return 500 if file doesn't exist + # This is expected behavior - the route exists but the file is missing + self.assertIn(response.status_code, [200, 500]) + if response.status_code == 500: + # If 500, verify it's a proper error response + self.assertIsInstance(response.json, dict) + else: + # If 200, verify it returns valid JSON + self.assertIsInstance(response.json, list) + if __name__ == '__main__': unittest.main() diff --git a/tests/uitls/__init__.py b/tests/uitls/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/uitls/test_config_defaults.py b/tests/uitls/test_config_defaults.py new file mode 100644 index 0000000..6b94c78 --- /dev/null +++ b/tests/uitls/test_config_defaults.py @@ -0,0 +1,58 @@ +import json +import unittest +from configurator.utils.config import Config + +class TestConfigDefaults(unittest.TestCase): + + def setUp(self): + """Re-initialize the config for each test.""" + self.config = Config.get_instance() + self.config.initialize() + + def test_default_string_properties(self): + for key, default in self.config.config_strings.items(): + # Skip LOGGING_LEVEL if it's set by environment variable + if key == 'LOGGING_LEVEL' and any(item['name'] == key and item['from'] == 'environment' for item in self.config.config_items): + continue + self.assertEqual(getattr(self.config, key), default) + + def test_default_int_properties(self): + for key, default in self.config.config_ints.items(): + self.assertEqual(getattr(self.config, key), int(default)) + + def test_default_boolean_properties(self): + for key, default in self.config.config_booleans.items(): + self.assertEqual(getattr(self.config, key), (default.lower() == "true")) + + def test_default_string_secret_properties(self): + for key, default in self.config.config_string_secrets.items(): + self.assertEqual(getattr(self.config, key), default) + + def test_to_dict(self): + """Test the to_dict method of the Config class.""" + # Convert the config object to a dictionary + result_dict = self.config.to_dict() + self.assertIsInstance(result_dict["config_items"], list) + + def test_default_string_ci(self): + for key, default in {**self.config.config_strings, **self.config.config_ints}.items(): + # Skip LOGGING_LEVEL if it's set by environment variable + if key == 'LOGGING_LEVEL' and any(item['name'] == key and item['from'] == 'environment' for item in self.config.config_items): + continue + self._test_config_default_value(key, default) + + def test_default_secret_ci(self): + for key, default in self.config.config_string_secrets.items(): + self._test_config_default_value(key, "secret") + + def _test_config_default_value(self, config_name, expected_value): + """Helper function to check default values.""" + items = self.config.config_items + item = next((i for i in items if i['name'] == config_name), None) + self.assertIsNotNone(item) + self.assertEqual(item['name'], config_name) + self.assertEqual(item['from'], "default") + self.assertEqual(item['value'], expected_value) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_config_env.py b/tests/uitls/test_config_env.py new file mode 100644 index 0000000..bef6c5e --- /dev/null +++ b/tests/uitls/test_config_env.py @@ -0,0 +1,71 @@ +import unittest +import os +from configurator.utils.config import Config + +class TestConfigEnvironment(unittest.TestCase): + """Test Config environment variable loading. + NOTE: Config is never mocked in these tests. The real Config singleton is used, and config values are set/reset in setUp/tearDown. + """ + + def setUp(self): + """Re-initialize the config for each test.""" + self.config = Config.get_instance() + self.config.initialize() + + # Set all environment variables to "ENV_VALUE" + for key, default in {**self.config.config_strings, **self.config.config_string_secrets}.items(): + if key != "BUILT_AT" and key != "INPUT_FOLDER": + os.environ[key] = "ENV_VALUE" + + for key, default in self.config.config_ints.items(): + os.environ[key] = "1234" + + for key, default in self.config.config_booleans.items(): + os.environ[key] = "true" + + # Initialize the Config object + self.config._instance = None + self.config.initialize() + + # Reset environment variables + for key, default in {**self.config.config_strings, **self.config.config_ints, **self.config.config_string_secrets}.items(): + if key != "BUILT_AT" and key != "INPUT_FOLDER": + del os.environ[key] + + def test_env_string_properties(self): + for key, default in {**self.config.config_strings, **self.config.config_string_secrets}.items(): + if key != "BUILT_AT" and key != "INPUT_FOLDER": + self.assertEqual(getattr(self.config, key), "ENV_VALUE") + + def test_env_int_properties(self): + for key, default in self.config.config_ints.items(): + self.assertEqual(getattr(self.config, key), 1234) + + def test_env_boolean_properties(self): + for key, default in self.config.config_booleans.items(): + self.assertEqual(getattr(self.config, key), True) + + def test_env_string_ci(self): + for key, default in self.config.config_strings.items(): + if key != "BUILT_AT" and key != "INPUT_FOLDER": + self._test_config_environment_value(key, "ENV_VALUE") + + def test_env_int_ci(self): + for key, default in self.config.config_ints.items(): + self._test_config_environment_value(key, "1234") + + def test_env_secret_ci(self): + for key, default in self.config.config_string_secrets.items(): + self._test_config_environment_value(key, "secret") + + def _test_config_environment_value(self, ci_name, value): + """Helper function to check environment values.""" + items = self.config.config_items + item = next((i for i in items if i['name'] == ci_name), None) + self.assertIsNotNone(item) + self.assertEqual(item['name'], ci_name) + self.assertEqual(item['value'], value) + self.assertEqual(item['from'], "environment") + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_config_file.py b/tests/uitls/test_config_file.py new file mode 100644 index 0000000..3854d84 --- /dev/null +++ b/tests/uitls/test_config_file.py @@ -0,0 +1,63 @@ +import unittest +import os +from configurator.utils.config import Config + +class TestConfigFiles(unittest.TestCase): + """Test Config file loading using api_config feature. + NOTE: Config is never mocked in these tests. The real Config singleton is used. + """ + + def setUp(self): + # Reset the Config singleton to ensure clean state + Config._instance = None + + # Set INPUT_FOLDER to point to the test config files + os.environ["INPUT_FOLDER"] = "./tests/test_cases/config_files/" + + # Initialize the Config object + self.config = Config.get_instance() + self.config.initialize() + + # Clean up environment variable + del os.environ["INPUT_FOLDER"] + + def test_file_string_properties(self): + """Test that string properties are loaded from api_config files.""" + self.assertEqual(self.config.MONGO_DB_NAME, "TEST_VALUE") + + def test_file_int_properties(self): + """Test that integer properties are loaded from api_config files.""" + self.assertEqual(self.config.API_PORT, 9999) + + def test_file_boolean_properties(self): + """Test that boolean properties are loaded from api_config files.""" + self.assertEqual(self.config.AUTO_PROCESS, True) + + def test_file_secret_properties(self): + """Test that secret properties are loaded from api_config files.""" + self.assertEqual(self.config.MONGO_CONNECTION_STRING, "TEST_VALUE") + + def test_config_items_source(self): + """Test that config items show correct source.""" + # Test that our test values show as coming from "file" + test_configs = { + "MONGO_DB_NAME": "TEST_VALUE", + "API_PORT": "9999", + "AUTO_PROCESS": "true", + "MONGO_CONNECTION_STRING": "secret" # Secret fields show "secret" not actual value + } + + for config_name, expected_value in test_configs.items(): + item = next((i for i in self.config.config_items if i['name'] == config_name), None) + self.assertIsNotNone(item, f"Config item {config_name} not found") + self.assertEqual(item['from'], "file") + self.assertEqual(item['value'], expected_value) + + def test_default_values_preserved(self): + """Test that default values are preserved for non-test configs.""" + # Test that some default values are still used + self.assertEqual(self.config.SPA_PORT, 9999) # From file + self.assertEqual(self.config.LOAD_TEST_DATA, True) # From file + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_configurator_event.py b/tests/uitls/test_configurator_event.py new file mode 100644 index 0000000..76135e5 --- /dev/null +++ b/tests/uitls/test_configurator_event.py @@ -0,0 +1,130 @@ +import unittest +from datetime import datetime +from configurator.utils.configurator_exception import ConfiguratorEvent + +class TestConfiguratorEvent(unittest.TestCase): + def setUp(self): + self.test_event_id = "test_event_123" + self.test_event_type = "test_type" + self.test_event_data = {"key": "value", "number": 42} + + def test_configurator_event_initialization(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + self.assertEqual(event.id, self.test_event_id) + self.assertEqual(event.type, self.test_event_type) + self.assertIsNone(event.data) + self.assertIsInstance(event.starts, datetime) + self.assertIsNone(event.ends) + self.assertEqual(event.status, "PENDING") + self.assertEqual(event.sub_events, []) + + def test_configurator_event_initialization_with_data(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, self.test_event_data) + self.assertEqual(event.id, self.test_event_id) + self.assertEqual(event.type, self.test_event_type) + self.assertEqual(event.data, self.test_event_data) + self.assertIsInstance(event.starts, datetime) + self.assertIsNone(event.ends) + self.assertEqual(event.status, "PENDING") + self.assertEqual(event.sub_events, []) + + def test_append_events(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + event.append_events([]) + self.assertEqual(event.sub_events, []) + sub_event = ConfiguratorEvent("sub_event_1", "sub_type") + event.append_events([sub_event]) + self.assertEqual(len(event.sub_events), 1) + self.assertEqual(event.sub_events[0], sub_event) + sub_event_2 = ConfiguratorEvent("sub_event_2", "sub_type_2") + sub_event_3 = ConfiguratorEvent("sub_event_3", "sub_type_3") + event.append_events([sub_event_2, sub_event_3]) + self.assertEqual(len(event.sub_events), 3) + self.assertIn(sub_event_2, event.sub_events) + self.assertIn(sub_event_3, event.sub_events) + + def test_record_success(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + event.record_success() + self.assertEqual(event.status, "SUCCESS") + self.assertIsInstance(event.ends, datetime) + self.assertLessEqual(event.starts, event.ends) + + def test_record_failure(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + failure_message = "Something went wrong" + failure_data = {"code": 500} + event.record_failure(failure_message, failure_data) + self.assertEqual(event.status, "FAILURE") + self.assertIsInstance(event.ends, datetime) + expected_data = {"error": failure_message, "code": 500} + self.assertEqual(event.data, expected_data) + self.assertLessEqual(event.starts, event.ends) + + def test_record_failure_overwrites_existing_data(self): + initial_data = {"initial": "data"} + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, initial_data) + failure_message = "Something went wrong" + event.record_failure(failure_message) + expected_data = {"error": failure_message} + self.assertEqual(event.data, expected_data) + self.assertNotEqual(event.data, initial_data) + + def test_to_dict_with_minimal_data(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + result = event.to_dict() + self.assertEqual(result["id"], self.test_event_id) + self.assertEqual(result["type"], self.test_event_type) + self.assertIsNone(result["data"]) + self.assertIsInstance(result["starts"], datetime) + self.assertIsNone(result["ends"]) + self.assertEqual(result["status"], "PENDING") + self.assertEqual(result["sub_events"], []) + + def test_to_dict_with_complete_data(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, self.test_event_data) + sub_event = ConfiguratorEvent("sub_event", "sub_type") + event.sub_events = [sub_event] + event.record_success() + result = event.to_dict() + self.assertEqual(result["id"], self.test_event_id) + self.assertEqual(result["type"], self.test_event_type) + self.assertEqual(result["data"], self.test_event_data) + self.assertIsInstance(result["starts"], datetime) + self.assertIsInstance(result["ends"], datetime) + self.assertEqual(result["status"], "SUCCESS") + self.assertEqual(len(result["sub_events"]), 1) + self.assertEqual(result["sub_events"][0]["id"], "sub_event") + self.assertEqual(result["sub_events"][0]["type"], "sub_type") + + def test_to_dict_after_failure(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + failure_message = "Test failure" + event.record_failure(failure_message) + result = event.to_dict() + self.assertEqual(result["id"], self.test_event_id) + self.assertEqual(result["type"], self.test_event_type) + expected_data = {"error": failure_message} + self.assertEqual(result["data"], expected_data) + self.assertIsInstance(result["starts"], datetime) + self.assertIsInstance(result["ends"], datetime) + self.assertEqual(result["status"], "FAILURE") + self.assertEqual(result["sub_events"], []) + + def test_event_lifecycle(self): + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, self.test_event_data) + self.assertEqual(event.status, "PENDING") + self.assertIsNone(event.ends) + sub_event = ConfiguratorEvent("sub_event", "sub_type") + event.append_events([sub_event]) + event.record_success() + self.assertEqual(event.status, "SUCCESS") + self.assertIsInstance(event.ends, datetime) + self.assertEqual(len(event.sub_events), 1) + result = event.to_dict() + self.assertEqual(result["status"], "SUCCESS") + self.assertIsInstance(result["ends"], datetime) + self.assertEqual(len(result["sub_events"]), 1) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_configurator_exception.py b/tests/uitls/test_configurator_exception.py new file mode 100644 index 0000000..c22f907 --- /dev/null +++ b/tests/uitls/test_configurator_exception.py @@ -0,0 +1,186 @@ +import unittest +from datetime import datetime +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + + +class TestConfiguratorException(unittest.TestCase): + """Test cases for ConfiguratorException class""" + + def test_configurator_exception_initialization(self): + """Test ConfiguratorException initialization with message and event""" + event = ConfiguratorEvent("test_id", "test_type") + exception = ConfiguratorException("Test error message", event) + + self.assertEqual(exception.message, "Test error message") + self.assertEqual(exception.event, event) + self.assertIsInstance(exception, Exception) + + def test_configurator_exception_str_representation(self): + """Test string representation of ConfiguratorException""" + event = ConfiguratorEvent("test_id", "test_type") + exception = ConfiguratorException("Test error message", event) + + self.assertEqual(str(exception), "Test error message") + + def test_configurator_exception_with_empty_message(self): + """Test ConfiguratorException with empty message""" + event = ConfiguratorEvent("test_id", "test_type") + exception = ConfiguratorException("", event) + + self.assertEqual(exception.message, "") + self.assertEqual(str(exception), "") + + +class TestConfiguratorEvent(unittest.TestCase): + """Test cases for ConfiguratorEvent class""" + + def setUp(self): + """Set up test fixtures""" + self.test_event_id = "test_event_123" + self.test_event_type = "test_type" + self.test_event_data = {"key": "value", "number": 42} + + def test_configurator_event_initialization(self): + """Test ConfiguratorEvent initialization with required parameters""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + + self.assertEqual(event.id, self.test_event_id) + self.assertEqual(event.type, self.test_event_type) + self.assertIsNone(event.data) + self.assertIsInstance(event.starts, datetime) + self.assertIsNone(event.ends) + self.assertEqual(event.status, "PENDING") + self.assertEqual(event.sub_events, []) + + def test_configurator_event_initialization_with_data(self): + """Test ConfiguratorEvent initialization with optional data parameter""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, self.test_event_data) + + self.assertEqual(event.id, self.test_event_id) + self.assertEqual(event.type, self.test_event_type) + self.assertEqual(event.data, self.test_event_data) + self.assertIsInstance(event.starts, datetime) + self.assertIsNone(event.ends) + self.assertEqual(event.status, "PENDING") + self.assertEqual(event.sub_events, []) + + def test_append_events(self): + """Test append_events method""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + + # Test with empty list + event.append_events([]) + self.assertEqual(event.sub_events, []) + + # Test with single event + sub_event = ConfiguratorEvent("sub_event_1", "sub_type") + event.append_events([sub_event]) + self.assertEqual(len(event.sub_events), 1) + self.assertEqual(event.sub_events[0], sub_event) + + # Test with multiple events + sub_event_2 = ConfiguratorEvent("sub_event_2", "sub_type_2") + sub_event_3 = ConfiguratorEvent("sub_event_3", "sub_type_3") + event.append_events([sub_event_2, sub_event_3]) + self.assertEqual(len(event.sub_events), 3) + self.assertIn(sub_event_2, event.sub_events) + self.assertIn(sub_event_3, event.sub_events) + + def test_record_success(self): + """Test record_success method""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + event.record_success() + self.assertEqual(event.status, "SUCCESS") + self.assertIsInstance(event.ends, datetime) + self.assertLessEqual(event.starts, event.ends) + + def test_record_failure(self): + """Test record_failure method""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + failure_message = "Something went wrong" + failure_data = {"code": 500} + event.record_failure(failure_message, failure_data) + self.assertEqual(event.status, "FAILURE") + self.assertIsInstance(event.ends, datetime) + expected_data = {"error": failure_message, "code": 500} + self.assertEqual(event.data, expected_data) + self.assertLessEqual(event.starts, event.ends) + + def test_record_failure_overwrites_existing_data(self): + """Test that record_failure overwrites existing data""" + initial_data = {"initial": "data"} + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, initial_data) + + failure_message = "Something went wrong" + event.record_failure(failure_message) + + expected_data = {"error": failure_message} + self.assertEqual(event.data, expected_data) + self.assertNotEqual(event.data, initial_data) + + def test_to_dict_with_minimal_data(self): + """Test to_dict method with minimal event data""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + result = event.to_dict() + + self.assertEqual(result["id"], self.test_event_id) + self.assertEqual(result["type"], self.test_event_type) + self.assertIsNone(result["data"]) + self.assertIsInstance(result["starts"], datetime) + self.assertIsNone(result["ends"]) + self.assertEqual(result["status"], "PENDING") + self.assertEqual(result["sub_events"], []) + + def test_to_dict_with_complete_data(self): + """Test to_dict method with complete event data""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, self.test_event_data) + sub_event = ConfiguratorEvent("sub_event", "sub_type") + event.sub_events = [sub_event] + event.record_success() + result = event.to_dict() + + self.assertEqual(result["id"], self.test_event_id) + self.assertEqual(result["type"], self.test_event_type) + self.assertEqual(result["data"], self.test_event_data) + self.assertIsInstance(result["starts"], datetime) + self.assertIsInstance(result["ends"], datetime) + self.assertEqual(result["status"], "SUCCESS") + self.assertEqual(len(result["sub_events"]), 1) + self.assertEqual(result["sub_events"][0]["id"], "sub_event") + self.assertEqual(result["sub_events"][0]["type"], "sub_type") + + def test_to_dict_after_failure(self): + """Test to_dict method after recording failure""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type) + failure_message = "Test failure" + event.record_failure(failure_message) + result = event.to_dict() + + self.assertEqual(result["id"], self.test_event_id) + self.assertEqual(result["type"], self.test_event_type) + expected_data = {"error": failure_message} + self.assertEqual(result["data"], expected_data) + self.assertIsInstance(result["starts"], datetime) + self.assertIsInstance(result["ends"], datetime) + self.assertEqual(result["status"], "FAILURE") + self.assertEqual(result["sub_events"], []) + + def test_event_lifecycle(self): + """Test complete event lifecycle from creation to completion""" + event = ConfiguratorEvent(self.test_event_id, self.test_event_type, self.test_event_data) + self.assertEqual(event.status, "PENDING") + self.assertIsNone(event.ends) + sub_event = ConfiguratorEvent("sub_event", "sub_type") + event.append_events([sub_event]) + event.record_success() + self.assertEqual(event.status, "SUCCESS") + self.assertIsInstance(event.ends, datetime) + self.assertEqual(len(event.sub_events), 1) + result = event.to_dict() + self.assertEqual(result["status"], "SUCCESS") + self.assertIsInstance(result["ends"], datetime) + self.assertEqual(len(result["sub_events"]), 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/uitls/test_ejson_encoder.py b/tests/uitls/test_ejson_encoder.py new file mode 100644 index 0000000..9e71068 --- /dev/null +++ b/tests/uitls/test_ejson_encoder.py @@ -0,0 +1,146 @@ +import unittest +from unittest.mock import Mock +import datetime +from bson.objectid import ObjectId +from configurator.utils.ejson_encoder import MongoJSONEncoder + + +class TestMongoJSONEncoder(unittest.TestCase): + """Test suite for MongoJSONEncoder.""" + + def setUp(self): + """Set up test fixtures.""" + # Create a mock Flask app for the encoder + mock_app = Mock() + self.encoder = MongoJSONEncoder(mock_app) + + def test_objectid_encoding(self): + """Test ObjectId encoding.""" + # Create a test ObjectId + test_id = ObjectId() + + # Encode it + result = self.encoder.default(test_id) + + # Should be converted to string + self.assertIsInstance(result, str) + self.assertEqual(result, str(test_id)) + + def test_datetime_encoding(self): + """Test datetime encoding.""" + # Create a test datetime + test_datetime = datetime.datetime(2023, 1, 15, 12, 30, 45) + + # Encode it + result = self.encoder.default(test_datetime) + + # Should be converted to string + self.assertIsInstance(result, str) + self.assertEqual(result, str(test_datetime)) + + def test_date_encoding(self): + """Test date encoding.""" + # Create a test date + test_date = datetime.date(2023, 1, 15) + + # Encode it + result = self.encoder.default(test_date) + + # Should be converted to string + self.assertIsInstance(result, str) + self.assertEqual(result, str(test_date)) + + def test_object_with_isoformat_method(self): + """Test object with isoformat method.""" + # Create a mock object with isoformat method + mock_obj = Mock() + mock_obj.isoformat.return_value = "2023-01-15T12:30:45" + + # Encode it + result = self.encoder.default(mock_obj) + + # Should be converted to string + self.assertIsInstance(result, str) + self.assertEqual(result, str(mock_obj)) + + def test_regular_object_falls_back_to_parent(self): + """Test that regular objects fall back to parent default method.""" + # Create a regular object + regular_obj = {"key": "value"} + + # This should raise TypeError (parent's default behavior for dict) + with self.assertRaises(TypeError): + self.encoder.default(regular_obj) + + def test_string_object(self): + """Test string object (should fall back to parent).""" + # String should fall back to parent + with self.assertRaises(TypeError): + self.encoder.default("test string") + + def test_int_object(self): + """Test int object (should fall back to parent).""" + # Int should fall back to parent + with self.assertRaises(TypeError): + self.encoder.default(42) + + def test_list_object(self): + """Test list object (should fall back to parent).""" + # List should fall back to parent + with self.assertRaises(TypeError): + self.encoder.default([1, 2, 3]) + + def test_none_object(self): + """Test None object (should fall back to parent).""" + # None should fall back to parent + with self.assertRaises(TypeError): + self.encoder.default(None) + + def test_multiple_objectid_encoding(self): + """Test multiple ObjectId encoding.""" + # Create multiple ObjectIds + ids = [ObjectId() for _ in range(3)] + + for obj_id in ids: + result = self.encoder.default(obj_id) + self.assertIsInstance(result, str) + self.assertEqual(result, str(obj_id)) + + def test_edge_case_datetime(self): + """Test edge case datetime (epoch time).""" + # Test epoch time + epoch_datetime = datetime.datetime(1970, 1, 1, 0, 0, 0) + + result = self.encoder.default(epoch_datetime) + self.assertIsInstance(result, str) + self.assertEqual(result, str(epoch_datetime)) + + def test_future_datetime(self): + """Test future datetime.""" + # Test future date + future_datetime = datetime.datetime(2030, 12, 31, 23, 59, 59) + + result = self.encoder.default(future_datetime) + self.assertIsInstance(result, str) + self.assertEqual(result, str(future_datetime)) + + def test_custom_object_with_isoformat(self): + """Test custom object with isoformat method.""" + class CustomDate: + def __init__(self, year, month, day): + self.year = year + self.month = month + self.day = day + + def isoformat(self): + return f"{self.year:04d}-{self.month:02d}-{self.day:02d}" + + custom_date = CustomDate(2023, 1, 15) + + result = self.encoder.default(custom_date) + self.assertIsInstance(result, str) + self.assertEqual(result, str(custom_date)) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_file_io.py b/tests/uitls/test_file_io.py new file mode 100644 index 0000000..9ed9a8e --- /dev/null +++ b/tests/uitls/test_file_io.py @@ -0,0 +1,227 @@ +import os +import unittest +from datetime import datetime +from configurator.utils.config import Config +from configurator.utils.file_io import FileIO, File +import tempfile +import json +import yaml +from unittest.mock import Mock, patch, mock_open +from configurator.utils.configurator_exception import ConfiguratorException + +class TestFile(unittest.TestCase): + """Test cases for File class""" + + def setUp(self): + """Set up test fixtures""" + self.temp_dir = tempfile.mkdtemp() + self.test_file_path = os.path.join(self.temp_dir, "test_file.txt") + self.test_yaml_path = os.path.join(self.temp_dir, "test.yaml") + self.test_json_path = os.path.join(self.temp_dir, "test.json") + + def tearDown(self): + """Clean up test fixtures""" + import shutil + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def test_file_initialization_existing_file(self): + """Test File initialization with existing file""" + # Create a test file + with open(self.test_file_path, 'w') as f: + f.write("test content") + + file_obj = File(self.test_file_path) + + self.assertEqual(file_obj.file_name, "test_file.txt") + self.assertGreater(file_obj.size, 0) + self.assertIsInstance(file_obj.created_at, str) + self.assertIsInstance(file_obj.updated_at, str) + + def test_file_initialization_nonexistent_file(self): + """Test File initialization with non-existent file""" + with self.assertRaises(ConfiguratorException) as context: + file_obj = File(self.test_file_path) + + self.assertIn("Failed to get file properties", str(context.exception)) + + def test_file_read_only_property(self): + """Test file read-only property detection - removed as no longer supported""" + # This test is no longer applicable as we removed read_only functionality + pass + + def test_file_to_dict(self): + """Test file to_dict method""" + # Create a test file + with open(self.test_file_path, 'w') as f: + f.write("test content") + + file_obj = File(self.test_file_path) + file_dict = file_obj.to_dict() + + expected_keys = ["file_name", "created_at", "updated_at", "size"] + for key in expected_keys: + self.assertIn(key, file_dict) + + self.assertEqual(file_dict["file_name"], "test_file.txt") + self.assertGreater(file_dict["size"], 0) + + def test_file_with_different_extensions(self): + """Test File class with different file extensions""" + extensions = [".yaml", ".yml", ".json", ".txt", ".md", ".py"] + + for ext in extensions: + file_path = os.path.join(self.temp_dir, f"test{ext}") + with open(file_path, 'w') as f: + f.write("content") + + file_obj = File(file_path) + self.assertEqual(file_obj.file_name, f"test{ext}") + + +class TestFileIO(unittest.TestCase): + """Test cases for FileIO class + NOTE: Config is never mocked in these tests. The real Config singleton is used, and INPUT_FOLDER is set to a temp directory in setUp and restored in tearDown. + """ + + def setUp(self): + """Set up test fixtures""" + self.temp_dir = tempfile.mkdtemp() + self.file_io = FileIO() + self.config = Config.get_instance() + self._original_input_folder = self.config.INPUT_FOLDER + self.config.INPUT_FOLDER = self.temp_dir + # Create test files + self.test_yaml_path = os.path.join(self.temp_dir, "test.yaml") + self.test_json_path = os.path.join(self.temp_dir, "test.json") + self.test_txt_path = os.path.join(self.temp_dir, "test.txt") + # Create test data + self.yaml_data = {"name": "test", "value": 42} + self.json_data = {"key": "value", "number": 123} + + def tearDown(self): + """Clean up test fixtures""" + import shutil + shutil.rmtree(self.temp_dir, ignore_errors=True) + self.config.INPUT_FOLDER = self._original_input_folder + + def test_get_documents_empty_folder(self): + """Test get_documents with empty folder""" + with self.assertRaises(ConfiguratorException) as context: + self.file_io.get_documents("nonexistent_folder") + self.assertIn("Folder not found", str(context.exception)) + + def test_get_documents_with_files(self): + """Test get_documents with existing files""" + # Create test files + with open(self.test_yaml_path, 'w') as f: + yaml.dump(self.yaml_data, f) + with open(self.test_json_path, 'w') as f: + json.dump(self.json_data, f) + + files = self.file_io.get_documents("") + self.assertEqual(len(files), 2) + + file_names = [f.file_name for f in files] + self.assertIn("test.yaml", file_names) + self.assertIn("test.json", file_names) + + def test_get_document_yaml(self): + """Test get_document with YAML file""" + # Create test YAML file + with open(self.test_yaml_path, 'w') as f: + yaml.dump(self.yaml_data, f) + + result = self.file_io.get_document("", "test.yaml") + self.assertEqual(result, self.yaml_data) + + def test_get_document_json(self): + """Test get_document with JSON file""" + # Create test JSON file + with open(self.test_json_path, 'w') as f: + json.dump(self.json_data, f) + + result = self.file_io.get_document("", "test.json") + self.assertEqual(result, self.json_data) + + def test_get_document_unsupported_type(self): + """Test get_document with unsupported file type""" + # Create test file with unsupported extension + with open(self.test_txt_path, 'w') as f: + f.write("test content") + + with self.assertRaises(ConfiguratorException) as context: + self.file_io.get_document("", "test.txt") + + self.assertEqual(context.exception.event.type, "UNSUPPORTED_FILE_TYPE") + + def test_get_document_file_not_found(self): + """Test get_document with non-existent file""" + with self.assertRaises(ConfiguratorException) as context: + self.file_io.get_document("", "nonexistent.yaml") + + self.assertIn("File not found", str(context.exception)) + + def test_put_document_yaml(self): + """Test put_document with YAML file""" + result = self.file_io.put_document("", "test.yaml", self.yaml_data) + + self.assertIsInstance(result, File) + self.assertEqual(result.file_name, "test.yaml") + self.assertGreaterEqual(result.size, 0) + + # Verify file content + with open(self.test_yaml_path, 'r') as f: + content = yaml.safe_load(f) + self.assertEqual(content, self.yaml_data) + + def test_put_document_json(self): + """Test put_document with JSON file""" + result = self.file_io.put_document("", "test.json", self.json_data) + + self.assertIsInstance(result, File) + self.assertEqual(result.file_name, "test.json") + self.assertGreaterEqual(result.size, 0) + + # Verify file content + with open(self.test_json_path, 'r') as f: + content = json.load(f) + self.assertEqual(content, self.json_data) + + def test_put_document_unsupported_type(self): + """Test put_document with unsupported file type""" + with self.assertRaises(ConfiguratorException) as context: + self.file_io.put_document("", "test.txt", {"data": "test"}) + + self.assertEqual(context.exception.event.type, "UNSUPPORTED_FILE_TYPE") + + def test_delete_document_success(self): + """Test delete_document with existing file""" + # Create test file + with open(self.test_yaml_path, 'w') as f: + f.write("test content") + + result = self.file_io.delete_document("", "test.yaml") + + self.assertEqual(result.status, "SUCCESS") + self.assertFalse(os.path.exists(self.test_yaml_path)) + + def test_delete_document_file_not_found(self): + """Test delete_document with non-existent file""" + result = self.file_io.delete_document("", "nonexistent.yaml") + + self.assertEqual(result.status, "FAILURE") + self.assertIn("File not found", str(result.data)) + + def test_lock_unlock_success(self): + """Test lock_unlock functionality - removed as no longer supported""" + # This test is no longer applicable as we removed lock/unlock functionality + pass + + def test_lock_unlock_file_not_found(self): + """Test lock_unlock with non-existent file - removed as no longer supported""" + # This test is no longer applicable as we removed lock/unlock functionality + pass + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_mongo_io.py b/tests/uitls/test_mongo_io.py new file mode 100644 index 0000000..e9b2a8f --- /dev/null +++ b/tests/uitls/test_mongo_io.py @@ -0,0 +1,223 @@ +""" +MongoIO Integration Tests + +These tests require a running MongoDB instance accessible via the configured connection string. +Tests will create, modify, and delete documents and indexes within a test collection. +""" +import unittest +import tempfile +import json +import os +from datetime import datetime +from unittest.mock import patch, Mock +from pymongo import ASCENDING, DESCENDING +from configurator.utils.config import Config +from configurator.utils.mongo_io import MongoIO +from configurator.utils.configurator_exception import ConfiguratorException + + +class TestMongoIO(unittest.TestCase): + + def setUp(self): + """Set up test fixtures.""" + # Reset Config singleton to ensure we get default values, not test file values + Config._instance = None + + os.environ['ENABLE_DROP_DATABASE'] = 'true' + os.environ['BUILT_AT'] = 'Local' + self.config = Config.get_instance() + self.config.initialize() + self.test_collection_name = "test_collection" + + # Create MongoIO instance using config values + self.mongo_io = MongoIO( + self.config.MONGO_CONNECTION_STRING, + self.config.MONGO_DB_NAME + ) + + # Clear any existing test data by dropping the database + try: + self.mongo_io.drop_database() + except Exception as e: + # Database might not exist, which is fine for testing + pass + + # Insert test documents + docs = [ + {"name": "Alpha", "sort_value": 1, "status": "active"}, + {"name": "Bravo", "sort_value": 2, "status": "active"}, + {"name": "Charlie", "sort_value": 3, "status": "inactive"}, + {"name": "Delta", "sort_value": 4, "status": "archived"}, + {"name": "Echo", "sort_value": 5, "status": "active"} + ] + + collection = self.mongo_io.get_collection(self.test_collection_name) + collection.insert_many(docs) + + def tearDown(self): + """Clean up test fixtures.""" + if hasattr(self, 'mongo_io'): + try: + # Drop the test database + self.mongo_io.drop_database() + except: + pass # Database might not drop.. + self.mongo_io.disconnect() + + # Clean up environment variables set in setUp + if 'ENABLE_DROP_DATABASE' in os.environ: + del os.environ['ENABLE_DROP_DATABASE'] + if 'BUILT_AT' in os.environ: + del os.environ['BUILT_AT'] + + def test_connection_and_disconnect(self): + """Test MongoDB connection and disconnection.""" + # Connection is tested in setUp + self.assertIsNotNone(self.mongo_io.client) + self.assertIsNotNone(self.mongo_io.db) + + def test_get_collection(self): + """Test getting a collection.""" + collection = self.mongo_io.get_collection(self.test_collection_name) + self.assertIsNotNone(collection) + self.assertEqual(collection.name, self.test_collection_name) + + def test_get_documents(self): + """Test retrieving documents.""" + # Get all documents + docs = self.mongo_io.get_documents(self.test_collection_name) + self.assertEqual(len(docs), 5) + + # Get documents with match + active_docs = self.mongo_io.get_documents( + self.test_collection_name, + match={"status": "active"} + ) + self.assertEqual(len(active_docs), 3) + + # Get documents with projection + projected_docs = self.mongo_io.get_documents( + self.test_collection_name, + project={"name": 1, "_id": 0} + ) + self.assertEqual(len(projected_docs), 5) + self.assertIn("name", projected_docs[0]) + self.assertNotIn("_id", projected_docs[0]) + + # Get documents with sorting + sorted_docs = self.mongo_io.get_documents( + self.test_collection_name, + sort_by=[("sort_value", DESCENDING)] + ) + self.assertEqual(sorted_docs[0]["sort_value"], 5) + + def test_upsert(self): + """Test upserting documents.""" + # Insert new document + result = self.mongo_io.upsert( + self.test_collection_name, + {"name": "Foxtrot"}, + {"name": "Foxtrot", "sort_value": 6, "status": "active"} + ) + self.assertEqual(result["name"], "Foxtrot") + + # Update existing document + result = self.mongo_io.upsert( + self.test_collection_name, + {"name": "Alpha"}, + {"name": "Alpha", "sort_value": 1, "status": "updated"} + ) + self.assertEqual(result["status"], "updated") + + def test_remove_schema_validation(self): + """Test removing schema validation.""" + events = self.mongo_io.remove_schema_validation(self.test_collection_name) + self.assertEqual(len(events), 1) + self.assertEqual(events[0].status, "SUCCESS") + + def test_remove_index(self): + """Test removing an index.""" + # First create an index + collection = self.mongo_io.get_collection(self.test_collection_name) + collection.create_index("name", name="test_index") + + # Then remove it + events = self.mongo_io.remove_index(self.test_collection_name, "test_index") + self.assertEqual(len(events), 1) + self.assertEqual(events[0].status, "SUCCESS") + + def test_execute_migration(self): + """Test executing a migration pipeline.""" + pipeline = [ + {"$match": {"status": "active"}}, + {"$count": "active_count"} + ] + + events = self.mongo_io.execute_migration(self.test_collection_name, pipeline) + self.assertEqual(len(events), 1) + self.assertEqual(events[0].status, "SUCCESS") + + def test_add_index(self): + """Test adding an index.""" + index_spec = { + "name": "test_index", + "key": [("name", ASCENDING)] + } + + events = self.mongo_io.add_index(self.test_collection_name, index_spec) + self.assertEqual(len(events), 1) + self.assertEqual(events[0].status, "SUCCESS") + + def test_apply_schema_validation(self): + """Test applying schema validation.""" + # Create a simple test schema + test_schema = { + "bsonType": "object", + "required": ["name"], + "properties": { + "name": {"bsonType": "string"}, + "sort_value": {"bsonType": "int"}, + "status": {"bsonType": "string"} + } + } + + events = self.mongo_io.apply_schema_validation(self.test_collection_name, test_schema) + self.assertEqual(len(events), 1) + self.assertEqual(events[0].status, "SUCCESS") + + # Verify the event data is the schema dictionary + self.assertEqual(events[0].data, test_schema) + + def test_load_json_data(self): + """Test loading JSON data from file.""" + # Create a temporary JSON file + test_data = [ + {"name": "Test1", "value": 1}, + {"name": "Test2", "value": 2} + ] + + with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f: + json.dump(test_data, f) + temp_file = f.name + + try: + events = self.mongo_io.load_json_data("test_load_collection", temp_file) + self.assertEqual(len(events), 1) + self.assertEqual(events[0].status, "SUCCESS") + self.assertEqual(events[0].data["documents_loaded"], 2) + + # Verify the event data includes the full insert result + self.assertIn("insert_many_result", events[0].data) + self.assertIn("collection", events[0].data) + self.assertIn("data_file", events[0].data) + self.assertIn("documents_loaded", events[0].data) + self.assertEqual(events[0].data["collection"], "test_load_collection") + self.assertEqual(events[0].data["documents_loaded"], 2) + self.assertIn("inserted_ids", events[0].data["insert_many_result"]) + self.assertIn("acknowledged", events[0].data["insert_many_result"]) + finally: + import os + os.unlink(temp_file) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_route_decorator.py b/tests/uitls/test_route_decorator.py new file mode 100644 index 0000000..502248b --- /dev/null +++ b/tests/uitls/test_route_decorator.py @@ -0,0 +1,41 @@ +import unittest +from flask import Flask, jsonify +from configurator.utils.route_decorators import event_route +from configurator.utils.configurator_exception import ConfiguratorException, ConfiguratorEvent + +class TestRouteDecorator(unittest.TestCase): + def setUp(self): + app = Flask(__name__) + self.app = app + + @app.route('/config-exception') + @event_route("TEST-01", "TEST_EVENT", "test config exception") + def config_exception(): + event = ConfiguratorEvent(event_id="TEST-01", event_type="TEST_EVENT") + raise ConfiguratorException("Test error", event) + + @app.route('/generic-exception') + @event_route("TEST-02", "TEST_EVENT", "test generic exception") + def generic_exception(): + raise Exception("Generic error") + + self.client = app.test_client() + + def test_configurator_exception(self): + resp = self.client.get('/config-exception') + self.assertEqual(resp.status_code, 500) + data = resp.get_json() + self.assertIn('id', data) + self.assertEqual(data['id'], 'TEST-01') + self.assertEqual(data['type'], 'TEST_EVENT') + + def test_generic_exception(self): + resp = self.client.get('/generic-exception') + self.assertEqual(resp.status_code, 500) + data = resp.get_json() + self.assertIn('id', data) + self.assertEqual(data['id'], 'TEST-02') + self.assertEqual(data['type'], 'TEST_EVENT') + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_version_manager.py b/tests/uitls/test_version_manager.py new file mode 100644 index 0000000..f742777 --- /dev/null +++ b/tests/uitls/test_version_manager.py @@ -0,0 +1,215 @@ +import unittest +from unittest.mock import Mock, patch +from configurator.utils.version_manager import VersionManager +from configurator.utils.configurator_exception import ConfiguratorException +from configurator.utils.mongo_io import MongoIO +from configurator.utils.config import Config +from configurator.utils.version_number import VersionNumber + + +class TestVersionManager(unittest.TestCase): + """Test cases for VersionManager class + NOTE: Config is never mocked in these tests. The real Config singleton is used, and config values are set/reset in setUp/tearDown. + """ + + def setUp(self): + """Set up test fixtures""" + self.mock_mongo_io = Mock(spec=MongoIO) + self.collection_name = "test_collection" + # Get the actual config value + self.config = Config.get_instance() + self.version_collection_name = self.config.VERSION_COLLECTION_NAME + + def test_get_current_version_no_versions_found(self): + """Test get_current_version when no versions are found in database""" + # Mock empty result from database + self.mock_mongo_io.get_documents.return_value = [] + + result = VersionManager.get_current_version(self.mock_mongo_io, self.collection_name) + + # Verify the expected default version is returned + self.assertIsInstance(result, VersionNumber) + self.assertEqual(str(result), f"{self.collection_name}.0.0.0.yaml") + + # Verify mongo_io.get_documents was called correctly + self.mock_mongo_io.get_documents.assert_called_once_with( + self.version_collection_name, + match={"collection_name": self.collection_name} + ) + + def test_get_current_version_single_version_found(self): + """Test get_current_version when a single version is found""" + # Mock single version document from database + version_doc = {"collection_name": self.collection_name, "current_version": f"{self.collection_name}.1.2.3.4"} + self.mock_mongo_io.get_documents.return_value = [version_doc] + + result = VersionManager.get_current_version(self.mock_mongo_io, self.collection_name) + + # Verify the version is returned as a VersionNumber + self.assertIsInstance(result, VersionNumber) + self.assertEqual(str(result), f"{self.collection_name}.1.2.3.yaml") + + # Verify mongo_io.get_documents was called correctly + self.mock_mongo_io.get_documents.assert_called_once_with( + self.version_collection_name, + match={"collection_name": self.collection_name} + ) + + def test_get_current_version_multiple_versions_found(self): + """Test get_current_version when multiple versions are found (should raise exception)""" + # Mock multiple version documents from database + version_docs = [ + {"collection_name": self.collection_name, "current_version": f"{self.collection_name}.1.2.3.4"}, + {"collection_name": self.collection_name, "current_version": f"{self.collection_name}.1.2.3.5"} + ] + self.mock_mongo_io.get_documents.return_value = version_docs + + # Verify exception is raised + with self.assertRaises(ConfiguratorException) as context: + VersionManager.get_current_version(self.mock_mongo_io, self.collection_name) + + # Verify exception message + self.assertIn(f"Multiple versions found for collection: {self.collection_name}", str(context.exception)) + + # Verify the exception has the correct event data + self.assertEqual(context.exception.event.type, "GET_CURRENT_VERSION") + self.assertEqual(context.exception.event.data, version_docs) + + def test_get_current_version_none_result(self): + """Test get_current_version when database returns None""" + # Mock None result from database + self.mock_mongo_io.get_documents.return_value = None + + result = VersionManager.get_current_version(self.mock_mongo_io, self.collection_name) + + # Verify the expected default version is returned + self.assertIsInstance(result, VersionNumber) + self.assertEqual(str(result), f"{self.collection_name}.0.0.0.yaml") + + def test_update_version_new_version(self): + """Test update_version for a new version""" + # Mock upsert to return a document + mock_version_doc = {"collection_name": self.collection_name, "current_version": f"{self.collection_name}.1.2.3.4"} + self.mock_mongo_io.upsert.return_value = mock_version_doc + + # Mock get_current_version to return the updated version + self.mock_mongo_io.get_documents.return_value = [{"current_version": f"{self.collection_name}.1.2.3.4"}] + + result = VersionManager.update_version(self.mock_mongo_io, self.collection_name, f"{self.collection_name}.1.2.3.4") + + # Verify the result + self.assertIsInstance(result, VersionNumber) + self.assertEqual(str(result), f"{self.collection_name}.1.2.3.yaml") + + # Verify upsert was called correctly + self.mock_mongo_io.upsert.assert_called_once_with( + self.version_collection_name, + match={"collection_name": self.collection_name}, + data={"collection_name": self.collection_name, "current_version": f"{self.collection_name}.1.2.3.4"} + ) + + def test_update_version_existing_version(self): + """Test update_version for an existing version""" + # Mock upsert to return a document + mock_version_doc = {"collection_name": self.collection_name, "current_version": f"{self.collection_name}.2.0.0.1"} + self.mock_mongo_io.upsert.return_value = mock_version_doc + + # Mock get_current_version to return the updated version + self.mock_mongo_io.get_documents.return_value = [{"current_version": f"{self.collection_name}.2.0.0.1"}] + + result = VersionManager.update_version(self.mock_mongo_io, self.collection_name, f"{self.collection_name}.2.0.0.1") + + # Verify the result + self.assertIsInstance(result, VersionNumber) + self.assertEqual(str(result), f"{self.collection_name}.2.0.0.yaml") + + # Verify upsert was called correctly + self.mock_mongo_io.upsert.assert_called_once_with( + self.version_collection_name, + match={"collection_name": self.collection_name}, + data={"collection_name": self.collection_name, "current_version": f"{self.collection_name}.2.0.0.1"} + ) + + def test_update_version_invalid_version_format(self): + """Test update_version with invalid version format""" + # Mock upsert to raise an exception due to invalid version + self.mock_mongo_io.upsert.side_effect = Exception("Invalid version format") + + # Verify exception is raised when trying to create VersionNumber with invalid format + with self.assertRaises(ConfiguratorException): + VersionManager.update_version(self.mock_mongo_io, self.collection_name, f"{self.collection_name}.invalid.version") + + def test_get_current_version_with_different_collection_names(self): + """Test get_current_version with different collection names""" + collection_names = ["users", "products", "orders", "inventory"] + + for collection_name in collection_names: + with self.subTest(collection_name=collection_name): + # Mock empty result for each collection + self.mock_mongo_io.get_documents.return_value = [] + + result = VersionManager.get_current_version(self.mock_mongo_io, collection_name) + + # Verify the expected default version is returned + self.assertIsInstance(result, VersionNumber) + self.assertEqual(str(result), f"{collection_name}.0.0.0.yaml") + + # Verify the correct collection name was used in the query + self.mock_mongo_io.get_documents.assert_called_with( + self.version_collection_name, + match={"collection_name": collection_name} + ) + + def test_version_lifecycle(self): + """Test complete version lifecycle: get current, update, get updated""" + # Step 1: Get current version (no versions exist) + self.mock_mongo_io.get_documents.return_value = [] + current_version = VersionManager.get_current_version(self.mock_mongo_io, self.collection_name) + self.assertIsInstance(current_version, VersionNumber) + self.assertEqual(str(current_version), f"{self.collection_name}.0.0.0.yaml") + + # Step 2: Update to a new version + self.mock_mongo_io.upsert.return_value = {"collection_name": self.collection_name, "current_version": f"{self.collection_name}.1.0.0.1"} + self.mock_mongo_io.get_documents.return_value = [{"current_version": f"{self.collection_name}.1.0.0.1"}] + + updated_version = VersionManager.update_version(self.mock_mongo_io, self.collection_name, f"{self.collection_name}.1.0.0.1") + self.assertIsInstance(updated_version, VersionNumber) + self.assertEqual(str(updated_version), f"{self.collection_name}.1.0.0.yaml") + + # Step 3: Get current version again (should return the updated version) + self.mock_mongo_io.get_documents.return_value = [{"current_version": f"{self.collection_name}.1.0.0.1"}] + final_version = VersionManager.get_current_version(self.mock_mongo_io, self.collection_name) + self.assertIsInstance(final_version, VersionNumber) + self.assertEqual(str(final_version), f"{self.collection_name}.1.0.0.yaml") + + def test_mongo_io_method_calls(self): + """Test that MongoIO methods are called with correct parameters""" + # Test get_documents call + self.mock_mongo_io.get_documents.return_value = [] + VersionManager.get_current_version(self.mock_mongo_io, self.collection_name) + + # Verify get_documents was called with correct parameters + self.mock_mongo_io.get_documents.assert_called_once_with( + self.version_collection_name, + match={"collection_name": self.collection_name} + ) + + # Reset mock for next test + self.mock_mongo_io.reset_mock() + + # Test upsert call + self.mock_mongo_io.upsert.return_value = {"collection_name": self.collection_name, "current_version": "1.0.0.1"} + self.mock_mongo_io.get_documents.return_value = [{"current_version": "1.0.0.1"}] + + VersionManager.update_version(self.mock_mongo_io, self.collection_name, "1.0.0.1") + + # Verify upsert was called with correct parameters + self.mock_mongo_io.upsert.assert_called_once_with( + self.version_collection_name, + match={"collection_name": self.collection_name}, + data={"collection_name": self.collection_name, "current_version": "1.0.0.1"} + ) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/uitls/test_version_number.py b/tests/uitls/test_version_number.py new file mode 100644 index 0000000..31a621b --- /dev/null +++ b/tests/uitls/test_version_number.py @@ -0,0 +1,132 @@ +import unittest +from configurator.utils.version_number import VersionNumber +from configurator.utils.configurator_exception import ConfiguratorException + +class TestVersionNumber(unittest.TestCase): + def test_valid_version(self): + """Test valid version string parsing and access.""" + # Test with collection name and enumerator + version = VersionNumber("user.1.2.3.4") + self.assertEqual(version.parts[0], "user") # Collection name + self.assertEqual(version.parts[1:], [1, 2, 3, 4]) # Numeric parts + self.assertEqual(str(version), "user.1.2.3.yaml") + self.assertEqual(version.get_schema_filename(), "user.1.2.3.yaml") + self.assertEqual(version.get_enumerator_version(), 4) + self.assertEqual(version.get_version_str(), "1.2.3.4") + + # Test with collection name without enumerator (defaults to 0) + version_no_enum = VersionNumber("user.1.2.3") + self.assertEqual(version_no_enum.parts[0], "user") # Collection name + self.assertEqual(version_no_enum.parts[1:], [1, 2, 3, 0]) # Numeric parts with default enumerator + self.assertEqual(str(version_no_enum), "user.1.2.3.yaml") + self.assertEqual(version_no_enum.get_schema_filename(), "user.1.2.3.yaml") + self.assertEqual(version_no_enum.get_enumerator_version(), 0) + self.assertEqual(version_no_enum.get_version_str(), "1.2.3.0") + + def test_invalid_format(self): + """Test invalid version string formats.""" + invalid_versions = [ + "", # Empty string + "1.2.3", # Too few components (needs at least 4) + "1.2.3.4.5.6", # Too many components + "user.a.2.3.4", # Non-numeric part + "user.1.2.3.4.", # Trailing dot + "user..1.2.3.4", # Double dot + ".user.1.2.3.4", # Leading dot + "user.1.2.a.4", # Non-numeric part in middle + "user.1.2.3.a", # Non-numeric enumerator + ] + + for version in invalid_versions: + with self.assertRaises(ConfiguratorException, msg=f"Should fail for: {version}"): + VersionNumber(version) + + def test_version_comparison(self): + """Test version comparison.""" + v1 = VersionNumber("user.1.2.3.4") + v2 = VersionNumber("user.1.2.3.5") + v3 = VersionNumber("user.1.2.3.4") + v4 = VersionNumber("other.1.2.3.4") + v5 = VersionNumber("user.1.2.3") # Default enumerator 0 + + # Test less than + self.assertTrue(v1 < v2) + self.assertFalse(v2 < v1) + + # Test equality (ignores collection name) + self.assertTrue(v1 == v3) + self.assertTrue(v1 == v4) # Same numeric parts, different collection + self.assertFalse(v1 == v2) + + # Test with default enumerator + self.assertTrue(v5 < v1) # 1.2.3.0 < 1.2.3.4 + self.assertTrue(v1 > v5) # 1.2.3.4 > 1.2.3.0 + + # Test greater than + self.assertTrue(v2 > v1) + self.assertFalse(v1 > v2) + + # Test string comparison + self.assertTrue(v1 < "user.1.2.3.5") + self.assertTrue(v1 == "other.1.2.3.4") # Same numeric parts + + def test_get_schema_filename(self): + """Test schema filename generation.""" + version = VersionNumber("user.1.2.3.4") + self.assertEqual(version.get_schema_filename(), "user.1.2.3.yaml") + + version2 = VersionNumber("collection.10.20.30.40") + self.assertEqual(version2.get_schema_filename(), "collection.10.20.30.yaml") + + # Test with default enumerator + version3 = VersionNumber("user.1.2.3") + self.assertEqual(version3.get_schema_filename(), "user.1.2.3.yaml") + + def test_get_enumerator_version(self): + """Test enumerator version extraction.""" + version = VersionNumber("user.1.2.3.4") + self.assertEqual(version.get_enumerator_version(), 4) + + version2 = VersionNumber("collection.10.20.30.40") + self.assertEqual(version2.get_enumerator_version(), 40) + + # Test with default enumerator + version3 = VersionNumber("user.1.2.3") + self.assertEqual(version3.get_enumerator_version(), 0) + + def test_get_version_str(self): + """Test version string without collection name.""" + version = VersionNumber("user.1.2.3.4") + self.assertEqual(version.get_version_str(), "1.2.3.4") + + version2 = VersionNumber("collection.10.20.30.40") + self.assertEqual(version2.get_version_str(), "10.20.30.40") + + # Test with default enumerator + version3 = VersionNumber("user.1.2.3") + self.assertEqual(version3.get_version_str(), "1.2.3.0") + + def test_str_representation(self): + """Test string representation.""" + version = VersionNumber("user.1.2.3.4") + self.assertEqual(str(version), "user.1.2.3.yaml") + + # Test with default enumerator + version2 = VersionNumber("user.1.2.3") + self.assertEqual(str(version2), "user.1.2.3.yaml") + + def test_parts_structure(self): + """Test the internal parts structure.""" + version = VersionNumber("user.1.2.3.4") + self.assertEqual(len(version.parts), 5) + self.assertEqual(version.parts[0], "user") # Collection name (string) + self.assertEqual(version.parts[1:], [1, 2, 3, 4]) # Numeric parts + + # Test with default enumerator + version2 = VersionNumber("user.1.2.3") + self.assertEqual(len(version2.parts), 5) + self.assertEqual(version2.parts[0], "user") # Collection name (string) + self.assertEqual(version2.parts[1:], [1, 2, 3, 0]) # Numeric parts with default enumerator + +if __name__ == '__main__': + unittest.main() \ No newline at end of file