{"payload":{"feedbackUrl":"https://github.com/orgs/community/discussions/53140","repo":{"id":245530539,"defaultBranch":"main","name":"operations","ownerLogin":"clearlydefined","currentUserCanPush":false,"isFork":false,"isEmpty":false,"createdAt":"2020-03-06T22:48:41.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/34137144?v=4","public":true,"private":false,"isOrgOwned":true},"refInfo":{"name":"","listCacheKey":"v0:1726660544.0","currentOid":""},"activityList":{"items":[{"before":null,"after":"6107258e03d2c8b4e1a59525994be9b9fcff9ef6","ref":"refs/heads/roman/regression_testing","pushedAt":"2024-09-18T11:55:44.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"RomanIakovlev","name":"Roman Iakovlev","path":"/RomanIakovlev","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/2363458?s=80&v=4"},"commit":{"message":"Initial version of harvester regression testing","shortMessageHtmlLink":"Initial version of harvester regression testing"}},{"before":"2873e624237007fad23b017a3e727529bd0a6270","after":"5fcbb12404ebe97941a1bce8a087344f96ae7808","ref":"refs/heads/tests-e2e","pushedAt":"2024-09-10T13:56:42.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"ljones140","name":"Lewis Jones","path":"/ljones140","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/7063028?s=80&v=4"},"commit":{"message":"only have one package to speed up test","shortMessageHtmlLink":"only have one package to speed up test"}},{"before":"eb8bfee8f73a8a49c8da926d694455a753b2a407","after":"2873e624237007fad23b017a3e727529bd0a6270","ref":"refs/heads/tests-e2e","pushedAt":"2024-09-09T14:22:13.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"ljones140","name":"Lewis Jones","path":"/ljones140","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/7063028?s=80&v=4"},"commit":{"message":"prettier","shortMessageHtmlLink":"prettier"}},{"before":"93d8b81218d90a58855dda6c7d61279836f5f2e1","after":"eb8bfee8f73a8a49c8da926d694455a753b2a407","ref":"refs/heads/tests-e2e","pushedAt":"2024-09-09T14:09:01.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"ljones140","name":"Lewis Jones","path":"/ljones140","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/7063028?s=80&v=4"},"commit":{"message":"Just have 2 for quick testing","shortMessageHtmlLink":"Just have 2 for quick testing"}},{"before":null,"after":"93d8b81218d90a58855dda6c7d61279836f5f2e1","ref":"refs/heads/tests-e2e","pushedAt":"2024-09-09T13:30:00.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"ljones140","name":"Lewis Jones","path":"/ljones140","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/7063028?s=80&v=4"},"commit":{"message":"Remove numpy\n\nGetting differences on the verification","shortMessageHtmlLink":"Remove numpy"}},{"before":"2b1c2cd25f67a7acbfe8954320cefdf3f38f8cda","after":"5d9b5a7d6b5d4db4b4f5322e5cefcd8a13dec6b9","ref":"refs/heads/main","pushedAt":"2024-09-03T15:08:05.000Z","pushType":"pr_merge","commitsCount":2,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"Merge pull request #88 from ljones140/patch-1\n\nAdd documentation for new queue connection string env var","shortMessageHtmlLink":"Merge pull request #88 from ljones140/patch-1"}},{"before":"9868ee7c7a3d1da042b11104490153bfe9fa72f4","after":"2b1c2cd25f67a7acbfe8954320cefdf3f38f8cda","ref":"refs/heads/main","pushedAt":"2024-07-24T16:13:22.000Z","pushType":"pr_merge","commitsCount":11,"pusher":{"login":"qtomlinson","name":"Qing Tomlinson","path":"/qtomlinson","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/92891533?s=80&v=4"},"commit":{"message":"Merge pull request #83 from yashkohli88/yk/notices-tests\n\nExpanded Integration Test suite with New Test Cases for Notices API","shortMessageHtmlLink":"Merge pull request #83 from yashkohli88/yk/notices-tests"}},{"before":"f2be76de72b365cac5d0edcd23cebf77e15cdcdf","after":"9868ee7c7a3d1da042b11104490153bfe9fa72f4","ref":"refs/heads/main","pushedAt":"2024-07-24T16:12:42.000Z","pushType":"pr_merge","commitsCount":4,"pusher":{"login":"qtomlinson","name":"Qing Tomlinson","path":"/qtomlinson","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/92891533?s=80&v=4"},"commit":{"message":"Merge pull request #87 from qtomlinson/qt/add_fixture\n\nAdd fixture for pypi package sdbus 0.12.0","shortMessageHtmlLink":"Merge pull request #87 from qtomlinson/qt/add_fixture"}},{"before":"392f8f616c58af838e1bf106e59e2e25da55cfa7","after":"d5cd05e141e61927c0cf73600b2da8dc7c90fae2","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T13:55:27.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"print as CSV when DRYRUN; add total count during range","shortMessageHtmlLink":"print as CSV when DRYRUN; add total count during range"}},{"before":"825987141c310a5611036eecc85f9cc3bbb4b3b6","after":"392f8f616c58af838e1bf106e59e2e25da55cfa7","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T13:48:01.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"print as CSV when DRYRUN; add total count during range","shortMessageHtmlLink":"print as CSV when DRYRUN; add total count during range"}},{"before":"d14d2a6c3771f28517f0df5bb3bc3f841521207b","after":"825987141c310a5611036eecc85f9cc3bbb4b3b6","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T13:45:25.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"print as CSV when DRYRUN; add total count during range","shortMessageHtmlLink":"print as CSV when DRYRUN; add total count during range"}},{"before":"291234c549565967b87555d9c2bde955cf365f49","after":"d14d2a6c3771f28517f0df5bb3bc3f841521207b","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T13:41:13.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"print as CSV when DRYRUN; add total count during range","shortMessageHtmlLink":"print as CSV when DRYRUN; add total count during range"}},{"before":"4165126b8f93d3769356f7a120d0c634fce874e3","after":"291234c549565967b87555d9c2bde955cf365f49","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T12:42:05.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* add estimated time to complete\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"482e8a22ecff3e041020be5a24f716d2bd25c403","after":"4165126b8f93d3769356f7a120d0c634fce874e3","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T12:39:52.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* add estimated time to complete\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"d364503e12c8dc06dd797971516a63d0962e83a6","after":"482e8a22ecff3e041020be5a24f716d2bd25c403","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T12:27:57.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* add estimated time to complete\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"54c677e445df3548782af10a89147ac553df5ade","after":"d364503e12c8dc06dd797971516a63d0962e83a6","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T12:18:43.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* add estimated time to complete\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"f7f33c0d85f6ad958032611bf7cacff1e5e37101","after":"54c677e445df3548782af10a89147ac553df5ade","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T12:01:17.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* add estimated time to complete\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"09033bcaa17538f8a6537a27a74c0afae478af40","after":"f7f33c0d85f6ad958032611bf7cacff1e5e37101","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T11:56:50.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* add estimated time to complete\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"68a5807d406a8160713eca1fbe1907f6f80c23ad","after":"09033bcaa17538f8a6537a27a74c0afae478af40","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T11:42:34.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"Add estimated time to complete","shortMessageHtmlLink":"Add estimated time to complete"}},{"before":"8d5cacd86a80b7a6312708f154cdd117a3b6ce22","after":"68a5807d406a8160713eca1fbe1907f6f80c23ad","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-24T11:24:11.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"b5c9e359cd6a47211dcab8c09b0fe37e7adfa404","after":"8d5cacd86a80b7a6312708f154cdd117a3b6ce22","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-23T18:01:55.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"615c940ca57253a572cbaf31bbcb0acee5c19159","after":"b5c9e359cd6a47211dcab8c09b0fe37e7adfa404","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-23T18:00:25.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process updates in batches of 500\n\nBatch processing:\n* updates just the declared license in the DB documents using `collection.bulk_write()`\n* updates denitions using service API `POST /definitions?force=true`\n\n_NOTE: Updating the DB makes the fix of the declared license immediately available. When the `POST /definitions` request completes, the full DB document will be updated to be in sync with the blob definition._\n\nAdditional changes:\n* moves global variable definitions based on .env to the initialize() function\n* adds DRYRUN flag to check what would run and how many records would be evaluated\n* adds script and function level documentation\n* includes timestamps to make it easier to estimate how long it will take to complete a run\n* generate filename based on date range and offset to avoid overwriting output files\n\n_NOTE: Azure only supports fetching one blob at a time. Not able to optimize that part of the code. _\n\n_NOTE: Batch size of 500 was selected because that is the max number of coordinates supported in calls to service API `POST /definitions`._","shortMessageHtmlLink":"process updates in batches of 500"}},{"before":"cba85deb978a8f793c5d685436329d55f507deba","after":"615c940ca57253a572cbaf31bbcb0acee5c19159","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-23T12:40:33.000Z","pushType":"push","commitsCount":1,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"process using pagination and optionally repair out-of-sync data","shortMessageHtmlLink":"process using pagination and optionally repair out-of-sync data"}},{"before":"0cf438d32d57a1ffc296ea8e7d0278dc0c0cbb0f","after":"cba85deb978a8f793c5d685436329d55f507deba","ref":"refs/heads/elr/sync-check","pushedAt":"2024-07-12T11:32:00.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"add ability to have start and stop dates\n\n* allows for a check of a single week\n* continues to support processing a month at a time\n* expands support for controlling function through .env file\n* provides example .env file","shortMessageHtmlLink":"add ability to have start and stop dates"}},{"before":"72524e9684f4bf7b24ffcc3cecc6410b725e6c68","after":"f2be76de72b365cac5d0edcd23cebf77e15cdcdf","ref":"refs/heads/main","pushedAt":"2024-07-11T21:55:31.000Z","pushType":"pr_merge","commitsCount":5,"pusher":{"login":"qtomlinson","name":"Qing Tomlinson","path":"/qtomlinson","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/92891533?s=80&v=4"},"commit":{"message":"Merge pull request #80 from qtomlinson/qt/auto-detect-schema-versions\n\nAdd auto detect schema versions","shortMessageHtmlLink":"Merge pull request #80 from qtomlinson/qt/auto-detect-schema-versions"}},{"before":"ac36345f541189b2652567e0ca054d9c60e174a3","after":null,"ref":"refs/heads/elr/deploy-scripts","pushedAt":"2024-07-11T21:52:59.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"}},{"before":"d68d58013536f9c0a96ffc19b0423a75cf09e478","after":"72524e9684f4bf7b24ffcc3cecc6410b725e6c68","ref":"refs/heads/main","pushedAt":"2024-07-11T21:52:57.000Z","pushType":"pr_merge","commitsCount":2,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"Merge pull request #86 from clearlydefined/elr/deploy-scripts\n\nmove workflow scripts to separate file to allow for testing","shortMessageHtmlLink":"Merge pull request #86 from clearlydefined/elr/deploy-scripts"}},{"before":"a23aaedad1c30805745ddef5a0153dcd1b22dd21","after":"ac36345f541189b2652567e0ca054d9c60e174a3","ref":"refs/heads/elr/deploy-scripts","pushedAt":"2024-07-11T19:49:49.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"move workflow scripts to separate file to allow for testing\n\nupdate version to v2.0.0","shortMessageHtmlLink":"move workflow scripts to separate file to allow for testing"}},{"before":"37018e7aa7241ec7bb34c7fde51636d18448c016","after":"a23aaedad1c30805745ddef5a0153dcd1b22dd21","ref":"refs/heads/elr/deploy-scripts","pushedAt":"2024-07-11T19:28:49.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"move workflow scripts to separate file to allow for testing\n\nupdate version to v1.1.1","shortMessageHtmlLink":"move workflow scripts to separate file to allow for testing"}},{"before":"a16c2dbfecd028d6c35e5cb858c10b2c25350567","after":"37018e7aa7241ec7bb34c7fde51636d18448c016","ref":"refs/heads/elr/deploy-scripts","pushedAt":"2024-07-11T19:16:51.000Z","pushType":"force_push","commitsCount":0,"pusher":{"login":"elrayle","name":"E. Lynette Rayle","path":"/elrayle","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/6855473?s=80&v=4"},"commit":{"message":"move workflow scripts to separate file to allow for testing\n\nupdate version to v1.1.1","shortMessageHtmlLink":"move workflow scripts to separate file to allow for testing"}}],"hasNextPage":true,"hasPreviousPage":false,"activityType":"all","actor":null,"timePeriod":"all","sort":"DESC","perPage":30,"cursor":"Y3Vyc29yOnYyOpK7MjAyNC0wOS0xOFQxMTo1NTo0NC4wMDAwMDBazwAAAAS5nuZh","startCursor":"Y3Vyc29yOnYyOpK7MjAyNC0wOS0xOFQxMTo1NTo0NC4wMDAwMDBazwAAAAS5nuZh","endCursor":"Y3Vyc29yOnYyOpK7MjAyNC0wNy0xMVQxOToxNjo1MS4wMDAwMDBazwAAAAR9LhK8"}},"title":"Activity ยท clearlydefined/operations"}