diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000..5a19e8a --- /dev/null +++ b/.eslintignore @@ -0,0 +1,3 @@ +node_modules +dist +coverage \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fde2952 --- /dev/null +++ b/.gitignore @@ -0,0 +1,491 @@ +# Created by .ignore support plugin (hsz.mobi) +### Node template +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Typescript v1 declaration files +typings/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env +.env.*.local + +# next.js build output +.next +### macOS template +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk +### JetBrains template +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff: +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/dictionaries + +# Sensitive or high-churn files: +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.xml +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml + +# Gradle: +.idea/**/gradle.xml +.idea/**/libraries + +# CMake +cmake-build-debug/ +cmake-build-release/ + +# Mongo Explorer plugin: +.idea/**/mongoSettings.xml + +## File-based project format: +*.iws + +## Plugin-specific files: + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties +### Windows template +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msm +*.msp + +# Windows shortcuts +*.lnk +### VisualStudio template +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ +**/Properties/launchSettings.json + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# TypeScript v1 declaration files + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +.vscode + +.npmrc + +dist/ \ No newline at end of file diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..53c37a1 --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +dist \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..f915048 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# Contributing to the Diia project +We're pleased that you're interested in contributing to the Diia project. At the moment we're welcoming contributions in various forms and we want to make contributing as easy and transparent as possible. You're welcome to contribute in any of the following ways: + +- Reporting a bug +- Discussing the current state of the code +- Proposing new features or ideas + +In the future we'll be considering welcoming code contributions and expanding our contributor community. + +## Report using Issues +We use GitHub issues to track public bugs. Report a bug, feature, idea or open a discussion point by [opening a new issue](../../issues/new); it's that easy! + +For bugs related to vulnerabilities or security concerns please feel free to contact us directly at [modt.opensource@thedigital.gov.ua](mailto:modt.opensource@thedigital.gov.ua). + +We'd also request that you detail bug reports with detail, background and sample code. Typically a great bug report includes: + +- A quick summary and/or background +- Steps to reproduce + - Be specific and provide sample code if you can. +- What you expected would happen +- What actually happens +- Notes (possibly including why you think this might be happening, or stuff you tried that didn't work) + +For ideas, suggestions and discussion you're free to use any format that you find suitable. + +## Licensing +By contributing, you agree that your contributions will be licensed under the EUPL. + +You may obtain a copy of the License at [https://joinup.ec.europa.eu/collection/eupl/eupl-text-eupl-12](https://joinup.ec.europa.eu/collection/eupl/eupl-text-eupl-12). + +Questions regarding the Diia project, the License and any re-use should be directed to [modt.opensource@thedigital.gov.ua](mailto:modt.opensource@thedigital.gov.ua). \ No newline at end of file diff --git a/LICENCE.md b/LICENCE.md new file mode 100644 index 0000000..7dfc687 --- /dev/null +++ b/LICENCE.md @@ -0,0 +1,287 @@ + EUROPEAN UNION PUBLIC LICENCE v. 1.2 + EUPL © the European Union 2007, 2016 + +This European Union Public Licence (the ‘EUPL’) applies to the Work (as defined +below) which is provided under the terms of this Licence. Any use of the Work, +other than as authorised under this Licence is prohibited (to the extent such +use is covered by a right of the copyright holder of the Work). + +The Work is provided under the terms of this Licence when the Licensor (as +defined below) has placed the following notice immediately following the +copyright notice for the Work: + + Licensed under the EUPL + +or has expressed by any other means his willingness to license under the EUPL. + +1. Definitions + +In this Licence, the following terms have the following meaning: + +- ‘The Licence’: this Licence. + +- ‘The Original Work’: the work or software distributed or communicated by the + Licensor under this Licence, available as Source Code and also as Executable + Code as the case may be. + +- ‘Derivative Works’: the works or software that could be created by the + Licensee, based upon the Original Work or modifications thereof. This Licence + does not define the extent of modification or dependence on the Original Work + required in order to classify a work as a Derivative Work; this extent is + determined by copyright law applicable in the country mentioned in Article 15. + +- ‘The Work’: the Original Work or its Derivative Works. + +- ‘The Source Code’: the human-readable form of the Work which is the most + convenient for people to study and modify. + +- ‘The Executable Code’: any code which has generally been compiled and which is + meant to be interpreted by a computer as a program. + +- ‘The Licensor’: the natural or legal person that distributes or communicates + the Work under the Licence. + +- ‘Contributor(s)’: any natural or legal person who modifies the Work under the + Licence, or otherwise contributes to the creation of a Derivative Work. + +- ‘The Licensee’ or ‘You’: any natural or legal person who makes any usage of + the Work under the terms of the Licence. + +- ‘Distribution’ or ‘Communication’: any act of selling, giving, lending, + renting, distributing, communicating, transmitting, or otherwise making + available, online or offline, copies of the Work or providing access to its + essential functionalities at the disposal of any other natural or legal + person. + +2. Scope of the rights granted by the Licence + +The Licensor hereby grants You a worldwide, royalty-free, non-exclusive, +sublicensable licence to do the following, for the duration of copyright vested +in the Original Work: + +- use the Work in any circumstance and for all usage, +- reproduce the Work, +- modify the Work, and make Derivative Works based upon the Work, +- communicate to the public, including the right to make available or display + the Work or copies thereof to the public and perform publicly, as the case may + be, the Work, +- distribute the Work or copies thereof, +- lend and rent the Work or copies thereof, +- sublicense rights in the Work or copies thereof. + +Those rights can be exercised on any media, supports and formats, whether now +known or later invented, as far as the applicable law permits so. + +In the countries where moral rights apply, the Licensor waives his right to +exercise his moral right to the extent allowed by law in order to make effective +the licence of the economic rights here above listed. + +The Licensor grants to the Licensee royalty-free, non-exclusive usage rights to +any patents held by the Licensor, to the extent necessary to make use of the +rights granted on the Work under this Licence. + +3. Communication of the Source Code + +The Licensor may provide the Work either in its Source Code form, or as +Executable Code. If the Work is provided as Executable Code, the Licensor +provides in addition a machine-readable copy of the Source Code of the Work +along with each copy of the Work that the Licensor distributes or indicates, in +a notice following the copyright notice attached to the Work, a repository where +the Source Code is easily and freely accessible for as long as the Licensor +continues to distribute or communicate the Work. + +4. Limitations on copyright + +Nothing in this Licence is intended to deprive the Licensee of the benefits from +any exception or limitation to the exclusive rights of the rights owners in the +Work, of the exhaustion of those rights or of other applicable limitations +thereto. + +5. Obligations of the Licensee + +The grant of the rights mentioned above is subject to some restrictions and +obligations imposed on the Licensee. Those obligations are the following: + +Attribution right: The Licensee shall keep intact all copyright, patent or +trademarks notices and all notices that refer to the Licence and to the +disclaimer of warranties. The Licensee must include a copy of such notices and a +copy of the Licence with every copy of the Work he/she distributes or +communicates. The Licensee must cause any Derivative Work to carry prominent +notices stating that the Work has been modified and the date of modification. + +Copyleft clause: If the Licensee distributes or communicates copies of the +Original Works or Derivative Works, this Distribution or Communication will be +done under the terms of this Licence or of a later version of this Licence +unless the Original Work is expressly distributed only under this version of the +Licence — for example by communicating ‘EUPL v. 1.2 only’. The Licensee +(becoming Licensor) cannot offer or impose any additional terms or conditions on +the Work or Derivative Work that alter or restrict the terms of the Licence. + +Compatibility clause: If the Licensee Distributes or Communicates Derivative +Works or copies thereof based upon both the Work and another work licensed under +a Compatible Licence, this Distribution or Communication can be done under the +terms of this Compatible Licence. For the sake of this clause, ‘Compatible +Licence’ refers to the licences listed in the appendix attached to this Licence. +Should the Licensee's obligations under the Compatible Licence conflict with +his/her obligations under this Licence, the obligations of the Compatible +Licence shall prevail. + +Provision of Source Code: When distributing or communicating copies of the Work, +the Licensee will provide a machine-readable copy of the Source Code or indicate +a repository where this Source will be easily and freely available for as long +as the Licensee continues to distribute or communicate the Work. + +Legal Protection: This Licence does not grant permission to use the trade names, +trademarks, service marks, or names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the copyright notice. + +6. Chain of Authorship + +The original Licensor warrants that the copyright in the Original Work granted +hereunder is owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each Contributor warrants that the copyright in the modifications he/she brings +to the Work are owned by him/her or licensed to him/her and that he/she has the +power and authority to grant the Licence. + +Each time You accept the Licence, the original Licensor and subsequent +Contributors grant You a licence to their contributions to the Work, under the +terms of this Licence. + +7. Disclaimer of Warranty + +The Work is a work in progress, which is continuously improved by numerous +Contributors. It is not a finished work and may therefore contain defects or +‘bugs’ inherent to this type of development. + +For the above reason, the Work is provided under the Licence on an ‘as is’ basis +and without warranties of any kind concerning the Work, including without +limitation merchantability, fitness for a particular purpose, absence of defects +or errors, accuracy, non-infringement of intellectual property rights other than +copyright as stated in Article 6 of this Licence. + +This disclaimer of warranty is an essential part of the Licence and a condition +for the grant of any rights to the Work. + +8. Disclaimer of Liability + +Except in the cases of wilful misconduct or damages directly caused to natural +persons, the Licensor will in no event be liable for any direct or indirect, +material or moral, damages of any kind, arising out of the Licence or of the use +of the Work, including without limitation, damages for loss of goodwill, work +stoppage, computer failure or malfunction, loss of data or any commercial +damage, even if the Licensor has been advised of the possibility of such damage. +However, the Licensor will be liable under statutory product liability laws as +far such laws apply to the Work. + +9. Additional agreements + +While distributing the Work, You may choose to conclude an additional agreement, +defining obligations or services consistent with this Licence. However, if +accepting obligations, You may act only on your own behalf and on your sole +responsibility, not on behalf of the original Licensor or any other Contributor, +and only if You agree to indemnify, defend, and hold each Contributor harmless +for any liability incurred by, or claims asserted against such Contributor by +the fact You have accepted any warranty or additional liability. + +10. Acceptance of the Licence + +The provisions of this Licence can be accepted by clicking on an icon ‘I agree’ +placed under the bottom of a window displaying the text of this Licence or by +affirming consent in any other similar way, in accordance with the rules of +applicable law. Clicking on that icon indicates your clear and irrevocable +acceptance of this Licence and all of its terms and conditions. + +Similarly, you irrevocably accept this Licence and all of its terms and +conditions by exercising any rights granted to You by Article 2 of this Licence, +such as the use of the Work, the creation by You of a Derivative Work or the +Distribution or Communication by You of the Work or copies thereof. + +11. Information to the public + +In case of any Distribution or Communication of the Work by means of electronic +communication by You (for example, by offering to download the Work from a +remote location) the distribution channel or media (for example, a website) must +at least provide to the public the information requested by the applicable law +regarding the Licensor, the Licence and the way it may be accessible, concluded, +stored and reproduced by the Licensee. + +12. Termination of the Licence + +The Licence and the rights granted hereunder will terminate automatically upon +any breach by the Licensee of the terms of the Licence. + +Such a termination will not terminate the licences of any person who has +received the Work from the Licensee under the Licence, provided such persons +remain in full compliance with the Licence. + +13. Miscellaneous + +Without prejudice of Article 9 above, the Licence represents the complete +agreement between the Parties as to the Work. + +If any provision of the Licence is invalid or unenforceable under applicable +law, this will not affect the validity or enforceability of the Licence as a +whole. Such provision will be construed or reformed so as necessary to make it +valid and enforceable. + +The European Commission may publish other linguistic versions or new versions of +this Licence or updated versions of the Appendix, so far this is required and +reasonable, without reducing the scope of the rights granted by the Licence. New +versions of the Licence will be published with a unique version number. + +All linguistic versions of this Licence, approved by the European Commission, +have identical value. Parties can take advantage of the linguistic version of +their choice. + +14. Jurisdiction + +Without prejudice to specific agreement between parties, + +- any litigation resulting from the interpretation of this License, arising + between the European Union institutions, bodies, offices or agencies, as a + Licensor, and any Licensee, will be subject to the jurisdiction of the Court + of Justice of the European Union, as laid down in article 272 of the Treaty on + the Functioning of the European Union, + +- any litigation arising between other parties and resulting from the + interpretation of this License, will be subject to the exclusive jurisdiction + of the competent court where the Licensor resides or conducts its primary + business. + +15. Applicable Law + +Without prejudice to specific agreement between parties, + +- this Licence shall be governed by the law of the European Union Member State + where the Licensor has his seat, resides or has his registered office, + +- this licence shall be governed by Belgian law if the Licensor has no seat, + residence or registered office inside a European Union Member State. + +Appendix + +‘Compatible Licences’ according to Article 5 EUPL are: + +- GNU General Public License (GPL) v. 2, v. 3 +- GNU Affero General Public License (AGPL) v. 3 +- Open Software License (OSL) v. 2.1, v. 3.0 +- Eclipse Public License (EPL) v. 1.0 +- CeCILL v. 2.0, v. 2.1 +- Mozilla Public Licence (MPL) v. 2 +- GNU Lesser General Public Licence (LGPL) v. 2.1, v. 3 +- Creative Commons Attribution-ShareAlike v. 3.0 Unported (CC BY-SA 3.0) for + works other than software +- European Union Public Licence (EUPL) v. 1.1, v. 1.2 +- Québec Free and Open-Source Licence — Reciprocity (LiLiQ-R) or Strong + Reciprocity (LiLiQ-R+). + +The European Commission may update this Appendix to later versions of the above +licences without producing a new version of the EUPL, as long as they provide +the rights granted in Article 2 of this Licence and protect the covered Source +Code from exclusive appropriation. + +All other changes or additions to this Appendix require the production of a new +EUPL version. diff --git a/README.md b/README.md index 8f15017..71d5e72 100644 --- a/README.md +++ b/README.md @@ -1 +1,94 @@ -# be-pkg-redis \ No newline at end of file +# Diia + +This repository provides an overview over the flagship product [**Diia**](https://diia.gov.ua/) developed by the [**Ministry of Digital Transformation of Ukraine**](https://thedigital.gov.ua/). + +**Diia** is an app with access to citizen’s digital documents and government services. + +The application was created so that Ukrainians could interact with the state in a few clicks, without spending their time on queues and paperwork - **Diia** open source application will help countries, companies and communities build a foundation for long-term relationships. At the heart of these relations are openness, efficiency and humanity. + +We're pleased to share the **Diia** project with you. + +## Useful Links + +| Topic | Link | Description | +| --------------------------------------------- | -------------------------- | -------------------------------------------------------------------------- | +| Ministry of Digital Transformation of Ukraine | https://thedigital.gov.ua/ | The Official homepage of the Ministry of Digital Transformation of Ukraine | +| Diia App | https://diia.gov.ua/ | The Official website for the Diia application | + +## Getting Started + +This repository contains the package with redis services - redlock, pubsub, cache and store. + +## Build Process + +### **1. Clone codebase via `git clone` command** + +Example: + +``` +git clone https://github.com/diia-open-source/pkg-redis.git +``` + +--- + +### **2. Go to code base root directory** + +``` +cd ./pkg-redis +``` + +--- + +### **3. Install npm dependencies** + +The installation of dependencies consists of the following 2 steps: + +#### **1. Manually clone, build and link dependencies from `@diia-inhouse` scope** + +Each Diia service depends on dependencies from `@diia-inhouse/` scope which are distributed across different repositories, are built separately, and aren't published into public npm registry. + +The full list of such dependencies can be found in the target service `package.json` file in `dependencies` and `devDependencies` sections respectively. + +Detailed instructions on how to link dependencies from `@diia-inhouse/` scope are described in `LINKDEPS.md` which can be found here +https://github.com/diia-open-source/diia-setup-howto/tree/main/backend + +#### **2. Install public npm dependencies and use those linked from `@diia-inhouse` scope** + +In order to install and use the linked dependencies for `pkg-redis` the following command can be used: + +``` +$ cd ./pkg-redis +$ npm link @diia-inhouse/db @diia-inhouse/redis ... @diia-inhouse/ +``` + +In case all dependencies from `@diia-inhouse` scope are linked, and can be resolved, you will then have a complete list of dependencies installed for the service code base. + +--- + +### **4. Build package** + +In order to build the service you have to run the command `npm run build` inside the root directory of service code base as per: + +``` +$ cd ./pkg-redis +$ npm run build +``` + +--- + +## How to contribute + +The Diia project welcomes contributions into this solution; please refer to the CONTRIBUTING.md file for details + +## Licensing + +Copyright (C) Diia and all other contributors. + +Licensed under the **EUPL** (the "License"); you may not use this file except in compliance with the License. Re-use is permitted, although not encouraged, under the EUPL, with the exception of source files that contain a different license. + +You may obtain a copy of the License at [https://joinup.ec.europa.eu/collection/eupl/eupl-text-eupl-12](https://joinup.ec.europa.eu/collection/eupl/eupl-text-eupl-12). + +Questions regarding the Diia project, the License and any re-use should be directed to [modt.opensource@thedigital.gov.ua](mailto:modt.opensource@thedigital.gov.ua). + +This project incorporates third party material. In all cases the original copyright notices and the license under which these third party dependencies were provided remains as so. In relation to the Typescript dependency you should also review the [Typescript Third Party Notices]( +https://github.com/microsoft/TypeScript/blob/9684ba6b0d73c37546ada901e5d0a5324de7fc1d/ThirdPartyNoticeText.txt). diff --git a/package.json b/package.json new file mode 100644 index 0000000..5cdcf5c --- /dev/null +++ b/package.json @@ -0,0 +1,91 @@ +{ + "name": "@diia-inhouse/redis", + "version": "2.4.0", + "description": "Redis services - redlock, pubsub, cache and store", + "main": "dist/index.js", + "types": "dist/types/index.d.ts", + "repository": "https://github.com/diia-open-source/be-pkg-redis.git", + "author": "Diia", + "license": "SEE LICENCE IN LICENCE.md", + "files": [ + "dist" + ], + "engines": { + "node": ">=18" + }, + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "semantic-release": "semantic-release", + "start": "npm run build && node dist/index.js", + "lint": "eslint --ext .ts . && prettier --check .", + "lint-fix": "eslint '*/**/*.{js,ts}' --fix && prettier --write .", + "lint:lockfile": "lockfile-lint --path package-lock.json --allowed-hosts registry.npmjs.org --validate-https", + "prepare": "npm run build", + "test": "jest", + "test:coverage": "jest --coverage", + "test:unit": "npm run test --selectProjects unit --", + "test:integration": "npm run test --selectProjects integration --", + "find-circulars": "madge --circular --extensions ts ./" + }, + "resolutions": { + "@babel/traverse": "7.23.2" + }, + "dependencies": { + "ioredis": "5.3.2", + "redis-semaphore": "5.5.0" + }, + "peerDependencies": { + "@diia-inhouse/env": ">=1.2.0", + "@diia-inhouse/errors": ">=1.4.2", + "@diia-inhouse/types": ">=3.50.0", + "@diia-inhouse/utils": ">=2.22.1", + "@diia-inhouse/validators": ">=1.6.1" + }, + "devDependencies": { + "@diia-inhouse/configs": "1.26.3", + "@diia-inhouse/crypto": "1.7.0", + "@diia-inhouse/diia-logger": "2.10.0", + "@diia-inhouse/env": "1.4.0", + "@diia-inhouse/errors": "1.6.1", + "@diia-inhouse/eslint-config": "3.4.0", + "@diia-inhouse/test": "5.1.0", + "@diia-inhouse/types": "4.25.0", + "@diia-inhouse/utils": "2.33.1", + "@diia-inhouse/validators": "1.12.1", + "@types/node": "20.8.6", + "protobufjs": "7.2.5" + }, + "release": { + "extends": "@diia-inhouse/configs/dist/semantic-release/package", + "branches": [ + "main" + ] + }, + "commitlint": { + "extends": "@diia-inhouse/configs/dist/commitlint" + }, + "eslintConfig": { + "extends": "@diia-inhouse/eslint-config", + "overrides": [ + { + "files": [ + "*.ts" + ], + "parserOptions": { + "project": [ + "./tsconfig.json", + "./tests/tsconfig.json" + ] + } + } + ] + }, + "jest": { + "preset": "@diia-inhouse/configs/dist/jest" + }, + "prettier": "@diia-inhouse/eslint-config/prettier", + "madge": { + "tsConfig": "./tsconfig.json" + } +} diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 0000000..e06e839 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,3 @@ +export * from './interfaces' + +export * from './services' diff --git a/src/interfaces/cache.ts b/src/interfaces/cache.ts new file mode 100644 index 0000000..bf9b8c3 --- /dev/null +++ b/src/interfaces/cache.ts @@ -0,0 +1,14 @@ +import { RedisKey, RedisValue } from 'ioredis' + +import { CacheStatus } from './redis' + +export interface CacheProvider { + get(key: RedisKey): Promise + set(key: RedisKey, data: RedisValue, expiration: number): Promise + getKeysByPattern(pattern: string): Promise + getByKeys(keys: string[]): Promise<(null | string)[]> + remove(...key: string[]): Promise + getStatus(): CacheStatus +} + +export type CacheStatusResult = { redis: CacheStatus } diff --git a/src/interfaces/deps.ts b/src/interfaces/deps.ts new file mode 100644 index 0000000..85f2666 --- /dev/null +++ b/src/interfaces/deps.ts @@ -0,0 +1,8 @@ +import { CacheService, PubSubService, RedlockService, StoreService } from '../services' + +export type RedisDeps = { + cache?: CacheService + pubsub?: PubSubService + redlock?: RedlockService + store?: StoreService +} diff --git a/src/interfaces/index.ts b/src/interfaces/index.ts new file mode 100644 index 0000000..2f664f4 --- /dev/null +++ b/src/interfaces/index.ts @@ -0,0 +1,11 @@ +export * from './deps' + +export * from './cache' + +export * from './pubsub' + +export * from './redis' + +export * from './mutex' + +export * from './store' diff --git a/src/interfaces/mutex.ts b/src/interfaces/mutex.ts new file mode 100644 index 0000000..1372d30 --- /dev/null +++ b/src/interfaces/mutex.ts @@ -0,0 +1,3 @@ +import { RedlockMutex } from 'redis-semaphore' + +export type Lock = RedlockMutex diff --git a/src/interfaces/pubsub.ts b/src/interfaces/pubsub.ts new file mode 100644 index 0000000..5993a91 --- /dev/null +++ b/src/interfaces/pubsub.ts @@ -0,0 +1,17 @@ +import { RedisStatus } from './redis' + +export interface PubSubStatus { + pub: RedisStatus + sub: RedisStatus +} + +export type PubSubStatusResult = { redis: PubSubStatus } + +export type MessageHandler = (message: string) => Promise + +export interface PubSubServiceProvider { + unsubscribe(channel: string): Promise + publish(channel: string, data: unknown): Promise + onceChannelMessage(channel: string, handler: MessageHandler): Promise + getStatus(): PubSubStatus +} diff --git a/src/interfaces/redis.ts b/src/interfaces/redis.ts new file mode 100644 index 0000000..4a7ec2a --- /dev/null +++ b/src/interfaces/redis.ts @@ -0,0 +1,18 @@ +import { RedisOptions } from 'ioredis' + +export enum RedisStatusValue { + Ready = 'ready', +} + +export type RedisStatus = RedisStatusValue | string + +export interface RedisConfig { + readWrite: RedisOptions + readOnly: RedisOptions + enablePubsub?: boolean +} + +export interface CacheStatus { + readWrite: RedisStatus + readOnly: RedisStatus +} diff --git a/src/interfaces/store.ts b/src/interfaces/store.ts new file mode 100644 index 0000000..4cc7a56 --- /dev/null +++ b/src/interfaces/store.ts @@ -0,0 +1,26 @@ +import { CacheStatus } from './redis' + +export enum StoreTag { + PublicService = 'publicService', + PublicServiceCategory = 'publicServiceCategory', + Faq = 'faq', + ErrorTemplate = 'errorTemplate', + MilitaryBondsName = 'militaryBondsName', +} + +export type TagsConfig = { + [tag in StoreTag]?: number +} + +export interface TaggedStoreValue { + data: string + timestamp: number + tags: StoreTag[] +} + +export interface SetValueOptions { + ttl?: number + tags?: StoreTag[] +} + +export type StoreStatusResult = { store: CacheStatus } diff --git a/src/services/cache.ts b/src/services/cache.ts new file mode 100644 index 0000000..efb3adb --- /dev/null +++ b/src/services/cache.ts @@ -0,0 +1,89 @@ +import { RedisKey, RedisValue } from 'ioredis' + +import { EnvService } from '@diia-inhouse/env' +import { HealthCheckResult, HttpStatusCode, Logger, OnHealthCheck } from '@diia-inhouse/types' + +import { CacheProvider, CacheStatusResult } from '../interfaces/cache' +import { CacheStatus, RedisConfig, RedisStatusValue } from '../interfaces/redis' + +import { RedisCacheProvider } from './providers/cache' + +/** + * @deprecated StoreService class should be used instead of this one + */ +export class CacheService implements OnHealthCheck { + private readonly defaultExpiration: number = 60 * 60 * 3 // 3 hours + + private readonly provider: CacheProvider + + constructor( + private readonly redisConfig: RedisConfig, + + private readonly envService: EnvService, + private readonly logger: Logger, + ) { + this.provider = new RedisCacheProvider(this.redisConfig, logger) + } + + async get(key: RedisKey): Promise { + const mappedKey: string = this.addPrefix(key) + + try { + const result: string | null = await this.provider.get(mappedKey) + + return result + } catch (err) { + this.logger.error('Failed to get cached value from a provider', { err }) + + throw err + } + } + + async set(key: RedisKey, data: RedisValue, expiration: number = this.defaultExpiration): Promise { + const mappedKey: string = this.addPrefix(key) + const result: string = await this.provider.set(mappedKey, data, expiration) + + return result + } + + async getKeysByPattern(pattern: string): Promise { + return await this.provider.getKeysByPattern(this.addPrefix(pattern)) + } + + async getByKeys(keys: string[]): Promise<(string | null)[]> { + try { + return await this.provider.getByKeys(keys) + } catch (err) { + this.logger.error('Failed to get cached value from a provider by keys', { err }) + + throw err + } + } + + async remove(key: string): Promise { + const result: number = await this.provider.remove(this.addPrefix(key)) + + return result + } + + async onHealthCheck(): Promise> { + const cacheStatus: CacheStatus = this.provider.getStatus() + + const status: HttpStatusCode = Object.values(cacheStatus).some((s) => s !== RedisStatusValue.Ready) + ? HttpStatusCode.SERVICE_UNAVAILABLE + : HttpStatusCode.OK + + return { + status, + details: { redis: cacheStatus }, + } + } + + private addPrefix(key: RedisKey): string { + if (this.envService.isTest()) { + return `test.${key}` + } + + return key.toString() + } +} diff --git a/src/services/index.ts b/src/services/index.ts new file mode 100644 index 0000000..e4ad2ca --- /dev/null +++ b/src/services/index.ts @@ -0,0 +1,9 @@ +export * from './cache' + +export * from './pubsub' + +export * from './redis' + +export * from './store' + +export * from './mutex' diff --git a/src/services/mutex.ts b/src/services/mutex.ts new file mode 100644 index 0000000..4138f72 --- /dev/null +++ b/src/services/mutex.ts @@ -0,0 +1,40 @@ +import Redis from 'ioredis' +import { RedlockMutex } from 'redis-semaphore' + +import { Logger } from '@diia-inhouse/types' + +import { RedisConfig } from '../interfaces/redis' + +import { RedisService } from './redis' + +export class RedlockService { + private clientRW: Redis + + constructor( + private readonly storeConfig: RedisConfig, + + private readonly logger: Logger, + ) { + const { readWrite } = this.storeConfig + + this.clientRW = RedisService.createClient(readWrite) + + this.clientRW.on('connect', () => { + this.logger.info(`Redis REDLOCK READ-WRITE connection open to ${JSON.stringify(readWrite.sentinels)}`) + }) + + this.clientRW.on('error', (err: Error) => { + this.logger.info('Redis REDLOCK READ-WRITE connection error ', { err }) + this.logger.info(`Redis Path ${JSON.stringify(readWrite.sentinels)}`) + }) + } + + async lock(resource: string, ttl = 60000): Promise { + this.logger.info(`Start LOCK resource [${resource}] for ttl [${ttl}]`) + const mutex = new RedlockMutex([this.clientRW], resource, { lockTimeout: ttl, acquireTimeout: ttl * 2 }) + + await mutex.acquire() + + return mutex + } +} diff --git a/src/services/providers/cache.ts b/src/services/providers/cache.ts new file mode 100644 index 0000000..b1cdc62 --- /dev/null +++ b/src/services/providers/cache.ts @@ -0,0 +1,70 @@ +import Redis, { RedisKey, RedisValue } from 'ioredis' + +import { Logger } from '@diia-inhouse/types' + +import { CacheProvider } from '../../interfaces/cache' +import { CacheStatus, RedisConfig } from '../../interfaces/redis' +import { RedisService } from '../redis' + +export class RedisCacheProvider implements CacheProvider { + private clientRW: Redis + + private clientRO: Redis + + constructor( + { readWrite, readOnly }: RedisConfig, + + private readonly logger: Logger, + ) { + this.clientRW = RedisService.createClient(readWrite) + this.clientRO = RedisService.createClient(readOnly) + + this.clientRW.on('connect', () => { + this.logger.info(`Redis READ-WRITE connection open to ${JSON.stringify(readWrite.sentinels)}`) + }) + + this.clientRW.on('error', (err: Error) => { + this.logger.error('Redis READ-WRITE connection error ', { err }) + }) + + this.clientRO.on('connect', () => { + this.logger.info(`Redis READ-ONLY connection open to ${JSON.stringify(readOnly.sentinels)}`) + }) + + this.clientRO.on('error', (err: Error) => { + this.logger.error('Redis READ-ONLY connection error ', { err }) + }) + } + + async get(key: string): Promise { + return await this.clientRO.get(key) + } + + async set(key: RedisKey, data: RedisValue, expiration: number): Promise { + const result: string = await this.clientRW.set(key, data) + if (expiration !== -1) { + await this.clientRW.expire(key, expiration) + } + + return result + } + + async getKeysByPattern(pattern: string): Promise { + return await this.clientRO.keys(pattern) + } + + async getByKeys(keys: string[]): Promise<(string | null)[]> { + return await this.clientRO.mget(keys) + } + + async remove(...key: string[]): Promise { + return await this.clientRW.del(...key) + } + + getStatus(): CacheStatus { + return { + readWrite: this.clientRW.status, + readOnly: this.clientRO.status, + } + } +} diff --git a/src/services/providers/pubsub.ts b/src/services/providers/pubsub.ts new file mode 100644 index 0000000..94c5a4d --- /dev/null +++ b/src/services/providers/pubsub.ts @@ -0,0 +1,83 @@ +import Redis from 'ioredis' + +import { Logger } from '@diia-inhouse/types' + +import { MessageHandler, PubSubServiceProvider, PubSubStatus } from '../../interfaces/pubsub' +import { RedisConfig } from '../../interfaces/redis' +import { RedisService } from '../redis' + +export class PubSubProvider implements PubSubServiceProvider { + private pub: Redis + + private sub: Redis + + private readonly handlerByChannel: Record = {} + + constructor( + { readWrite, readOnly }: RedisConfig, + + private readonly logger: Logger, + ) { + this.pub = RedisService.createClient(readWrite) + this.sub = RedisService.createClient({ ...readOnly, autoResubscribe: true }) + + this.pub.on('connect', () => { + this.logger.info(`Redis READ-WRITE pub connection open to ${JSON.stringify(readWrite.sentinels)}`) + }) + + this.pub.on('error', (err: Error) => { + this.logger.error('Redis READ-WRITE pub connection error ', { err }) + }) + + this.sub.on('connect', () => { + this.logger.info(`Redis READ-ONLY sub connection open to ${JSON.stringify(readOnly.sentinels)}`) + }) + + this.sub.on('error', (err: Error) => { + this.logger.error('Redis READ-ONLY sub connection error ', { err }) + }) + + this.sub.on('message', async (channel: string, message: string) => { + const handler = this.handlerByChannel[channel] + if (!handler) { + return this.logger.error(`Could not find a message handler for the channel ${channel}`) + } + + delete this.handlerByChannel[channel] + + await this.sub.unsubscribe(channel) + + try { + await handler(message) + } catch (err) { + this.logger.error(`Failed to handle message from the channel ${channel}`, { err }) + } + }) + } + + async unsubscribe(channel: string): Promise { + delete this.handlerByChannel[channel] + + return await this.sub.unsubscribe(channel) + } + + async publish(channel: string, data: unknown): Promise { + return await this.pub.publish(channel, JSON.stringify(data)) + } + + async onceChannelMessage(channel: string, handler: MessageHandler): Promise { + if (Object.keys(this.handlerByChannel).includes(channel)) { + throw new Error(`Handler already exists by the provided channel ${channel}`) + } + + this.handlerByChannel[channel] = handler + await this.sub.subscribe(channel) + } + + getStatus(): PubSubStatus { + return { + pub: this.pub.status, + sub: this.sub.status, + } + } +} diff --git a/src/services/pubsub.ts b/src/services/pubsub.ts new file mode 100644 index 0000000..d2624b1 --- /dev/null +++ b/src/services/pubsub.ts @@ -0,0 +1,43 @@ +import { HealthCheckResult, HttpStatusCode, Logger, OnHealthCheck } from '@diia-inhouse/types' + +import { MessageHandler, PubSubStatus, PubSubStatusResult } from '../interfaces/pubsub' +import { RedisConfig, RedisStatusValue } from '../interfaces/redis' + +import { PubSubProvider } from './providers/pubsub' + +export class PubSubService implements OnHealthCheck { + private readonly provider: PubSubProvider + + constructor( + private readonly redisConfig: RedisConfig, + + private readonly logger: Logger, + ) { + this.provider = new PubSubProvider(this.redisConfig, this.logger) + } + + async unsubscribe(channel: string): Promise { + return await this.provider.unsubscribe(channel) + } + + async publish(channel: string, data: unknown): Promise { + return await this.provider.publish(channel, data) + } + + onceChannelMessage(channel: string, handler: MessageHandler): Promise { + return this.provider.onceChannelMessage(channel, handler) + } + + async onHealthCheck(): Promise> { + const pubSubStatus: PubSubStatus = this.provider.getStatus() + + const status: HttpStatusCode = Object.values(pubSubStatus).some((s) => s !== RedisStatusValue.Ready) + ? HttpStatusCode.SERVICE_UNAVAILABLE + : HttpStatusCode.OK + + return { + status, + details: { redis: pubSubStatus }, + } + } +} diff --git a/src/services/redis.ts b/src/services/redis.ts new file mode 100644 index 0000000..be16db8 --- /dev/null +++ b/src/services/redis.ts @@ -0,0 +1,12 @@ +import Redis, { RedisOptions } from 'ioredis' + +export class RedisService { + static createClient(options: RedisOptions): Redis { + const redisOptions: RedisOptions = { + enableAutoPipelining: true, + ...options, + } + + return new Redis(redisOptions) + } +} diff --git a/src/services/store.ts b/src/services/store.ts new file mode 100644 index 0000000..8bbade1 --- /dev/null +++ b/src/services/store.ts @@ -0,0 +1,169 @@ +import Redis from 'ioredis' + +import { ServiceUnavailableError } from '@diia-inhouse/errors' +import { HealthCheckResult, HttpStatusCode, Logger, OnHealthCheck } from '@diia-inhouse/types' + +import { CacheStatus, RedisConfig, RedisStatusValue } from '../interfaces/redis' +import { SetValueOptions, StoreStatusResult, StoreTag, TaggedStoreValue, TagsConfig } from '../interfaces/store' + +import { RedisService } from './redis' + +export class StoreService implements OnHealthCheck { + private clientRW: Redis + + private clientRO: Redis + + private tagsKey = '_tags' + + constructor( + private readonly storeConfig: RedisConfig, + + private readonly logger: Logger, + ) { + const { readWrite, readOnly } = this.storeConfig + + this.clientRW = RedisService.createClient(readWrite) + this.clientRO = RedisService.createClient(readOnly) + + this.clientRW.on('connect', () => { + this.logger.info(`Store READ-WRITE connection open to ${JSON.stringify(readWrite.sentinels)}`) + }) + + this.clientRW.on('error', (err: Error) => { + this.logger.info('Store READ-WRITE connection error ', { err }) + this.logger.info(`Store Path ${JSON.stringify(readWrite.sentinels)}`) + }) + + this.clientRO.on('connect', () => { + this.logger.info(`Store READ-ONLY connection open to ${JSON.stringify(readOnly.sentinels)}`) + }) + + this.clientRO.on('error', (err: Error) => { + this.logger.info('Store READ-ONLY connection error ', { err }) + this.logger.info(`Store Path ${JSON.stringify(readOnly.sentinels)}`) + }) + } + + async get(key: string): Promise { + return await this.clientRO.get(key) + } + + async mget(...keys: string[]): Promise<(string | null)[]> { + return await this.clientRO.mget(keys) + } + + async getUsingTags(key: string): Promise { + const [cachedValue, tagsValue] = await this.clientRO.mget(key, this.tagsKey) + if (!cachedValue) { + return null + } + + const tagsConfig: TagsConfig = tagsValue ? JSON.parse(tagsValue) : {} + + try { + const item: TaggedStoreValue = JSON.parse(cachedValue) + if (Array.isArray(item?.tags)) { + const isValid: boolean = this.validate(item, tagsConfig) + if (isValid) { + return item.data + } + } + + return null + } catch (err) { + if (err instanceof Error) { + this.logger.error('Failed when parse value with tags', { err }) + } + + throw new ServiceUnavailableError() + } + } + + async set(key: string, value: string, options: SetValueOptions = {}): Promise<'OK' | null> { + const { ttl, tags } = options + + if (tags?.length) { + value = await this.wrapValueWithMetadata(value, tags) + } + + if (ttl) { + return await this.clientRW.set(key, value, 'PX', ttl) // milliseconds + } + + return await this.clientRW.set(key, value) + } + + async keys(pattern: string): Promise { + return await this.clientRW.keys(pattern) + } + + async remember(key: string, closure: () => Promise, options: SetValueOptions = {}): Promise { + const cachedValue = await this.get(key) + if (cachedValue) { + return cachedValue + } + + const result = await closure() + + await this.set(key, result || '', options) + + return result + } + + async remove(...keys: string[]): Promise { + return await this.clientRW.del(...keys) + } + + async onHealthCheck(): Promise> { + const storeStatus: CacheStatus = { + readWrite: this.clientRW.status, + readOnly: this.clientRO.status, + } + + const status: HttpStatusCode = Object.values(storeStatus).some((s) => s !== RedisStatusValue.Ready) + ? HttpStatusCode.SERVICE_UNAVAILABLE + : HttpStatusCode.OK + + return { + status, + details: { store: storeStatus }, + } + } + + async bumpTags(tags: StoreTag[]): Promise<'OK' | null> { + const tagsValue = await this.clientRO.get(this.tagsKey) + const tagsConfig: TagsConfig = tagsValue ? JSON.parse(tagsValue) : {} + const timestamp: number = Date.now() + + tags.forEach((tagKey) => (tagsConfig[tagKey] = timestamp)) + + return await this.clientRW.set(this.tagsKey, JSON.stringify(tagsConfig)) + } + + async flushDb(): Promise<'OK'> { + return await this.clientRW.flushdb() + } + + private validate({ tags, timestamp }: TaggedStoreValue, tagsConfig: TagsConfig): boolean { + const tagTimestamps: number[] = Object.entries(tagsConfig) + .filter(([tag]) => tags.includes(tag)) + .map(([, tagTimestamp]) => tagTimestamp) + + return tagTimestamps.every((tagTimestamp) => tagTimestamp <= timestamp) + } + + private async wrapValueWithMetadata(data: string, tags: StoreTag[]): Promise { + const tagsValue = await this.clientRO.get(this.tagsKey) + const tagsConfig: TagsConfig = tagsValue ? JSON.parse(tagsValue) : {} + + const tagTimestamps: number[] = Object.entries(tagsConfig) + .filter(([tag]) => tags.includes(tag)) + .map(([, tagTimestamp]) => tagTimestamp) + + const timestamp: number = tagTimestamps.length ? Math.max(...tagTimestamps) : 0 + + const wrappedValue: TaggedStoreValue = { data, tags, timestamp } + + return JSON.stringify(wrappedValue) + } +} diff --git a/tests/integration/store.spec.ts b/tests/integration/store.spec.ts new file mode 100644 index 0000000..4af6fff --- /dev/null +++ b/tests/integration/store.spec.ts @@ -0,0 +1,193 @@ +import { randomUUID } from 'crypto' + +import DiiaLogger from '@diia-inhouse/diia-logger' +import { ServiceUnavailableError } from '@diia-inhouse/errors' + +import { StoreService, StoreTag } from '../../src/index' + +let store: StoreService + +describe(`${StoreService.name} service`, () => { + beforeEach(async () => { + const logger = new DiiaLogger() + + store = new StoreService({ readWrite: { port: 6379 }, readOnly: { port: 6379 } }, logger) + }) + + const key = 'key' + const value = 'value' + + afterEach(async () => { + await store.flushDb() + }) + + describe('Basic operations with store', () => { + describe('get value', () => { + it('gets null if key does not exist', async () => { + // Act + const res = await store.get(key) + + expect(res).toBeNull() + }) + + it('gets value if key exists', async () => { + await store.set(key, value) + + // Act + const res = await store.get(key) + + expect(res).toEqual(value) + }) + }) + + describe('set value', () => { + it('successfully sets value', async () => { + // Act + await store.set(key, value) + + const res = await store.get(key) + + expect(res).toEqual(value) + }) + + it('sets value with expiration', async () => { + const ttl = 100 + + // Act + await store.set(key, value, { ttl }) + + const res = await store.get(key) + + expect(res).toEqual(value) + }) + + it('fails to get value when value expires', async () => { + const ttl = 100 + + // Act + await store.set(key, value, { ttl }) + + await new Promise((resolve) => setTimeout(resolve, ttl + 10)) + + const res = await store.get(key) + + expect(res).toBeNull() + }) + }) + + describe('remove value', () => { + it('ignores remove operation if key not exist', async () => { + // Act + const res: number = await store.remove(key) + + expect(res).toBe(0) + }) + it('removes value if key exists', async () => { + await store.set(key, value) + + // Act + const res: number = await store.remove(key) + + expect(res).toBe(1) + expect(await store.get(key)).toBeNull() + }) + + it('removes multiple keys', async () => { + const key1 = 'key1' + const key2 = 'key2' + const nonExistingKey = 'nonExistingKey' + + await store.set(key1, value) + await store.set(key2, value) + + // Act + const res: number = await store.remove(key1, key2, nonExistingKey) + + expect(res).toBe(2) + expect(await store.get(key1)).toBeNull() + expect(await store.get(key2)).toBeNull() + }) + }) + + describe('get & Set value with remember method', () => { + it('should return value from closure and save in cache if key does not exist', async () => { + // Act + const rememberValue = await store.remember(key, async () => { + return value + }) + + const valueFromCache = await store.get(key) + + // Assert + expect(valueFromCache).toEqual(rememberValue) + }) + + it('should return value from cache if key exists', async () => { + // Arrange + const randomValue = randomUUID().toString() + + await store.set(key, randomValue) + + // Act + const rememberValue = await store.remember(key, async () => { + return value + }) + + // Assert + expect(rememberValue).toEqual(randomValue) + }) + }) + }) + + describe('Operations with tagged values', () => { + it('fails to get untagged value', async () => { + await store.set(key, value) + + // Act + await expect(store.getUsingTags(key)).rejects.toThrow(ServiceUnavailableError) + }) + + it('gets tagged value without tag in cache', async () => { + await store.set(key, value, { tags: [StoreTag.PublicService] }) + + // Act + const res = await store.getUsingTags(key) + + expect(res).toEqual(value) + }) + + it('gets tagged value when value is older than tag', async () => { + await store.bumpTags([StoreTag.PublicService]) + await store.set(key, value, { tags: [StoreTag.PublicService] }) + + // Act + const res = await store.getUsingTags(key) + + expect(res).toEqual(value) + }) + + it('key becomes invalid when at least one tag is bumped', async () => { + await store.set(key, value, { tags: [StoreTag.PublicService, StoreTag.PublicServiceCategory] }) + await store.bumpTags([StoreTag.PublicService]) + + // Act + const res = await store.getUsingTags(key) + + expect(res).toBeNull() + }) + + it('all keys becomes invalid when common tag is bumped', async () => { + const key1 = 'key1' + const key2 = 'key2' + + await store.set(key1, value, { tags: [StoreTag.PublicService] }) + await store.set(key2, value, { tags: [StoreTag.PublicService] }) + + // Act + await store.bumpTags([StoreTag.PublicService]) + + expect(await store.getUsingTags(key1)).toBeNull() + expect(await store.getUsingTags(key2)).toBeNull() + }) + }) +}) diff --git a/tests/mocks/randomData.ts b/tests/mocks/randomData.ts new file mode 100644 index 0000000..80ce25d --- /dev/null +++ b/tests/mocks/randomData.ts @@ -0,0 +1,9 @@ +import { randomBytes, randomUUID } from 'crypto' + +export function generateUuid(): string { + return randomUUID() +} + +export function generateIdentifier(length = 12): string { + return randomBytes(length).toString('hex') +} diff --git a/tests/mocks/services/cache.ts b/tests/mocks/services/cache.ts new file mode 100644 index 0000000..6d14cb2 --- /dev/null +++ b/tests/mocks/services/cache.ts @@ -0,0 +1 @@ +export { config } from './providers/cache' diff --git a/tests/mocks/services/providers/cache.ts b/tests/mocks/services/providers/cache.ts new file mode 100644 index 0000000..db44845 --- /dev/null +++ b/tests/mocks/services/providers/cache.ts @@ -0,0 +1,6 @@ +import { RedisConfig } from '../../../../src/interfaces/redis' + +export const config: RedisConfig = { + readOnly: { sentinels: [{ host: 'read.only.redis.sentinel' }] }, + readWrite: { sentinels: [{ host: 'read.write.redis.sentinel' }] }, +} diff --git a/tests/mocks/services/providers/pubsub.ts b/tests/mocks/services/providers/pubsub.ts new file mode 100644 index 0000000..cb874bc --- /dev/null +++ b/tests/mocks/services/providers/pubsub.ts @@ -0,0 +1 @@ +export { config } from './cache' diff --git a/tests/mocks/services/pubsub.ts b/tests/mocks/services/pubsub.ts new file mode 100644 index 0000000..6d14cb2 --- /dev/null +++ b/tests/mocks/services/pubsub.ts @@ -0,0 +1 @@ +export { config } from './providers/cache' diff --git a/tests/mocks/services/redlock.ts b/tests/mocks/services/redlock.ts new file mode 100644 index 0000000..6d14cb2 --- /dev/null +++ b/tests/mocks/services/redlock.ts @@ -0,0 +1 @@ +export { config } from './providers/cache' diff --git a/tests/mocks/services/store.ts b/tests/mocks/services/store.ts new file mode 100644 index 0000000..6d14cb2 --- /dev/null +++ b/tests/mocks/services/store.ts @@ -0,0 +1 @@ +export { config } from './providers/cache' diff --git a/tests/tsconfig.json b/tests/tsconfig.json new file mode 100644 index 0000000..08f4d3e --- /dev/null +++ b/tests/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@diia-inhouse/configs/tsconfig", + "compilerOptions": { + "baseUrl": "../", + "noEmit": true, + "strict": true + }, + "include": ["./**/*"] +} diff --git a/tests/unit/services/cache.spec.ts b/tests/unit/services/cache.spec.ts new file mode 100644 index 0000000..b7cae4f --- /dev/null +++ b/tests/unit/services/cache.spec.ts @@ -0,0 +1,184 @@ +const redisCacheProviderMock = { + get: jest.fn(), + set: jest.fn(), + getKeysByPattern: jest.fn(), + getByKeys: jest.fn(), + remove: jest.fn(), + getStatus: jest.fn(), +} + +class RedisCacheProviderMock { + get(...args: unknown[]): unknown { + return redisCacheProviderMock.get(...args) + } + + set(...args: unknown[]): unknown { + return redisCacheProviderMock.set(...args) + } + + getKeysByPattern(...args: unknown[]): unknown { + return redisCacheProviderMock.getKeysByPattern(...args) + } + + getByKeys(...args: unknown[]): unknown { + return redisCacheProviderMock.getByKeys(...args) + } + + remove(...args: unknown[]): unknown { + return redisCacheProviderMock.remove(...args) + } + + getStatus(...args: unknown[]): unknown { + return redisCacheProviderMock.getStatus(...args) + } +} + +jest.mock('@services/providers/cache', () => ({ RedisCacheProvider: RedisCacheProviderMock })) + +import Logger from '@diia-inhouse/diia-logger' +import { EnvService } from '@diia-inhouse/env' +import { mockClass } from '@diia-inhouse/test' +import { HttpStatusCode } from '@diia-inhouse/types' + +import { CacheService, CacheStatus, RedisStatusValue } from '../../../src/index' +import { generateUuid } from '../../mocks/randomData' +import { config } from '../../mocks/services/cache' + +const LoggerMock = mockClass(Logger) +const EnvServiceMock = mockClass(EnvService) + +describe('CacheService', () => { + const logger = new LoggerMock() + const envService = new EnvServiceMock() + const cacheService = new CacheService(config, envService, logger) + + describe('method: `get`', () => { + it('should successfully get item from cache', async () => { + const key = generateUuid() + const expectedValue = 'value' + + redisCacheProviderMock.get.mockResolvedValue(expectedValue) + jest.spyOn(envService, 'isTest').mockReturnValue(false) + + expect(await cacheService.get(key)).toEqual(expectedValue) + expect(redisCacheProviderMock.get).toHaveBeenCalledWith(key) + }) + + it('should fail to get item from cache in case there is error occured', async () => { + const key = generateUuid() + const expectedError = new Error('Unable to get item from cache') + + redisCacheProviderMock.get.mockRejectedValue(expectedError) + jest.spyOn(envService, 'isTest').mockReturnValue(true) + + await expect(async () => { + await cacheService.get(key) + }).rejects.toEqual(expectedError) + + expect(logger.error).toHaveBeenCalledWith('Failed to get cached value from a provider', { err: expectedError }) + expect(redisCacheProviderMock.get).toHaveBeenCalledWith(`test.${key}`) + }) + }) + + describe('method: `set`', () => { + it('should successfully set value in cache with provided expiration', async () => { + const key = generateUuid() + const value = 'value' + const expiration = 1800 + + redisCacheProviderMock.set.mockResolvedValue('OK') + jest.spyOn(envService, 'isTest').mockReturnValue(false) + + expect(await cacheService.set(key, value, expiration)).toBe('OK') + expect(redisCacheProviderMock.set).toHaveBeenCalledWith(key, value, expiration) + }) + + it('should successfully set value in cache with default expiration', async () => { + const key = generateUuid() + const value = 'value' + const expiration = 60 * 60 * 3 + + redisCacheProviderMock.set.mockResolvedValue('OK') + jest.spyOn(envService, 'isTest').mockReturnValue(false) + + expect(await cacheService.set(key, value)).toBe('OK') + expect(redisCacheProviderMock.set).toHaveBeenCalledWith(key, value, expiration) + }) + }) + + describe('method: `getKeysByPattern`', () => { + it('should successfully get item from cache', async () => { + const pattern = '*' + const expectedValue = ['key'] + + redisCacheProviderMock.getKeysByPattern.mockResolvedValue(expectedValue) + jest.spyOn(envService, 'isTest').mockReturnValue(false) + + expect(await cacheService.getKeysByPattern(pattern)).toEqual(expectedValue) + expect(redisCacheProviderMock.getKeysByPattern).toHaveBeenCalledWith(pattern) + }) + }) + + describe('method: `getByKeys`', () => { + it('should successfully get list of items from cache by keys', async () => { + const keys = [generateUuid()] + const expectedValues = ['value'] + + redisCacheProviderMock.getByKeys.mockResolvedValue(expectedValues) + jest.spyOn(envService, 'isTest').mockReturnValue(false) + + expect(await cacheService.getByKeys(keys)).toEqual(expectedValues) + expect(redisCacheProviderMock.getByKeys).toHaveBeenCalledWith(keys) + }) + + it('should fail to get list of items from cache in case there is error occured', async () => { + const keys = [generateUuid()] + const expectedError = new Error('Unable to get items from cache') + + redisCacheProviderMock.getByKeys.mockRejectedValue(expectedError) + jest.spyOn(envService, 'isTest').mockReturnValue(true) + + await expect(async () => { + await cacheService.getByKeys(keys) + }).rejects.toEqual(expectedError) + + expect(logger.error).toHaveBeenCalledWith('Failed to get cached value from a provider by keys', { err: expectedError }) + expect(redisCacheProviderMock.getByKeys).toHaveBeenCalledWith(keys) + }) + }) + + describe('method: `remove`', () => { + it('should successfully remove value from cache', async () => { + const key = generateUuid() + + redisCacheProviderMock.remove.mockResolvedValue(1) + jest.spyOn(envService, 'isTest').mockReturnValue(false) + + expect(await cacheService.remove(key)).toBe(1) + expect(redisCacheProviderMock.remove).toHaveBeenCalledWith(key) + }) + }) + + describe('method: `onHealthCheck`', () => { + it.each([ + [ + 'OK', + { + status: HttpStatusCode.OK, + details: { redis: { readOnly: RedisStatusValue.Ready, readWrite: RedisStatusValue.Ready } }, + }, + ], + [ + 'SERVICE UNAVAILABLE', + { + status: HttpStatusCode.SERVICE_UNAVAILABLE, + details: { redis: { readOnly: 'connecting', readWrite: RedisStatusValue.Ready } }, + }, + ], + ])('should return `%s` status', async (_httpStatus, expectedStatus) => { + redisCacheProviderMock.getStatus.mockReturnValue(expectedStatus.details.redis) + + expect(await cacheService.onHealthCheck()).toEqual(expectedStatus) + }) + }) +}) diff --git a/tests/unit/services/providers/cache.spec.ts b/tests/unit/services/providers/cache.spec.ts new file mode 100644 index 0000000..dd0c071 --- /dev/null +++ b/tests/unit/services/providers/cache.spec.ts @@ -0,0 +1,142 @@ +const redisClientRoMock = { + on: jest.fn(), + get: jest.fn(), + keys: jest.fn(), + mget: jest.fn(), + status: 'ready', +} + +const redisClientRwMock = { + on: jest.fn(), + set: jest.fn(), + expire: jest.fn(), + del: jest.fn(), + status: 'ready', +} + +const createClient = jest.fn() + +class RedisServiceMock { + static createClient = createClient +} + +jest.mock('@services/redis', () => ({ RedisService: RedisServiceMock })) + +import Logger from '@diia-inhouse/diia-logger' +import { mockClass } from '@diia-inhouse/test' + +import { RedisCacheProvider } from '../../../../src/services/providers/cache' +import { config } from '../../../mocks/services/providers/cache' + +const LoggerMock = mockClass(Logger) + +describe('RedisCacheProvider', () => { + describe('method: `get`', () => { + it('shoudl successfully get item from cache', async () => { + createClient.mockReturnValueOnce(redisClientRwMock) + createClient.mockReturnValueOnce(redisClientRoMock) + + const expectedValue = 'value' + const logger = new LoggerMock() + const redisCacheProvider = new RedisCacheProvider(config, logger) + + redisClientRoMock.get.mockResolvedValue(expectedValue) + + expect(await redisCacheProvider.get('key')).toEqual(expectedValue) + expect(redisClientRoMock.get).toHaveBeenCalledWith('key') + }) + }) + + describe('method: `set`', () => { + it('should successfully set value with expiration in cache', async () => { + createClient.mockReturnValueOnce(redisClientRwMock) + createClient.mockReturnValueOnce(redisClientRoMock) + + const logger = new LoggerMock() + const redisCacheProvider = new RedisCacheProvider(config, logger) + + redisClientRwMock.set.mockResolvedValue('OK') + + expect(await redisCacheProvider.set('key', 'value', 1800)).toBe('OK') + expect(redisClientRwMock.set).toHaveBeenCalledWith('key', 'value') + expect(redisClientRwMock.expire).toHaveBeenCalledWith('key', 1800) + }) + }) + + describe('method: `getKeysByPattern`', () => { + it('shoudl successfully get keys list by pattern', async () => { + createClient.mockReturnValueOnce(redisClientRwMock) + createClient.mockReturnValueOnce(redisClientRoMock) + + const expectedKeys = ['key1', 'key2'] + const logger = new LoggerMock() + const redisCacheProvider = new RedisCacheProvider(config, logger) + + redisClientRoMock.keys.mockResolvedValue(expectedKeys) + + expect(await redisCacheProvider.getKeysByPattern('*')).toEqual(expectedKeys) + expect(redisClientRoMock.keys).toHaveBeenCalledWith('*') + }) + }) + + describe('method: `getByKeys`', () => { + it('shoudl successfully get list of items', async () => { + createClient.mockReturnValueOnce(redisClientRwMock) + createClient.mockReturnValueOnce(redisClientRoMock) + + const expectedValues = ['value1', 'value2'] + const logger = new LoggerMock() + const redisCacheProvider = new RedisCacheProvider(config, logger) + + redisClientRoMock.mget.mockResolvedValue(expectedValues) + + expect(await redisCacheProvider.getByKeys(['key1', 'key2'])).toEqual(expectedValues) + expect(redisClientRoMock.mget).toHaveBeenCalledWith(['key1', 'key2']) + }) + }) + + describe('method: `remove`', () => { + it('should successfully remove key from cache', async () => { + createClient.mockReturnValueOnce(redisClientRwMock) + createClient.mockReturnValueOnce(redisClientRoMock) + + const logger = new LoggerMock() + const redisCacheProvider = new RedisCacheProvider(config, logger) + + redisClientRwMock.del.mockResolvedValue(1) + + expect(await redisCacheProvider.remove('key1')).toBe(1) + expect(redisClientRwMock.del).toHaveBeenCalledWith('key1') + }) + }) + + describe('method: `getStatus`', () => { + it('should return status for both clients', () => { + const connError = new Error('Connn error') + + createClient.mockReturnValueOnce(redisClientRwMock) + createClient.mockReturnValueOnce(redisClientRoMock) + + redisClientRwMock.on.mockImplementationOnce((_connectEvent, cb) => { + cb() + }) + redisClientRwMock.on.mockImplementationOnce((_errorEvent, cb) => { + cb(connError) + }) + + redisClientRoMock.on.mockImplementationOnce((_connectEvent, cb) => { + cb() + }) + redisClientRoMock.on.mockImplementationOnce((_errorEvent, cb) => { + cb(connError) + }) + + const logger = new LoggerMock() + const redisCacheProvider = new RedisCacheProvider(config, logger) + + expect(redisCacheProvider.getStatus()).toEqual({ readWrite: 'ready', readOnly: 'ready' }) + expect(logger.info).toHaveBeenCalledWith(`Redis READ-WRITE connection open to ${JSON.stringify(config.readWrite.sentinels)}`) + expect(logger.info).toHaveBeenCalledWith(`Redis READ-ONLY connection open to ${JSON.stringify(config.readOnly.sentinels)}`) + }) + }) +}) diff --git a/tests/unit/services/providers/pubsub.spec.ts b/tests/unit/services/providers/pubsub.spec.ts new file mode 100644 index 0000000..3193814 --- /dev/null +++ b/tests/unit/services/providers/pubsub.spec.ts @@ -0,0 +1,177 @@ +/* eslint-disable jest/no-conditional-in-test */ +const redisClientSubMock = { + on: jest.fn(), + subscribe: jest.fn(), + unsubscribe: jest.fn(), + status: 'ready', +} + +const redisClientPubMock = { + on: jest.fn(), + publish: jest.fn(), + status: 'ready', +} + +const createClient = jest.fn() + +class RedisServiceMock { + static createClient = createClient +} + +jest.mock('@services/redis', () => ({ RedisService: RedisServiceMock })) + +import Logger from '@diia-inhouse/diia-logger' +import { mockClass } from '@diia-inhouse/test' + +import { PubSubProvider } from '../../../../src/services/providers/pubsub' +import { generateUuid } from '../../../mocks/randomData' +import { config } from '../../../mocks/services/providers/pubsub' + +const LoggerMock = mockClass(Logger) + +describe('PubSubProvider', () => { + describe('method: `unsubscribe`', () => { + it('should successfully unsubscribe handler and log in case handler does not exist', async () => { + const channel = generateUuid() + + createClient.mockReturnValueOnce(redisClientPubMock) + createClient.mockReturnValueOnce(redisClientSubMock) + + redisClientSubMock.on.mockImplementation((_event, cb) => { + cb(channel, '{}') + }) + + const logger = new LoggerMock() + const pubSubProvider = new PubSubProvider(config, logger) + + redisClientSubMock.subscribe.mockResolvedValue(null) + + await pubSubProvider.onceChannelMessage(channel, async () => {}) + await pubSubProvider.unsubscribe(channel) + + expect(logger.error).toHaveBeenCalledWith(`Could not find a message handler for the channel ${channel}`) + expect(logger.info).toHaveBeenCalledWith(`Redis READ-ONLY sub connection open to ${JSON.stringify(config.readOnly.sentinels)}`) + }) + }) + + describe('method: `publish`', () => { + it('should successfully publish message', async () => { + const channel = generateUuid() + + createClient.mockReturnValueOnce(redisClientPubMock) + createClient.mockReturnValueOnce(redisClientSubMock) + + const logger = new LoggerMock() + const pubSubProvider = new PubSubProvider(config, logger) + + redisClientPubMock.publish.mockResolvedValue(1) + + expect(await pubSubProvider.publish(channel, {})).toBe(1) + }) + }) + + describe('method: `onceChannelMessage`', () => { + it('should successfully register handler for channel and then handle received message', async () => { + let onMessageHandler: CallableFunction = async () => {} + + const channel = generateUuid() + + createClient.mockReturnValueOnce(redisClientPubMock) + createClient.mockReturnValueOnce(redisClientSubMock) + + redisClientSubMock.on.mockImplementation((event, cb) => { + if (event === 'message') { + onMessageHandler = cb + } + }) + + const logger = new LoggerMock() + const pubSubProvider = new PubSubProvider(config, logger) + + redisClientSubMock.subscribe.mockResolvedValue(null) + + await pubSubProvider.onceChannelMessage(channel, async (message) => { + expect(message).toBe('{}') + }) + + await onMessageHandler(channel, '{}') + }) + + it('should fail to register handler for channel in case it was already registered', async () => { + const channel = generateUuid() + + createClient.mockReturnValueOnce(redisClientPubMock) + createClient.mockReturnValueOnce(redisClientSubMock) + + const logger = new LoggerMock() + const pubSubProvider = new PubSubProvider(config, logger) + + redisClientSubMock.subscribe.mockResolvedValue(null) + + await pubSubProvider.onceChannelMessage(channel, async () => {}) + + await expect(async () => { + await pubSubProvider.onceChannelMessage(channel, async () => {}) + }).rejects.toEqual(new Error(`Handler already exists by the provided channel ${channel}`)) + }) + + it('should successfully register handler for channel and then only log error in case handler rejects', async () => { + let onMessageHandler: CallableFunction = async () => {} + + const channel = generateUuid() + const expectedError = new Error('Unable to handle message') + + createClient.mockReturnValueOnce(redisClientPubMock) + createClient.mockReturnValueOnce(redisClientSubMock) + + redisClientSubMock.on.mockImplementation((event, cb) => { + if (event === 'message') { + onMessageHandler = cb + } + }) + + const logger = new LoggerMock() + const pubSubProvider = new PubSubProvider(config, logger) + + redisClientSubMock.subscribe.mockResolvedValue(null) + + await pubSubProvider.onceChannelMessage(channel, async () => { + throw expectedError + }) + + await onMessageHandler(channel, '{}') + + expect(logger.error).toHaveBeenCalledWith(`Failed to handle message from the channel ${channel}`, { err: expectedError }) + }) + }) + + describe('method: `getStatus`', () => { + it('should return status for both pub/sub', () => { + createClient.mockReturnValueOnce(redisClientPubMock) + createClient.mockReturnValueOnce(redisClientSubMock) + + redisClientPubMock.on.mockImplementationOnce((_connectEvent, cb) => { + cb() + }) + redisClientPubMock.on.mockImplementationOnce((_errorEvent, cb) => { + cb(new Error('Some error')) + }) + + redisClientSubMock.on.mockImplementationOnce((_connectEvent, cb) => { + cb() + }) + redisClientSubMock.on.mockImplementationOnce((_errorEvent, cb) => { + cb(new Error('Some error')) + }) + + const logger = new LoggerMock() + const pubSubProvider = new PubSubProvider(config, logger) + + expect(pubSubProvider.getStatus()).toEqual({ pub: 'ready', sub: 'ready' }) + expect(logger.info).toHaveBeenCalledWith( + `Redis READ-WRITE pub connection open to ${JSON.stringify(config.readWrite.sentinels)}`, + ) + expect(logger.info).toHaveBeenCalledWith(`Redis READ-ONLY sub connection open to ${JSON.stringify(config.readOnly.sentinels)}`) + }) + }) +}) diff --git a/tests/unit/services/pubsub.spec.ts b/tests/unit/services/pubsub.spec.ts new file mode 100644 index 0000000..81dff61 --- /dev/null +++ b/tests/unit/services/pubsub.spec.ts @@ -0,0 +1,98 @@ +const pubSubProviderMock = { + unsubscribe: jest.fn(), + publish: jest.fn(), + onceChannelMessage: jest.fn(), + getStatus: jest.fn(), +} + +class PubSubProviderMock { + unsubscribe(...args: unknown[]): unknown { + return pubSubProviderMock.unsubscribe(...args) + } + + publish(...args: unknown[]): unknown { + return pubSubProviderMock.publish(...args) + } + + onceChannelMessage(...args: unknown[]): unknown { + return pubSubProviderMock.onceChannelMessage(...args) + } + + getStatus(...args: unknown[]): unknown { + return pubSubProviderMock.getStatus(...args) + } +} + +jest.mock('@services/providers/pubsub', () => ({ PubSubProvider: PubSubProviderMock })) + +import Logger from '@diia-inhouse/diia-logger' +import { mockClass } from '@diia-inhouse/test' +import { HttpStatusCode } from '@diia-inhouse/types' + +import { PubSubService, PubSubStatus, RedisStatusValue } from '../../../src/index' +import { generateUuid } from '../../mocks/randomData' +import { config } from '../../mocks/services/pubsub' + +const LoggerMock = mockClass(Logger) + +describe('PubSubService', () => { + const logger = new LoggerMock() + const pubSubService = new PubSubService(config, logger) + + describe('method: `unsubscribe`', () => { + it('should successfully unsubscribe', async () => { + const channel = generateUuid() + + pubSubProviderMock.unsubscribe.mockResolvedValue(true) + + expect(await pubSubService.unsubscribe(channel)).toBe(true) + expect(pubSubProviderMock.unsubscribe).toHaveBeenCalledWith(channel) + }) + }) + + describe('method: `publish`', () => { + it('should successfully publish message', async () => { + const channel = generateUuid() + + pubSubProviderMock.publish.mockResolvedValue(1) + + expect(await pubSubService.publish(channel, 'message')).toBe(1) + expect(pubSubProviderMock.publish).toHaveBeenCalledWith(channel, 'message') + }) + }) + + describe('method: `onceChannelMessage`', () => { + it('should successfully publish message', async () => { + const channel = generateUuid() + const handler = async (): Promise => {} + + pubSubProviderMock.onceChannelMessage.mockResolvedValue(true) + + expect(await pubSubService.onceChannelMessage(channel, handler)).toBe(true) + expect(pubSubProviderMock.onceChannelMessage).toHaveBeenCalledWith(channel, handler) + }) + }) + + describe('method: `onHealthCheck`', () => { + it.each([ + [ + 'OK', + { + status: HttpStatusCode.OK, + details: { redis: { pub: RedisStatusValue.Ready, sub: RedisStatusValue.Ready } }, + }, + ], + [ + 'SERVICE UNAVAILABLE', + { + status: HttpStatusCode.SERVICE_UNAVAILABLE, + details: { redis: { pub: 'connecting', sub: RedisStatusValue.Ready } }, + }, + ], + ])('should return `%s` status', async (_httpStatus, expectedStatus) => { + pubSubProviderMock.getStatus.mockReturnValue(expectedStatus.details.redis) + + expect(await pubSubService.onHealthCheck()).toEqual(expectedStatus) + }) + }) +}) diff --git a/tests/unit/services/redis.spec.ts b/tests/unit/services/redis.spec.ts new file mode 100644 index 0000000..fe68067 --- /dev/null +++ b/tests/unit/services/redis.spec.ts @@ -0,0 +1,24 @@ +const constructorMock = jest.fn() + +class RedisMock { + options: unknown[] + + constructor(...args: unknown[]) { + this.options = [...args] + + constructorMock(...args) + } +} + +jest.mock('ioredis', () => RedisMock) + +import { RedisService } from '../../../src/index' + +describe('RedisService', () => { + describe('method: `createClient`', () => { + it('should successfully create redis client', () => { + expect(RedisService.createClient({})).toEqual(new RedisMock({ enableAutoPipelining: true })) + expect(constructorMock).toHaveBeenCalledWith({ enableAutoPipelining: true }) + }) + }) +}) diff --git a/tests/unit/services/redlock.spec.ts b/tests/unit/services/redlock.spec.ts new file mode 100644 index 0000000..6f6843b --- /dev/null +++ b/tests/unit/services/redlock.spec.ts @@ -0,0 +1,99 @@ +const redlockMock = { + constructor: jest.fn(), + acquire: jest.fn(), +} + +const redisClientRwMock = { + on: jest.fn(), +} + +class RedisServiceMock { + static createClient = jest.fn() +} + +class RedlockMutexMock { + constructor(...args: unknown[]) { + redlockMock.constructor(...args) + } + + acquire(...args: unknown[]): unknown { + return redlockMock.acquire(...args) + } +} + +jest.mock('redis-semaphore', () => ({ RedlockMutex: RedlockMutexMock })) +jest.mock('@services/redis', () => ({ RedisService: RedisServiceMock })) + +import Logger from '@diia-inhouse/diia-logger' +import { mockClass } from '@diia-inhouse/test' + +import { RedlockService } from '../../../src/index' +import { config } from '../../mocks/services/redlock' + +const LoggerMock = mockClass(Logger) + +describe('RedlockService', () => { + const logger = new LoggerMock() + + describe('event handlers', () => { + it('should properly react on different events', () => { + const expectedRedisError = new Error('Unable to instantiate redis client') + + RedisServiceMock.createClient.mockReturnValue(redisClientRwMock) + + redisClientRwMock.on.mockImplementationOnce((_event, cb) => { + cb() + }) + + redisClientRwMock.on.mockImplementationOnce((_event, cb) => { + cb(expectedRedisError) + }) + + new RedlockService(config, logger) + + expect(logger.info).toHaveBeenCalledWith( + `Redis REDLOCK READ-WRITE connection open to ${JSON.stringify(config.readWrite.sentinels)}`, + ) + expect(logger.info).toHaveBeenCalledWith('Redis REDLOCK READ-WRITE connection error ', { err: expectedRedisError }) + expect(logger.info).toHaveBeenCalledWith(`Redis Path ${JSON.stringify(config.readWrite.sentinels)}`) + }) + }) + + describe('method: `lock`', () => { + it('should lock resource with provided ttl', async () => { + const resource = 'resource' + const ttl = 1800 + + RedisServiceMock.createClient.mockReturnValue(redisClientRwMock) + redlockMock.acquire.mockResolvedValue({}) + + const redlockService = new RedlockService(config, logger) + + expect(await redlockService.lock(resource, ttl)).toEqual({}) + expect(redlockMock.constructor).toHaveBeenCalledWith([redisClientRwMock], resource, { + acquireTimeout: 3600, + lockTimeout: 1800, + }) + expect(redlockMock.acquire).toHaveBeenCalled() + expect(logger.info).toHaveBeenCalledWith(`Start LOCK resource [${resource}] for ttl [${ttl}]`) + }) + + it('should lock resource with default ttl', async () => { + const resource = 'resource' + const defaultTtl = 60000 + + RedisServiceMock.createClient.mockReturnValue(redisClientRwMock) + redlockMock.acquire.mockResolvedValue({}) + + const redlockService = new RedlockService(config, logger) + + expect(await redlockService.lock(resource)).toEqual({}) + expect(redlockMock.constructor).toHaveBeenCalledWith([redisClientRwMock], resource, { + acquireTimeout: 120_000, + lockTimeout: 60_000, + }) + expect(redlockMock.acquire).toHaveBeenCalled() + expect(logger.info).toHaveBeenCalledWith(`Start LOCK resource [${resource}] for ttl [${defaultTtl}]`) + }) + }) +}) diff --git a/tests/unit/services/store.spec.ts b/tests/unit/services/store.spec.ts new file mode 100644 index 0000000..91657d1 --- /dev/null +++ b/tests/unit/services/store.spec.ts @@ -0,0 +1,325 @@ +const redisClientRoMock = { + on: jest.fn(), + get: jest.fn(), + keys: jest.fn(), + mget: jest.fn(), + pttl: jest.fn(), + status: 'ready', +} + +const redisClientRwMock = { + on: jest.fn(), + set: jest.fn(), + expire: jest.fn(), + pexpire: jest.fn(), + del: jest.fn(), + flushdb: jest.fn(), + status: 'ready', +} + +class RedisServiceMock { + static createClient = jest.fn() +} + +jest.mock('@services/redis', () => ({ RedisService: RedisServiceMock })) + +import Logger from '@diia-inhouse/diia-logger' +import { ServiceUnavailableError } from '@diia-inhouse/errors' +import { mockClass } from '@diia-inhouse/test' +import { HttpStatusCode } from '@diia-inhouse/types' + +import { CacheStatus, SetValueOptions, StoreService, StoreTag, TaggedStoreValue, TagsConfig } from '../../../src/index' +import { generateUuid } from '../../mocks/randomData' +import { config } from '../../mocks/services/store' + +const LoggerMock = mockClass(Logger) + +describe('StoreService', () => { + const now = Date.now() + const logger = new LoggerMock() + + beforeAll(() => { + jest.useFakeTimers({ now }) + }) + + afterAll(() => { + jest.useRealTimers() + }) + + describe('event handlers', () => { + it('should properly react on different events', () => { + const connError = new Error('Connn error') + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + + redisClientRoMock.on.mockImplementationOnce((_connectEvent, cb) => { + cb() + }) + redisClientRoMock.on.mockImplementationOnce((_errorEvent, cb) => { + cb(connError) + }) + + redisClientRwMock.on.mockImplementationOnce((_connectEvent, cb) => { + cb() + }) + redisClientRwMock.on.mockImplementationOnce((_errorEvent, cb) => { + cb(connError) + }) + + new StoreService(config, logger) + + expect(logger.info).toHaveBeenCalledWith(`Store READ-WRITE connection open to ${JSON.stringify(config.readWrite.sentinels)}`) + expect(logger.info).toHaveBeenCalledWith('Store READ-WRITE connection error ', { err: connError }) + expect(logger.info).toHaveBeenCalledWith(`Store Path ${JSON.stringify(config.readWrite.sentinels)}`) + expect(logger.info).toHaveBeenCalledWith(`Store READ-ONLY connection open to ${JSON.stringify(config.readOnly.sentinels)}`) + expect(logger.info).toHaveBeenCalledWith('Store READ-ONLY connection error ', { err: connError }) + expect(logger.info).toHaveBeenCalledWith(`Store Path ${JSON.stringify(config.readOnly.sentinels)}`) + }) + }) + + describe('method: `get`', () => { + it('should successfully get value from store', async () => { + const key = generateUuid() + const expectedValue = 'value' + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.get.mockResolvedValue(expectedValue) + + const storeService = new StoreService(config, logger) + + expect(await storeService.get(key)).toEqual(expectedValue) + expect(redisClientRoMock.get).toHaveBeenCalledWith(key) + }) + }) + + describe('method: `getUsingTags`', () => { + it.each([ + ['cached value is null', generateUuid(), null, null, null], + [ + 'there are no tags', + generateUuid(), + JSON.stringify({ data: 'value', tags: [], timestamp: 1800 }), + null, + 'value', + ], + [ + 'invalid tags', + generateUuid(), + JSON.stringify({ data: 'value', tags: [StoreTag.PublicService], timestamp: 1800 }), + JSON.stringify({ publicService: 1900 }), + null, + ], + [ + 'valid tags', + generateUuid(), + JSON.stringify({ data: 'value', tags: [StoreTag.PublicService], timestamp: 1800 }), + JSON.stringify({ publicService: 1800 }), + 'value', + ], + ])( + 'should successfully get value using tags in case %s', + async (_msg: string, key: string, cachedValue: string | null, tagsConfig: string | null, expectedValue: string | null) => { + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.mget.mockResolvedValue([cachedValue, tagsConfig]) + + const storeService = new StoreService(config, logger) + + expect(await storeService.getUsingTags(key)).toEqual(expectedValue) + expect(redisClientRoMock.mget).toHaveBeenCalledWith(key, '_tags') + }, + ) + + it('should fail to get value from store using tags when invalid json value received', async () => { + const key = generateUuid() + const expectedError = new ServiceUnavailableError() + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.mget.mockResolvedValue(['invalid-json-string', null]) + + const storeService = new StoreService(config, logger) + + await expect(async () => { + await storeService.getUsingTags(key) + }).rejects.toEqual(expectedError) + expect(redisClientRoMock.mget).toHaveBeenCalledWith(key, '_tags') + expect(logger.error).toHaveBeenCalledWith('Failed when parse value with tags', { + err: new SyntaxError('Unexpected token i in JSON at position 0'), + }) + }) + }) + + describe('method: `remove`', () => { + it('should successfully remove value from store', async () => { + const key = generateUuid() + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRwMock.del.mockResolvedValue(1) + + const storeService = new StoreService(config, logger) + + expect(await storeService.remove(key)).toBe(1) + expect(redisClientRwMock.del).toHaveBeenCalledWith(key) + }) + }) + + describe('method: `set`', () => { + it('should set value in store with provided ttl', async () => { + const key = generateUuid() + const value = 'value' + const options: SetValueOptions = { tags: [StoreTag.PublicService], ttl: 1800 } + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.get.mockResolvedValue(JSON.stringify({ publicService: 1800 })) + redisClientRwMock.set.mockResolvedValue('OK') + + const storeService = new StoreService(config, logger) + + expect(await storeService.set(key, value, options)).toBe('OK') + expect(redisClientRoMock.get).toHaveBeenCalledWith('_tags') + expect(redisClientRwMock.set).toHaveBeenCalledWith( + key, + JSON.stringify({ data: value, tags: [StoreTag.PublicService], timestamp: 1800 }), + 'PX', + options.ttl, + ) + }) + + it('should set value in store without ttl', async () => { + const key = generateUuid() + const value = 'value' + const options: SetValueOptions = { tags: [StoreTag.PublicService] } + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.get.mockResolvedValue(null) + redisClientRwMock.set.mockResolvedValue('OK') + + const storeService = new StoreService(config, logger) + + expect(await storeService.set(key, value, options)).toBe('OK') + expect(redisClientRoMock.get).toHaveBeenCalledWith('_tags') + expect(redisClientRwMock.set).toHaveBeenCalledWith( + key, + JSON.stringify({ data: value, tags: [StoreTag.PublicService], timestamp: 0 }), + ) + }) + + it('should set value in store without any options', async () => { + const key = generateUuid() + const value = 'value' + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRwMock.set.mockResolvedValue('OK') + + const storeService = new StoreService(config, logger) + + expect(await storeService.set(key, value)).toBe('OK') + expect(redisClientRwMock.set).toHaveBeenCalledWith(key, value) + }) + }) + + describe('method: `remember`', () => { + it('should just return item from cache', async () => { + const key = generateUuid() + const expectedValue = 'value' + const closure = jest.fn() + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.get.mockResolvedValue(expectedValue) + closure.mockResolvedValueOnce(expectedValue) + + const storeService = new StoreService(config, logger) + + expect(await storeService.remember(key, closure)).toEqual(expectedValue) + expect(redisClientRoMock.get).toHaveBeenCalledWith(key) + expect(closure).not.toHaveBeenCalledWith() + }) + + it.each(['value', ''])('should set item `%s` received from closure in store', async (expectedValue) => { + const key = generateUuid() + const closure = jest.fn() + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.get.mockResolvedValue(null) + closure.mockResolvedValueOnce(expectedValue) + + const storeService = new StoreService(config, logger) + + expect(await storeService.remember(key, closure, {})).toEqual(expectedValue) + expect(redisClientRoMock.get).toHaveBeenCalledWith(key) + expect(closure).toHaveBeenCalledWith() + expect(redisClientRwMock.set).toHaveBeenCalledWith(key, expectedValue) + }) + }) + + describe('method: `onHealthCheck`', () => { + it.each([ + [ + 'OK', + ['ready', 'ready'], + { status: HttpStatusCode.OK, details: { store: { readOnly: 'ready', readWrite: 'ready' } } }, + ], + [ + 'SERVICE_UNAVAILABLE', + ['connecting', 'ready'], + { + status: HttpStatusCode.SERVICE_UNAVAILABLE, + details: { store: { readOnly: 'ready', readWrite: 'connecting' } }, + }, + ], + ])('should return health status `%s`', async (_status, [rw, ro], expectedStatus) => { + RedisServiceMock.createClient.mockReturnValueOnce({ ...redisClientRwMock, status: rw }) + RedisServiceMock.createClient.mockReturnValueOnce({ ...redisClientRoMock, ro }) + + const storeService = new StoreService(config, logger) + + expect(await storeService.onHealthCheck()).toEqual(expectedStatus) + }) + }) + + describe('method: `bumpTags`', () => { + it.each([ + [ + 'tags config exists in store', + JSON.stringify({ [StoreTag.Faq]: now }), + JSON.stringify({ [StoreTag.Faq]: now, [StoreTag.PublicService]: now }), + ], + ['tags does not exist in store', null, JSON.stringify({ [StoreTag.PublicService]: now })], + ])('should successfully bump tags when %s', async (_msg, tagsConfig: string | null, expectedTagsConfig: string) => { + const tags: StoreTag[] = [StoreTag.PublicService] + + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRoMock.get.mockResolvedValue(tagsConfig) + redisClientRwMock.set.mockResolvedValue('OK') + + const storeService = new StoreService(config, logger) + + expect(await storeService.bumpTags(tags)).toBe('OK') + expect(redisClientRoMock.get).toHaveBeenCalledWith('_tags') + expect(redisClientRwMock.set).toHaveBeenCalledWith('_tags', expectedTagsConfig) + }) + }) + + describe('method: `flushDb`', () => { + it('should successfully flush entire store', async () => { + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRwMock) + RedisServiceMock.createClient.mockReturnValueOnce(redisClientRoMock) + redisClientRwMock.flushdb.mockResolvedValue('OK') + + const storeService = new StoreService(config, logger) + + expect(await storeService.flushDb()).toBe('OK') + expect(redisClientRwMock.flushdb).toHaveBeenCalledWith() + }) + }) +}) diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..585bba1 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "@diia-inhouse/configs/tsconfig", + "compilerOptions": { + "outDir": "dist", + "declaration": true, + "declarationDir": "dist/types", + "baseUrl": ".", + "strict": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules"] +}