diff --git a/mkdocs/.gitignore b/mkdocs/.gitignore
new file mode 100644
index 000000000..b6e47617d
--- /dev/null
+++ b/mkdocs/.gitignore
@@ -0,0 +1,129 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/mkdocs/README.md b/mkdocs/README.md
new file mode 100644
index 000000000..153ff427c
--- /dev/null
+++ b/mkdocs/README.md
@@ -0,0 +1,28 @@
+# tank-docs
+This is the new manifestation of Tank's documentation, leveraging the very powerful docs solution "Material fo MkDocs"(mkdocs-material)
+
+# What does this new doc site fulfill?
+
+This new doc site, being built on mkdocs-material,
+
+* Ships with a very user-friendly documentation ecosystem.
+* Ships with a powerful Search feature to easily search any content within the docs.
+* Provides a streamlined Navigation experience, being organised for the best understanding experience
+* The documentation itself has been reviewed and revamped at places, for better understandability
+* There are code blocks and content tabs, which keep the reader focused with its concise layout.
+* Last but not the least, this doc site is version controlled on Git, which is a plus.
+
+# Installation and ops
+## Running on local
+Get into the `mkdocs` folder.
+Install dependencies from Requirements file: `pip install -r requirements.txt`
+Run locally using comamnd: `python3 -m mkdocs serve`
+
+## Publishing the docs site
+Publish docs site using command: `mkdocs gh-deploy --force`
+
+## References
+Detailed documentation for mkdocs-material is available under https://squidfunk.github.io/mkdocs-material/
+
+# Credits
+Made with ❤️ using `squidfunk/mkdocs-material`
\ No newline at end of file
diff --git a/mkdocs/docs/.pages b/mkdocs/docs/.pages
new file mode 100644
index 000000000..ebf886326
--- /dev/null
+++ b/mkdocs/docs/.pages
@@ -0,0 +1,7 @@
+nav:
+ - index.md
+ - Key Concepts: key-concepts
+ - roles.md
+ - administration.md
+ - supplemental-tools.md
+ - Set-up and Installation: installation-guide
\ No newline at end of file
diff --git a/mkdocs/docs/administration.md b/mkdocs/docs/administration.md
new file mode 100644
index 000000000..b8d1c0e5f
--- /dev/null
+++ b/mkdocs/docs/administration.md
@@ -0,0 +1,31 @@
+# Administration
+If you have administrator rights, you will see the `Admin` link at the top of the page. The `Admin` section allows you to view log files, administer users, and set log levels.
+
+## User Management
+
+As discussed in the `Users and Groups` section, Tank users have specific roles that dictate which actions they're allowed to perform on which entities. Users may have multiple roles, and any user may edit any entity which they own.
+
+
+
+### Assigning Roles
+
+To assign a role to a user, navigate to the `Admin` section and follow the link to `Administer Users`. Once you've selected a user to administer, change their roles by moving the desired options from the `Available Groups` column to the `Member Groups` column. When finished, click the `Save` button.
+
+
+
+
+# Log Viewer
+The log viewer alows you to see the server logs. It works like the tail Linux command.
+
+
+
+
diff --git a/mkdocs/docs/assets/TankLogo.svg b/mkdocs/docs/assets/TankLogo.svg
new file mode 100644
index 000000000..b61470939
--- /dev/null
+++ b/mkdocs/docs/assets/TankLogo.svg
@@ -0,0 +1,84 @@
+
+
+
+
diff --git a/mkdocs/docs/assets/admin/admin_1.png b/mkdocs/docs/assets/admin/admin_1.png
new file mode 100644
index 000000000..06a847d7e
Binary files /dev/null and b/mkdocs/docs/assets/admin/admin_1.png differ
diff --git a/mkdocs/docs/assets/admin/admin_2.png b/mkdocs/docs/assets/admin/admin_2.png
new file mode 100644
index 000000000..7a0612aec
Binary files /dev/null and b/mkdocs/docs/assets/admin/admin_2.png differ
diff --git a/mkdocs/docs/assets/admin/admin_3.png b/mkdocs/docs/assets/admin/admin_3.png
new file mode 100644
index 000000000..3e9ac488e
Binary files /dev/null and b/mkdocs/docs/assets/admin/admin_3.png differ
diff --git a/mkdocs/docs/assets/admin/admin_4.png b/mkdocs/docs/assets/admin/admin_4.png
new file mode 100644
index 000000000..7f0aa3011
Binary files /dev/null and b/mkdocs/docs/assets/admin/admin_4.png differ
diff --git a/mkdocs/docs/assets/datafiles/datafiles_1.png b/mkdocs/docs/assets/datafiles/datafiles_1.png
new file mode 100644
index 000000000..2964941ba
Binary files /dev/null and b/mkdocs/docs/assets/datafiles/datafiles_1.png differ
diff --git a/mkdocs/docs/assets/datafiles/datafiles_2.png b/mkdocs/docs/assets/datafiles/datafiles_2.png
new file mode 100644
index 000000000..bad2c937b
Binary files /dev/null and b/mkdocs/docs/assets/datafiles/datafiles_2.png differ
diff --git a/mkdocs/docs/assets/datafiles/datafiles_3.png b/mkdocs/docs/assets/datafiles/datafiles_3.png
new file mode 100644
index 000000000..abc5817d7
Binary files /dev/null and b/mkdocs/docs/assets/datafiles/datafiles_3.png differ
diff --git a/mkdocs/docs/assets/datafiles/datafiles_4.png b/mkdocs/docs/assets/datafiles/datafiles_4.png
new file mode 100644
index 000000000..c2f53d41a
Binary files /dev/null and b/mkdocs/docs/assets/datafiles/datafiles_4.png differ
diff --git a/mkdocs/docs/assets/datafiles/datafiles_5.png b/mkdocs/docs/assets/datafiles/datafiles_5.png
new file mode 100644
index 000000000..f6b50db0e
Binary files /dev/null and b/mkdocs/docs/assets/datafiles/datafiles_5.png differ
diff --git a/mkdocs/docs/assets/filters/filters_1.png b/mkdocs/docs/assets/filters/filters_1.png
new file mode 100644
index 000000000..f2b62e1c6
Binary files /dev/null and b/mkdocs/docs/assets/filters/filters_1.png differ
diff --git a/mkdocs/docs/assets/filters/filters_2.png b/mkdocs/docs/assets/filters/filters_2.png
new file mode 100644
index 000000000..6dd7c3031
Binary files /dev/null and b/mkdocs/docs/assets/filters/filters_2.png differ
diff --git a/mkdocs/docs/assets/filters/filters_3.png b/mkdocs/docs/assets/filters/filters_3.png
new file mode 100644
index 000000000..97e202aa0
Binary files /dev/null and b/mkdocs/docs/assets/filters/filters_3.png differ
diff --git a/mkdocs/docs/assets/filters/filters_4.png b/mkdocs/docs/assets/filters/filters_4.png
new file mode 100644
index 000000000..e7c798dfd
Binary files /dev/null and b/mkdocs/docs/assets/filters/filters_4.png differ
diff --git a/mkdocs/docs/assets/filters/filters_5.png b/mkdocs/docs/assets/filters/filters_5.png
new file mode 100644
index 000000000..073f85f38
Binary files /dev/null and b/mkdocs/docs/assets/filters/filters_5.png differ
diff --git a/mkdocs/docs/assets/filters/filters_6.png b/mkdocs/docs/assets/filters/filters_6.png
new file mode 100644
index 000000000..4063f4595
Binary files /dev/null and b/mkdocs/docs/assets/filters/filters_6.png differ
diff --git a/mkdocs/docs/assets/intuit_tank_projects_1.png b/mkdocs/docs/assets/intuit_tank_projects_1.png
new file mode 100644
index 000000000..e0afc3fd3
Binary files /dev/null and b/mkdocs/docs/assets/intuit_tank_projects_1.png differ
diff --git a/mkdocs/docs/assets/projects/projects_1.png b/mkdocs/docs/assets/projects/projects_1.png
new file mode 100644
index 000000000..e0afc3fd3
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_1.png differ
diff --git a/mkdocs/docs/assets/projects/projects_10.png b/mkdocs/docs/assets/projects/projects_10.png
new file mode 100644
index 000000000..660adf94f
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_10.png differ
diff --git a/mkdocs/docs/assets/projects/projects_11.png b/mkdocs/docs/assets/projects/projects_11.png
new file mode 100644
index 000000000..36c9bdcce
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_11.png differ
diff --git a/mkdocs/docs/assets/projects/projects_12.png b/mkdocs/docs/assets/projects/projects_12.png
new file mode 100644
index 000000000..42282eb1f
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_12.png differ
diff --git a/mkdocs/docs/assets/projects/projects_13.png b/mkdocs/docs/assets/projects/projects_13.png
new file mode 100644
index 000000000..1746418d1
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_13.png differ
diff --git a/mkdocs/docs/assets/projects/projects_14.png b/mkdocs/docs/assets/projects/projects_14.png
new file mode 100644
index 000000000..c79904a48
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_14.png differ
diff --git a/mkdocs/docs/assets/projects/projects_2.png b/mkdocs/docs/assets/projects/projects_2.png
new file mode 100644
index 000000000..9bf4b2bdb
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_2.png differ
diff --git a/mkdocs/docs/assets/projects/projects_3.png b/mkdocs/docs/assets/projects/projects_3.png
new file mode 100644
index 000000000..cfdd69b56
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_3.png differ
diff --git a/mkdocs/docs/assets/projects/projects_4.png b/mkdocs/docs/assets/projects/projects_4.png
new file mode 100644
index 000000000..876440480
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_4.png differ
diff --git a/mkdocs/docs/assets/projects/projects_5.png b/mkdocs/docs/assets/projects/projects_5.png
new file mode 100644
index 000000000..65b92a718
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_5.png differ
diff --git a/mkdocs/docs/assets/projects/projects_6.png b/mkdocs/docs/assets/projects/projects_6.png
new file mode 100644
index 000000000..cca5ef468
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_6.png differ
diff --git a/mkdocs/docs/assets/projects/projects_7.png b/mkdocs/docs/assets/projects/projects_7.png
new file mode 100644
index 000000000..6f693f140
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_7.png differ
diff --git a/mkdocs/docs/assets/projects/projects_8.png b/mkdocs/docs/assets/projects/projects_8.png
new file mode 100644
index 000000000..efc1252bd
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_8.png differ
diff --git a/mkdocs/docs/assets/projects/projects_9.png b/mkdocs/docs/assets/projects/projects_9.png
new file mode 100644
index 000000000..34512f6b0
Binary files /dev/null and b/mkdocs/docs/assets/projects/projects_9.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_1.0.png b/mkdocs/docs/assets/scripts/scripts_1.0.png
new file mode 100644
index 000000000..4749d0fa5
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_1.0.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_1.png b/mkdocs/docs/assets/scripts/scripts_1.png
new file mode 100644
index 000000000..e0a6b43b2
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_1.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_2.png b/mkdocs/docs/assets/scripts/scripts_2.png
new file mode 100644
index 000000000..d54192972
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_2.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_3.png b/mkdocs/docs/assets/scripts/scripts_3.png
new file mode 100644
index 000000000..4d24464d8
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_3.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_4.png b/mkdocs/docs/assets/scripts/scripts_4.png
new file mode 100644
index 000000000..48dd4242e
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_4.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_5.png b/mkdocs/docs/assets/scripts/scripts_5.png
new file mode 100644
index 000000000..200cc2f6f
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_5.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_6.png b/mkdocs/docs/assets/scripts/scripts_6.png
new file mode 100644
index 000000000..55b710f5d
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_6.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_7.png b/mkdocs/docs/assets/scripts/scripts_7.png
new file mode 100644
index 000000000..ff375ddb7
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_7.png differ
diff --git a/mkdocs/docs/assets/scripts/scripts_8.png b/mkdocs/docs/assets/scripts/scripts_8.png
new file mode 100644
index 000000000..10b21eff4
Binary files /dev/null and b/mkdocs/docs/assets/scripts/scripts_8.png differ
diff --git a/mkdocs/docs/assets/tools/tools_1.png b/mkdocs/docs/assets/tools/tools_1.png
new file mode 100644
index 000000000..d9ec5ed02
Binary files /dev/null and b/mkdocs/docs/assets/tools/tools_1.png differ
diff --git a/mkdocs/docs/assets/tools/tools_2.png b/mkdocs/docs/assets/tools/tools_2.png
new file mode 100644
index 000000000..531a10282
Binary files /dev/null and b/mkdocs/docs/assets/tools/tools_2.png differ
diff --git a/mkdocs/docs/gatlings-vs-tank.md b/mkdocs/docs/gatlings-vs-tank.md
new file mode 100644
index 000000000..826b9babd
--- /dev/null
+++ b/mkdocs/docs/gatlings-vs-tank.md
@@ -0,0 +1,37 @@
+# Idealogy: Gatling vs Tank
+
+
+- :material-clock-fast:{ .lg .middle } __Set up in 5 minutes__
+
+ ---
+
+ Install [`mkdocs-material`](#) with [`pip`](#) and get up
+ and running in minutes
+
+ [:octicons-arrow-right-24: Getting started](#)
+
+- :fontawesome-brands-markdown:{ .lg .middle } __It's just Markdown__
+
+ ---
+
+ Focus on your content and generate a responsive and searchable static site
+
+ [:octicons-arrow-right-24: Reference](#)
+
+- :material-format-font:{ .lg .middle } __Made to measure__
+
+ ---
+
+ Change the colors, fonts, language, icons, logo and more with a few lines
+
+ [:octicons-arrow-right-24: Customization](#)
+
+- :material-scale-balance:{ .lg .middle } __Open Source, MIT__
+
+ ---
+
+ Material for MkDocs is licensed under MIT and available on [GitHub]
+
+ [:octicons-arrow-right-24: License](#)
+
+
diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md
new file mode 100644
index 000000000..5395b9815
--- /dev/null
+++ b/mkdocs/docs/index.md
@@ -0,0 +1,21 @@
+# Welcome to Intuit Tank
+
+Intuit Tank(called `The Tank` for brevity) is a open-source load test platform from Intuit that runs in a cloud environment. It currently supports Amazon web interfaces and utilizes services from `EC2`, `S3` and `DynamoDB`.
+
+## Constituents
+Tank has two main components: A Controller Interface and (one or many) agent(s).
+
+### Controller
+The Controller is the central hub/Control Plane for Tank.It supports the GUI for managing tests and for orchestrating.
+It utilizes Apache Tomcat for a web container and ActiveMQ as a message queue for communication.
+It exposes a `REST`ful interface for invoking services. It stores data in a `SQL` database, and uses a `AWS S3` bucket as a shared filesystem.
+The Controller also interfaces with an instance of a `JMS` Queue (`ActiveMQ`), usually on the same instance as the controller. The Message Queue is used to coordinate starting and coordination of the load tests.
+The Controller can be accessed at the context root of `http://[baseUrlOfAmazonInstance]/tank`
+
+### Agent(s)
+Agent(s) are instantiated on demand and exist for the duration of a test. They communicate with the controller via a combination of RESTful interfaces as well as the Message Queue.
+
+### Optional Components
+Optional components can include a *log aggregator* as well as other instances to be started at the beginning of a test. These are configured in the `settings.xml` and can be stopped and re-used or terminated at the end of a test.
+
+The next few sections will delve deep into the Key Concepts of Tank and Tools that assist in making Load testing with Tank a seamless experience.
\ No newline at end of file
diff --git a/mkdocs/docs/installation-guide/.pages b/mkdocs/docs/installation-guide/.pages
new file mode 100644
index 000000000..57f9f4727
--- /dev/null
+++ b/mkdocs/docs/installation-guide/.pages
@@ -0,0 +1,2 @@
+nav:
+ - installation.md
\ No newline at end of file
diff --git a/mkdocs/docs/installation-guide/installation.md b/mkdocs/docs/installation-guide/installation.md
new file mode 100644
index 000000000..7f1325300
--- /dev/null
+++ b/mkdocs/docs/installation-guide/installation.md
@@ -0,0 +1,569 @@
+# Set-up & Installation
+
+## Preface
+
+The purpose of this guide is to describe the IT concepts and strategies employed by Tank.
+
+Tank is meant to be an integrated testing solution for driving load against production systems. It includes a central controller for coordinating test agents and driving the UI for setting up and reporting on the tests.
+The agents are instantiated on demand and terminated when they are no longer needed. This helps keep the costs of running large tests to a minimum.
+
+Tank is meant to be a load test solution and is not currently optimized for functional testing. This does not mean that it is not useful for functional testing, but many of the paradigms and toolsets needed for functional testing may be lacking.
+
+### Cloud Services Used
+Tank uses a gamut of Cloud Service to fulfill the mission of Load Testing. The below section (expand for more info) gives a brief on each of the cloud services.
+
+??? abstract "Cloud Services used"
+
+ === "EC2"
+
+ Elastic Compute Cloud (`EC2`) is the service that is used to create compute resources to drive load (agents). There are several parts of this service used.
+
+ * `Instances` are the actual compute resources used to run load.
+ * `Security Groups` are used to control access to and from the agent instances and the controller instance.
+ * `AMIs` are used as the templates for instantiating new resources.
+ * `Load balancers` are used to distribute load to the controller.
+ * `Elastic IPs` are used to bind specific IP addresses to agent instances and can be used in firewall rules to allow access from agents to the datacenter.
+ * `Volumes` are used for persisted data that is not stored in `S3`.
+
+ === "S3"
+ Simple Storage Service is used to store document-based items such as data files and response time reports.
+
+ === "DynamoDB"
+ DynamoDb is a key value data store used to collect response metrics during the execution of a test. They are typically aggregated after the test is run.
+
+ === "RDS"
+ Relational Data Service (`MySQL`) is used to store the metadata about the projects including scripts, filters, and project configuration used by the controller.
+
+ === "IAM"
+ Identity and Access Management is used to control access and permissions to cloud resources.
+
+
+
+## Environment
+
+??? tip "Payment set-up for creating Cloud Account"
+ You will need a Credit Card or arrange payment in order to complete the following steps.
+
+ * Go to http://aws.amazon.com/ and click on the "Sign Up" button.
+ * Complete all steps for creating an account.
+
+### Retrieve Security Credentials
+Thera are two methods of setting up access to resources from the controller and agent instances.
+
+!!! abstract "Access Mechanism"
+
+ === "IAM role"
+
+ Use IAM roles to start the instances giving them access to the needed resources. If you use this method, just make sure you start your resources with the appropriate roles.
+
+ === "Access Keys"
+
+ * Log in to the amazon console from `http://aws.amazon.com/`
+ * In the top right corner where the account name shows up, click it and select `Security Credentials` from the drop down menu.
+ * Scroll down to the `Access Keys` section and click `Create a new Access Key`.
+ * Copy the `Access Key ID` from the newly created key.
+ * Click on `Show` section and copy the `Secret Access Key`.
+ * Store these values in a file on your computer somewhere, you will need them for configuration later.
+
+
+### Creating Essential Resources
+
+
+!!! abstract "Creating Essential Resources"
+
+ === "Key Pair"
+
+ The keypair is used to ssh into the instances that get started. Do not lose these keys as they cannot be retrieved form AWS.
+
+ * Log in to the amazon console from `http://aws.amazon.com/`
+ * Go to `EC2` tab
+ * Click on `Key Pairs` link under the `Network & Security` section in left navigation panel
+ * Create a Key Pair and save the downloaded `pem` file somewhere safe `ssh-keygen -y -f [path/to/pem/file].pem [path/to/key/file].pub`
+ * Change the Mod settings on the key file to 600 `sudo chmod 600 [path/to/pem/file].pem [path/to/key/file].pub`
+
+ !!! tip "Tip"
+ Optionally create an `environment variable` for the key. We will use the environment variable `AWS_KEY` in the rest of this documentation to refer to the key file. You could add the following command into your `bashrc` or `profile` file as well. `export AWS_KEY=/Home/aws_keys/tank_key.pem`
+
+ === "Security Group"
+
+ * Log in to the amazon console `from http://aws.amazon.com/`
+ * Go to `EC2` tab
+ * Click on `Security Groups` link under the `Network & Security` section in left navigation panel
+ * Click the `Create Security Group` button in the tool bar area
+ * Name the security group something appropriate like tank.
+ * Select the new Group and click the `Inbound tab` in the lower panel.
+ * Add appropriate rules. At a minimum, you will need `SSH`, `HTTP` (80 and 8080), `HTTPS`(443 and 8443), and `8090`(agent)
+
+
+ === "RDS Instance"
+
+ If you wish, you can create your own `MySQ`L instance and point to it instead.
+
+ * Log in to the amazon console from `http://aws.amazon.com/`
+ * Go to `RDS` tab
+ * Click on `Instaces` link in left navigation panel
+ * Click the `Launch DB Instance` button in the tool panel
+ * Select `mysql` instance
+ * Set all options appropriately.
+
+
+
+ === "Base AMI"
+ Agent AMIs must be created in each of the Amazon regions that you wish to run tests from. You can use `Cloudformation` or `chef` to provision your instances if you wish.
+
+ !!! tip "Tip"
+ There is a Cloudformation script and resources in aws-config in the root of this project's directory that will create the controller and the agent instances that can be used as a starting point for creating the AMIs.
+
+
+ * Log in to the amazon console from http://aws.amazon.com/
+ * Go to EC2 tab
+ * Click on AMIs link under the Images section in left navigation panel
+ * Find a suitable 64-bit base image backed by EBS and launch an instance. It does not matter which size or zone you start it in as it will only be used to create a base AMI.
+ * After it is started, connect via ssh
+ * Download and install the latest version of Java and add an entry in /etc/profile for a JAVA_HOME variable pointing to the base java directory.
+ * Ensure that java is installed and that the JAVA_HOME environment variable is set in /etc/profile
+ * Set the maximum number of open files limit to at least 50000 `echo ulimit -n 50000 >>/etc/profile`
+ * Select the instance and choose Create Image (EBS AMI) from the Instance Actions dropdown in the toolbar area.
+ * Give it an appropriate name such as Intuit Tank Base and click OK.
+ * After the instance is ready, terminate the instance you started.
+
+ === "Controller Instance and its AMI"
+ * Log in to the amazon console from http://aws.amazon.com/
+ * Go to EC2 tab
+ * Click on AMIs link under the Images section in left navigation panel
+ * Launch an instance from your Intuit Tank Base AMI. Select m1.xlarge and the zone your EBS volume is installed in.
+ * Click on Elastic IPs link under the Network and Security section in left navigation panel
+ * Click the Allocate New Address button.
+ * Click the new address and click the Associate Address button.
+ * Select the new instance you just launched and select ok.
+ * Connect to the instance `ssh -i $AWS_KEY root@[instance]`
+ * Download and Install Tomcat 6x from apache `wget http://tomcat.apache.org/download-60.cgi`
+ * Download mysql connector and install in TOMCAT_HOME/lib `wget http://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-5.1.30.tar.gz` or `or wget http://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-5.1.30.zip`
+ * Uncompress and move the jar file mysql-connector-java-5.1.30-bin.jar to TOMCAT_HOME/lib
+ * Download Weld support jar and install in TOMCAT_HOME/lib `wget -O TOMCAT_HOME/lib http://central.maven.org/maven2/org/jboss/weld/servlet/weld-tomcat-support/1.0.1-Final/weld-tomcat-support-1.0.1-Final.jar`
+ * Upload Intuit Tank war file from your build machine `upload PROJECT_ROOT/web/web_ui/target/tank.war from your local machine.`
+ * Move it to the webapps dir `For context of /tankmv tank.war TOMCAT_HOME/webapps/` and `For context of /mv tank.war TOMCAT_HOME/webapps/ROOT.war`
+ * Create the tank directories. `mkdir /mnt/ebs/wats; mkdir /mnt/ebs/wats/conf; mkdir /mnt/ebs/wats/jars`
+ * Add Datasource definition to server.xml ``
+
+ === "Agent AMI"
+
+ * Log in to the amazon console from http://aws.amazon.com/
+ * Go to EC2 tab
+ * Click on AMIs link under the Images section in left navigation panel
+ * Launch an instance from your Intuit Tank Base AMI.
+ * Connect to the instance ssh -i AWS_KEY root@[instance]
+ * Upload the agent startup zip file from your build machine PROJECT_ROOT/agent/agent_startup_pkg/target/agent-startup-pkg.zip
+ * unzip the file to /opt cd /opt ; unzip ~/agent-startup-pkg.zip
+ * Move startup script to /etc/init.d mv tank_agent/tank-agent /etc/init.d/
+ * Set it to start on startup chkconfig tank-agent on
+ * Select the instance and choose Create Image (EBS AMI) from the Instance Actions dropdown in the toolbar area.
+ * Give it an appropriate name such as Intuit Tank Agent and click OK.
+ * After the instance is ready, terminate the instance you started.
+
+## Standalone Deployment
+For standalone deployments, you will need to install the controller, database, and as many agent machines as needed. These steps assume that you have java installed on all machines needed.
+
+??? abstract "Standalone deployment"
+
+ === "Install Server Components"
+ On the machine that wil server as the controller.
+
+ * Download and Install `Tomcat 6x` from apache `http://tomcat.apache.org/download-60.cgi`
+ * Download mysql connector and install in `TOMCAT_HOME/lib`
+ `wget http://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-5.1.30.tar.gz`
+ * Uncompress and move the jar file `mysql-connector-java-nnnn.jar` to `TOMCAT_HOME/lib`
+ * Download Weld support jar and install in TOMCAT_HOME/lib `wget -O TOMCAT_HOME/lib http://central.maven.org/maven2/org/jboss/weld/servlet/weld-tomcat-support/1.0.1-Final/weld-tomcat-support-1.0.1-Final.jar`
+ * Build the project `mvn clean install -P release`
+ * Move the war to the tomcat webapp directory as tank.war or ROOT.war `For context /tank mv PROJECT_ROOT/web/web_ui/target/tank.war /opt/tomcat/webapps/tank.war` AND `For context /mv PROJECT_ROOT/web/web_ui/target/tank.war /opt/tomcat/webapps/ROOT.war`
+ * Install `MySQL` server
+ * Change the root password `mysqladmin -u root password 'NEW_PASSWORD'`
+ * Start `MySQL` client `mysql –u root –p`
+ * Create tank schema `CREATE SCHEMA wats DEFAULT CHARACTER SET utf8;`
+ * Create the tank directories. `mkdir [TS_HOME]; mkdir [TS_HOME]/conf; mkdir [TS_HOME]/jars`
+ * Add variable declarations to `/etc/profile` and add the following `export WATS_PROPERTIES=[TS_HOME]/conf`
+ * Start and stop tomcat to initialize the system. Ensure that the settings file is created in $WATS_PROPERTIES and that the database tables are created.
+ * Edit the `settings.xml` file. Change the standalone entry to true and change any other settings.
+ * Add Datasource definition to `server.xml`. Edit the file `[TOMCAT_HOME]/conf/server.xml`. Inside the tag add the following replacing values appropriately.
+ ```
+ ```
+ * Add Datasource definition to context.xml. Edit the file [TOMCAT_HOME]/conf/context.xml. Inside the tag add the following:
+ ```
+ ```
+
+ === "Create Agents"
+ Agents will typically run on separate machines. The default is `4000` users per agent but that number can be adjusted by editing the `run.sh` script.
+
+ * Upload the agent standalone zip file `PROJECT_ROOT/agent/agent_standalone_pkg/target/agent-standalone-pkg.zip`
+ * unzip the file `unzip agent-standalone-pkg.zip`
+ * Read the `README.txt` for further instructions.
+
+## Deployment Strategy
+Build the project from the root using the release profile.
+
+`mvn clean install -P release`
+
+Deployment is currently done uploading the war to the TOMCAT_HOME/webapps directory and restarting the server.
+
+Deployment can also be done via Cloudformation scripts that create new controller resources and then switching the load balancer to point at the new resource. This is the preferred method as it requires no downtime.
+
+## Configuration
+Configuration is achieved via an XML file called `settings.xml`. The default is `[TOMCAT_HOME]/settings.xml`. The directory location can be specified by setting the environment variable `WATS_PROPERTIES`.
+
+!!! tip "Tip"
+
+ * To get the default settings file, start the server and then stop it. If the `settings.xml` file is not available, the default settings will be created. You can then edit it.
+ * This configuration file is broken into several different sections. Each section is independent of the other sections and can occur in any order within the document.
+ * Relative paths are from the `tomcat` home.
+
+
+### Configuration Entries
+
+??? abstract "Configuration Entries"
+
+ === "Global Configuration Entries"
+
+ ``` xml
+
+ datafiles
+
+
+ timing
+
+
+ jars
+
+
+ tmpfiles
+
+
+ tank
+
+
+ http://localhost:8080/tank
+
+
+ false
+
+
+ true
+
+
+
+ All Products
+ My Product
+
+
+
+
+
+
+
+
+
+
+ localhost
+ 25
+ do_not_reply@myCompany.com
+
+
+
+
+
+ com.intuit.tank.reporting.db.DatabaseResultsReporter
+ com.intuit.tank.reporting.db.DatabaseResultsReader
+
+
+
+
+
+
+
+ ```
+
+ === "Agent Configuration Entries"
+
+ ``` xml
+
+
+
+
+
+
+
+
+
+ /tmp
+
+
+ 8090
+
+
+ 5000
+
+
+ 360000
+
+
+ 15000
+
+ 30000
+
+
+ 5000
+
+
+ 180000
+
+
+ 7200000
+
+
+ false
+
+
+ false
+
+
+ false
+
+
+
+ .*text.*
+ .*json.*
+ .*xml.*
+
+
+
+
+ test_flag
+
+
+
+
+ com.intuit.tank.httpclient3.TankHttpClient3
+ com.intuit.tank.httpclient4.TankHttpClient4
+ com.intuit.tank.okhttpclient.TankOkHttpClient
+
+ Apache HttpClient 4.5
+
+
+
+ ```
+
+ === "VM Manager Entries"
+
+ ``` xml
+
+ US_EAST
+
+
+ false
+
+
+
+
+
+
+
+ security_group
+ myKey
+
+
+
+
+ false
+
+
+
+
+
+ AWS_SECRET_KEY_ID
+ AWS_SECRET_KEY
+
+
+
+
+
+
+
+
+
+ [AMI-ID]
+ [KEYPAIR]
+
+
+
+
+
+
+
+
+
+ [AMI-ID]
+ [KEYPAIR]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 3m
+
+ 5m
+
+ 2
+
+ 30s
+
+
+
+
+
+ 50
+ 10
+
+ com.intuit.tank.persistence.databases.AmazonDynamoDatabaseDocApi
+
+
+
+
+
+ ```
+
+
+ === "Security and Access control Entries"
+
+ ``` xml
+
+
+
+ admin
+ user
+ script-manager
+ project-manager
+ job-manager
+ guest
+
+
+
+
+
+
+ user
+ project-manager
+
+
+
+
+ project-manager
+
+
+
+
+ project-manager
+
+
+
+
+
+ user
+ script-manager
+
+
+
+
+ script-manager
+
+
+
+
+ script-manager
+
+
+
+
+
+ user
+ script-manager
+
+
+
+
+ script-manager
+
+
+
+
+ script-manager
+
+
+
+
+
+ user
+ script-manager
+ project-manager
+
+
+
+
+ script-manager
+ project-manager
+
+
+
+
+
+ job-manager
+ project-manager
+
+
+
+
+
+
+ admin
+ admin
+ email@company.com
+ admin
+
+
+
+ ```
diff --git a/mkdocs/docs/installation.md b/mkdocs/docs/installation.md
new file mode 100644
index 000000000..d1abd52ff
--- /dev/null
+++ b/mkdocs/docs/installation.md
@@ -0,0 +1,9 @@
+# Installation Guide
+
+## Local Installation
+
+
+* `mkdocs new [dir-name]` - Create a new project.
+* `mkdocs serve` - Start the live-reloading docs server.
+* `mkdocs build` - Build the documentation site.
+* `mkdocs -h` - Print help message and exit.
\ No newline at end of file
diff --git a/mkdocs/docs/key-concepts/.pages b/mkdocs/docs/key-concepts/.pages
new file mode 100644
index 000000000..484a24345
--- /dev/null
+++ b/mkdocs/docs/key-concepts/.pages
@@ -0,0 +1,13 @@
+nav:
+ - projects.md
+ - scripts.md
+ - filters.md
+ - functions_variables.md
+ - Types Of In-built Functions:
+ - io-functions.md
+ - string-functions.md
+ - date-functions.md
+ - monetary-functions.md
+ - numeric-functions.md
+ - tax-functions.md
+ - datafiles.md
\ No newline at end of file
diff --git a/mkdocs/docs/key-concepts/datafiles.md b/mkdocs/docs/key-concepts/datafiles.md
new file mode 100644
index 000000000..4285a0398
--- /dev/null
+++ b/mkdocs/docs/key-concepts/datafiles.md
@@ -0,0 +1,61 @@
+# Data Files
+Data Files are external sources of data for your scripts. The lines will get evenly divided up among the agents for use.
+
+
+## Uploading Data Files
+Clicking the `Data Files` tab takes the user to the main screen, which lists all the previously created Data Files.
+
+
+
+!!! info "Note"
+ Currently, Tank supports data files with `CSV` (Comma Separated Values), and `XML` format only.
+
+The data file is read into shared memory only once, even if it is read by multiple virtual users.
+
+Usually you will use one of the `functions` below only once in your script for each file you load.
+The following two functions can be used in Tank to read a file:
+
+* `#{ioFunctions.getCSVData(COLUMN_NUMBER)` - This function is used to read a default data file associated with the project. A Data File is default if there is only one in a particular job.
+* `#{ioFunctions.getCSVData('FILE_NAME', COLUMN_NUMBER)` - This function can be used to read a specific data file when you have to read from multiple data files used for the same script or multiple scripts in a project (i.e. separate data files for personal information and login credentials).
+
+!!! abstract "Tip"
+ * Column Number 0 represents the 1st column in the file.
+ * It is important that all the user data files needed by your test script are added to your project.
+
+
+Clicking on the `Magnify` icon will allow you to see the contents of the data file.
+
+
+
+## Steps to Data File Upload
+1. In the `Data Files` Tab, click the `Upload` link to add data files. The `Upload Data Files` dialog box opens.
+
+2. From the `Upload Data Files` dialog box, click `Choose` to navigate and select the data file(s) or zip archives to add to the project.
+If a zip archive is uploaded, Tank will recursively upload all files with the `.csv` extension.
+
+3. Click either `Upload` or the `arrow` next to the box to upload the data file. You can also click the `Cancel` button if you choose not to upload the data file.
+
+
+
+
+
+
+
+
+
+
diff --git a/mkdocs/docs/key-concepts/date-functions.md b/mkdocs/docs/key-concepts/date-functions.md
new file mode 100644
index 000000000..122ae002f
--- /dev/null
+++ b/mkdocs/docs/key-concepts/date-functions.md
@@ -0,0 +1,7 @@
+# Date Functions
+
+| Method Definition | Description | Parameters | Example |
+| :---------------------------------------| :----------------------------------: | :----:| :--------:|
+| `addDays(int days, String format)`| Adds days to the current date. | 1. `days`: Integer The number of days. Pass in a negative value to subtract days. 2. `format`: String The date format string to use. If empty, will use default for locale. | `#{dateFunctions.addDays(5,'yyyy-MM-dd')}` produces `2011-11-20` on `November 15, 2011` |
+| `currentDate(String format, String timeZone)`| Current Date. Get the current date. | 1. `format`: String The date format string to use. If empty, will use default for locale. 2. `timeZone`: String (optional) The timezone to use. ()e.g. 'PST', 'America/Los_Angeles', or 'GMT'). If empty, will use default for locale. | `#{dateFunctions.currentDate('yyyy-MM-dd', 'PST')}` produces `2011-11-15` on `November 15, 2014` |
+| `currentTimeMilis()`| Gets the current Time in miliseconds since January 1, 1970. (Unix epoch time) | - | `#{dateFunctions.currentTimeMilis()}` produces a long number like `1357842009812` |
\ No newline at end of file
diff --git a/mkdocs/docs/key-concepts/filters.md b/mkdocs/docs/key-concepts/filters.md
new file mode 100644
index 000000000..766c776b8
--- /dev/null
+++ b/mkdocs/docs/key-concepts/filters.md
@@ -0,0 +1,61 @@
+# Filters and Filter Groups
+
+`Filters` are enhancements made to a `pre-recorded` script, which allow the script to behave more like a real user. Filters enable the script to be more dynamic, where it can behave a certain way when certain conditions are met.
+Filters can add `functions`, `thinktime`, and change script behavior, etc.
+
+`Filter Groups` are created to contain multiple filters that are unique to that group. For e.g, A `TTO Production` Filter group will have only filters that are unique to that type of user.
+
+When uploading a script, you will be given the option to choose the filters and groups you want to process the script with.
+Selecting a group will select all the filters associated with that group automatically by default. You may choose to remove those that are not required.
+
+
+
+### Creating a Filter
+
+1. Click on the `New Filter` icon to open the `New Script Filter` form.
+
+
+2. A filter can be `internal` or `external` (`Javascript`). Select the type on the radio button and the form will change depending on the mode.
+
+
+ - `Internal Filters`
+ - Internal filters consist of one of more Conditions and one or more Actions . Conditions determine if this filter matches a particular request and Actions define the transformation to apply when the conditions match.
+ - Set the `Name`, `Product` and whether the conditions are `match all` or `match any`.
+ - Click on `Add Condition` and `Add Action` and set the properties appropriately.
+
+
+ - `External Filters`
+ - External filters consist of a reference to an external javascript file.
+ - Set the Name, Product and Script.
+
+
+
+
+### Creating a Filter Group
+1. Click on the `New Filter Group` icon to open the `Filter Group` selection form.
+2. Give the Filter Group a unique `name`, select the `Product`, and select from the list of `filters` that belong to that group.
+
+
+
+
+
+
+
diff --git a/mkdocs/docs/key-concepts/functions_variables.md b/mkdocs/docs/key-concepts/functions_variables.md
new file mode 100644
index 000000000..80adc49b4
--- /dev/null
+++ b/mkdocs/docs/key-concepts/functions_variables.md
@@ -0,0 +1,28 @@
+# Functions and Variables
+
+Functions can be used to manipulate data during test execution. They can be used anywhere in place of a Value.
+
+Functions and Variables both use the same format. They start with either a `#` or `$` and then are enclosed in parentheses. `e.g. #{functionType.function(param, param2)}` or `${functionType.function(param, param2)}`. Variables have no parentheses. `e.g. #{varName}`
+
+Functions or variables can be nested in other functions.
+`e.g. #{stringFunctions.toBase64(ioFunctions.getFileData(fileName)}`
+In this example the variable is `fileName` which is passed to the `getFileDataFunction` which is passed to the `toBase64` function.
+
+??? info "Example: Using Functions"
+ * Declaring the function `#{stringFunctions.concat('Intuit ', 'Tank ',' Rocks')}` would yield `Intuit Tank Rocks`.
+ * Declaring the function `#{numericFunctions.mod(authId, 10)}` with the variable `${authId}` defined as `51 `would result in `51 % 10 = 1`.
+
+
+## Types of Functions
+There are 6 built in function types.
+
+- `ioFunctions`
+- `stringFunctions`
+- `dateFunctions`
+- `monetaryFunctions`
+- `numericFunctions`
+- `taxFunctions`
+
+
+??? info "Tip: Types of Functions and their usage"
+ Check out the sub-sction `Types of Functions` on the left navigation bar for Information on each Function Type and their usage.
\ No newline at end of file
diff --git a/mkdocs/docs/key-concepts/io-functions.md b/mkdocs/docs/key-concepts/io-functions.md
new file mode 100644
index 000000000..2d0ffa4b0
--- /dev/null
+++ b/mkdocs/docs/key-concepts/io-functions.md
@@ -0,0 +1,9 @@
+# IO Functions
+
+| Method Definition | Description | Parameters | Example |
+| :----------------------------------| :---------------------------------- | :----------| :----------|
+| `getCSVData(String fileName)` | Gets the value of the first `columnNumber` from the specified csv file. equivalent of calling `getCSVData(fileName, 0, false)` | `fileName`: String indicating the name of the csv file to read data from. | `#{ioFunctions.getCSVData('myData.csv')}` returns the first column in the csv file `myData.csv`.|
+| getCSVData(String fileName, int columnIndex) | Gets the value of the specified `columnNumber` from the specified csv file. equivalent of calling `getCSVData(fileName, columnIndex, false)` | 1. `fileName`: String indicating the name of the csv file to read data from. 2. `columnIndex`: Integer Zero indexed column number.| `#{ioFunctions.getCSVData('myData.csv', 1, true)}` returns the second column in the csv file `myData.csv`.|
+| getCSVData(String fileName, int columnIndex, boolean loop) | Gets the value of the specified columnNumber from the specified csv file. | `fileName`: String indicating the name of the csv file to read data from.`columnIndex`: Integer Zero indexed column number.`loop`: Boolean true to go back to the first line after all lines are read. | `#{ioFunctions.getCSVData('myData.csv', 1, true)}` returns the second column in the csv file `myData.csv`.|
+| getFileData(String fileName) | Reads the specified file and returns it as a String. | `fileName`: String indicating the name of the csv file to read data from.| `#{ioFunctions.getFileData('myData.txt')}` returns the contents of the file `myData.txt` as a String.|
+| getFileBytes(String fileName) | Reads the specified file and returns it as a byte array (`byte[]`). This function can only be used as input to another function that returns a String such as toBase64. | `fileName`: String indicating the name of the csv file to read data from.| `#{ioFunctions.getFileBytes('myData.txt')}` returns the contents of the file myData.txt as a String|
\ No newline at end of file
diff --git a/mkdocs/docs/key-concepts/monetary-functions.md b/mkdocs/docs/key-concepts/monetary-functions.md
new file mode 100644
index 000000000..487c0ff4e
--- /dev/null
+++ b/mkdocs/docs/key-concepts/monetary-functions.md
@@ -0,0 +1,6 @@
+# Monetary Functions
+
+| Method Definition | Description | Parameters | Example |
+| :---------------------------------------| :----------------------------------: | :----:| :--------:|
+| `randomPositive(int length)`| Gets a random positive money amount. | 1. `length`: Integer (required) The number of digits in the whole amount. | `#{monetaryFunctions.randomPositive(3)}` produces a random monetary amount between `100.00` and `999.99`|
+| `randomNegative(int length)`| Gets a random negative money amount.| 1. `length`: Integer (required) The number of digits in the whole amount. | `#{monetaryFunctions.randomNegative(3)}` produces a random monetary amount between `-100.00` and `-999.99`|
diff --git a/mkdocs/docs/key-concepts/numeric-functions.md b/mkdocs/docs/key-concepts/numeric-functions.md
new file mode 100644
index 000000000..4fbbb0458
--- /dev/null
+++ b/mkdocs/docs/key-concepts/numeric-functions.md
@@ -0,0 +1,13 @@
+# Numeric Functions
+
+| Method Definition | Description | Parameters | Example |
+| :---------------------------------------| :----------------------------------: | :----:| :--------:|
+| `add(double... values)`| Adds a list of values | `values`: Doubles (required) Variable number of number to add together. |`#{numericFunctions.add(3,2,6)}` results in `3 + 2 + 6 = 11`|
+| `subtract(double... values)`| Subtracts a list of values from the first value.| 1. `values`: Doubles (required) Variable number of number to subtract. | `#{numericFunctions.subtract(10,2,3)}` results in `10 - 2 - 3 = 5`|
+| `mod(int value, int modulo)`| Performs a modulo operation, or the whole remainder of a division operation.| 1. `value`: Integer (required) The number ot modulo. 2. `modulo`: Integer (required). | `#{numericFunctions.mod(5,4)}` results in `5 % 4 = 1`|
+| `randomPositiveWhole(int length)`| Gets a random positive integer value.| `length`: Integer (required) The number of digits in the whole amount. |` #{numericFunctions.randomPositiveWhole(3)}` produces a random value between 1`00` and `999`|
+| `randomNegativeWhole(int length)`| Gets a random negative integer value.| `length`: Integer (required) The number of digits in the whole amount. | `#{numericFunctions.randomNegativeWhole(3)}` produces a random value between `-100` and `-999`|
+| `randomPositiveFloat(int length, int decimalPlaces)`| Gets a random positive float value.| 1.`length`: Integer (required) The number of digits in the whole amount. 2. `decimalPlaces`: Integer (required) The number of digits after the decimal.|`#{numericFunctionS.randompPositiveFloat(3,2)}` produces a random value between `100.00` and `999.99`|
+| `randomNegativeFloat(int length, int decimalPlaces)`| Gets a random negative float value.| 1.`length`: Integer (required) The number of digits in the whole amount. 2. `decimalPlaces`: Integer (required) The number of digits after the decimal. | `#{numericFunctions.randomNegativeFloat(3,2)}` produces a random value between `-100.00` and `-999.00`|
+| `random(int min, int max)`| Gets a random negative float value.| 1.`min`: Integer (optional) The minimum value to return. default 0. 2. `max`: Integer (required) maximum value to return. | `#{numericFunctions.random(4,10)}` produces a random value between `4` and `10`|
+
diff --git a/mkdocs/docs/key-concepts/projects.md b/mkdocs/docs/key-concepts/projects.md
new file mode 100644
index 000000000..3e27eb483
--- /dev/null
+++ b/mkdocs/docs/key-concepts/projects.md
@@ -0,0 +1,187 @@
+# Projects
+
+Projects define how tests are executed.
+
+### Creating a Project
+#### Configuring Top-level Properties
+The main properties that must be specified include:
+
+* `Number of Users` - The Number of test users.
+* `Ramp time` - The Ramp time.
+* `Test length` - The test length.
+* `Scripts` - Which scripts to use.
+* `Data Read` - Whether to read from data.
+
+#### Steps to creation
+Clicking the `Project` tab takes the user to the main screen, which lists all the previously created projects. From this screen, the user can click the `New Project` icon or edit an existing project by clicking the name in the list.
+
+
+1. After clicking the `New Project` icon, the user will be presented with a screen where they can name the project.
+2. Select the `Product` this test belongs to and optionally add comments.
+3. After clicking `Save` in the toolbar, the project will be created and you will be taken to the `Edit project` screen.
+
+
+
+### Editing a Project
+You can click on the `Edit` icon at the far right of the project row or click the name of the Project to edit a project. From the edit screens, you can set the properties of the project as well as run a test.
+
+#### Configuring Top-level Properties
+The main properties that must be specified include:
+
+
+
+* `Name` - The name of the Project. This must be unique.
+* `Simulation Time` - This is the length of the test and should be filled in if the `Simulation Time Reached` option is chosen under the `Users and Times` tab. If `Scripts Loops Completed` is chosen on the `Users and Times` tab , this field can be ignored.
+* `Ramp Time` - This is how quickly test users should be added. For example, if the total number of Users is 3000, the Ramp Time is 15 minutes, and the User Increment is 1, then 3.33 users will be added every second.
+* `Owner` - The owner of this Project. Owners and Adminstrators have specific rights to Projects within Intuit Tank. See Security for more information.
+* `Total Users` - This is a `read-only` reference field that will total the users for all the regions configured on the `Users And Times` tab.
+
+#### Configuring Additional Properties
+There are additional properties that be configured for a Project. Expand the section below explore further.
+??? abstract "Additional Project Properties"
+
+ === "Users and Times "
+
+ `Users and Times` is the tab within a Project where you configure the number of users that will be simulated in your test and how the load will be generated and terminated.
+
+
+ * `Users` - Enter the number of users for each Amazon EC2 region to simulate. Regions are the Amazon data center locations. Available regions are configured by the administrator.
+ * `Run Scripts Until` - Simulation Time Reached - Run scripts until simulation time has been reached. Will run trough the script once in its entirety regardless of the simulation time ant then continur running the last script group until the simulation time has been reached.
+ Scripts Loops Completed - Run the script once then terminate the user.
+
+ * `Workload Type` - Increasing - Users will be added for the length of time specified in the Ramp Time field, after which the test will run at steady state with the number of Total Users until Simulation Time or Script Loops are completed.
+ Steady State - Start all the users at once.
+
+ * `Initial Users` - The number of users to simulate immediately when the test starts. The typical value is 0.
+ * `User Increment` - The number of users to add at a time during the Ramp Time. The typical value is 1.
+
+
+
+ === "Scripts and Test Plans"
+
+ The `Scripts` tab is where the the flow of the test is configured. Some key terminology to review here would be:
+
+ * `Tests and Test Plans` - Tests are a collection of `Test Plans`. A Test Plan consists of one or more `Script Groups`. Test Plans run a percentage of the users for the test.
+ * `Script Groups` - Script Groups contain one or more Scripts. Script groups are intended to be self contained flows for a user to execute.They are the demarkation unit for stopping a test as well as for looping after a test has met the simulation time.
+ * `Scripts` - Scripts are a collection of steps to run. Looping can be configured at either level.
+
+ === "Adding a Test Plan"
+ 1. Click on `Add Test Plan` to add a Test Plan to the project.
+ 2. Fill in the name and the percentage of users this plan will run. You are responsible for making sure that the percentages add up to 100%.
+ 3. Click the `Add` button to save the Test Plan. The new Test Plan will show up as a new tab in the `Test Groups` tab view.
+
+ === "Adding a Script Group"
+ 1. Click on `Insert Script Group` to add a Script Group at the end of the list or the `Insert` icon at the far right of any existing Script Group to add one before the one selected.
+ 2. Fill in the name and number of loops.
+ 3. Click the `Add` button to save the Script Group.
+
+ === "Editing a Script Group"
+ 1. Click on name of Script Group or the `Edit` icon at the far right of any existing Script Group.
+ 2. Move scripts from the available box to the selected box by using either the buttons or by dragging the scripts form one to the other. _(Refer to the `Move Script Groups` figure below)_
+ 3. You can reorder the Selected Scripts by dragging them up or down in the list. Click the `Add Scripts` button to add the scripts to the script group (lower panel on the `Move Script Groups` figure).
+ 4. You may set the number of loops of the individual script by clicking the `loop number` and changing the value.
+ 3. When you are done, Close the `Edit Script Group` Dialog.
+ !!! info "Pro Tip"
+ A user can specify multiple Script Groups and Scripts within each Group. This gives the user a great deal of control over how their test is run.
+ For example, `Script Group 1` might be looped twice and contain `Script1` (looped once) and `Script2` (looped twice).
+ Running the test would result in this sequence: `Script1, Script2, Script2, Script1, Script2, Script2`
+
+
+
+
+ === "Data Files"
+ Data Files are `csv` files that can be accessed by the scripts.
+ Each of the lines in a csv file will get divided evenly among the agents. So if there are 6000 lines in the csv file and there are two agents running (600 users) each agent will get 3000 lines.
+ When individual user threads access the csv file, the lines are locked as they get doled out so that lines are only given out once.
+ Use the `Upload Data Files` button to upload Data Files.
+ Move Data Files from the available to the selected panels using either the buttons or by dragging.
+ You can view the contents of a Data File by clicking on the `magnifying glass` :material-magnify-scan: icon.
+
+
+ === "Notifications"
+ Email Notifications can be attached to different lifecycle events. An email will be sent when the event happens.
+ Use the `Add Notification` button to add a notification.
+ Enter the email address(es) seperated by commas.Then select the lifecycle events that this notificaiton should listen for.
+
+
+ === "Variables"
+ A Project can be configured with `Global variables`.
+
+ !!! info "Pro Tip"
+ * You can either allow these variables to be overridden by the scripts (or) to be immutable (default).
+ * You can cause the agent to use a Proxy Server for requests by setting a variable named `TANK_HTTP_PROXY` in the format `proxyHost:proxyport`. e.g. `myProxyserver:80`
+
+ Use the `Add Variable` button and fill in the key and the value.
+ Check the `Allow Scripts to Override Project Variables` checkbox to allow scripts to override the variable values.
+
+
+ === "Job scheduling"
+ When you schedule a job, you snapshot it off at that point so that you can then run it exactly as it is set up at the time. A pop up will show the details of the job you are going to run.
+ ### Configuring Scheduling Properties
+
+ * `Name` - The name of the job.
+ * `Logging Profile` - How verbose the agent logs should be.
+ * `Tank HTTP Client` - The client implementation you want to use to drive load.
+ * `Stop Behavior` - Where you want the script to exit when stop command is issued.
+ * `Location` - Where the test is running. Configured by the administrator.
+ * `Agent Instance Type` - The instance type for the agent.
+ * `Max Users per Agent` - The number of users each agent should run.
+ * `Assign Elastic IPs` - Whether the agents should use an elastic ip that you have reserved. If no ips are available, then the agent will not get an Elastic IP.
+
+
+
+ After you schedule a job you will automatically be taken to the `Job Queue` tab.
+ !!! warning "Note"
+ It is important that you examine the details of the job, to make sure that everything is as you want it.
+
+
+
+
+ === "Job queue"
+ The Job Queue show all the jobs tht have been scheduled (or) have run. It allows you to control the execution of the test and monitor the current state of the test.
+
+
+
+
\ No newline at end of file
diff --git a/mkdocs/docs/key-concepts/scripts.md b/mkdocs/docs/key-concepts/scripts.md
new file mode 100644
index 000000000..6f54beaf4
--- /dev/null
+++ b/mkdocs/docs/key-concepts/scripts.md
@@ -0,0 +1,424 @@
+# Scripts
+
+!!! note "Tip"
+ Scripts are accessed from the `Scripts` Top navigation link.
+
+A Tank performance script is a preset of steps that `virtual`(mocked for load) users will execute during a load test.
+A `step` can be composed from one of the following step types:
+
+- [x] HTTP Request
+- [x] Think Time
+- [x] Sleep Time
+- [x] Variable
+- [x] Clear Session
+- [x] Logic
+
+### Importing a recorded Script
+There are two ways to generate a script.
+
+* Importing from a `Tank-Proxy recording`.
+* Manually creating a script.
+
+This section will mainly focus on the `Importing` method.
+
+!!! warning "Note"
+ Currently Tank only supports Intuit Tank's own `Proxy Recording` tool. More info on the Recording tool is covered in the later sections.
+
+#### Steps to Importing a Script
+1. Click on the `New Script` icon which takes you to the `Create Script` form.
+2. Fill in the `Script Name` and select from the appropriate `Product name`.
+3. Click on the `Browse` button to select the recording file or the `zip archive` that contains the recording file. _(If Creating a blank script just select the `Blank Script` radio button and skip this step.)_
+4. Select the appropriate `Filter Groups` (AND/OR) individual `Filters`. The Filters are used to transform the imported recording to a final Tank script. _(Refer to the `Filters` section for more details on `Filters` and `Filters Group`.)_
+5. Click on the `Save` button. Once the Intuit Tank script is created, you will be taken back to the `Scripts` section and your script will appear at the top of the list.
+
+
+
+
+
+!!! warning "Warning"
+ If uploading a zipped recording, Tank will only extract the first one from the zip, in case there are more than one recordings in the uploaded zip.
+
+
+### Modifying a Script
+Click on the name of the script (or) the `Edit` icon corresponding to the script you wish to edit. This will take you to the `Edit Script` form.
+
+The attributes that can be edited include:
+
+* `Name` - The name of the script. This is an `in-place` editor. Simply click the name once to enter the `Edit` mode.
+* `Product` - The name of the product this script belongs to.
+* `Owner` - The user who owns the script.
+* `Comments` - Comments are accessed by the `Comments` button on the toolbar.
+
+!!! error "TODO: Edit verbiage for comments heading"
+
+#### Step Selection
+Selection of script steps is modelled different than other sections.
+
+Instead of check boxes, it uses a standard selection method, and highlights to indicate selection.
+
+
+This toolbar has actions for inserting, deleting, and changing the order of the steps.
+
+### Re-ordering Execution steps of a Script
+Below are the instructions to re-order the Script steps.
+
+1. To re-order/move a step you will need to click on the button labeled `Reorder Steps` . A pop-up window labeled `Reorder Steps` will appear.
+2. Use the buttons to move steps up or down in the list. You can also drag the steps to the new order.
+3. Click on the `Save` button to save the new ordering.
+
+
+
+!!! warning "Warning"
+ The script is not saved to the database until you click `Save` in the main toolbar. Changes can be discarded at any time by clicking `Cancel` or by navigating to another panel. In general, it is recommended that you periodically save your updates.
+
+### Variables
+Variables are used to store various values that can be used to parameterize requests (i.e. `hostnames`, `query string`, `POST data`, etc.).
+You can define a variable to be set to a `literal` value or `function`.
+A variable can also be set by parsing the `response` or `headers` of a given request. When using variables in requests, you need to add the `@` symbol in front of the variable. This tells Tank to replace the variable with the actual value.
+
+#### Adding a Variable
+To add a Variable,
+
+1. Click on the button labeled `Variable` . The Variable dialog will pop up.
+2. In the `Key` field you will define your variable name.
+3. The `Value` field is where you define the actual value or `function` you use will associate with the variable.
+4. Click on the `Add` or `Save` button when done.
+
+
+
+
+!!! info "Note"
+ You can cause the agent to use a `Proxy Server` for requests by setting a variable named `TANK_HTTP_PROXY` in the format `proxyHost:proxyport`. e.g. `myProxyserver:80`
+
+
+### Think Time and Sleep Time
+`Think Time` is used to simulate a real user's page to page transitions in an application. In Tank, a random think time is generated based on the `min` and `max` values provided.
+`Sleep time` is used when you wish to wait for a specific amount of time.
+
+!!! info "Note"
+ Think Time and Sleep Time are entered in `milliseconds`.
+
+#### Adding Think Time and Sleep Time
+
+1. To add a `Think Time` or `Sleep Time`, click on the appropriate button or the link to edit an existing one. The `Insert or Edit` dialog will pop up.
+2. In the `Key` field you will define your variable name.
+3. For `Think Time` you will need to enter the `Minimum` and `Maximum` time in `miliseconds`. For `Sleep Time` you just need to enter a single value.
+4. Click on the `Add` or `Save` button when done.
+
+
+
+
+### HTTP Request
+In Tank, a request is either a `HTTP(S)` `GET` or `POST` request. Before we go through the steps of adding a new request, we will go over what makes up the basic request.
+
+First, will start with the fields that can make up a request:
+
+* `Host` - You can enter either the `FQDN`, `IP address` or variable for the hostname. When you use a variable, you need to add the `&` symbol at the beginning of the variable. (e.g. `@webserver`) You can specify the port by appending a `:[port]` to the host. e.g. `server.domain.com:9001`
+* `Protocol` - Supported protocols `HTTP` or `HTTPS`.
+* `Path` - The path to the requested resource. The path can also be parameterized with variables. For e.g, `#{rootContext}/ajax/logEmail`
+* `Method` - Supported methods are `GET` or `POST`
+* `Query String` - The query parameters expressed as key value pairs that get appended to the `url`.
+* `POST data` - Parameters expressed as key value pairs that get posted in the `body` of the request.
+* `Group` - Allows you to group requests under a `label`. Grouping is used with `On Failure` field below. _`Note`: This field is optional._
+* `Logging` - This field is used to gather response times on a specific request. _`Note`: This field is `optional`._
+* `On Failure` - Defines what action to take next when a response validation has failed.
+
+ The following are the 5 actions that can be taken by the tool when validation has failed:
+
+ * `Abort Script, goto next Script (Default)` – The virtual user will skip to the next script defined in the project. If there is only one script, the virtual user will start from over from the top of the script.
+ * `Continue to next request` – Execute next request.
+ * `Skip remaining requests in a group` – Will execute the next step after the last request with the group label.
+ * `Goto Group` – Jumps to the first step with the group label.
+ * `Terminate user` – The virtual user is terminated.
+
+* `Name` - Allows a user to uniquely label a request. (i.e. `Login request`, `Signout request` or `Print request`). _`Note`: This field is optional._
+
+#### Adding a HTTP Request
+
+ To add a HTTP request,
+
+1. Click on the appropriate button or the link to edit an existing one. The `Insert` or `Edit` dialog will pop up.
+2. Fill in the appropriate top level attributes such as `Host`, `Path`, `Name`, etc.
+3. For each of the tabs, you can add or modify the key value pairs.
+
+
+
+!!! info "Note"
+ * Key and Value are in-place edit components. click on them to change their value and click the check mark to accept the change or the 'X' to revert.
+ * Not all Heders are re-played. The following are filtered out:
+ - Host
+ - Cookie
+ - Connection
+ - If-None-Match
+ - If-Modified-Since
+ - Content* (Content-Type, Content-Length)
+
+
+#### Supplemental Properties
+The following Supplemental properties can be added to the HTTP Request.
+
+* `Response Header` - Response Headers are read only. They are here to help you in creating validation or assignemnts.
+* `Request Cookies` - Cookies lets you see which cookies were set when the request was made. However, only cookies that are set from variables are replayed.
+* `Response Cookies` - Response Cookies are read only.
+* `Query String` - Used primarily with `GET` requests.
+* `Post Data` - Only valid for POST requests.
+* `Validation` - Perform validation specified. If validation fails, the On Fail action is performed. Key is the xpath or expression for the value.
+* `Assignments` - Assign values to variables for use in other script steps. Key is the variable name and value is the xpath or expression for the value.
+
+!!! info "Note"
+ Two Synthetic headers can be validated as well:
+
+ - `HTTPRESPONSEMESSAGE` - The http response message. e.g. `OK` or `Not Found`
+ - `HTTPRESPONSECODE` - The http response code. e.g. `200` or `404`
+
+Finally, Click on the `Save` button when done.
+
+
+### Clear Session
+Inserting this step clears the session, which can be thought of as comparable to closing and re-opening the browser.
+
+This accomplishes the following:
+
+- Clears cookies
+- Clears the cache
+
+### Logic
+Inserting this step allows the user to use `Javascript` to control the flow of the script, change variable values, or perform any custom logic that is too complex for the normal operations.
+
+Logic step scripts are written in `Javascript`. They have access to the following objects:
+
+* `Variables` - The current variables in the test plan execution.
+* `Request` - The previous request in the test plan execution.
+* `Response` - The previous response in the test plan execution.
+
+!!! info "Note"
+ There are functions that are added to each script for accessing these values.
+
+Control flow is controlled by setting the `action` output parameter in the `ioBean`. Handled values are:
+
+* `goto:[groupName]` - Jump to a group within the script with the specified name.
+* `restartPlan` - Restart the test plan from the start.
+* `abortScriptGroup` - Aborts the current script group and proceeds to the next script group.
+* `abortScript` - Aborts the current script and proceeds to the next script.
+* `abortGroup` - Aborts the current group within a script and proceeds to the next request after the current group.
+* `terminateUser` - Terminates the current user.
+
+!!! info "Tip"
+ There are functions that are added to each script for accessing these values.
+
+The following functions are added to each script before it is run and are available for use.
+
+??? abstract "Functions added at each Script"
+
+ === "Logging functions"
+ ``` js
+ /**
+ * prints the line to the output. will be log
+ *
+ * @param line
+ * the line to print
+ */
+ function logWithDate(line) {
+ ioBean.println(new Date().toString + ": " + line);
+ }
+
+ /**
+ * prints the line in info context.
+ *
+ * @param line
+ * the line to print
+ */
+ function log(line) {
+ ioBean.println(line);
+ }
+ /**
+ * prints the line in error context.
+ *
+ * @param line
+ * the line to print
+ */
+ function error(line) {
+ ioBean.error(line);
+ }
+
+ /**
+ * prints the line in debug context. Will be logged to console or test but not
+ * to logs when in production.
+ *
+ * @param line
+ * the line to print
+ */
+ function debug(line) {
+ ioBean.debug(line);
+ }
+ ```
+ === "Data Transformational functions"
+ ``` js
+ function toJsonObj(text) {
+ try {
+ return JSON.parse(text);
+ } catch(e) {
+ error("Error parsing json: " + e);
+ }
+ return text;
+ }
+ function toJsonString(jsonObj) {
+ try {
+ return JSON.stringify(jsonObj, null, '\t');
+ } catch(e) {
+ error("Error converting json to string json: " + e);
+ }
+ return jsonObj;
+ }
+ ```
+ === "Data Retrieval functions"
+ ``` js
+ /**
+ * gets the com.intuit.tank.http.BaseRequest object of the last call made.
+ *
+ * @return the request or null if no requests have been made
+ */
+ function getRequest() {
+ return ioBean.getInput("request");
+ }
+
+ /**
+ * gets the com.intuit.tank.http.BaseResponse object of the last call made.
+ *
+ * @return the response or null if no requests have been made
+ *
+ */
+ function getResponse() {
+ return ioBean.getInput("response");
+ }
+
+ /**
+ * gets the request body as string if not binary
+ *
+ * @return the body or empty string if null or binary
+ */
+ function getResquestBody() {
+ if (getRequest() != null) {
+ return getRequest().getBody();
+ }
+ return "";
+ }
+
+ /**
+ * gets the response body as string if not binary
+ *
+ * @return the body or empty string if null or binary
+ */
+ function getResponseBody() {
+ if (getResponse() != null) {
+ return getResponse().getBody();
+ }
+ return "";
+ }
+
+ /**
+ * Returns the value of the variable or null if variable does not exist
+ *
+ * @param key
+ * the variable name
+ * @returns the variable value.
+ */
+ function getVariable(key) {
+ return ioBean.getInput("variables").getVariable(key);
+ }
+
+ /**
+ * Sets the value of the variable.
+ *
+ * @param key
+ * the variable name to set
+ * @param value
+ * the value to set it to
+ */
+ function setVariable(key, value) {
+ ioBean.getInput("variables").addVariable(key, value);
+ }
+ ```
+ === "Result functions"
+ ```js
+ /**
+ * jump to a group within the script with the specified name.
+ *
+ * @param groupName
+ * the name of the group to go to. if the group does not exist, it
+ * skips to next script.
+ */
+ function gotoGroup(groupName) {
+ setAction("goto " + groupName);
+ }
+
+ /**
+ * restart the TestPlan from the start.
+ */
+ function restartPlan() {
+ setAction("restartPlan");
+ }
+
+ /**
+ * aborts the current ScriptGroup and proceeds to the next.
+ *
+ * @returns
+ */
+ function abortScriptGroup() {
+ setAction("abortScriptGroup");
+ }
+
+ /**
+ * aborts the current Script and proceeds to the next.
+ */
+ function abortScript() {
+ setAction("abortScript");
+ }
+
+ /**
+ * aborts the current group and proceeds to the next request after the current
+ * group.
+ */
+ function abortGroup() {
+ setAction("abortGroup");
+ }
+
+ /**
+ * terminates this user.
+ */
+ function terminateUser() {
+ setAction("terminateUser");
+ }
+
+ /**
+ * sets the action in the output.
+ */
+ function setAction(action) {
+ ioBean.setOutput("action", action);
+ }
+
+ ```
+
+
+
diff --git a/mkdocs/docs/key-concepts/string-functions.md b/mkdocs/docs/key-concepts/string-functions.md
new file mode 100644
index 000000000..3a5b143e5
--- /dev/null
+++ b/mkdocs/docs/key-concepts/string-functions.md
@@ -0,0 +1,22 @@
+# String Functions
+
+| Method Definition | Description | Parameters | Example |
+| :---------------------------------------| :----------------------------------: | :----:| :--------:|
+| `concat(String ... values)`| Concatenates the given strings | `values`: Strings(comma seperated) The strings to concatenate | `#{stringFunctions.concat('Turbo', 'Scale', ' Rocks')}` will return `Turbo Scale Rocks` |
+| `substring(String subject, int start, int stop) ` | Returns a new string that is a substring of subject. The substring begins at the specified start and extends to the character at index stop - 1. Thus the length of the substring is stop-start. | 1. `subject`: String the string from which the substring is to be found (required) 2. `start`: Integer the start index for the substring`(inclusive) (required)` 3. `stop`: Integer the index at which to end the substring`(exclusive) (optional)`| `#{stringFunctions.substring('hello world', 6)}` returns `world` and `#{stringFunctions.substring('hello world', 0, 5)}` returns `hello`|
+| `substringBetween(String subject, String open, String close, int index)` | Returns the String that is nested in between two Strings. | 1. `subject`: String the string from which the substring is to be found (required) 2. `open`: the String before the substring, may be null (if null will return the substring before the first occurance of the close param) (required) 3.`close`: the String after the substring, may be null (if null will return the substring after the last occurance of the open param) (required) 4. `index`: the zero based index of the string to return. (optional) | `#{stringFunctions.substringBetween('yabcz', 'y', 'z')}` returns `abc` and `#{stringFunctions.substringBetween('yabcz ydefz', 'y', 'z', 1)}` returns `def` |
+| `randomAlphaLower(int length)`| Generates a random string consisting of lower case alphabets of given length | `length`: Integer the length of the random string| - |
+| `randomAlphaUpper(int length)`| Generates a random string consisting of upper case alphabets of given length | `length`: Integer the length of the random string | - |
+| `randomAlphaMixed(int length)`| Generates a random string consisting of lower and upper case alphabets of given length | `length`: Integer the length of the random string | - |
+| `randomAlphaNumeric(int length)` | Generates a random string consisting of numerals of given length | `length`: Integer the length of the random string | - |
+| `randomAlphaSpecial(int length)`| Generates a random string consisting of special characters of given length | `length`: Integer the length of the random string | - |
+| `randomAlphaMixedNumeric(int length)`|` Generates a random string consisting of lower and upper case alphabets and numerals of given length | `length`: Integer the length of the random string | - |
+| randomAlphaMixedSpecial(int length)| Generates a random string consisting of lower and upper case alphabets and special characters of given length | `length`: Integer the length of the random string | - |
+| `randomAlphaMixedNumericSpecial(int length)`| Generates a random string consisting of lower and upper case alphabets, numerals and special characters of given length | `length`: Integer the length of the random string | - |
+| `userIdDate(int prefixLength, String format)`| Generates a Random String suitable for a user ID by combining a random character string and a date | 1. `prefixLength`: Integer (required) The number of characters to use for the prefix. 2. `format`: String (required) The date format string to use.| `#{stringFunctions.userIdDate(4,'yyyy-MM-dd')}` produces `GdGE2011-11-15` on `November 15, 2011`|
+| `userIdFromDate(int prefixLength, String format)`| Generates a Random String suitable for a user ID by combining a random character string and a date | 1. `prefixLength`: Integer (required) The number of characters to use for the prefix. 2. `format`: String (required) The date format string to use.| `#{stringFunctions.userIdFromDate(4,'yyyy-MM-dd')}` produces `GdGE2011-11-15` on `November 15, 2011`|
+| `userIdFromRange(int minId, int maxId)`| Generates a Integer user Id from the given range. Will distribute these ids equally among the different agents. | 1. `minId`: Integer (required) The minmum id of the range. 2.`maxId`: Integer (required) The maximum id of the range.| `#{stringFunctions.userIdFromRange(1,1000)}` produces an unique integer between `1` and `1000`|
+| `toBase64(String toEncode)`| Will encode the given string to base64 format | `toEncode`: String the string to base 64 encoding| -|
+| `fromBase64(String toDecode)`| Will decode the given string from base64 format | `toDecode`: String the base64 string to decode| -|
+| `urlEncode(String toEncode)`| Will encode the given string using URLEncoder | `toEncode`: String the string to encode| -|
+| `urlDecode(String toDecode)`| Will decode the given string Using URLDecoder | `toDecode`: String the encoded string to decode| -|
\ No newline at end of file
diff --git a/mkdocs/docs/key-concepts/tax-functions.md b/mkdocs/docs/key-concepts/tax-functions.md
new file mode 100644
index 000000000..57bab987f
--- /dev/null
+++ b/mkdocs/docs/key-concepts/tax-functions.md
@@ -0,0 +1,5 @@
+# Tax Functions
+
+| Method Definition | Description | Parameters | Example |
+| :---------------------------------------| :----------------------------------: | :----:| :--------:|
+| `getSsn(long startSSN)`| Gets a valid Social Security Number starting at at specific number. Each user will get a unique ssn. range is divided evenly across all agents. | `startSSN`: Integer The starting number to use as a ssn. |`#{taxFunctions.getSsn(562000000)}` returns the next ssn requested after the given number.|
\ No newline at end of file
diff --git a/mkdocs/docs/roles.md b/mkdocs/docs/roles.md
new file mode 100644
index 000000000..7f1833323
--- /dev/null
+++ b/mkdocs/docs/roles.md
@@ -0,0 +1,45 @@
+# Roles and Permissions
+
+## Users and groups
+Intuit Tank uses user roles to dictate which users can perform a given action. The administrator determines which roles are assigned to which users via the Admin section. The default role assigned to users is "User" but each Intuit Tank user may have more than one role.
+
+??? tip "Tip: Assigning Groups"
+ `Administration` section below has detailed instructions on Assigning groups.
+
+
+## Roles
+
+| Role Name | Role Description |
+| :---------| :----------------|
+| Guest | `Guest` is a default role assigned to all users. Guests are capable of viewing any type of entity on Intuit Tank, however they are unable to create or edit any entities.|
+| User | `User` is another default role assigned to all users. Users can create Scripts, Projects, Filters, and Filter Groups, as well as upload Data Files; however, Users may only edit entities of which they are the owner.|
+| Project Manager | Intuit Tank users with the `Project Manager` role are responsible for maintaining Intuit Tank Projects. While they are unable to create Scripts or Filters/Filter Groups, they have the ability to make edits to other users' Projects as well as control that Project's Job Queue.|
+| Script Manager | Intuit Tank users with the `Script Manager` role are responsible for upkeep of Scripts, Filters, and Filter Groups. Script Managers are unable to create Projects or control jobs; however, they have permission to edit other users' Scripts and Filters/Filter Groups.|
+| Job Manager | Intuit Tank users with the `Job Manager` role are responsible for controlling jobs in the Agent Tracker/Job Queue. For example, they can choose to run or delete a job in the Job Queue, however, they may not create or edit the Project.|
+
+## Permissions Matrix
+
+
+
+| Capability | Member Role | Create | Delete | Modify | Read |
+| :---------:| :---------: | :-------: |:-------: |:-------: |:-------: |
+| Projects | Guest | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Projects | User | :white_check_mark: |:crossed_swords:|:crossed_swords:|:crossed_swords: |
+| Projects | Script Manager | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Projects | Project Manager | :white_check_mark: |:white_check_mark: |:white_check_mark: |:white_check_mark: |
+| Projects | Job Manager | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Scripts | Guest | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Scripts | User | :white_check_mark: |:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Scripts | Script Manager | :white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark: |
+| Scripts | Project Manager | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Scripts | Job Manager | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Filters | Guest | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Filters | User | :white_check_mark: |:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Filters | Script Manager | :white_check_mark:|:white_check_mark:|:white_check_mark:|:white_check_mark: |
+| Filters | Project Manager | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Filters | Job Manager | :crossed_swords:|:crossed_swords:|:crossed_swords:|:white_check_mark: |
+| Data Files | Guest | :crossed_swords:|:crossed_swords:|:material-null:|:white_check_mark: |
+| Data Files | User | :white_check_mark: |:crossed_swords:|:material-null:|:white_check_mark: |
+| Data Files | Script Manager | :white_check_mark: |:white_check_mark: |:material-null:|:white_check_mark: |
+| Data Files | Project Manager | :white_check_mark: |:white_check_mark: |:material-null:|:white_check_mark: |
+| Data Files | Job Manager | :crossed_swords:|:crossed_swords:|:material-null:|:white_check_mark: |
diff --git a/mkdocs/docs/supplemental-tools.md b/mkdocs/docs/supplemental-tools.md
new file mode 100644
index 000000000..4a22d9c6c
--- /dev/null
+++ b/mkdocs/docs/supplemental-tools.md
@@ -0,0 +1,46 @@
+# Supplemental Tools
+
+Tank includes several external/supplemental tools that can be used in conjunction with it for a comprehensive load testing experience.
+
+??? tip "Tip: Navigating to the Tools page"
+ Clicking the `Tools` tab takes the user to the main screen, which has links to `downloads` (AND) pages for launching the tools as `java applets`.
+
+There are two sections of links for the tools, `Applets` and `Downlaodable` Tools.
+
+
+
+## Java "Applet" Tools
+Applets are rich clients that are run using `Java Applet` technology.
+They access system resources, and so they are signed using a `self-signed certificate`. This would mean you will be prompted to trust the applet when you launch it.
+
+
+
+### List of available Applets
+#### Tank Agent Visual Debugger
+Allows for the debugging of scripts or projects in a visual manner.
+
+#### Tank Script Filter Editor
+Allows for the editing and creation and testing of filter scripts (external scripts) in a visual environment.
+
+## Downloadable Tools
+Downloadable Tools are meant to be run from your local machine. They require that java is installed on your local machine.
+
+### List of downloadable Tools
+
+#### Tank Agent Visual Debugger
+The Tank Agent Visual Debugger is downloaded as an executable jar file. It may be run by double clicking on the jar or by running from the command line. If run from the command line, you can additionally specify the url of Intuit Tank you wish to connect to. e.g. java -jar Tank-Debugger-all.jar http://my.tank/tank
+
+#### Tank Script Filter Editor
+The Tank Script Filter Editor allows for the editing and creation and testing of filter scripts (external scripts) in a visual environment
+
+#### Tank Proxy Package
+The proxy package is a zip file that contains the proxy recorder. See the readme.txt file for instructions on setup.
+
+#### API Test Harness Debugger Package
+Command line debugger for agent scripts.
diff --git a/mkdocs/mkdocs.yml b/mkdocs/mkdocs.yml
new file mode 100644
index 000000000..4997ef238
--- /dev/null
+++ b/mkdocs/mkdocs.yml
@@ -0,0 +1,56 @@
+site_name: Tank docs
+repo_url: https://github.com/intuit/Tank
+repo_name: intuit/Tank
+plugins:
+ - search
+ - awesome-pages:
+ collapse_single_pages: true
+markdown_extensions:
+ - md_in_html
+ - attr_list
+ - def_list
+ - pymdownx.highlight:
+ anchor_linenums: true
+ - pymdownx.inlinehilite
+ - pymdownx.snippets
+ - pymdownx.tasklist:
+ custom_checkbox: true
+ - pymdownx.emoji:
+ emoji_index: !!python/name:materialx.emoji.twemoji
+ emoji_generator: !!python/name:materialx.emoji.to_svg
+ - admonition
+ - pymdownx.details
+ - pymdownx.superfences
+ - pymdownx.tabbed:
+ alternate_style: true
+theme:
+ favicon: assets/TankLogo.svg
+ features:
+ - navigation.top
+ - navigation.tabs
+ - navigation.tracking
+ - navigation.tabs.sticky
+ - header.autohide
+ name: material
+
+ palette:
+ - scheme: default
+ primary: orange
+ accent: teal
+ toggle:
+ icon: material/toggle-switch-off-outline
+ name: Switch to dark mode
+ - scheme: slate
+ primary: teal
+ accent: orange
+ toggle:
+ icon: material/toggle-switch
+ name: Switch to light mode
+ font:
+ code: Roboto Mono
+ logo: assets/TankLogo.svg
+
+extra:
+ social:
+ - icon: fontawesome/brands/slack
+ link: https://twitter.com/squidfunk
\ No newline at end of file
diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt
new file mode 100644
index 000000000..4430a32a1
--- /dev/null
+++ b/mkdocs/requirements.txt
@@ -0,0 +1,2 @@
+mkdocs-material==8.2.13
+mkdocs-awesome-pages-plugin==2.7.0
\ No newline at end of file
diff --git a/web/web_ui/src/main/webapp/docs/index.xhtml b/web/web_ui/src/main/webapp/docs/index.xhtml
index 6211d2f46..6a347ef2f 100644
--- a/web/web_ui/src/main/webapp/docs/index.xhtml
+++ b/web/web_ui/src/main/webapp/docs/index.xhtml
@@ -16,6 +16,12 @@