diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
new file mode 100644
index 0000000..221b87e
--- /dev/null
+++ b/.github/workflows/docs.yaml
@@ -0,0 +1,80 @@
+name: Build and Deploy Docs
+
+on:
+ workflow_dispatch:
+
+permissions:
+ id-token: write
+ pages: write
+
+env:
+ INSTANCE: Writerside/hi
+ ARTIFACT: webHelpHI2-all.zip
+ DOCS_FOLDER: ./docs
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ - name: Build Writerside docs using Docker
+ uses: JetBrains/writerside-github-action@v4
+ with:
+ instance: ${{ env.INSTANCE }}
+ artifact: ${{ env.ARTIFACT }}
+ location: ${{ env.DOCS_FOLDER }}
+
+ - name: Upload documentation
+ uses: actions/upload-artifact@v3
+ with:
+ name: docs
+ path: |
+ artifacts/${{ env.ARTIFACT }}
+ artifacts/report.json
+ retention-days: 7
+
+ test:
+ needs: build
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Download artifacts
+ uses: actions/download-artifact@v1
+ with:
+ name: docs
+ path: artifacts
+
+ - name: Test documentation
+ uses: JetBrains/writerside-checker-action@v1
+ with:
+ instance: ${{ env.INSTANCE }}
+
+ deploy:
+ environment:
+ name: github-pages
+ url: ${{ steps.deployment.outputs.page_url }}
+ needs: test
+ runs-on: ubuntu-latest
+ steps:
+ - name: Download artifact
+ uses: actions/download-artifact@v3
+ with:
+ name: docs
+
+ - name: Unzip artifact
+ run: unzip -O UTF-8 -qq ${{ env.ARTIFACT }} -d dir
+
+ - name: Setup Pages
+ uses: actions/configure-pages@v2
+
+ - name: Upload artifact
+ uses: actions/upload-pages-artifact@v1
+ with:
+ path: dir
+
+ - name: Deploy to GitHub Pages
+ id: deployment
+ uses: actions/deploy-pages@v1
\ No newline at end of file
diff --git a/README.md b/README.md
index 8058231..c2003e2 100644
--- a/README.md
+++ b/README.md
@@ -1,322 +1,10 @@
-# Zilean
+# What is Zilean
-DMM sourced arr-less searching for [Riven](https://github.com/rivenmedia/riven)
+
-## What is Zilean?
-Zilean is a service that allows you to search for [DebridMediaManager](https://github.com/debridmediamanager/debrid-media-manager) sourced arr-less content.
-When the service is started, it will automatically download and index all the DMM shared hashlists and index them using Lucene.
-The service provides a search endpoint that allows you to search for content using a query string, and returns a list of filenames and infohashes.
-There is no clean filtering applied to the search results, the idea behind this endpoint is Riven performs that using [RTN](https://pypi.org/project/rank-torrent-name/).
-The DMM import reruns on missing pages every hour.
+Zilean is a service that allows you to search for [DebridMediaManager](https://github.com/debridmediamanager/debrid-media-manager) sourced content shared by users.
+This can then be configured as a Torznab indexer in your favorite content application.
+Newly added is the ability for Zilean to scrape from your running Zurg instance, and from other running Zilean instances.
-## Configuration
+Documentation for zilean can be viewed at [https://ipromknight.github.io/zilean/](https://ipromknight.github.io/zilean/)
-```json
-{
- "Zilean": {
- "ApiKey": "69f72d7eb22e48938fd889206ffcf911a514bcc2e3824b2e9e7549122fb16849",
- "FirstRun": true,
- "Dmm": {
- "EnableScraping": true,
- "EnableEndpoint": true,
- "ScrapeSchedule": "0 * * * *",
- "MinimumReDownloadIntervalMinutes": 30,
- "MaxFilteredResults": 200,
- "MinimumScoreMatch": 0.85
- },
- "Torznab": {
- "EnableEndpoint": true
- },
- "Database": {
- "ConnectionString": "Host=localhost;Database=zilean;Username=postgres;Password=postgres;Include Error Detail=true;Timeout=30;CommandTimeout=3600;"
- },
- "Torrents": {
- "EnableEndpoint": false
- },
- "Imdb": {
- "EnableImportMatching": true,
- "EnableEndpoint": true,
- "MinimumScoreMatch": 0.85
- },
- "Ingestion": {
- "ZurgInstances": [],
- "ZileanInstances": [],
- "EnableScraping": false,
- "Kubernetes": {
- "EnableServiceDiscovery": false,
- "KubernetesSelectors": [],
- "KubeConfigFile": "/$HOME/.kube/config",
- "AuthenticationType": 0
- },
- "ScrapeSchedule": "0 * * * *",
- "ZurgEndpointSuffix": "/debug/torrents",
- "ZileanEndpointSuffix": "/torrents/all",
- "RequestTimeout": 10000
- },
- "Parsing": {
- "IncludeAdult": false,
- "IncludeTrash": true,
- "BatchSize": 5000
- }
- }
-}
-```
-### This file can be edited on your disk and mounted as a volume into the container at `/app/data/settings.json`.
-
-Every option you see can be set as an env variable, the env variable name is the same as the json path with double underscores instead of dots.
-For example, `Zilean__Dmm__EnableScraping` would be the env variable for `Zilean.Dmm.EnableScraping`.
-
-A breakdown of all configuration options:
-
-- `Zilean__Dmm__EnableScraping`: Whether to enable the DMM scraping service.
-- `Zilean__Dmm__EnableEndpoint`: Whether to enable the DMM search endpoint.
-- `Zilean__Dmm__ScrapeSchedule`: The cron schedule for the DMM scraping service.
-- `Zilean__Dmm__MaxFilteredResults`: The maximum number of results to return from the DMM search endpoint.
-- `Zilean__Dmm__MinimumScoreMatch`: The minimum score required for a search result to be returned. Values between 0 and 1. Defaults to 0.85.
-- `Zilean__Dmm__ImportBatched`: Whether to import DMM pages in batches. This is for low end systems. Defaults to false. Will make the initial import take longer. A lot longer.
-- `Zilean__Database__ConnectionString`: The connection string for the database (Postgres).
-- `Zilean__Prowlarr__EnableEndpoint`: Whether to enable the Prowlarr search endpoint. (Unused currently).
-- `Zilean__Imdb__EnableImportMatching`: Whether to enable the IMDB import matching service. Defaults to true. Disabling this will improve import speed at the cost of not having IMDB data.
-- `Zilean__Imdb__EnableEndpoint`: Whether to enable the IMDB search endpoint.
-- `Zilean__Imdb__MinimumScoreMatch`: The minimum score required for a search result to be returned. Values between 0 and 1. Defaults to 0.85.
-- `Zilean__Torznab__EnableEndpoint`: Whether to enable the Torznab endpoints.
----
-
-## Compose Example
-See the file [compose.yaml](https://github.com/iPromKnight/zilean/blob/main/compose.yaml) for an example of how to run Zilean.
-
----
-
-## API
-
-The Api can be accessed at `http://localhost:8181/scalar/v2` by default, which allows you to execute
-any of the available endpoints directly in a [Scalar](https://github.com/ScalaR/ScalaR) dashboard.
-
----
-
-
-# Generic Ingestion Setup
-
-Zilean now has new **Generic Ingestion** functionality added. This setup provides a flexible mechanism to discover and process torrent data from multiple endpoints, including Kubernetes services and direct zurg and other zilean endpoint URLs.
-
----
-
-## Ingestion Configuration
-
-The `Ingestion` section in the JSON configuration defines the behavior and options for the generic ingestion process.
-
-### Example Configuration
-
-```json
-"Ingestion": {
- "ZurgInstances": [],
- "ZileanInstances": [],
- "EnableScraping": false,
- "Kubernetes": {
- "EnableServiceDiscovery": false,
- "KubernetesSelectors": [
- {
- "UrlTemplate": "http://zurg.{0}:9999",
- "LabelSelector": "app.elfhosted.com/name=zurg",
- "EndpointType": 1
- }
- ],
- "KubeConfigFile": "/$HOME/.kube/config",
- "AuthenticationType": 0
- },
- "ScrapeSchedule": "0 * * * *",
- "ZurgEndpointSuffix": "/debug/torrents",
- "ZileanEndpointSuffix": "/torrents/all",
- "RequestTimeout": 10000
-}
-```
-
----
-
-## Key Fields
-
-### `ZurgInstances`
-- **Type**: `GenericEndpoint[]`
-- **Description**: A list of direct endpoints for Zurg instances.
-- **Structure**:
- ```json
- {
- "Url": "http://zurg.example.com:19999",
- "EndpointType": 1
- }
- ```
-- **Example**:
- ```json
- "ZurgInstances": [
- {
- "Url": "http://zurg.prod.cluster.local:19999",
- "EndpointType": 1
- }
- ]
- ```
-
-### `ZileanInstances`
-- **Type**: `GenericEndpoint[]`
-- **Description**: A list of direct endpoints for Zilean instances.
-- **Structure**:
- ```json
- {
- "Url": "http://zilean.example.com:8181",
- "EndpointType": 0
- }
- ```
-- **Example**:
- ```json
- "ZileanInstances": [
- {
- "Url": "http://zilean.prod.cluster.local:8181",
- "EndpointType": 0
- }
- ]
- ```
-
-### `EnableScraping`
-- **Type**: `bool`
-- **Description**: Enables or disables automated scraping functionality for ingestion.
-
-### `Kubernetes`
-- **Type**: `object`
-- **Description**: Configuration for Kubernetes-based service discovery.
-- **Fields**:
- - **`EnableServiceDiscovery`**: Enables Kubernetes service discovery.
- - **`KubernetesSelectors`**:
- - **`UrlTemplate`**: Template for constructing URLs from Kubernetes service metadata.
- - **`LabelSelector`**: Label selector to filter Kubernetes services.
- - **`EndpointType`**: Indicates the type of endpoint (0 = Zilean, 1 = Zurg).
- - **`KubeConfigFile`**: Path to the Kubernetes configuration file.
- - **`AuthenticationType`**: Authentication type for Kubernetes service discovery (0 = ConfigFile, 1 = RoleBased).
-
-### `ScrapeSchedule`
-- **Type**: `string` (CRON format)
-- **Description**: Schedule for automated scraping tasks.
-
-### `ZurgEndpointSuffix`
-- **Type**: `string`
-- **Description**: Default suffix appended to Zurg instance URLs for ingestion.
-
-### `ZileanEndpointSuffix`
-- **Type**: `string`
-- **Description**: Default suffix appended to Zilean instance URLs for ingestion.
-
-### `RequestTimeout`
-- **Type**: `int`
-- **Description**: Timeout for HTTP requests in milliseconds.
-
----
-
-## `GenericEndpoint` and `GenericEndpointType`
-
-### `GenericEndpoint`
-Represents a generic endpoint configuration.
-
-```csharp
-public class GenericEndpoint
-{
- public required string Url { get; set; }
- public required GenericEndpointType EndpointType { get; set; }
-}
-```
-
-### `GenericEndpointType`
-Defines the type of an endpoint.
-
-```csharp
-public enum GenericEndpointType
-{
- Zilean = 0,
- Zurg = 1
-}
-```
-
----
-
-## New Torrents Configuration
-
-### Example
-
-```json
-"Torrents": {
- "EnableEndpoint": false
-}
-```
-
-- **`EnableEndpoint`**:
- - **Type**: `bool`
- - **Description**: Enables or disables the Torrents API endpoint `/torrents/all` which allows this zilean instance to be scraped by another.
-
----
-
-## Kubernetes Service Discovery
-
-If `EnableServiceDiscovery` is set to `true` in the Kubernetes section, the application will use the Kubernetes API to discover services matching the provided `LabelSelector`. The discovered services will be processed using the specified `UrlTemplate` and their `EndpointType`.
-
-### Example Service Discovery Configuration
-
-```json
-"Kubernetes": {
- "EnableServiceDiscovery": true,
- "KubernetesSelectors": [
- {
- "UrlTemplate": "http://zurg.{0}:9999",
- "LabelSelector": "app.elfhosted.com/name=zurg",
- "EndpointType": 1
- }
- ],
- "KubeConfigFile": "/$HOME/.kube/config",
- "AuthenticationType": 0
-}
-```
-### `AuthenticationType`
-Defines the Types of authentication to use when connecting to the kubernetes service host.
-
-```csharp
-public enum KubernetesAuthenticationType
-{
- ConfigFile = 0,
- RoleBased = 1
-}
-```
-note: In order for RBAC to work, the service account must have the correct permissions to list services in the namespace, and the `KUBERNETES_SERVICE_HOST` and `KUBERNETES_SERVICE_PORT` environment variables must be set.
-
-### Behavior
-1. The application uses the Kubernetes client to list services matching the `LabelSelector`.
-2. It generates service URLs using the `UrlTemplate`, where `{0}` is replaced by the namespace.
-3. These URLs are passed to the ingestion pipeline for processing.
-
----
-
-## Integration with Ingestion Pipeline
-
-The ingestion pipeline combines direct endpoints (`ZurgInstances` and `ZileanInstances`) and Kubernetes-discovered services (if enabled) to create a unified list of URLs. These URLs are then processed in batches, filtering out torrents already stored in the database.
-
----
-
-## Logging and Monitoring
-
-Key events in the ingestion process are logged:
-- Discovered URLs.
-- Filtered torrents (existing in the database).
-- Processed torrents (new and valid).
-- Errors during processing or service discovery.
-
----
-
-## Blacklisting
-
-The ingestion pipeline supports blacklisting infohashes to prevent them from being processed. This feature is useful for filtering out unwanted torrents or duplicates.
-See the `/blacklist` endpoints for more information in scalar.
-These endpoints are protected by the ApiKey that will be generated on first run of the application and stored in the settings.json file as well as a one time print to application logs on startup.
-Blacklisting an item also removes it from the database.
-
----
-
-## Api Key
-
-The ApiKey is generated on first run of the application and stored in the settings.json file as well as a one time print to application logs on startup.
-The key can also be cycled to a new key if you set the environment variable `ZILEAN__NEW__API__KEY` to `true` and restart the application.
-To authenticate with the API, you must include the `ApiKey` in the request headers. The header key is `X-Api-Key` and will automatically be configured in scalar.
\ No newline at end of file
diff --git a/Zilean.sln b/Zilean.sln
index 2bc7c1b..e0ee7a7 100644
--- a/Zilean.sln
+++ b/Zilean.sln
@@ -13,7 +13,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "eng", "eng", "{49EC8AF3-9A9
Directory.Build.targets = Directory.Build.targets
Directory.Packages.props = Directory.Packages.props
Dockerfile = Dockerfile
- compose.yaml = compose.yaml
README.md = README.md
eng\compose-dev.yaml = eng\compose-dev.yaml
eng\.dockerignore = eng\.dockerignore
diff --git a/compose.yaml b/compose.yaml
deleted file mode 100644
index 4a9a875..0000000
--- a/compose.yaml
+++ /dev/null
@@ -1,92 +0,0 @@
-volumes:
- zilean_data:
- zilean_tmp:
- pg-data:
- pgadmin-data:
-
-services:
- zilean:
- image: ipromknight/zilean:latest
- restart: unless-stopped
- container_name: zilean
- tty: true
- ports:
- - "8181:8181"
- volumes:
- - zilean_data:/app/data
- - zilean_tmp:/tmp
- environment:
- Zilean__Database__ConnectionString: "Host=postgres;Port=5432;Database=zilean;Username=postgres;Password=postgres;Include Error Detail=true;Timeout=300;CommandTimeout=300;"
- # Zilean__Dmm__ImportBatched: "true" Allows enabling batched import - this is for low-end systems.
- # Zilean__Dmm__MaxFilteredResults: 200 Allows changing the maximum number of filtered results returned by the DMM API. 200 is the default.
- # Zilean__Dmm__MinimumScoreMatch: 0.85 Allows changing the minimum score match for the DMM API. 0.85 is the default. Values between 0 and 1 are accepted.
- # Zilean__Imdb__MinimumScoreMatch: 0.85 Allows changing the minimum score match for Imdb Matching API. 0.85 is the default. Values between 0 and 1 are accepted.
- # Zilean__Dmm__MinimumReDownloadIntervalMinutes: 30 Minimum number of minutes between downloads from the DMM Repo - defaults to `30`
- # Zilean__Imdb__EnableImportMatching: true Should IMDB functionality be enabled, true/false, defaults to `true`. Disabling will lead to drastically improved import speed on initial run however you will have no internal imdb matching within zilean, so its up to the upstream project utilising zilean to implement that.
- healthcheck:
- test: curl --connect-timeout 10 --silent --show-error --fail http://localhost:8181/healthchecks/ping
- timeout: 60s
- interval: 30s
- retries: 10
- depends_on:
- postgres:
- condition: service_healthy
-
- postgres:
- image: postgres:16.3-alpine3.20
- container_name: postgres
- restart: unless-stopped
- environment:
- PGDATA: /var/lib/postgresql/data/pgdata
- POSTGRES_USER: postgres
- POSTGRES_PASSWORD: postgres
- POSTGRES_DB: zilean
- ports:
- - "5432:5432"
- volumes:
- - pg-data:/var/lib/postgresql/data/pgdata
- healthcheck:
- test: [ "CMD-SHELL", "pg_isready -U postgres" ]
- interval: 10s
- timeout: 5s
- retries: 5
-
- pgadmin:
- image: dpage/pgadmin4
- ports:
- - 6001:80
- environment:
- - PGADMIN_DEFAULT_EMAIL=postgres@example.com
- - PGADMIN_DEFAULT_PASSWORD=postgres
- - PGADMIN_CONFIG_SERVER_MODE=False
- - PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED=False
- entrypoint: /bin/sh -c "chmod 600 /pgpass; /entrypoint.sh;"
- user: root
- configs:
- - source: pgadmin_servers.json
- target: /pgadmin4/servers.json
- - source: pgadmin_pgpass
- target: /pgpass
- depends_on:
- postgres:
- condition: service_healthy
-
-configs:
- pgadmin_pgpass:
- content: postgres:5432:*:postgres:postgres
- pgadmin_servers.json:
- content: |
- {
- "Servers": {
- "1": {
- "Group": "Servers",
- "Name": "Zilean Database",
- "Host": "postgres",
- "Port": 5432,
- "MaintenanceDB": "zilean",
- "Username": "postgres",
- "PassFile": "/pgpass",
- "SSLMode": "prefer"
- }
- }
- }
\ No newline at end of file
diff --git a/docs/Writerside/c.list b/docs/Writerside/c.list
new file mode 100644
index 0000000..c4c77a2
--- /dev/null
+++ b/docs/Writerside/c.list
@@ -0,0 +1,6 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/Writerside/hi.tree b/docs/Writerside/hi.tree
new file mode 100644
index 0000000..5842b93
--- /dev/null
+++ b/docs/Writerside/hi.tree
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/Writerside/images/zilean-logo.jpg b/docs/Writerside/images/zilean-logo.jpg
new file mode 100644
index 0000000..fa0f6ee
Binary files /dev/null and b/docs/Writerside/images/zilean-logo.jpg differ
diff --git a/docs/Writerside/redirection-rules.xml b/docs/Writerside/redirection-rules.xml
new file mode 100644
index 0000000..c7e960a
--- /dev/null
+++ b/docs/Writerside/redirection-rules.xml
@@ -0,0 +1,9 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/Writerside/snippets/compose-file.yaml b/docs/Writerside/snippets/compose-file.yaml
new file mode 100644
index 0000000..d817a18
--- /dev/null
+++ b/docs/Writerside/snippets/compose-file.yaml
@@ -0,0 +1,41 @@
+volumes:
+ zilean_data:
+ zilean_tmp:
+ postgres_data:
+
+services:
+ zilean:
+ image: moby-dangling@sha256:9934d1a0d47f9bbb3bfc56430e2059b1420f7f7402abfb68a7393ded126cd09e
+ restart: unless-stopped
+ container_name: zilean
+ tty: true
+ ports:
+ - "8181:8181"
+ volumes:
+ - zilean_data:/app/data
+ - zilean_tmp:/tmp
+ healthcheck:
+ test: curl --connect-timeout 10 --silent --show-error --fail http://localhost:8181/healthchecks/ping
+ timeout: 60s
+ interval: 30s
+ retries: 10
+ depends_on:
+ postgres:
+ condition: service_healthy
+
+ postgres:
+ image: postgres:17.1
+ container_name: postgres
+ restart: unless-stopped
+ environment:
+ PGDATA: /var/lib/postgresql/data/pgdata
+ POSTGRES_USER: postgres
+ POSTGRES_PASSWORD: postgres
+ POSTGRES_DB: zilean
+ volumes:
+ - postgres_data:/var/lib/postgresql/data/pgdata
+ healthcheck:
+ test: [ "CMD-SHELL", "pg_isready -U postgres" ]
+ interval: 10s
+ timeout: 5s
+ retries: 5
\ No newline at end of file
diff --git a/docs/Writerside/snippets/default-settings.json b/docs/Writerside/snippets/default-settings.json
new file mode 100644
index 0000000..acec89e
--- /dev/null
+++ b/docs/Writerside/snippets/default-settings.json
@@ -0,0 +1,48 @@
+{
+ "Zilean": {
+ "ApiKey": "da3a4ee25d3749ad87301d701a924eb9154c4a95c9b740c5a179469ab0f10578",
+ "FirstRun": true,
+ "Dmm": {
+ "EnableScraping": true,
+ "EnableEndpoint": true,
+ "ScrapeSchedule": "0 * * * *",
+ "MinimumReDownloadIntervalMinutes": 30,
+ "MaxFilteredResults": 200,
+ "MinimumScoreMatch": 0.85
+ },
+ "Torznab": {
+ "EnableEndpoint": true
+ },
+ "Database": {
+ "ConnectionString": "Host=postgres;Database=zilean;Username=postgres;Password=postgres;Include Error Detail=true;Timeout=30;CommandTimeout=3600;"
+ },
+ "Torrents": {
+ "EnableEndpoint": false
+ },
+ "Imdb": {
+ "EnableImportMatching": true,
+ "EnableEndpoint": true,
+ "MinimumScoreMatch": 0.85
+ },
+ "Ingestion": {
+ "ZurgInstances": [],
+ "ZileanInstances": [],
+ "EnableScraping": false,
+ "Kubernetes": {
+ "EnableServiceDiscovery": false,
+ "KubernetesSelectors": [],
+ "KubeConfigFile": "/$HOME/.kube/config",
+ "AuthenticationType": 0
+ },
+ "ScrapeSchedule": "0 0 * * *",
+ "ZurgEndpointSuffix": "/debug/torrents",
+ "ZileanEndpointSuffix": "/torrents/all",
+ "RequestTimeout": 10000
+ },
+ "Parsing": {
+ "IncludeAdult": false,
+ "IncludeTrash": true,
+ "BatchSize": 5000
+ }
+ }
+}
\ No newline at end of file
diff --git a/docs/Writerside/snippets/example-torznab.xml b/docs/Writerside/snippets/example-torznab.xml
new file mode 100644
index 0000000..036a14a
--- /dev/null
+++ b/docs/Writerside/snippets/example-torznab.xml
@@ -0,0 +1,12 @@
+
+
+
+ Example Movie 1080p
+ magnet:?xt=urn:btih:...
+ Some details about the torrent.
+ Movies
+ 2147483648
+
+ ...
+
+
\ No newline at end of file
diff --git a/docs/Writerside/snippets/settings-with-ingestion.json b/docs/Writerside/snippets/settings-with-ingestion.json
new file mode 100644
index 0000000..cc4931d
--- /dev/null
+++ b/docs/Writerside/snippets/settings-with-ingestion.json
@@ -0,0 +1,54 @@
+{
+ "Zilean": {
+ "ApiKey": "da3a4ee25d3749ad87301d701a924eb9154c4a95c9b740c5a179469ab0f10578",
+ "FirstRun": true,
+ "Dmm": {
+ "EnableScraping": true,
+ "EnableEndpoint": true,
+ "ScrapeSchedule": "0 * * * *",
+ "MinimumReDownloadIntervalMinutes": 30,
+ "MaxFilteredResults": 200,
+ "MinimumScoreMatch": 0.85
+ },
+ "Torznab": {
+ "EnableEndpoint": true
+ },
+ "Database": {
+ "ConnectionString": "Host=localhost;Database=zilean;Username=postgres;Password=postgres;Include Error Detail=true;Timeout=30;CommandTimeout=3600;"
+ },
+ "Torrents": {
+ "EnableEndpoint": true
+ },
+ "Imdb": {
+ "EnableImportMatching": true,
+ "EnableEndpoint": true,
+ "MinimumScoreMatch": 0.85
+ },
+ "Ingestion": {
+ "ZurgInstances": [{
+ "Url": "http://zurg:9999",
+ "EndpointType": 0
+ }],
+ "ZileanInstances": [{
+ "Url": "http://other-zilean:8181",
+ "EndpointType": 1
+ }],
+ "EnableScraping": true,
+ "Kubernetes": {
+ "EnableServiceDiscovery": false,
+ "KubernetesSelectors": [],
+ "KubeConfigFile": "/$HOME/.kube/config",
+ "AuthenticationType": 0
+ },
+ "ScrapeSchedule": "0 0 * * *",
+ "ZurgEndpointSuffix": "/debug/torrents",
+ "ZileanEndpointSuffix": "/torrents/all",
+ "RequestTimeout": 10000
+ },
+ "Parsing": {
+ "IncludeAdult": false,
+ "IncludeTrash": true,
+ "BatchSize": 5000
+ }
+ }
+}
\ No newline at end of file
diff --git a/docs/Writerside/topics/Api.md b/docs/Writerside/topics/Api.md
new file mode 100644
index 0000000..cf6b376
--- /dev/null
+++ b/docs/Writerside/topics/Api.md
@@ -0,0 +1,85 @@
+
+# API
+
+The API for %Product% can be accessed at `http://localhost:8181/scalar/v2` by default, where you can see more information on the endpoints outlined below.
+
+Some endpoints can be disabled, so they do not get mapped by the Api, and then ultimately do not exist while %Product% is running.
+Refer to [](Configuration.md) for more details on enabling/disabling endpoints.
+
+## Authentication
+
+Some Api Endpoints require authenticated requests. To perform these, include the `ApiKey` in the headers:
+- **Header Key**: `X-Api-Key`
+- **Header Value**: ``
+
+This `ApiKey` is generated on first run of %product%, and stored in the settings file: `%settings-file%`
+
+---
+
+## Full List of Endpoints
+
+### Blacklist Endpoints
+
+> Blacklist endpoints allow you to add or remove torrents from the blacklist.
+> Once a torrent is blacklisted, it will not be processed by %Product%.
+> Adding a torrent to the blacklist will also remove it from the database.
+{style="note"}
+
+
+| Path | Description | Authenticated |
+|---------------------|--------------------------------------|---------------|
+| `/blacklist/add` | Add a torrent to the blacklist. | Yes |
+| `/blacklist/remove` | Remove a torrent from the blacklist. | Yes |
+
+
+### Dmm Endpoints
+
+> Dmm endpoints allow you to search for content, filter results, and ingest content.
+> The Dmm import reruns on missing pages in the configured time interval see [](Configuration.md) but can be run on demand.
+{style="note"}
+
+| Path | Description | Authenticated |
+|-------------------------|---------------------------|---------------|
+| `/dmm/search` | Search for content. | No |
+| `/dmm/filtered` | Filter search results. | No |
+| `/dmm/on-demand-scrape` | Ingest content on demand. | Yes |
+
+### Imdb Endpoints
+
+> Imdb endpoints allow you to search for content via Imdb `tt` ids.
+> The Imdb import of metadata occurs once on first run, and then every `14` days.
+{style="note"}
+
+| Path | Description | Authenticated |
+|---------------------|--------------------------------------|---------------|
+| `/imdb/search` | Search for content. | No |
+
+### Torznab Endpoints
+
+> Torznab endpoints allow you to search for content via requests adhearing to the [Torznab Specification](https://torznab.github.io/spec-1.3-draft/torznab/Specification-v1.3.html#:~:text=Torznab%20is%20an%20api%20specification%20based%20on%20the,goal%20to%20supply%20an%20consistent%20api%20for%20torrents)
+> Torznab is an api specification based on the Newznab WebAPI. The api is built around a simple xml/rss feed with filtering and paging capabilities.
+> The Torznab standard strives to be completely compliant with Newznab, insofar it does not conflict with its primary goal to supply an consistent api for torrents.
+{style="note"}
+
+| Path | Description | Authenticated |
+|----------------|----------------------------------------|---------------|
+| `/torznab/api` | Search feeds using torznab parameters. | No |
+
+### Torrents Endpoints
+
+> The Torrents endpoint exists so that an external system can Stream all the results in your database to their database.
+> This endpoint is disabled by default, and can be enabled in the settings file see [](Configuration.md).
+{style="note"}
+
+| Path | Description | Authenticated |
+|---------------------|--------------------------------------|---------------|
+| `/torrents/all` | Stream all torrents in the database. | No |
+
+### Healthcheck Endpoints
+
+> Healthcheck endpoints allow you to check the health of the %Product% service.
+> The healthcheck endpoint is always available, and does not require authentication.
+
+| Path | Description | Authenticated |
+|----------------------|--------------------------------------|---------------|
+| `/healthchecks/ping` | Check the health of the service. | No |
\ No newline at end of file
diff --git a/docs/Writerside/topics/Configuration.md b/docs/Writerside/topics/Configuration.md
new file mode 100644
index 0000000..786ca22
--- /dev/null
+++ b/docs/Writerside/topics/Configuration.md
@@ -0,0 +1,166 @@
+
+# Configuration
+
+Configuration for the Zilean application is defined in a JSON file, which can be mounted as a volume into the container at `/app/data/settings.json`.
+
+> The LOAD ORDER of the configuration is as follows:
+> Settings are loaded first from `/app/data/settings.json`, then from environment variables, as these take precedence.
+{ style="note" }
+
+The format of environment variables is as follows:
+`Zilean__{Section}__{Key}`
+Where `{Section}` is the section of the configuration, and `{Key}` is the key within that section.
+
+For example, to set the `ApiKey` in the `Zilean` section, you would set the environment variable `Zilean__ApiKey` or
+`Zilean__Dmm__EnableScraping` for the `Dmm.EnableScraping` key.
+
+### Example Configuration
+
+```json
+```
+{ src="default-settings.json" }
+
+### Configuration Options in Detail
+
+**ApiKey**
+_The API key used to authenticate requests to the Zilean API._
+_Default: `Generated on first run`_
+
+**FirstRun**
+_Indicates whether this is the first run of the application._
+_Default: `true`_
+
+### DMM Configuration
+**Dmm.EnableScraping**
+_Indicates whether the DMM indexer should scrape from Dmm Hashlists._
+_Default: `true`_
+
+**Dmm.EnableEndpoint**
+_Indicates whether the DMM indexer should expose an API endpoint._
+_Default: `true`_
+
+**Dmm.ScrapeSchedule**
+_The cron schedule for the DMM indexer to scrape from Dmm Hashlists._
+_Default: `0 * * * *` [Hourly]_
+
+**Dmm.MinimumReDownloadIntervalMinutes**
+_The minimum interval in minutes before re-downloading Dmm Hashlists._
+_Default: `30`_
+
+**Dmm.MaxFilteredResults**
+_The maximum number of filtered results to return from the DMM Search Endpoints._
+_Default: `200`_
+
+**Dmm.MinimumScoreMatch**
+_The minimum score match for DMM search results. Closer to 1 means a more exact match has to occur. A value between 1 and 0._
+_Default: `0.85`_
+
+### Torznab Configuration
+**Torznab.EnableEndpoint**
+_Indicates whether the Torznab indexer should expose an API endpoint._
+_Default: `true`_
+
+### Database Configuration
+**Database.ConnectionString**
+_The connection string for the PostgreSQL database._
+_Default: `Host=localhost;Database=zilean;Username=postgres;Password=postgres;Include Error Detail=true;Timeout=30;CommandTimeout=3600;`_
+
+The database connection string comprises of the following:
+- `Host`: The host of the database, this will usually be the `containername` if you are using docker compose of the postgres instance.
+- `Database`: The name of the database to connect to.
+- `Username`: The username to connect to the database with.
+- `Password`: The password to connect to the database with.
+- `Include Error Detail`: Whether to include error details in logging database results.
+- `Timeout`: The timeout in seconds for the database connection.
+- `CommandTimeout`: The timeout in seconds for database commands to occur, such as applying migrations.
+
+### Torrents Configuration
+**Torrents.EnableEndpoint**
+_Indicates whether the Torrents API allowing other apps to scrape the Zilean database is enabled._
+_Default: `false`_
+
+### IMDB Configuration
+**Imdb.EnableImportMatching**
+_Indicates whether the indexer should import match titles to IMDB Ids during importing._
+_Default: `true`_
+
+**Imdb.EnableEndpoint**
+_Indicates whether the IMDB indexer should expose an API endpoint._
+_Default: `true`_
+
+**Imdb.MinimumScoreMatch**
+_The minimum score match for IMDB search results. Closer to 1 means a more exact match has to occur. A value between 1 and 0._
+_Default: `0.85`_
+
+### Ingestion Configuration
+**Ingestion.ZurgInstances**
+_A list of Zurg instances to scrape from._
+_Default: `[]`_
+
+**Ingestion.ZileanInstances**
+_A list of Zilean instances to scrape from._
+_Default: `[]`_
+
+**Ingestion.EnableScraping**
+_Indicates whether the Ingestion indexer should scrape from Zurg and Zilean instances._
+_Default: `false`_
+
+#### Kubernetes Configuration for Ingestion
+**Ingestion.Kubernetes.EnableServiceDiscovery**
+_Indicates whether the Ingestion indexer should use Kubernetes service discovery. This can be used to automatically find Zurg instances running in Kubernetes._
+_Default: `false`_
+
+**Ingestion.Kubernetes.KubernetesSelectors**
+_A list of selectors to use for Kubernetes service discovery._
+_Default: `[]`_
+
+**Ingestion.Kubernetes.KubeConfigFile**
+_The path to the Kubernetes configuration file._
+_Default: `/$HOME/.kube/config`_
+
+**Ingestion.Kubernetes.AuthenticationType**
+_The type of authentication to use for Kubernetes service discovery. 0 = None, 1 = Kubernetes RBAC._
+_Default: `0`_
+
+**Ingestion.ScrapeSchedule**
+_The cron schedule for the Ingestion indexer to scrape from Zurg and Zilean instances._
+_Default: `0 0 * * *` [Daily]_
+
+**Ingestion.ZurgEndpointSuffix**
+_The endpoint suffix for the Zurg API._
+_Default: `/debug/torrents`_
+
+**Ingestion.ZileanEndpointSuffix**
+_The endpoint suffix for the Zilean API._
+_Default: `/torrents/all`_
+
+**Ingestion.RequestTimeout**
+_The timeout in milliseconds for requests to Zurg and Zilean instances._
+_Default: `10000`_
+
+### Parsing Configuration
+**Parsing.IncludeAdult**
+_Indicates whether adult content should be included in the indexer._
+_Default: `false`_
+
+**Parsing.IncludeTrash**
+_Indicates whether trash content should be included in the indexer._
+_Default: `true`_
+
+**Parsing.BatchSize**
+_The batch size for parsing content._
+_Default: `5000`_
+
+## Enabling Ingestion
+
+To enable ingestion, set the `Ingestion.EnableScraping` key to `true` in the configuration.
+Also ensure that the `Ingestion.ZurgInstances` and or `Ingestion.ZileanInstances` keys are populated with the appropriate `Url`, and `EndpointType` values.
+`EndpointType` can be either `0` (Zurg) or `1` (Zilean).
+You do not have to specify both, you can specify one or the other, or both, depending on your requirements.
+Also there is no limit to the number of instances you can scrape from.
+
+An example of this configuration is as follows:
+
+```json
+```
+{ src="settings-with-ingestion.json" }
\ No newline at end of file
diff --git a/docs/Writerside/topics/Database-Migrations.md b/docs/Writerside/topics/Database-Migrations.md
new file mode 100644
index 0000000..035510d
--- /dev/null
+++ b/docs/Writerside/topics/Database-Migrations.md
@@ -0,0 +1,52 @@
+# Database Migrations: A Detailed Overview
+
+## 1. Purpose of Database Migrations
+The primary goal of database migrations is to synchronize the database schema with the application's data model. This is essential when:
+- Adding new features to an application that require changes to the database.
+- Modifying existing database structures to improve performance or adapt to new requirements.
+- Fixing issues or bugs in the schema.
+- Keeping multiple environments (e.g., development, staging, production) consistent.
+
+---
+
+## 2. Components of a Database Migration
+A typical migration involves:
+- **Schema Changes**: Modifying tables, columns, indexes, or constraints. For example:
+ - Adding new tables or columns.
+ - Renaming or removing existing tables or columns.
+ - Changing data types of columns.
+ - Adding or modifying primary keys, foreign keys, or indexes.
+- **Data Transformations**: Moving or transforming existing data to fit the new schema. For example:
+ - Populating new columns with default or calculated values.
+ - Restructuring data to match new relationships.
+- **Rollback Mechanism**: Providing a way to undo changes in case of errors or unexpected issues.
+
+---
+
+## 3. How Database Migrations Work
+### a. **Migration Files**
+Migrations are typically written as scripts or classes that describe the changes to the database. These files:
+- Define the schema changes or data transformations (e.g., using SQL or migration frameworks).
+- Track the order of migrations to ensure they are applied sequentially.
+
+### b. **Version Control**
+Migration frameworks often use a versioning system (e.g., timestamps or sequential numbers) to track which migrations have been applied. This prevents duplicate executions and maintains consistency across environments.
+
+### c. **Execution**
+Migrations are executed using a migration tool or framework. The tool:
+- Reads the migration file.
+- Applies the changes to the database.
+- Updates a record (e.g., in a special `migrations` table) to mark the migration as applied.
+
+### d. **Rollback**
+If a migration introduces errors, the rollback script can revert the database to its previous state.
+
+---
+
+## How %Product% Handles Database Migrations
+
+%Product% migrations automatically run when the application starts. This is done by checking the database for the latest migration, and then running any migrations that have not been applied.
+
+Some migrations take longer to run than others, but the overall idea here is you will not have to worry about running migrations manually. This is all handled by %Product%.
+
+Database Index management is also performed by %Product% on startup. This is done by checking the database for any missing indexes, and then creating them if they do not exist, Ensuring that the database is optimized for the application.
\ No newline at end of file
diff --git a/docs/Writerside/topics/Getting-Started.md b/docs/Writerside/topics/Getting-Started.md
new file mode 100644
index 0000000..0b97752
--- /dev/null
+++ b/docs/Writerside/topics/Getting-Started.md
@@ -0,0 +1,43 @@
+# Getting Started
+
+
+
+## What is %Product%?
+
+%Product% is a service that allows you to search for [DebridMediaManager](https://github.com/debridmediamanager/debrid-media-manager) sourced content shared by users.
+The DMM import reruns on missing pages in the configured time interval see [](Configuration.md).
+
+This can then be configured as a Torznab indexer in your favorite content application.
+
+Newly added is the ability for %Product% to scrape from your running Zurg instance, and from other running %Product% instances.
+
+## Installation
+
+The easiest way to get up and running with %Product% is to use the provided docker-compose file.
+
+Ensure you have the following installed:
+- Docker, Docker Desktop or Podman
+
+The example compose file below can be copied, and used to get the system running locally.
+
+```yaml
+```
+{ src="compose-file.yaml" }
+
+This compose file will start the following services:
+- %Product%
+- Postgres (version 17)
+
+The configuration and persistent storage of both services will be stored in docker volumes, but i recommend changing this if you are not on windows to the `./data` directory, next to where the compose file resides.
+
+## Pulling Latest Image
+
+If you would like to pull the latest image from the docker registry, you can use the following command:
+
+```bash
+docker compose pull %product%
+```
+
+> Please Note - Always make sure you check the [github release notes](https://github.com/iPromKnight/zilean/releases) for the latest release, to ensure there are no breaking changes.
+> The changelog can also be viewed [here](https://github.com/iPromKnight/zilean/blob/main/CHANGELOG.md).
+{ style="note" }
\ No newline at end of file
diff --git a/docs/Writerside/topics/Torznab-Indexer.md b/docs/Writerside/topics/Torznab-Indexer.md
new file mode 100644
index 0000000..47762a0
--- /dev/null
+++ b/docs/Writerside/topics/Torznab-Indexer.md
@@ -0,0 +1,81 @@
+# Torznab Indexer: A Detailed Explanation
+
+## What is a Torznab Indexer?
+
+A **Torznab indexer** is a service or API that provides a standardized way to interact with torrent or Usenet indexers. It is part of the **"nzb" and "torrent" search ecosystem**, inspired by the NZB (Usenet) model but adapted for torrents. Torznab simplifies how applications like **Sonarr**, **Radarr**, or **Prowlarr** communicate with indexers by using a consistent interface.
+
+The **Torznab protocol** is based on the RSS feed format and extends it with additional query parameters to enable searching, filtering, and retrieving torrent metadata efficiently.
+
+---
+
+## Purpose of Torznab Indexers
+
+Torznab indexers serve as a bridge between **media management applications** (like Sonarr or Radarr) and the actual torrent trackers or Usenet servers. Their key purposes include:
+
+1. **Centralized Management**:
+ - Allow users to aggregate multiple torrent or Usenet indexers into a single application.
+ - Enable media management tools to work seamlessly with various trackers.
+
+2. **Standardization**:
+ - Provide a uniform API for interacting with different indexers, which might otherwise have diverse and incompatible interfaces.
+
+3. **Search and Discovery**:
+ - Enable applications to perform automated searches for specific content (e.g., movies, TV shows, or software) using criteria such as keywords, categories, or file sizes.
+
+4. **Automation**:
+ - Facilitate hands-free searching and downloading of media based on predefined filters and schedules in applications like Radarr and Sonarr.
+
+---
+
+## How Does a Torznab Indexer Work?
+
+Torznab indexers work by exposing a RESTful API that supports queries and responses in a consistent format. Here's how the workflow typically looks:
+
+### 1. **Setup**
+- Users configure the Torznab indexer URL and API key in their media application.
+- Applications send requests to the indexer using the Torznab API.
+
+### 2. **Request**
+- The application makes a query to the Torznab indexer, specifying parameters such as:
+ - **Search term**: Keywords for content (e.g., a movie name).
+ - **Category**: Filters like movies, TV shows, or games.
+ - **Limits**: File size, age, etc.
+
+Example request: `GET /api?t=search&q=example_movie&cat=5000`
+
+### 3. **Response**
+- The Torznab indexer responds with an XML feed (based on RSS) that contains metadata about the search results.
+- The response includes fields like:
+ - Title
+ - Link (usually a magnet link or torrent file URL)
+ - Description
+ - Category
+ - Size
+
+Example response (simplified):
+```xml
+```
+{ src="example-torznab.xml" }
+
+---
+
+## Setting up as Torznab Indexer for Prowlarr
+
+### Prowlarr
+
+* Open Prowlarr, and navigate to `Indexers -> Add`
+* Search for `generic`, with type `private`
+* Add `Generic Torznab`
+* Give it a name at the top (Zilean)
+* Ensure `Url` is `http://zilean:8181/torznab`
+* Ensure `API` box is `/api`
+* Sync to Apps
+
+Then move on to Radarr if using it
+
+### Radarr
+
+* Navigate to `/settings/indexers`
+* On `Zilean` click edit
+* Tick the Box `Remove year from search string`
+* Save
diff --git a/docs/Writerside/v.list b/docs/Writerside/v.list
new file mode 100644
index 0000000..269c6df
--- /dev/null
+++ b/docs/Writerside/v.list
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
diff --git a/docs/Writerside/writerside.cfg b/docs/Writerside/writerside.cfg
new file mode 100644
index 0000000..7c3a7de
--- /dev/null
+++ b/docs/Writerside/writerside.cfg
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/Zilean.Shared/Features/Configuration/DatabaseConfiguration.cs b/src/Zilean.Shared/Features/Configuration/DatabaseConfiguration.cs
index d7bf0a7..fab9ba7 100644
--- a/src/Zilean.Shared/Features/Configuration/DatabaseConfiguration.cs
+++ b/src/Zilean.Shared/Features/Configuration/DatabaseConfiguration.cs
@@ -2,5 +2,5 @@ namespace Zilean.Shared.Features.Configuration;
public class DatabaseConfiguration
{
- public string ConnectionString { get; set; } = "Host=localhost;Database=zilean;Username=postgres;Password=postgres;Include Error Detail=true;Timeout=30;CommandTimeout=3600;";
+ public string ConnectionString { get; set; } = "Host=postgres;Database=zilean;Username=postgres;Password=postgres;Include Error Detail=true;Timeout=30;CommandTimeout=3600;";
}