Skip to content

Commit

Permalink
Merge pull request #4187 from learningequality/hotfixes
Browse files Browse the repository at this point in the history
Release v2023.07.05 - Kolibri 0.16 support
  • Loading branch information
rtibbles authored Jul 5, 2023
2 parents 761a532 + b16dd11 commit 84bec71
Show file tree
Hide file tree
Showing 450 changed files with 25,524 additions and 10,186 deletions.
17 changes: 17 additions & 0 deletions .docker/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
## What is this directory?
This directory is a space for mounting directories to docker containers, allowing the mounts to be specified in committed code, but the contents of the mounts to remain ignored by git.

### postgres
The `postgres` directory is mounted to `/docker-entrypoint-initdb.d`. Any `.sh` or `.sql` files will be executed when the container is first started with a new data volume. You may read more regarding this functionality on the [Docker Hub page](https://hub.docker.com/_/postgres), under _Initialization scripts_.

When running docker services through the Makefile commands, it specifies a docker-compose project name that depends on the name of the current git branch. This causes the volumes to change when the branch changes, which is helpful when switching between many branches that might have incompatible database schema changes. The downside is that whenever you start a new branch, you'll have to re-initialize the database again, like with `yarn run devsetup`. Creating a SQL dump from an existing, initialized database and placing it in this directory will allow you to skip this step.

To create a SQL dump of your preferred database data useful for local testing, run `make .docker/postgres/init.sql` while the docker postgres container is running.

> Note: you will likely need to run `make migrate` to ensure your database schema is up-to-date when using this technique.
#### pgpass
Stores the postgres authentication for the docker service for scripting access without manually providing a password, created by `make .docker/pgpass`

### minio
The `minio` directory is mounted to `/data`, since it isn't necessarily useful to have this data isolated based off the current git branch.
3 changes: 3 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
root = true

[*]
max_line_length = 100

[*.js]
indent_size = 2

Expand Down
9 changes: 9 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,18 @@ updates:
directory: "/"
schedule:
interval: "daily"
time: "00:00"

# Maintain dependencies for Javascript
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
time: "00:00"

# Maintain dependencies for Github Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
time: "00:00"
16 changes: 8 additions & 8 deletions .github/workflows/deploytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@ jobs:
if: ${{ needs.pre_job.outputs.should_skip != 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Use Node.js
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: '16.x'
- name: Cache Node.js modules
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
Expand All @@ -51,13 +51,13 @@ jobs:
if: ${{ needs.pre_job.outputs.should_skip != 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Set up Python 3.9
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: pip cache
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pyprod-${{ hashFiles('requirements.txt') }}
Expand All @@ -69,11 +69,11 @@ jobs:
pip install pip-tools
pip-sync requirements.txt
- name: Use Node.js
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: '16.x'
- name: Cache Node.js modules
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/frontendlint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@ jobs:
if: ${{ needs.pre_job.outputs.should_skip != 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Use Node.js
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: '16.x'
- name: Cache Node.js modules
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/frontendtest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@ jobs:
if: ${{ needs.pre_job.outputs.should_skip != 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Use Node.js
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: '16.x'
- name: Cache Node.js modules
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/pythontest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ jobs:
# Maps port 6379 on service container to the host
- 6379:6379
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Set up minio
run: |
docker run -d -p 9000:9000 --name minio \
Expand All @@ -71,11 +71,11 @@ jobs:
-v /tmp/minio_config:/root/.minio \
minio/minio server /data
- name: Set up Python 3.9
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: pip cache
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pytest-${{ hashFiles('requirements.txt', 'requirements-dev.txt') }}
Expand Down
9 changes: 7 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ var/
# IntelliJ IDE, except project config
.idea/*
!.idea/studio.iml
# ignore future updates to run configuration
.run/devserver.run.xml

# PyInstaller
# Usually these files are written by a python script from a template
Expand Down Expand Up @@ -95,8 +97,11 @@ contentcuration/csvs/
# Ignore the TAGS file generated by some editors
TAGS

# Ignore Vagrant-created files
/.vagrant/
# Services
.vagrant/
.docker/minio/*
.docker/postgres/*
.docker/pgpass

# Ignore test files
/contentcuration/contentcuration/proxy_settings.py
Expand Down
24 changes: 24 additions & 0 deletions .run/devserver.run.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="devserver" type="PythonConfigurationType" factoryName="Python">
<module name="studio" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="$ModuleSdkPath$" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="false" />
<option name="ADD_SOURCE_ROOTS" value="false" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="contentcuration/manage.py" />
<option name="PARAMETERS" value="runserver --settings=contentcuration.dev_settings 0.0.0.0:8080" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
</component>
101 changes: 65 additions & 36 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
# standalone install method
DOCKER_COMPOSE = docker-compose

# support new plugin installation for docker-compose
ifeq (, $(shell which docker-compose))
DOCKER_COMPOSE = docker compose
endif

###############################################################
# PRODUCTION COMMANDS #########################################
###############################################################
Expand All @@ -20,6 +28,18 @@ migrate:
python contentcuration/manage.py migrate || true
python contentcuration/manage.py loadconstants

# This is a special command that is we'll reuse to run data migrations outside of the normal
# django migration process. This is useful for long running migrations which we don't want to block
# the CD build. Do not delete!
# Procedure:
# 1) Add a new management command for the migration
# 2) Call it here
# 3) Perform the release
# 4) Remove the management command from this `deploy-migrate` recipe
# 5) Repeat!
deploy-migrate:
python contentcuration/manage.py export_channels_to_kolibri_public

contentnodegc:
python contentcuration/manage.py garbage_collect

Expand All @@ -31,7 +51,11 @@ learningactivities:

set-tsvectors:
python contentcuration/manage.py set_channel_tsvectors
python contentcuration/manage.py set_contentnode_tsvectors
python contentcuration/manage.py set_contentnode_tsvectors --published

reconcile:
python contentcuration/manage.py reconcile_publishing_status
python contentcuration/manage.py reconcile_change_tasks

###############################################################
# END PRODUCTION COMMANDS #####################################
Expand All @@ -53,10 +77,10 @@ i18n-extract: i18n-extract-frontend i18n-extract-backend
i18n-transfer-context:
yarn transfercontext

#i18n-django-compilemessages:
# Change working directory to kolibri/ such that compilemessages
i18n-django-compilemessages:
# Change working directory to contentcuration/ such that compilemessages
# finds only the .po files nested there.
#cd kolibri && PYTHONPATH="..:$$PYTHONPATH" python -m kolibri manage compilemessages
cd contentcuration && python manage.py compilemessages

i18n-upload: i18n-extract
python node_modules/kolibri-tools/lib/i18n/crowdin.py upload-sources ${branch}
Expand All @@ -67,27 +91,15 @@ i18n-pretranslate:
i18n-pretranslate-approve-all:
python node_modules/kolibri-tools/lib/i18n/crowdin.py pretranslate ${branch} --approve-all

i18n-convert:
python node_modules/kolibri-tools/lib/i18n/crowdin.py convert-files

i18n-download-translations:
python node_modules/kolibri-tools/lib/i18n/crowdin.py rebuild-translations ${branch}
python node_modules/kolibri-tools/lib/i18n/crowdin.py download-translations ${branch}
node node_modules/kolibri-tools/lib/i18n/intl_code_gen.js
python node_modules/kolibri-tools/lib/i18n/crowdin.py convert-files
# TODO: is this necessary? # Manual hack to add es language by copying es_ES to es
# cp -r contentcuration/locale/es_ES contentcuration/locale/es
yarn exec kolibri-tools i18n-code-gen -- --output-dir ./contentcuration/contentcuration/frontend/shared/i18n
$(MAKE) i18n-django-compilemessages
yarn exec kolibri-tools i18n-create-message-files -- --namespace contentcuration --searchPath ./contentcuration/contentcuration/frontend

i18n-download: i18n-download-translations

i18n-update:
echo "WARNING: i18n-update has been renamed to i18n-download"
$(MAKE) i18n-download
echo "WARNING: i18n-update has been renamed to i18n-download"

i18n-stats:
python node_modules/kolibri-tools/lib/i18n/crowdin.py translation-stats ${branch}

i18n-download-glossary:
python node_modules/kolibri-tools/lib/i18n/crowdin.py download-glossary

Expand Down Expand Up @@ -126,9 +138,9 @@ hascaptions:

export COMPOSE_PROJECT_NAME=studio_$(shell git rev-parse --abbrev-ref HEAD)

purge-postgres:
-PGPASSWORD=kolibri dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost
PGPASSWORD=kolibri createdb -U learningequality "kolibri-studio" --port 5432 -h localhost
purge-postgres: .docker/pgpass
-PGPASSFILE=.docker/pgpass dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost
PGPASSFILE=.docker/pgpass createdb -U learningequality "kolibri-studio" --port 5432 -h localhost

destroy-and-recreate-database: purge-postgres setup

Expand All @@ -138,39 +150,56 @@ devceleryworkers:
run-services:
$(MAKE) -j 2 dcservicesup devceleryworkers

.docker/minio:
mkdir -p $@

.docker/postgres:
mkdir -p $@

.docker/pgpass:
echo "localhost:5432:kolibri-studio:learningequality:kolibri" > $@
chmod 600 $@

.docker/postgres/init.sql: .docker/pgpass
# assumes postgres is running in a docker container
PGPASSFILE=.docker/pgpass pg_dump --host localhost --port 5432 --username learningequality --dbname "kolibri-studio" --exclude-table-data=contentcuration_change --file $@

dcbuild:
# build all studio docker image and all dependent services using docker-compose
docker-compose build
$(DOCKER_COMPOSE) build

dcup:
dcup: .docker/minio .docker/postgres
# run all services except for cloudprober
docker-compose up studio-app celery-worker
$(DOCKER_COMPOSE) up studio-app celery-worker

dcup-cloudprober:
dcup-cloudprober: .docker/minio .docker/postgres
# run all services including cloudprober
docker-compose up
$(DOCKER_COMPOSE) up

dcdown:
# run make deverver in foreground with all dependent services using docker-compose
docker-compose down
# run make deverver in foreground with all dependent services using $(DOCKER_COMPOSE)
$(DOCKER_COMPOSE) down

dcclean:
# stop all containers and delete volumes
docker-compose down -v
$(DOCKER_COMPOSE) down -v
docker image prune -f

dcshell:
# bash shell inside the (running!) studio-app container
docker-compose exec studio-app /usr/bin/fish
$(DOCKER_COMPOSE) exec studio-app /usr/bin/fish

dcpsql: .docker/pgpass
PGPASSFILE=.docker/pgpass psql --host localhost --port 5432 --username learningequality --dbname "kolibri-studio"

dctest:
dctest: .docker/minio .docker/postgres
# run backend tests inside docker, in new instances
docker-compose run studio-app make test
$(DOCKER_COMPOSE) run studio-app make test

dcservicesup:
dcservicesup: .docker/minio .docker/postgres
# launch all studio's dependent services using docker-compose
docker-compose -f docker-compose.yml -f docker-compose.alt.yml up minio postgres redis
$(DOCKER_COMPOSE) -f docker-compose.yml -f docker-compose.alt.yml up minio postgres redis

dcservicesdown:
# stop services that were started using dcservicesup
docker-compose -f docker-compose.yml -f docker-compose.alt.yml down
$(DOCKER_COMPOSE) -f docker-compose.yml -f docker-compose.alt.yml down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,10 @@ export LDFLAGS="-L/opt/homebrew/opt/openssl/lib"
```

### Install frontend dependencies
Install the version of node.js supported by Studio, and install `yarn`:
Install the version of node.js supported by Studio, and install `yarn` version 1.x:
```bash
volta install node@16
volta install yarn
volta install yarn@1
```
After installing `yarn`, you may now install frontend dependencies:
```bash
Expand Down
Loading

0 comments on commit 84bec71

Please sign in to comment.