1
0
Fork 0
mirror of https://github.com/YunoHost-Apps/overleaf_ynh.git synced 2024-09-03 19:56:27 +02:00

Merge pull request #25 from YunoHost-Apps/testing

Testing upgrade to 5.0.1
This commit is contained in:
Thomas 2024-05-02 15:04:01 +02:00 committed by GitHub
commit 405228e9b8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 468 additions and 386 deletions

View file

@ -4,3 +4,4 @@
- [Irakurri README euskaraz](README_eu.md) - [Irakurri README euskaraz](README_eu.md)
- [Lire le README en français](README_fr.md) - [Lire le README en français](README_fr.md)
- [Le o README en galego](README_gl.md) - [Le o README en galego](README_gl.md)
- [阅读中文(简体)的 README](README_zh_Hans.md)

View file

@ -19,7 +19,7 @@ It shall NOT be edited by hand.
Overleaf is an open-source online real-time collaborative LaTeX editor. Overleaf is an open-source online real-time collaborative LaTeX editor.
**Shipped version:** 2024.01.26~ynh1 **Shipped version:** 2024.04.09~ynh1
## Screenshots ## Screenshots

View file

@ -19,7 +19,7 @@ EZ editatu eskuz.
Overleaf is an open-source online real-time collaborative LaTeX editor. Overleaf is an open-source online real-time collaborative LaTeX editor.
**Paketatutako bertsioa:** 2024.01.26~ynh1 **Paketatutako bertsioa:** 2024.04.09~ynh1
## Pantaila-argazkiak ## Pantaila-argazkiak

View file

@ -19,7 +19,7 @@ Il NE doit PAS être modifié à la main.
Overleaf est un éditeur LaTeX collaboratif en ligne et en temps réel open source. Overleaf est un éditeur LaTeX collaboratif en ligne et en temps réel open source.
**Version incluse:** 2024.01.26~ynh1 **Version incluse:** 2024.04.09~ynh1
## Captures décran ## Captures décran

View file

@ -19,7 +19,7 @@ NON debe editarse manualmente.
Overleaf is an open-source online real-time collaborative LaTeX editor. Overleaf is an open-source online real-time collaborative LaTeX editor.
**Versión proporcionada:** 2024.01.26~ynh1 **Versión proporcionada:** 2024.04.09~ynh1
## Capturas de pantalla ## Capturas de pantalla

48
README_zh_Hans.md Normal file
View file

@ -0,0 +1,48 @@
<!--
注意:此 README 由 <https://github.com/YunoHost/apps/tree/master/tools/readme_generator> 自动生成
请勿手动编辑。
-->
# YunoHost 的 Overleaf
[![集成程度](https://dash.yunohost.org/integration/overleaf.svg)](https://dash.yunohost.org/appci/app/overleaf) ![工作状态](https://ci-apps.yunohost.org/ci/badges/overleaf.status.svg) ![维护状态](https://ci-apps.yunohost.org/ci/badges/overleaf.maintain.svg)
[![使用 YunoHost 安装 Overleaf](https://install-app.yunohost.org/install-with-yunohost.svg)](https://install-app.yunohost.org/?app=overleaf)
*[阅读此 README 的其它语言版本。](./ALL_README.md)*
> *通过此软件包,您可以在 YunoHost 服务器上快速、简单地安装 Overleaf。*
> *如果您还没有 YunoHost请参阅[指南](https://yunohost.org/install)了解如何安装它。*
## 概况
Overleaf is an open-source online real-time collaborative LaTeX editor.
**分发版本:** 2024.04.09~ynh1
## 截图
![Overleaf 的截图](./doc/screenshots/screenshot.png)
## 文档与资源
- 官方应用网站: <https://www.overleaf.com>
- 官方用户文档: <https://www.overleaf.com/learn>
- 上游应用代码库: <https://github.com/overleaf/overleaf>
- YunoHost 商店: <https://apps.yunohost.org/app/overleaf>
- 报告 bug <https://github.com/YunoHost-Apps/overleaf_ynh/issues>
## 开发者信息
请向 [`testing` 分支](https://github.com/YunoHost-Apps/overleaf_ynh/tree/testing) 发送拉取请求。
如要尝试 `testing` 分支,请这样操作:
```bash
sudo yunohost app install https://github.com/YunoHost-Apps/overleaf_ynh/tree/testing --debug
sudo yunohost app upgrade overleaf -u https://github.com/YunoHost-Apps/overleaf_ynh/tree/testing --debug
```
**有关应用打包的更多信息:** <https://yunohost.org/packaging_apps>

View file

@ -1,18 +1,23 @@
#sub_path_only rewrite ^__PATH__$ __PATH__/ permanent; #sub_path_only rewrite ^__PATH__$ __PATH__/ permanent;
# block external access to prometheus /metrics
location /metrics/ {
internal;
}
location __PATH__/ { location __PATH__/ {
# Set max upload size # Set max upload size
client_max_body_size 50m; client_max_body_size 50m;
proxy_pass http://localhost:__PORT__; # The port must match the value of SHARELATEX_PORT. proxy_pass http://localhost:__PORT__;
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade"; proxy_set_header Connection "upgrade";
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Forwarded-Host $host; proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme; proxy_read_timeout 10m;
proxy_send_timeout 10m; proxy_send_timeout 10m;
# Include SSOWAT user panel. # Include SSOWAT user panel.
include conf.d/yunohost_panel.conf.inc; include conf.d/yunohost_panel.conf.inc;

View file

@ -8,6 +8,8 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="CHAT_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/chat/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/chat/app.js
StandardOutput=append:/var/log/__APP__/chat.log StandardOutput=append:/var/log/__APP__/chat.log
StandardError=inherit StandardError=inherit

View file

@ -8,6 +8,8 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="CLSI_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/clsi/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/clsi/app.js
StandardOutput=append:/var/log/__APP__/clsi.log StandardOutput=append:/var/log/__APP__/clsi.log
StandardError=inherit StandardError=inherit

View file

@ -8,6 +8,8 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="CONTACTS_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/contacts/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/contacts/app.js
StandardOutput=append:/var/log/__APP__/contacts.log StandardOutput=append:/var/log/__APP__/contacts.log
StandardError=inherit StandardError=inherit

View file

@ -8,6 +8,8 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="DOCSTORE_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/docstore/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/docstore/app.js
StandardOutput=append:/var/log/__APP__/docstore.log StandardOutput=append:/var/log/__APP__/docstore.log
StandardError=inherit StandardError=inherit

View file

@ -8,6 +8,9 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="DOCUMENT_UPDATER_HOST=127.0.0.1"
Environment="DOCUPDATER_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/document-updater/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/document-updater/app.js
StandardOutput=append:/var/log/__APP__/document-updater.log StandardOutput=append:/var/log/__APP__/document-updater.log
StandardError=inherit StandardError=inherit

View file

@ -8,6 +8,8 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="FILESTORE_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/filestore/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/filestore/app.js
StandardOutput=append:/var/log/__APP__/filestore.log StandardOutput=append:/var/log/__APP__/filestore.log
StandardError=inherit StandardError=inherit

View file

@ -8,6 +8,7 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="HISTORY_V1_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__" Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
Environment="NODE_CONFIG_DIR=__INSTALL_DIR__/live/services/history-v1/config" Environment="NODE_CONFIG_DIR=__INSTALL_DIR__/live/services/history-v1/config"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/history-v1/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/history-v1/app.js

View file

@ -8,6 +8,8 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="NOTIFICATIONS_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/notifications/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/notifications/app.js
StandardOutput=append:/var/log/__APP__/notifications.log StandardOutput=append:/var/log/__APP__/notifications.log
StandardError=inherit StandardError=inherit

View file

@ -8,6 +8,8 @@ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
Environment="PROJECT_HISTORY_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/project-history/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/project-history/app.js
StandardOutput=append:/var/log/__APP__/project-history.log StandardOutput=append:/var/log/__APP__/project-history.log
StandardError=inherit StandardError=inherit

View file

@ -7,6 +7,8 @@ Type=simple
User=__APP__ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
Environment="REALTIME_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/real-time/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/real-time/app.js
StandardOutput=append:/var/log/__APP__/real-time.log StandardOutput=append:/var/log/__APP__/real-time.log

View file

@ -7,6 +7,8 @@ Type=simple
User=__APP__ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
Environment="SPELLING_HOST=127.0.0.1"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/spelling/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/spelling/app.js
StandardOutput=append:/var/log/__APP__/spelling.log StandardOutput=append:/var/log/__APP__/spelling.log

View file

@ -7,6 +7,10 @@ Type=simple
User=__APP__ User=__APP__
Group=__APP__ Group=__APP__
WorkingDirectory=__INSTALL_DIR__/ WorkingDirectory=__INSTALL_DIR__/
Environment="WEB_HOST=127.0.0.1"
Environment="WEB_API_HOST=127.0.0.1"
Environment="WEB_PORT=__PORT__"
Environment="MONGO_CONNECTION_STRING=mongodb://127.0.0.1:27017/__DB_NAME__"
EnvironmentFile=__INSTALL_DIR__/variables.env EnvironmentFile=__INSTALL_DIR__/variables.env
ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/web/app.js ExecStart=__YNH_NODE__ __INSTALL_DIR__/live/services/web/app.js
StandardOutput=append:/var/log/__APP__/web.log StandardOutput=append:/var/log/__APP__/web.log

View file

@ -18,10 +18,12 @@ const Path = require('path')
// These credentials are used for authenticating api requests // These credentials are used for authenticating api requests
// between services that may need to go over public channels // between services that may need to go over public channels
const httpAuthUser = 'sharelatex' const httpAuthUser = process.env.WEB_API_USER
const httpAuthPass = process.env.WEB_API_PASSWORD const httpAuthPass = process.env.WEB_API_PASSWORD
const httpAuthUsers = {} const httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass if (httpAuthUser && httpAuthPass) {
httpAuthUsers[httpAuthUser] = httpAuthPass
}
const parse = function (option) { const parse = function (option) {
if (option != null) { if (option != null) {
@ -55,30 +57,30 @@ const settings = {
port: __PORT__, port: __PORT__,
allowAnonymousReadAndWriteSharing: allowAnonymousReadAndWriteSharing:
process.env.SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true', process.env.OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true',
// Databases // Databases
// --------- // ---------
// ShareLaTeX's main persistent data store is MongoDB (http://www.mongodb.org/) // Overleaf Community Edition's main persistent data store is MongoDB (http://www.mongodb.org/)
// Documentation about the URL connection string format can be found at: // Documentation about the URL connection string format can be found at:
// //
// http://docs.mongodb.org/manual/reference/connection-string/ // http://docs.mongodb.org/manual/reference/connection-string/
// //
// The following works out of the box with Mongo's default settings: // The following works out of the box with Mongo's default settings:
mongo: { mongo: {
url: process.env.SHARELATEX_MONGO_URL || 'mongodb://dockerhost/sharelatex', url: process.env.OVERLEAF_MONGO_URL || 'mongodb://dockerhost/sharelatex',
}, },
// Redis is used in ShareLaTeX for high volume queries, like real-time // Redis is used in Overleaf Community Edition for high volume queries, like real-time
// editing, and session management. // editing, and session management.
// //
// The following config will work with Redis's default settings: // The following config will work with Redis's default settings:
redis: { redis: {
web: (redisConfig = { web: (redisConfig = {
host: process.env.SHARELATEX_REDIS_HOST || 'dockerhost', host: process.env.OVERLEAF_REDIS_HOST || 'dockerhost',
port: process.env.SHARELATEX_REDIS_PORT || '6379', port: process.env.OVERLEAF_REDIS_PORT || '6379',
password: process.env.SHARELATEX_REDIS_PASS || undefined, password: process.env.OVERLEAF_REDIS_PASS || undefined,
key_schema: { key_schema: {
// document-updater // document-updater
blockingKey({ doc_id }) { blockingKey({ doc_id }) {
@ -177,35 +179,35 @@ const settings = {
// Server Config // Server Config
// ------------- // -------------
// Where your instance of ShareLaTeX can be found publicly. This is used // Where your instance of Overleaf Community Edition can be found publicly. This is used
// when emails are sent out and in generated links: // when emails are sent out and in generated links:
siteUrl: (siteUrl = process.env.SHARELATEX_SITE_URL || 'http://localhost'), siteUrl: (siteUrl = process.env.OVERLEAF_SITE_URL || 'http://localhost'),
// Status page URL as displayed on the maintenance/500 pages. // Status page URL as displayed on the maintenance/500 pages.
statusPageUrl: process.env.SHARELATEX_STATUS_PAGE_URL, statusPageUrl: process.env.OVERLEAF_STATUS_PAGE_URL,
// The name this is used to describe your ShareLaTeX Installation // The name this is used to describe your Overleaf Community Edition Installation
appName: process.env.SHARELATEX_APP_NAME || 'ShareLaTeX (Community Edition)', appName: process.env.OVERLEAF_APP_NAME || 'Overleaf Community Edition',
restrictInvitesToExistingAccounts: restrictInvitesToExistingAccounts:
process.env.SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS === 'true', process.env.OVERLEAF_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS === 'true',
nav: { nav: {
title: title:
process.env.SHARELATEX_NAV_TITLE || process.env.OVERLEAF_NAV_TITLE ||
process.env.SHARELATEX_APP_NAME || process.env.OVERLEAF_APP_NAME ||
'ShareLaTeX Community Edition', 'Overleaf Community Edition',
}, },
// The email address which users will be directed to as the main point of // The email address which users will be directed to as the main point of
// contact for this installation of ShareLaTeX. // contact for this installation of Overleaf Community Edition.
adminEmail: process.env.SHARELATEX_ADMIN_EMAIL || 'placeholder@example.com', adminEmail: process.env.OVERLEAF_ADMIN_EMAIL || 'placeholder@example.com',
// If provided, a sessionSecret is used to sign cookies so that they cannot be // If provided, a sessionSecret is used to sign cookies so that they cannot be
// spoofed. This is recommended. // spoofed. This is recommended.
security: { security: {
sessionSecret: sessionSecret:
process.env.SHARELATEX_SESSION_SECRET || process.env.CRYPTO_RANDOM, process.env.OVERLEAF_SESSION_SECRET || process.env.CRYPTO_RANDOM,
}, },
// These credentials are used for authenticating api requests // These credentials are used for authenticating api requests
@ -220,56 +222,56 @@ const settings = {
// but should be set to true in production. // but should be set to true in production.
cacheStaticAssets: true, cacheStaticAssets: true,
// If you are running ShareLaTeX over https, set this to true to send the // If you are running Overleaf Community Edition over https, set this to true to send the
// cookie with a secure flag (recommended). // cookie with a secure flag (recommended).
secureCookie: process.env.SHARELATEX_SECURE_COOKIE != null, secureCookie: process.env.OVERLEAF_SECURE_COOKIE != null,
// If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc) // If you are running Overleaf Community Edition behind a proxy (like Apache, Nginx, etc)
// then set this to true to allow it to correctly detect the forwarded IP // then set this to true to allow it to correctly detect the forwarded IP
// address and http/https protocol information. // address and http/https protocol information.
behindProxy: process.env.SHARELATEX_BEHIND_PROXY || false, behindProxy: process.env.OVERLEAF_BEHIND_PROXY || false,
trustedProxyIps: process.env.SHARELATEX_TRUSTED_PROXY_IPS, trustedProxyIps: process.env.OVERLEAF_TRUSTED_PROXY_IPS,
// The amount of time, in milliseconds, until the (rolling) cookie session expires // The amount of time, in milliseconds, until the (rolling) cookie session expires
cookieSessionLength: parseInt( cookieSessionLength: parseInt(
process.env.SHARELATEX_COOKIE_SESSION_LENGTH || 5 * 24 * 60 * 60 * 1000, // default 5 days process.env.OVERLEAF_COOKIE_SESSION_LENGTH || 5 * 24 * 60 * 60 * 1000, // default 5 days
10 10
), ),
redisLockTTLSeconds: parseInt( redisLockTTLSeconds: parseInt(
process.env.SHARELATEX_REDIS_LOCK_TTL_SECONDS || '60', process.env.OVERLEAF_REDIS_LOCK_TTL_SECONDS || '60',
10 10
), ),
i18n: { i18n: {
subdomainLang: { subdomainLang: {
www: { www: {
lngCode: process.env.SHARELATEX_SITE_LANGUAGE || 'en', lngCode: process.env.OVERLEAF_SITE_LANGUAGE || 'en',
url: siteUrl, url: siteUrl,
}, },
}, },
defaultLng: process.env.SHARELATEX_SITE_LANGUAGE || 'en', defaultLng: process.env.OVERLEAF_SITE_LANGUAGE || 'en',
}, },
currentImageName: process.env.TEX_LIVE_DOCKER_IMAGE, currentImageName: process.env.TEX_LIVE_DOCKER_IMAGE,
apis: { apis: {
web: { web: {
url: 'http://localhost:__PORT__', url: 'http://127.0.0.1:__PORT__',
user: httpAuthUser, user: httpAuthUser,
pass: httpAuthPass, pass: httpAuthPass,
}, },
project_history: { project_history: {
sendProjectStructureOps: true, sendProjectStructureOps: true,
url: 'http://localhost:3054', url: 'http://127.0.0.1:3054',
}, },
v1_history: { v1_history: {
url: process.env.V1_HISTORY_URL || 'http://localhost:3100/api', url: process.env.V1_HISTORY_URL || 'http://127.0.0.1:3100/api',
user: 'staging', user: 'staging',
pass: process.env.STAGING_PASSWORD, pass: process.env.STAGING_PASSWORD,
requestTimeout: parseInt( requestTimeout: parseInt(
process.env.SHARELATEX_HISTORY_V1_HTTP_REQUEST_TIMEOUT || '300000', // default is 5min process.env.OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT || '300000', // default is 5min
10 10
), ),
}, },
@ -291,48 +293,35 @@ const settings = {
// # OPTIONAL CONFIGURABLE SETTINGS // # OPTIONAL CONFIGURABLE SETTINGS
if (process.env.SHARELATEX_LEFT_FOOTER != null) { if (process.env.OVERLEAF_LEFT_FOOTER != null) {
try { try {
settings.nav.left_footer = JSON.parse(process.env.SHARELATEX_LEFT_FOOTER) settings.nav.left_footer = JSON.parse(process.env.OVERLEAF_LEFT_FOOTER)
} catch (error) { } catch (error) {
e = error e = error
console.error('could not parse SHARELATEX_LEFT_FOOTER, not valid JSON') console.error('could not parse OVERLEAF_LEFT_FOOTER, not valid JSON')
} }
} }
if (process.env.SHARELATEX_RIGHT_FOOTER != null) { if (process.env.OVERLEAF_RIGHT_FOOTER != null) {
settings.nav.right_footer = process.env.SHARELATEX_RIGHT_FOOTER settings.nav.right_footer = process.env.OVERLEAF_RIGHT_FOOTER
try { try {
settings.nav.right_footer = JSON.parse(process.env.SHARELATEX_RIGHT_FOOTER) settings.nav.right_footer = JSON.parse(process.env.OVERLEAF_RIGHT_FOOTER)
} catch (error1) { } catch (error1) {
e = error1 e = error1
console.error('could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON') console.error('could not parse OVERLEAF_RIGHT_FOOTER, not valid JSON')
} }
} }
if (process.env.SHARELATEX_HEADER_IMAGE_URL != null) { if (process.env.OVERLEAF_HEADER_IMAGE_URL != null) {
settings.nav.custom_logo = process.env.SHARELATEX_HEADER_IMAGE_URL settings.nav.custom_logo = process.env.OVERLEAF_HEADER_IMAGE_URL
} }
if (process.env.SHARELATEX_HEADER_NAV_LINKS != null) { if (process.env.OVERLEAF_HEADER_EXTRAS != null) {
console.error(`\
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\
`)
}
if (process.env.SHARELATEX_HEADER_EXTRAS != null) {
try { try {
settings.nav.header_extras = JSON.parse( settings.nav.header_extras = JSON.parse(process.env.OVERLEAF_HEADER_EXTRAS)
process.env.SHARELATEX_HEADER_EXTRAS
)
} catch (error2) { } catch (error2) {
e = error2 e = error2
console.error('could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON') console.error('could not parse OVERLEAF_HEADER_EXTRAS, not valid JSON')
} }
} }
@ -340,65 +329,62 @@ if (process.env.SHARELATEX_HEADER_EXTRAS != null) {
// ------------- // -------------
// //
// You must configure a mail server to be able to send invite emails from // You must configure a mail server to be able to send invite emails from
// ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer // Overleaf Community Edition. The config settings are passed to nodemailer. See the nodemailer
// documentation for available options: // documentation for available options:
// //
// http://www.nodemailer.com/docs/transports // http://www.nodemailer.com/docs/transports
if (process.env.SHARELATEX_EMAIL_FROM_ADDRESS != null) { if (process.env.OVERLEAF_EMAIL_FROM_ADDRESS != null) {
settings.email = { settings.email = {
fromAddress: process.env.SHARELATEX_EMAIL_FROM_ADDRESS, fromAddress: process.env.OVERLEAF_EMAIL_FROM_ADDRESS,
replyTo: process.env.SHARELATEX_EMAIL_REPLY_TO || '', replyTo: process.env.OVERLEAF_EMAIL_REPLY_TO || '',
driver: process.env.SHARELATEX_EMAIL_DRIVER, driver: process.env.OVERLEAF_EMAIL_DRIVER,
parameters: { parameters: {
// AWS Creds // AWS Creds
AWSAccessKeyID: process.env.SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID, AWSAccessKeyID: process.env.OVERLEAF_EMAIL_AWS_SES_ACCESS_KEY_ID,
AWSSecretKey: process.env.SHARELATEX_EMAIL_AWS_SES_SECRET_KEY, AWSSecretKey: process.env.OVERLEAF_EMAIL_AWS_SES_SECRET_KEY,
// SMTP Creds // SMTP Creds
host: process.env.SHARELATEX_EMAIL_SMTP_HOST, host: process.env.OVERLEAF_EMAIL_SMTP_HOST,
port: process.env.SHARELATEX_EMAIL_SMTP_PORT, port: process.env.OVERLEAF_EMAIL_SMTP_PORT,
secure: parse(process.env.SHARELATEX_EMAIL_SMTP_SECURE), secure: parse(process.env.OVERLEAF_EMAIL_SMTP_SECURE),
ignoreTLS: parse(process.env.SHARELATEX_EMAIL_SMTP_IGNORE_TLS), ignoreTLS: parse(process.env.OVERLEAF_EMAIL_SMTP_IGNORE_TLS),
name: process.env.SHARELATEX_EMAIL_SMTP_NAME, name: process.env.OVERLEAF_EMAIL_SMTP_NAME,
logger: process.env.SHARELATEX_EMAIL_SMTP_LOGGER === 'true', logger: process.env.OVERLEAF_EMAIL_SMTP_LOGGER === 'true',
}, },
textEncoding: process.env.SHARELATEX_EMAIL_TEXT_ENCODING, textEncoding: process.env.OVERLEAF_EMAIL_TEXT_ENCODING,
template: { template: {
customFooter: process.env.SHARELATEX_CUSTOM_EMAIL_FOOTER, customFooter: process.env.OVERLEAF_CUSTOM_EMAIL_FOOTER,
}, },
} }
if (process.env.SHARELATEX_EMAIL_AWS_SES_REGION != null) { if (process.env.OVERLEAF_EMAIL_AWS_SES_REGION != null) {
settings.email.parameters.region = settings.email.parameters.region = process.env.OVERLEAF_EMAIL_AWS_SES_REGION
process.env.SHARELATEX_EMAIL_AWS_SES_REGION
} }
if ( if (
process.env.SHARELATEX_EMAIL_SMTP_USER != null || process.env.OVERLEAF_EMAIL_SMTP_USER != null ||
process.env.SHARELATEX_EMAIL_SMTP_PASS != null process.env.OVERLEAF_EMAIL_SMTP_PASS != null
) { ) {
settings.email.parameters.auth = { settings.email.parameters.auth = {
user: process.env.SHARELATEX_EMAIL_SMTP_USER, user: process.env.OVERLEAF_EMAIL_SMTP_USER,
pass: process.env.SHARELATEX_EMAIL_SMTP_PASS, pass: process.env.OVERLEAF_EMAIL_SMTP_PASS,
} }
} }
if (process.env.SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH != null) { if (process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH != null) {
settings.email.parameters.tls = { settings.email.parameters.tls = {
rejectUnauthorized: parse( rejectUnauthorized: parse(
process.env.SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH
), ),
} }
} }
} }
// i18n // i18n
if (process.env.SHARELATEX_LANG_DOMAIN_MAPPING != null) { if (process.env.OVERLEAF_LANG_DOMAIN_MAPPING != null) {
settings.i18n.subdomainLang = parse( settings.i18n.subdomainLang = parse(process.env.OVERLEAF_LANG_DOMAIN_MAPPING)
process.env.SHARELATEX_LANG_DOMAIN_MAPPING
)
} }
// Password Settings // Password Settings
@ -406,26 +392,26 @@ if (process.env.SHARELATEX_LANG_DOMAIN_MAPPING != null) {
// These restrict the passwords users can use when registering // These restrict the passwords users can use when registering
// opts are from http://antelle.github.io/passfield // opts are from http://antelle.github.io/passfield
if ( if (
process.env.SHARELATEX_PASSWORD_VALIDATION_PATTERN || process.env.OVERLEAF_PASSWORD_VALIDATION_PATTERN ||
process.env.SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH || process.env.OVERLEAF_PASSWORD_VALIDATION_MIN_LENGTH ||
process.env.SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH process.env.OVERLEAF_PASSWORD_VALIDATION_MAX_LENGTH
) { ) {
settings.passwordStrengthOptions = { settings.passwordStrengthOptions = {
pattern: process.env.SHARELATEX_PASSWORD_VALIDATION_PATTERN || 'aA$3', pattern: process.env.OVERLEAF_PASSWORD_VALIDATION_PATTERN || 'aA$3',
length: { length: {
min: process.env.SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH || 8, min: process.env.OVERLEAF_PASSWORD_VALIDATION_MIN_LENGTH || 8,
max: process.env.SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH || 72, max: process.env.OVERLEAF_PASSWORD_VALIDATION_MAX_LENGTH || 72,
}, },
} }
} }
// ###################### // ######################
// ShareLaTeX Server Pro // Overleaf Server Pro
// ###################### // ######################
if (parse(process.env.SHARELATEX_IS_SERVER_PRO) === true) { if (parse(process.env.OVERLEAF_IS_SERVER_PRO) === true) {
settings.bypassPercentageRollouts = true settings.bypassPercentageRollouts = true
settings.apis.references = { url: 'http://localhost:3040' } settings.apis.references = { url: 'http://127.0.0.1:3040' }
} }
// Compiler // Compiler
@ -464,21 +450,21 @@ if (process.env.SANDBOXED_COMPILES === 'true') {
// Templates // Templates
// --------- // ---------
if (process.env.SHARELATEX_TEMPLATES_USER_ID) { if (process.env.OVERLEAF_TEMPLATES_USER_ID) {
settings.templates = { settings.templates = {
mountPointUrl: '/templates', mountPointUrl: '/templates',
user_id: process.env.SHARELATEX_TEMPLATES_USER_ID, user_id: process.env.OVERLEAF_TEMPLATES_USER_ID,
} }
settings.templateLinks = parse( settings.templateLinks = parse(
process.env.SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS process.env.OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS
) )
} }
// /Learn // /Learn
// ------- // -------
if (process.env.SHARELATEX_PROXY_LEARN != null) { if (process.env.OVERLEAF_PROXY_LEARN != null) {
settings.proxyLearn = parse(process.env.SHARELATEX_PROXY_LEARN) settings.proxyLearn = parse(process.env.OVERLEAF_PROXY_LEARN)
if (settings.proxyLearn) { if (settings.proxyLearn) {
settings.nav.header_extras = [ settings.nav.header_extras = [
{ {
@ -491,33 +477,33 @@ if (process.env.SHARELATEX_PROXY_LEARN != null) {
// /References // /References
// ----------- // -----------
if (process.env.SHARELATEX_ELASTICSEARCH_URL != null) { if (process.env.OVERLEAF_ELASTICSEARCH_URL != null) {
settings.references.elasticsearch = { settings.references.elasticsearch = {
host: process.env.SHARELATEX_ELASTICSEARCH_URL, host: process.env.OVERLEAF_ELASTICSEARCH_URL,
} }
} }
// filestore // filestore
switch (process.env.SHARELATEX_FILESTORE_BACKEND) { switch (process.env.OVERLEAF_FILESTORE_BACKEND) {
case 's3': case 's3':
settings.filestore = { settings.filestore = {
backend: 's3', backend: 's3',
stores: { stores: {
user_files: process.env.SHARELATEX_FILESTORE_USER_FILES_BUCKET_NAME, user_files: process.env.OVERLEAF_FILESTORE_USER_FILES_BUCKET_NAME,
template_files: template_files:
process.env.SHARELATEX_FILESTORE_TEMPLATE_FILES_BUCKET_NAME, process.env.OVERLEAF_FILESTORE_TEMPLATE_FILES_BUCKET_NAME,
}, },
s3: { s3: {
key: key:
process.env.SHARELATEX_FILESTORE_S3_ACCESS_KEY_ID || process.env.OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID ||
process.env.AWS_ACCESS_KEY_ID, process.env.AWS_ACCESS_KEY_ID,
secret: secret:
process.env.SHARELATEX_FILESTORE_S3_SECRET_ACCESS_KEY || process.env.OVERLEAF_FILESTORE_S3_SECRET_ACCESS_KEY ||
process.env.AWS_SECRET_ACCESS_KEY, process.env.AWS_SECRET_ACCESS_KEY,
endpoint: process.env.SHARELATEX_FILESTORE_S3_ENDPOINT, endpoint: process.env.OVERLEAF_FILESTORE_S3_ENDPOINT,
pathStyle: process.env.SHARELATEX_FILESTORE_S3_PATH_STYLE === 'true', pathStyle: process.env.OVERLEAF_FILESTORE_S3_PATH_STYLE === 'true',
region: region:
process.env.SHARELATEX_FILESTORE_S3_REGION || process.env.OVERLEAF_FILESTORE_S3_REGION ||
process.env.AWS_DEFAULT_REGION, process.env.AWS_DEFAULT_REGION,
}, },
} }

View file

@ -1,7 +1,7 @@
SHARELATEX_APP_NAME="YunoHost Overleaf Community Edition" OVERLEAF_APP_NAME="YunoHost Overleaf Community Edition"
SHARELATEX_PORT=__PORT__ OVERLEAF_PORT=__PORT__
SHARELATEX_CONFIG="__INSTALL_DIR__/settings.js" OVERLEAF_CONFIG="__INSTALL_DIR__/settings.js"
ADMIN_PRIVILEGE_AVAILABLE=true ADMIN_PRIVILEGE_AVAILABLE=true
OPTIMISE_PDF=true OPTIMISE_PDF=true
NODE_ENV=production NODE_ENV=production
@ -11,19 +11,23 @@ WEB_API_USER="__APP__"
WEB_API_PASSWORD=__WEB_API_PASSWORD__ WEB_API_PASSWORD=__WEB_API_PASSWORD__
CRYPTO_RANDOM=__CRYPTO_RANDOM__ CRYPTO_RANDOM=__CRYPTO_RANDOM__
SHARELATEX_MONGO_URL="mongodb://127.0.0.1:27017/__DB_NAME__" OVERLEAF_MONGO_URL="mongodb://127.0.0.1:27017/__DB_NAME__"
SHARELATEX_REDIS_HOST="localhost"
OVERLEAF_REDIS_HOST="localhost"
OVERLEAF_REDIS_PORT=6379
REDIS_HOST="localhost" REDIS_HOST="localhost"
REDIS_PORT=6379 REDIS_PORT=6379
SHARELATEX_SITE_LANGUAGE="__LANGUAGE__" OVERLEAF_SITE_LANGUAGE="__LANGUAGE__"
ENABLED_LINKED_FILE_TYPES=project_file,project_output_file ENABLED_LINKED_FILE_TYPES=project_file,project_output_file
SHARELATEX_ALLOW_PUBLIC_ACCESS=true CLSI_COMPILES_PATH="__DATA_DIR__/compiles"
SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING=true CLSI_OUTPUT_PATH="__DATA_DIR__/output"
CLSI_CACHE_PATH="__DATA_DIR__/cache"
OVERLEAF_ALLOW_PUBLIC_ACCESS=true
OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING=true
# Enables Thumbnail generation using ImageMagick # Enables Thumbnail generation using ImageMagick
ENABLE_CONVERSIONS=true ENABLE_CONVERSIONS=true
@ -40,32 +44,32 @@ TEXMFVAR=__INSTALL_DIR__/tmp/texmf-var
# NGINX_WORKER_CONNECTIONS=768 # NGINX_WORKER_CONNECTIONS=768
## Set for TLS via nginx-proxy ## Set for TLS via nginx-proxy
SHARELATEX_BEHIND_PROXY=true # OVERLEAF_BEHIND_PROXY=true
SHARELATEX_SECURE_COOKIE=false # OVERLEAF_SECURE_COOKIE=false
SHARELATEX_SITE_URL=https://__DOMAIN__ OVERLEAF_SITE_URL=https://__DOMAIN__
# SHARELATEX_NAV_TITLE=Our Overleaf Instance # OVERLEAF_NAV_TITLE=Our Overleaf Instance
# SHARELATEX_HEADER_IMAGE_URL=http://somewhere.com/mylogo.png # OVERLEAF_HEADER_IMAGE_URL=http://somewhere.com/mylogo.png
SHARELATEX_ADMIN_EMAIL=__APP__@__DOMAIN__ OVERLEAF_ADMIN_EMAIL=__APP__@__DOMAIN__
# SHARELATEX_LEFT_FOOTER=[{"text":"Powered by Overleaf © 2021", "url": "https://www.overleaf.com"}, {"text": "Contact your support team", "url": "mailto:support@example.com"} ] # OVERLEAF_LEFT_FOOTER=[{"text":"Powered by Overleaf © 2021", "url": "https://www.overleaf.com"}, {"text": "Contact your support team", "url": "mailto:support@example.com"} ]
# SHARELATEX_RIGHT_FOOTER=[{"text":"Hello I am on the Right"}] # OVERLEAF_RIGHT_FOOTER=[{"text":"Hello I am on the Right"}]
SHARELATEX_EMAIL_FROM_ADDRESS=__APP__@__DOMAIN__ OVERLEAF_EMAIL_FROM_ADDRESS=__APP__@__DOMAIN__
# SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID= # OVERLEAF_EMAIL_AWS_SES_ACCESS_KEY_ID=
# SHARELATEX_EMAIL_AWS_SES_SECRET_KEY= # OVERLEAF_EMAIL_AWS_SES_SECRET_KEY=
SHARELATEX_EMAIL_SMTP_HOST=__MAIN_DOMAIN__ OVERLEAF_EMAIL_SMTP_HOST=__MAIN_DOMAIN__
SHARELATEX_EMAIL_SMTP_PORT=587 OVERLEAF_EMAIL_SMTP_PORT=587
# SHARELATEX_EMAIL_SMTP_SECURE=true # OVERLEAF_EMAIL_SMTP_SECURE=true
SHARELATEX_EMAIL_SMTP_USER=__APP__ OVERLEAF_EMAIL_SMTP_USER=__APP__
SHARELATEX_EMAIL_SMTP_PASS=__MAIL_PWD__ OVERLEAF_EMAIL_SMTP_PASS=__MAIL_PWD__
SHARELATEX_EMAIL_SMTP_NAME=__APP__@__DOMAIN__ OVERLEAF_EMAIL_SMTP_NAME=__APP__@__DOMAIN__
# SHARELATEX_EMAIL_SMTP_LOGGER=false # OVERLEAF_EMAIL_SMTP_LOGGER=false
# SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH=true # OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH=true
# SHARELATEX_EMAIL_SMTP_IGNORE_TLS=false # OVERLEAF_EMAIL_SMTP_IGNORE_TLS=false
# SHARELATEX_CUSTOM_EMAIL_FOOTER=This system is run by department x # OVERLEAF_CUSTOM_EMAIL_FOOTER=This system is run by department x
OT_JWT_AUTH_KEY=__JWT_KEY__ OT_JWT_AUTH_KEY=__JWT_KEY__
@ -83,20 +87,20 @@ OT_JWT_AUTH_KEY=__JWT_KEY__
#LDAP_CONTACTS=false #LDAP_CONTACTS=false
# EXTERNAL_AUTH=ldap # EXTERNAL_AUTH=ldap
# SHARELATEX_LDAP_URL=ldap://localhost:389 # OVERLEAF_LDAP_URL=ldap://localhost:389
# SHARELATEX_LDAP_SEARCH_BASE=ou=users,dc=yunohost,dc=org # OVERLEAF_LDAP_SEARCH_BASE=ou=users,dc=yunohost,dc=org
# SHARELATEX_LDAP_SEARCH_FILTER=(uid={{username}}) # OVERLEAF_LDAP_SEARCH_FILTER=(uid={{username}})
# SHARELATEX_LDAP_BIND_DN=cn=admin,dc=planetexpress,dc=com # OVERLEAF_LDAP_BIND_DN=cn=admin,dc=planetexpress,dc=com
# SHARELATEX_LDAP_BIND_CREDENTIALS=GoodNewsEveryone # OVERLEAF_LDAP_BIND_CREDENTIALS=GoodNewsEveryone
# SHARELATEX_LDAP_EMAIL_ATT=mail # OVERLEAF_LDAP_EMAIL_ATT=mail
# SHARELATEX_LDAP_NAME_ATT=cn # OVERLEAF_LDAP_NAME_ATT=cn
# SHARELATEX_LDAP_LAST_NAME_ATT=sn # OVERLEAF_LDAP_LAST_NAME_ATT=sn
# SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN=true # OVERLEAF_LDAP_UPDATE_USER_DETAILS_ON_LOGIN=true
# SHARELATEX_TEMPLATES_USER_ID=578773160210479700917ee5 # OVERLEAF_TEMPLATES_USER_ID=578773160210479700917ee5
# SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS=[{"name":"All Templates","url":"/templates/all"}] # OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS=[{"name":"All Templates","url":"/templates/all"}]
# TEX_LIVE_DOCKER_IMAGE=quay.io/sharelatex/texlive-full:2021.1 # TEX_LIVE_DOCKER_IMAGE=quay.io/OVERLEAF/texlive-full:2021.1
# ALL_TEX_LIVE_DOCKER_IMAGES=quay.io/sharelatex/texlive-full:2021.1,quay.io/sharelatex/texlive-full:2020.1 # ALL_TEX_LIVE_DOCKER_IMAGES=quay.io/OVERLEAF/texlive-full:2021.1,quay.io/OVERLEAF/texlive-full:2020.1
# SHARELATEX_PROXY_LEARN=true # OVERLEAF_PROXY_LEARN=true

View file

@ -7,7 +7,7 @@ name = "Overleaf"
description.en = "Online real-time collaborative LaTeX editor" description.en = "Online real-time collaborative LaTeX editor"
description.fr = "Éditeur LaTeX collaboratif en ligne et en temps réel" description.fr = "Éditeur LaTeX collaboratif en ligne et en temps réel"
version = "2024.01.26~ynh1" version = "2024.04.09~ynh1"
maintainers = [] maintainers = []
@ -51,8 +51,9 @@ ram.runtime = "3G"
[resources] [resources]
[resources.sources] [resources.sources]
[resources.sources.main] [resources.sources.main]
url = "https://github.com/overleaf/overleaf/archive/fdf8ebe001ec91dc3ab5c23b47bbbb03dc03d1bb.tar.gz" url = "https://github.com/overleaf/overleaf/archive/568044ee48a20bdb930f9f140f17227d19cb1219.tar.gz"
sha256 = "802e3c0add7690c211fc039f94e1fceffe83040e7e60b0c340f35703087f1704" sha256 = "61e2602c76f8c8a635656d99db6725c802261e383411d71095cf9710149567f6"
autoupdate.strategy = "latest_github_commit"
[resources.system_user] [resources.system_user]
allow_email = true allow_email = true

View file

@ -4,7 +4,7 @@
# COMMON VARIABLES # COMMON VARIABLES
#================================================= #=================================================
nodejs_version=18.18.2 nodejs_version=18.20.2
#================================================= #=================================================
# PERSONAL HELPERS # PERSONAL HELPERS
@ -14,7 +14,7 @@ nodejs_version=18.18.2
# EXPERIMENTAL HELPERS # EXPERIMENTAL HELPERS
#================================================= #=================================================
readonly YNH_DEFAULT_MONGO_VERSION=4.4 readonly YNH_DEFAULT_MONGO_VERSION=5.0
# Declare the actual MongoDB version to use: 4.4 ; 5.0 ; 6.0 # Declare the actual MongoDB version to use: 4.4 ; 5.0 ; 6.0
# A packager willing to use another version of MongoDB can override the variable into its _common.sh. # A packager willing to use another version of MongoDB can override the variable into its _common.sh.
YNH_MONGO_VERSION=${YNH_MONGO_VERSION:-$YNH_DEFAULT_MONGO_VERSION} YNH_MONGO_VERSION=${YNH_MONGO_VERSION:-$YNH_DEFAULT_MONGO_VERSION}
@ -25,100 +25,100 @@ YNH_MONGO_VERSION=${YNH_MONGO_VERSION:-$YNH_DEFAULT_MONGO_VERSION}
# example: ynh_mongo_exec --command="db.getMongo().getDBNames().indexOf(\"wekan\")" # example: ynh_mongo_exec --command="db.getMongo().getDBNames().indexOf(\"wekan\")"
# #
# usage: ynh_mongo_exec [--user=user] [--password=password] [--authenticationdatabase=authenticationdatabase] [--database=database] [--host=host] [--port=port] --command="command" [--eval] # usage: ynh_mongo_exec [--user=user] [--password=password] [--authenticationdatabase=authenticationdatabase] [--database=database] [--host=host] [--port=port] --command="command" [--eval]
# | arg: -u, --user= - The user name to connect as # | arg: -u, --user= - The user name to connect as
# | arg: -p, --password= - The user password # | arg: -p, --password= - The user password
# | arg: -d, --authenticationdatabase= - The authenticationdatabase to connect to # | arg: -d, --authenticationdatabase= - The authenticationdatabase to connect to
# | arg: -d, --database= - The database to connect to # | arg: -d, --database= - The database to connect to
# | arg: -h, --host= - The host to connect to # | arg: -h, --host= - The host to connect to
# | arg: -P, --port= - The port to connect to # | arg: -P, --port= - The port to connect to
# | arg: -c, --command= - The command to evaluate # | arg: -c, --command= - The command to evaluate
# | arg: -e, --eval - Evaluate instead of execute the command. # | arg: -e, --eval - Evaluate instead of execute the command.
# #
# #
ynh_mongo_exec() { ynh_mongo_exec() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=upadhPce local legacy_args=upadhPce
local -A args_array=( [u]=user= [p]=password= [a]=authenticationdatabase= [d]=database= [h]=host= [P]=port= [c]=command= [e]=eval ) local -A args_array=( [u]=user= [p]=password= [a]=authenticationdatabase= [d]=database= [h]=host= [P]=port= [c]=command= [e]=eval )
local user local user
local password local password
local authenticationdatabase local authenticationdatabase
local database local database
local host local host
local port local port
local command local command
local eval local eval
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
user="${user:-}" user="${user:-}"
password="${password:-}" password="${password:-}"
authenticationdatabase="${authenticationdatabase:-}" authenticationdatabase="${authenticationdatabase:-}"
database="${database:-}" database="${database:-}"
host="${host:-}" host="${host:-}"
port="${port:-}" port="${port:-}"
eval=${eval:-0} eval=${eval:-0}
# If user is provided # If user is provided
if [ -n "$user" ] if [ -n "$user" ]
then then
user="--username=$user" user="--username=$user"
# If password is provided # If password is provided
if [ -n "$password" ] if [ -n "$password" ]
then then
password="--password=$password" password="--password=$password"
fi fi
# If authenticationdatabase is provided # If authenticationdatabase is provided
if [ -n "$authenticationdatabase" ] if [ -n "$authenticationdatabase" ]
then then
authenticationdatabase="--authenticationDatabase=$authenticationdatabase" authenticationdatabase="--authenticationDatabase=$authenticationdatabase"
else else
authenticationdatabase="--authenticationDatabase=admin" authenticationdatabase="--authenticationDatabase=admin"
fi fi
else else
password="" password=""
authenticationdatabase="" authenticationdatabase=""
fi fi
# If host is provided # If host is provided
if [ -n "$host" ] if [ -n "$host" ]
then then
host="--host=$host" host="--host=$host"
fi fi
# If port is provided # If port is provided
if [ -n "$port" ] if [ -n "$port" ]
then then
port="--port=$port" port="--port=$port"
fi fi
# If eval is not provided # If eval is not provided
if [ $eval -eq 0 ] if [ $eval -eq 0 ]
then then
# If database is provided # If database is provided
if [ -n "$database" ] if [ -n "$database" ]
then then
database="use $database" database="use $database"
else else
database="" database=""
fi fi
mongosh --quiet --username $user --password $password --authenticationDatabase $authenticationdatabase --host $host --port $port <<EOF mongo --quiet $user $password $authenticationdatabase $host $port <<EOF
$database $database
${command} ${command}
quit() quit()
EOF EOF
else else
# If database is provided # If database is provided
if [ -n "$database" ] if [ -n "$database" ]
then then
database="$database" database="$database"
else else
database="" database=""
fi fi
mongosh --quiet $database --username $user --password $password --authenticationDatabase $authenticationdatabase --host $host --port $port --eval="$command" mongo --quiet $database $user $password $authenticationdatabase $host $port --eval="$command"
fi fi
} }
# Drop a database # Drop a database
@ -129,18 +129,18 @@ EOF
# consider using ynh_mongo_remove_db instead. # consider using ynh_mongo_remove_db instead.
# #
# usage: ynh_mongo_drop_db --database=database # usage: ynh_mongo_drop_db --database=database
# | arg: -d, --database= - The database name to drop # | arg: -d, --database= - The database name to drop
# #
# #
ynh_mongo_drop_db() { ynh_mongo_drop_db() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=d local legacy_args=d
local -A args_array=( [d]=database= ) local -A args_array=( [d]=database= )
local database local database
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
ynh_mongo_exec --database="$database" --command='db.runCommand({dropDatabase: 1})' ynh_mongo_exec --database="$database" --command='db.runCommand({dropDatabase: 1})'
} }
# Dump a database # Dump a database
@ -148,19 +148,19 @@ ynh_mongo_drop_db() {
# example: ynh_mongo_dump_db --database=wekan > ./dump.bson # example: ynh_mongo_dump_db --database=wekan > ./dump.bson
# #
# usage: ynh_mongo_dump_db --database=database # usage: ynh_mongo_dump_db --database=database
# | arg: -d, --database= - The database name to dump # | arg: -d, --database= - The database name to dump
# | ret: the mongodump output # | ret: the mongodump output
# #
# #
ynh_mongo_dump_db() { ynh_mongo_dump_db() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=d local legacy_args=d
local -A args_array=( [d]=database= ) local -A args_array=( [d]=database= )
local database local database
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
mongodump --quiet --db="$database" --archive mongodump --quiet --db="$database" --archive
} }
# Create a user # Create a user
@ -168,49 +168,49 @@ ynh_mongo_dump_db() {
# [internal] # [internal]
# #
# usage: ynh_mongo_create_user --db_user=user --db_pwd=pwd --db_name=name # usage: ynh_mongo_create_user --db_user=user --db_pwd=pwd --db_name=name
# | arg: -u, --db_user= - The user name to create # | arg: -u, --db_user= - The user name to create
# | arg: -p, --db_pwd= - The password to identify user by # | arg: -p, --db_pwd= - The password to identify user by
# | arg: -n, --db_name= - Name of the database to grant privilegies # | arg: -n, --db_name= - Name of the database to grant privilegies
# #
# #
ynh_mongo_create_user() { ynh_mongo_create_user() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=unp local legacy_args=unp
local -A args_array=( [u]=db_user= [n]=db_name= [p]=db_pwd= ) local -A args_array=( [u]=db_user= [n]=db_name= [p]=db_pwd= )
local db_user local db_user
local db_name local db_name
local db_pwd local db_pwd
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
# Create the user and set the user as admin of the db # Create the user and set the user as admin of the db
ynh_mongo_exec --database="$db_name" --command='db.createUser( { user: "'${db_user}'", pwd: "'${db_pwd}'", roles: [ { role: "readWrite", db: "'${db_name}'" } ] } );' ynh_mongo_exec --database="$db_name" --command='db.createUser( { user: "'${db_user}'", pwd: "'${db_pwd}'", roles: [ { role: "readWrite", db: "'${db_name}'" } ] } );'
# Add clustermonitoring rights # Add clustermonitoring rights
ynh_mongo_exec --database="$db_name" --command='db.grantRolesToUser("'${db_user}'",[{ role: "clusterMonitor", db: "admin" }]);' ynh_mongo_exec --database="$db_name" --command='db.grantRolesToUser("'${db_user}'",[{ role: "clusterMonitor", db: "admin" }]);'
} }
# Check if a mongo database exists # Check if a mongo database exists
# #
# usage: ynh_mongo_database_exists --database=database # usage: ynh_mongo_database_exists --database=database
# | arg: -d, --database= - The database for which to check existence # | arg: -d, --database= - The database for which to check existence
# | exit: Return 1 if the database doesn't exist, 0 otherwise # | exit: Return 1 if the database doesn't exist, 0 otherwise
# #
# #
ynh_mongo_database_exists() { ynh_mongo_database_exists() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=d local legacy_args=d
local -A args_array=([d]=database=) local -A args_array=([d]=database=)
local database local database
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
if [ $(ynh_mongo_exec --command='db.getMongo().getDBNames().indexOf("'${database}'")' --eval) -lt 0 ] if [ $(ynh_mongo_exec --command='db.getMongo().getDBNames().indexOf("'${database}'")' --eval) -lt 0 ]
then then
return 1 return 1
else else
return 0 return 0
fi fi
} }
# Restore a database # Restore a database
@ -218,18 +218,18 @@ ynh_mongo_database_exists() {
# example: ynh_mongo_restore_db --database=wekan < ./dump.bson # example: ynh_mongo_restore_db --database=wekan < ./dump.bson
# #
# usage: ynh_mongo_restore_db --database=database # usage: ynh_mongo_restore_db --database=database
# | arg: -d, --database= - The database name to restore # | arg: -d, --database= - The database name to restore
# #
# #
ynh_mongo_restore_db() { ynh_mongo_restore_db() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=d local legacy_args=d
local -A args_array=( [d]=database= ) local -A args_array=( [d]=database= )
local database local database
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
mongorestore --quiet --db="$database" --archive mongorestore --quiet --db="$database" --archive
} }
# Drop a user # Drop a user
@ -237,123 +237,121 @@ ynh_mongo_restore_db() {
# [internal] # [internal]
# #
# usage: ynh_mongo_drop_user --db_user=user --db_name=name # usage: ynh_mongo_drop_user --db_user=user --db_name=name
# | arg: -u, --db_user= - The user to drop # | arg: -u, --db_user= - The user to drop
# | arg: -n, --db_name= - Name of the database # | arg: -n, --db_name= - Name of the database
# #
# #
ynh_mongo_drop_user() { ynh_mongo_drop_user() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=un local legacy_args=un
local -A args_array=( [u]=db_user= [n]=db_name= ) local -A args_array=( [u]=db_user= [n]=db_name= )
local db_user local db_user
local db_name local db_name
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
ynh_mongo_exec --database="$db_name" --command='db.dropUser("'$db_user'", {w: "majority", wtimeout: 5000})' ynh_mongo_exec --database="$db_name" --command='db.dropUser("'$db_user'", {w: "majority", wtimeout: 5000})'
} }
# Create a database, an user and its password. Then store the password in the app's config # Create a database, an user and its password. Then store the password in the app's config
# #
# usage: ynh_mongo_setup_db --db_user=user --db_name=name [--db_pwd=pwd] # usage: ynh_mongo_setup_db --db_user=user --db_name=name [--db_pwd=pwd]
# | arg: -u, --db_user= - Owner of the database # | arg: -u, --db_user= - Owner of the database
# | arg: -n, --db_name= - Name of the database # | arg: -n, --db_name= - Name of the database
# | arg: -p, --db_pwd= - Password of the database. If not provided, a password will be generated # | arg: -p, --db_pwd= - Password of the database. If not provided, a password will be generated
# #
# After executing this helper, the password of the created database will be available in $db_pwd # After executing this helper, the password of the created database will be available in $db_pwd
# It will also be stored as "mongopwd" into the app settings. # It will also be stored as "mongopwd" into the app settings.
# #
# #
ynh_mongo_setup_db() { ynh_mongo_setup_db() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=unp local legacy_args=unp
local -A args_array=( [u]=db_user= [n]=db_name= [p]=db_pwd= ) local -A args_array=( [u]=db_user= [n]=db_name= [p]=db_pwd= )
local db_user local db_user
local db_name local db_name
db_pwd="" db_pwd=""
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
local new_db_pwd=$(ynh_string_random) # Generate a random password local new_db_pwd=$(ynh_string_random) # Generate a random password
# If $db_pwd is not provided, use new_db_pwd instead for db_pwd # If $db_pwd is not provided, use new_db_pwd instead for db_pwd
db_pwd="${db_pwd:-$new_db_pwd}" db_pwd="${db_pwd:-$new_db_pwd}"
# Create the user and grant access to the database # Create the user and grant access to the database
ynh_mongo_create_user --db_user="$db_user" --db_pwd="$db_pwd" --db_name="$db_name" ynh_mongo_create_user --db_user="$db_user" --db_pwd="$db_pwd" --db_name="$db_name"
# Store the password in the app's config # Store the password in the app's config
ynh_app_setting_set --app=$app --key=db_pwd --value=$db_pwd ynh_app_setting_set --app=$app --key=db_pwd --value=$db_pwd
} }
# Remove a database if it exists, and the associated user # Remove a database if it exists, and the associated user
# #
# usage: ynh_mongo_remove_db --db_user=user --db_name=name # usage: ynh_mongo_remove_db --db_user=user --db_name=name
# | arg: -u, --db_user= - Owner of the database # | arg: -u, --db_user= - Owner of the database
# | arg: -n, --db_name= - Name of the database # | arg: -n, --db_name= - Name of the database
# #
# #
ynh_mongo_remove_db() { ynh_mongo_remove_db() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=un local legacy_args=un
local -A args_array=( [u]=db_user= [n]=db_name= ) local -A args_array=( [u]=db_user= [n]=db_name= )
local db_user local db_user
local db_name local db_name
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
if ynh_mongo_database_exists --database=$db_name; then # Check if the database exists if ynh_mongo_database_exists --database=$db_name; then # Check if the database exists
ynh_mongo_drop_db --database=$db_name # Remove the database ynh_mongo_drop_db --database=$db_name # Remove the database
else else
ynh_print_warn --message="Database $db_name not found" ynh_print_warn --message="Database $db_name not found"
fi fi
# Remove mongo user if it exists # Remove mongo user if it exists
ynh_mongo_drop_user --db_user=$db_user --db_name=$db_name ynh_mongo_drop_user --db_user=$db_user --db_name=$db_name
} }
# Install MongoDB and integrate MongoDB service in YunoHost # Install MongoDB and integrate MongoDB service in YunoHost
# #
# usage: ynh_install_mongo [--mongo_version=mongo_version] # usage: ynh_install_mongo [--mongo_version=mongo_version]
# | arg: -m, --mongo_version= - Version of MongoDB to install # | arg: -m, --mongo_version= - Version of MongoDB to install
# #
# #
ynh_install_mongo() { ynh_install_mongo() {
# Declare an array to define the options of this helper. # Declare an array to define the options of this helper.
local legacy_args=m local legacy_args=m
local -A args_array=([m]=mongo_version=) local -A args_array=([m]=mongo_version=)
local mongo_version local mongo_version
# Manage arguments with getopts # Manage arguments with getopts
ynh_handle_getopts_args "$@" ynh_handle_getopts_args "$@"
mongo_version="${mongo_version:-$YNH_MONGO_VERSION}" mongo_version="${mongo_version:-$YNH_MONGO_VERSION}"
ynh_print_info --message="Installing MongoDB Community Edition ..." ynh_print_info --message="Installing MongoDB Community Edition..."
local mongo_debian_release=$(ynh_get_debian_release) local mongo_debian_release=bullseye #$(ynh_get_debian_release)
if [[ $(cat /proc/cpuinfo) != *"avx"* && "$mongo_version" != "4.4" ]]; then if [[ $(cat /proc/cpuinfo) != *"avx"* && "$mongo_version" != "4.4" ]]; then
ynh_print_warn --message="Installing Mongo 4.4 as $mongo_version is not compatible with your cpu (see https://docs.mongodb.com/manual/administration/production-notes/#x86_64)." ynh_print_warn --message="Installing Mongo 4.4 as $mongo_version is not compatible with your CPU (see https://docs.mongodb.com/manual/administration/production-notes/#x86_64)."
mongo_version="4.4" mongo_version="4.4"
fi fi
if [[ "$mongo_version" == "4.4" && "$mongo_debian_release" != "buster" ]]; then if [[ "$mongo_version" == "4.4" && "$mongo_debian_release" != "buster" ]]; then
ynh_print_warn --message="Switched to buster install as Mongo 4.4 is not compatible with $mongo_debian_release." ynh_print_warn --message="Switched to Buster install as Mongo 4.4 is not compatible with $mongo_debian_release."
mongo_debian_release=buster mongo_debian_release=buster
fi fi
ynh_install_extra_app_dependencies \ ynh_install_extra_app_dependencies --repo="deb http://repo.mongodb.org/apt/debian $mongo_debian_release/mongodb-org/$mongo_version main" --package="mongodb-org-server mongodb-org-shell mongodb-database-tools" --key="https://www.mongodb.org/static/pgp/server-$mongo_version.asc"
--repo="deb https://repo.mongodb.org/apt/debian $mongo_debian_release/mongodb-org/$mongo_version main" \ mongodb_servicename=mongod
--package="mongodb-org mongodb-org-server mongodb-org-tools mongodb-mongosh" \
--key="https://www.mongodb.org/static/pgp/server-$mongo_version.asc"
mongodb_servicename=mongod
# Make sure MongoDB is started and enabled # Make sure MongoDB is started and enabled
systemctl enable $mongodb_servicename --quiet systemctl enable $mongodb_servicename --quiet
systemctl daemon-reload --quiet systemctl daemon-reload --quiet
ynh_systemd_action --service_name=$mongodb_servicename --action=restart --line_match="aiting for connections" --log_path="/var/log/mongodb/$mongodb_servicename.log"
# Integrate MongoDB service in YunoHost ynh_systemd_action --service_name=$mongodb_servicename --action=restart --line_match="aiting for connections" --log_path="/var/log/mongodb/$mongodb_servicename.log"
yunohost service add $mongodb_servicename --description="MongoDB daemon" --log="/var/log/mongodb/$mongodb_servicename.log"
# Store mongo_version into the config of this app # Integrate MongoDB service in YunoHost
ynh_app_setting_set --app=$app --key=mongo_version --value=$mongo_version yunohost service add $mongodb_servicename --description="MongoDB daemon" --log="/var/log/mongodb/$mongodb_servicename.log"
# Store mongo_version into the config of this app
ynh_app_setting_set --app=$app --key=mongo_version --value=$mongo_version
} }
# Remove MongoDB # Remove MongoDB
@ -364,16 +362,16 @@ ynh_install_mongo() {
# #
# #
ynh_remove_mongo() { ynh_remove_mongo() {
# Only remove the mongodb service if it is not installed. # Only remove the mongodb service if it is not installed.
if ! ynh_package_is_installed --package="mongodb*" if ! ynh_package_is_installed --package="mongodb*"
then then
ynh_print_info --message="Removing MongoDB service..." ynh_print_info --message="Removing MongoDB service..."
mongodb_servicename=mongod mongodb_servicename=mongod
# Remove the mongodb service # Remove the mongodb service
yunohost service remove $mongodb_servicename yunohost service remove $mongodb_servicename
ynh_secure_remove --file="/var/lib/mongodb" ynh_secure_remove --file="/var/lib/mongodb"
ynh_secure_remove --file="/var/log/mongodb" ynh_secure_remove --file="/var/log/mongodb"
fi fi
} }
#================================================= #=================================================

View file

@ -143,6 +143,9 @@ pushd "$install_dir/live/services/web"
ynh_secure_remove --file="$install_dir/live/services/web/node_modules/.cache" ynh_secure_remove --file="$install_dir/live/services/web/node_modules/.cache"
popd popd
# remove build dir
ynh_secure_remove "$install_dir/build"
chmod 750 "$install_dir/live" chmod 750 "$install_dir/live"
chmod -R o-rwx "$install_dir/live" chmod -R o-rwx "$install_dir/live"
chown -R $app:www-data "$install_dir/live" chown -R $app:www-data "$install_dir/live"

View file

@ -84,9 +84,12 @@ then
ynh_script_progression --message="Upgrading source files..." ynh_script_progression --message="Upgrading source files..."
# Download, check integrity, uncompress and patch the source from app.src # Download, check integrity, uncompress and patch the source from app.src
ynh_setup_source --dest_dir="$install_dir/build" ynh_setup_source --dest_dir="$install_dir/build" --full_replace=1
fi fi
# full replace on live dir
ynh_secure_remove "$install_dir/live"
chmod 750 "$install_dir" chmod 750 "$install_dir"
chmod -R o-rwx "$install_dir" chmod -R o-rwx "$install_dir"
chown -R $app:www-data "$install_dir" chown -R $app:www-data "$install_dir"
@ -141,6 +144,7 @@ cp -r "$install_dir/build/patches/" "$install_dir/live/patches/"
cp -r "$install_dir/build/server-ce/config" "$install_dir/config/" cp -r "$install_dir/build/server-ce/config" "$install_dir/config/"
ynh_secure_remove --file="$install_dir/config/settings.js" ynh_secure_remove --file="$install_dir/config/settings.js"
ynh_secure_remove --file="$install_dir/config/production.json" ynh_secure_remove --file="$install_dir/config/production.json"
ynh_secure_remove --file="$install_dir/live/services/history-v1/config/production.json"
ynh_add_config --template="../conf/production.json" --destination="$install_dir/config/production.json" ynh_add_config --template="../conf/production.json" --destination="$install_dir/config/production.json"
ynh_add_config --template="../conf/production.json" --destination="$install_dir/live/services/history-v1/config/production.json" ynh_add_config --template="../conf/production.json" --destination="$install_dir/live/services/history-v1/config/production.json"
@ -149,6 +153,7 @@ cp "$install_dir/build/server-ce/config/custom-environment-variables.json" "$ins
ynh_script_progression --message="Building app... This may take a LOT of time depending of your CPU" --weight=25 ynh_script_progression --message="Building app... This may take a LOT of time depending of your CPU" --weight=25
pushd "$install_dir/live" pushd "$install_dir/live"
ynh_use_nodejs ynh_use_nodejs
ynh_exec_warn_less npm install
ynh_exec_warn_less npm ci ynh_exec_warn_less npm ci
popd popd
@ -157,6 +162,9 @@ pushd "$install_dir/live/services/web"
ynh_secure_remove --file="$install_dir/live/services/web/node_modules/.cache" ynh_secure_remove --file="$install_dir/live/services/web/node_modules/.cache"
popd popd
#remove build dir
ynh_secure_remove "$install_dir/build"
chmod 750 "$install_dir/live" chmod 750 "$install_dir/live"
chmod -R o-rwx "$install_dir/live" chmod -R o-rwx "$install_dir/live"
chown -R $app:www-data "$install_dir/live" chown -R $app:www-data "$install_dir/live"

View file

@ -1,3 +1,5 @@
test_format = 1.0 test_format = 1.0
[default] [default]
test_upgrade_from.c5e7ec1.name = "Overleaf 4.x"