Merge branch 'stretch-testing' into stretch-stable

This commit is contained in:
Kay0u 2020-05-20 18:55:47 +00:00
commit 875eec7e82
90 changed files with 9061 additions and 1329 deletions

View file

@ -1,11 +1,19 @@
language: python
addons:
apt:
packages:
- ldap-utils
- slapd
matrix:
include:
- python: 2.7
env: TOXENV=py27
- python: 2.7
env: TOXENV=lint
- python: 3.6
env: TOXENV=format-check
- python: 2.7
env: TOXENV=docs

View file

@ -50,7 +50,6 @@ Requirements
* Python 2.7
* python-bottle (>= 0.10)
* python-gnupg (>= 0.3)
* python-ldap (>= 2.4)
* PyYAML

View file

@ -1,72 +0,0 @@
#############################
# Global parameters #
#############################
_global:
configuration:
authenticate:
- api
authenticator:
default:
vendor: ldap
help: Admin Password
parameters:
uri: ldap://localhost:389
base_dn: dc=yunohost,dc=org
user_rdn: cn=admin,dc=yunohost,dc=org
ldap-anonymous:
vendor: ldap
parameters:
uri: ldap://localhost:389
base_dn: dc=yunohost,dc=org
test-profile:
vendor: ldap
help: Admin Password (profile)
parameters:
uri: ldap://localhost:389
base_dn: dc=yunohost,dc=org
user_rdn: cn=admin,dc=yunohost,dc=org
as-root:
vendor: ldap
parameters:
# We can get this uri by (urllib.quote_plus('/var/run/slapd/ldapi')
uri: ldapi://%2Fvar%2Frun%2Fslapd%2Fldapi
base_dn: dc=yunohost,dc=org
user_rdn: gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth
argument_auth: true
lock: false
#############################
# Test Actions #
#############################
test:
actions:
non-auth:
api: GET /test/non-auth
configuration:
authenticate: false
auth:
api: GET /test/auth
configuration:
authenticate: all
auth-profile:
api: GET /test/auth-profile
configuration:
authenticate: all
authenticator: test-profile
auth-cli:
api: GET /test/auth-cli
configuration:
authenticate:
- cli
root-auth:
api: GET /test/root-auth
configuration:
authenticate: all
authenticator: as-root
anonymous:
api: GET /test/anon
configuration:
authenticate: all
authenticator: ldap-anonymous
argument_auth: false

View file

@ -1,33 +0,0 @@
# yunohost(1) completion
_yunohost_cli()
{
local argc cur prev opts
COMPREPLY=()
argc=${COMP_CWORD}
cur="${COMP_WORDS[argc]}"
prev="${COMP_WORDS[argc-1]}"
opts=$(yunohost -h | sed -n "/usage/,/}/p" | awk -F"{" '{print $2}' | awk -F"}" '{print $1}' | tr ',' ' ')
if [[ $argc = 1 ]];
then
COMPREPLY=( $(compgen -W "$opts --help" -- $cur ) )
fi
if [[ "$prev" != "--help" ]];
then
if [[ $argc = 2 ]];
then
opts2=$(yunohost $prev -h | sed -n "/usage/,/}/p" | awk -F"{" '{print $2}' | awk -F"}" '{print $1}' | tr ',' ' ')
COMPREPLY=( $(compgen -W "$opts2 --help" -- $cur ) )
elif [[ $argc = 3 ]];
then
COMPREPLY=( $(compgen -W "--help" $cur ) )
fi
else
COMPREPLY=()
fi
}
complete -F _yunohost_cli yunohost

54
debian/changelog vendored
View file

@ -1,3 +1,35 @@
moulinette (3.8.1) testing; urgency=low
- [fix] Misc technical ux/debugging fixes (#242, #243, #244, 840f27d2)
- [fix] try to autorestart ldap when the server is down (#247)
- [i18n] Translations updated for Dutch, Esperanto, French, German, Nepali, Polish
Thanks to all contributors <3 ! (amirale qt, Bram, É. Gaspar, Kay0u, M. Döring, Zeik0s)
-- Alexandre Aubin <alex.aubin@mailoo.org> Sat, 09 May 2020 21:09:35 +0200
moulinette (3.8.0) testing; urgency=low
# Major stuff
- Simplify auth mechanism (#216)
- Add more tests (#230)
- Use Black in Moulinette (#220, 6f5daa0, 54b8cab)
# Minor technical stuff
- [fix] Don't display comment if argument is already set (#226)
- Don't miserably crash if async running can't read incoming message (06d8c48)
- Report the actual error when ldap fails (628ffc9)
# i18n
- Improve translations for Swedish, Dutch, Italian, Russian, Polish, Portuguese, Catalan, Spanish, Occitan, Nepali, Esperanto, Basque, Chinese (Simplified), Arabic, German, Hungarian, Greek, Turkish, Bengali (Bangladesh)
Thanks to all contributors ! (Aleks, Bram, ButterflyOfFire, Filip B., Jeroen F., Josué T., Kay0u, Quentí, Yifei D., amirale qt, decentral1se, Elie G., frju365, Romain R., xaloc33)
-- Kay0u <pierre@kayou.io> Thu, 09 Apr 2020 20:29:48 +0000
moulinette (3.7.1.1) stable; urgency=low
- [fix] Report actual errors when some LDAP operation fails to ease
@ -29,21 +61,21 @@ moulinette (3.7.0) testing; urgency=low
# ~ Major stuff
- [enh] Add group and permission mechanism ([Moulinette#189](https://github.com/YunoHost/moulinette/pull/189)
- [mod] Be able to customize prompt colors ([Moulinette/808f620](https://github.com/YunoHost/Moulinette/commit/808f620))
- [enh] Support app manifests in toml ([Moulinette#204](https://github.com/YunoHost/moulinette/pull/204), [Moulinette/55515cb](https://github.com/YunoHost/Moulinette/commit/55515cb))
- [enh] Quite a lot of messages improvements, string cleaning, language rework... ([Moulinette/599bec3](https://github.com/YunoHost/Moulinette/commit/599bec3), [Moulinette#208](https://github.com/YunoHost/moulinette/pull/208), [Moulinette#213](https://github.com/YunoHost/moulinette/pull/213), [Moulinette/b7d415d](https://github.com/YunoHost/Moulinette/commit/b7d415d), [Moulinette/a8966b8](https://github.com/YunoHost/Moulinette/commit/a8966b8), [Moulinette/fdf9a71](https://github.com/YunoHost/Moulinette/commit/fdf9a71), [Moulinette/d895ae3](https://github.com/YunoHost/Moulinette/commit/d895ae3), [Moulinette/bdf0a1c](https://github.com/YunoHost/Moulinette/commit/bdf0a1c))
- [enh] Add group and permission mechanism (#189)
- [mod] Be able to customize prompt colors (808f620)
- [enh] Support app manifests in toml (#204, 55515cb)
- [enh] Quite a lot of messages improvements, string cleaning, language rework... (599bec3, #208, #213, b7d415d, a8966b8, fdf9a71, d895ae3, bdf0a1c)
- [i18n] Improved translations for Catalan, Occitan, French, Arabic, Spanish, German, Norwegian Bokmål
# Smaller or pretty technical fix/enh
- [enh] Preparations for moulinette Python3 migration (Tox, Pytest and unit tests) ([Moulinette#203](https://github.com/YunoHost/moulinette/pull/203), [Moulinette#206](https://github.com/YunoHost/moulinette/pull/206), [Moulinette#207](https://github.com/YunoHost/moulinette/pull/207), [Moulinette#210](https://github.com/YunoHost/moulinette/pull/210), [Moulinette#211](https://github.com/YunoHost/moulinette/pull/211) [Moulinette#212](https://github.com/YunoHost/moulinette/pull/212), [Moulinette/2403ee1](https://github.com/YunoHost/Moulinette/commit/2403ee1), [Moulinette/69b0d49](https://github.com/YunoHost/Moulinette/commit/69b0d49), [Moulinette/49c749c](https://github.com/YunoHost/Moulinette/commit/49c749c), [Moulinette/2c84ee1](https://github.com/YunoHost/Moulinette/commit/2c84ee1), [Moulinette/cef72f7](https://github.com/YunoHost/Moulinette/commit/cef72f7))
- [enh] Add a write_to_yaml utility similar to write_to_json ([Moulinette/2e2e627](https://github.com/YunoHost/Moulinette/commit/2e2e627))
- [enh] Warn the user about long locks ([Moulinette#205](https://github.com/YunoHost/moulinette/pull/205))
- [mod] Tweak stuff about setuptools and moulinette deps? ([Moulinette/b739f27](https://github.com/YunoHost/Moulinette/commit/b739f27), [Moulinette/da00fc9](https://github.com/YunoHost/Moulinette/commit/da00fc9), [Moulinette/d8cbbb0](https://github.com/YunoHost/Moulinette/commit/d8cbbb0))
- [fix] Misc micro bugfixes or improvements ([Moulinette/83d9e77](https://github.com/YunoHost/Moulinette/commit/83d9e77))
- [doc] Fix doc building + add doc build tests with Tox ([Moulinette/f1ac5b8](https://github.com/YunoHost/Moulinette/commit/f1ac5b8), [Moulinette/df7d478](https://github.com/YunoHost/Moulinette/commit/df7d478), [Moulinette/74c8f79](https://github.com/YunoHost/Moulinette/commit/74c8f79), [Moulinette/bcf92c7](https://github.com/YunoHost/Moulinette/commit/bcf92c7), [Moulinette/af2c80c](https://github.com/YunoHost/Moulinette/commit/af2c80c), [Moulinette/d52a574](https://github.com/YunoHost/Moulinette/commit/d52a574), [Moulinette/307f660](https://github.com/YunoHost/Moulinette/commit/307f660), [Moulinette/dced104](https://github.com/YunoHost/Moulinette/commit/dced104), [Moulinette/ed3823b](https://github.com/YunoHost/Moulinette/commit/ed3823b))
- [enh] READMEs improvements ([Moulinette/1541b74](https://github.com/YunoHost/Moulinette/commit/1541b74), [Moulinette/ad1eeef](https://github.com/YunoHost/Moulinette/commit/ad1eeef))
- [enh] Preparations for moulinette Python3 migration (Tox, Pytest and unit tests) (#203, #206, #207, #210, #211 #212, 2403ee1, 69b0d49, 49c749c, 2c84ee1, cef72f7)
- [enh] Add a write_to_yaml utility similar to write_to_json (2e2e627)
- [enh] Warn the user about long locks (#205)
- [mod] Tweak stuff about setuptools and moulinette deps? (b739f27, da00fc9, d8cbbb0)
- [fix] Misc micro bugfixes or improvements (83d9e77)
- [doc] Fix doc building + add doc build tests with Tox (f1ac5b8, df7d478, 74c8f79, bcf92c7, af2c80c, d52a574, 307f660, dced104, ed3823b)
- [enh] READMEs improvements (1541b74, ad1eeef)
Thanks to all contributors <3 ! (accross all repo: Yunohost, Moulinette, SSOwat, Yunohost-admin) : advocatux, Aksel K., Aleks, Allan N., amirale qt, Armin P., Bram, ButterflyOfFire, Carles S. A., chema o. r., decentral1se, Emmanuel V., Etienne M., Filip B., Geoff M., htsr, Jibec, Josué, Julien J., Kayou, liberodark, ljf, lucaskev, Lukas D., madtibo, Martin D., Mélanie C., nr 458 h, pitfd, ppr, Quentí, sidddy, troll, tufek yamero, xaloc33, yalh76

1
debian/control vendored
View file

@ -13,7 +13,6 @@ Depends: ${misc:Depends}, ${python:Depends},
python-ldap,
python-yaml,
python-bottle (>= 0.12),
python-gnupg,
python-gevent-websocket,
python-argcomplete,
python-toml,

View file

@ -1,5 +1,4 @@
sphinx
gnupg
mock
pyyaml
toml

View file

@ -13,12 +13,11 @@
"file_not_exist": "الملف غير موجود : '{path}'",
"folder_exists": "إنّ المجلد موجود من قبل : '{path}'",
"folder_not_exist": "المجلد غير موجود",
"instance_already_running": "هناك نسخة خادوم تشتغل مِن قبل",
"instance_already_running": "هناك بالفعل عملية YunoHost جارية. الرجاء الانتظار حتى ينتهي الأمر قبل تشغيل آخر.",
"invalid_argument": "المُعامِل غير صالح '{argument}': {error}",
"invalid_password": "كلمة السر خاطئة",
"invalid_usage": "إستعمال غير صالح، إستخدم --help لعرض المساعدة",
"ldap_attribute_already_exists": "الخاصية '{attribute}' موجودة مسبقا و تحمل القيمة '{value}'",
"ldap_operation_error": "طرأ هناك خطأ أثناء عملية في LDAP",
"ldap_server_down": "لا يمكن الإتصال بخادم LDAP",
"logged_in": "مُتّصل",
"logged_out": "تم تسجيل خروجك",
@ -52,5 +51,7 @@
"command_unknown": "الأمر '{command:s}' مجهول؟",
"corrupted_yaml": "قراءة مُشوّهة لنسق yaml مِن {ressource:s} (السبب : {error:s})",
"info": "معلومة:",
"warn_the_user_about_waiting_lock_again": "جارٍ الانتظار…"
"warn_the_user_about_waiting_lock_again": "جارٍ الانتظار…",
"warn_the_user_that_lock_is_acquired": "لقد انتهى تنفيذ ذاك الأمر ، جارٍ إطلاق الأمر",
"warn_the_user_about_waiting_lock": "هناك أمر لـ YunoHost قيد التشغيل حاليا. في انتظار انتهاء تنفيذه قبل تشغيل التالي"
}

View file

@ -1 +1,4 @@
{}
{
"logged_out": "প্রস্থান",
"password": "পাসওয়ার্ড"
}

View file

@ -13,12 +13,11 @@
"file_not_exist": "El fitxer no existeix: '{path}'",
"folder_exists": "La carpeta ja existeix: '{path}'",
"folder_not_exist": "La carpeta no existeix",
"instance_already_running": "Una instància ja s'està executant",
"instance_already_running": "Ja hi ha una operació de YunoHost en curs. Espereu a que s'acabi abans d'executar-ne una altra.",
"invalid_argument": "Argument invàlid '{argument}': {error}",
"invalid_password": "Contrasenya invàlida",
"invalid_usage": "Utilització invàlida, utilitzeu --help per veure l'ajuda",
"ldap_attribute_already_exists": "L'atribut '{attribute}' ja existeix amb el valor '{value}'",
"ldap_operation_error": "Hi ha hagut un error durant l'operació de LDAP",
"ldap_server_down": "No s'ha pogut connectar amb el servidor LDAP",
"logged_in": "Sessió iniciada",
"logged_out": "Sessió tancada",
@ -55,5 +54,6 @@
"corrupted_toml": "El fitxer TOML ha estat corromput en la lectura des de {ressource:s} (motiu: {error:s})",
"warn_the_user_about_waiting_lock": "Hi ha una altra ordre de YunoHost en execució, s'executarà aquesta ordre un cop l'anterior hagi acabat",
"warn_the_user_about_waiting_lock_again": "Encara en espera…",
"warn_the_user_that_lock_is_acquired": "l'altra ordre tot just ha acabat, ara s'executarà aquesta ordre"
"warn_the_user_that_lock_is_acquired": "l'altra ordre tot just ha acabat, ara s'executarà aquesta ordre",
"invalid_token": "Testimoni no vàlid - torneu-vos a autenticar"
}

View file

@ -1,5 +1,5 @@
{
"argument_required": "{argument}是必须的",
"argument_required": "参数“{argument}是必须的",
"authentication_profile_required": "必须验证配置文件{profile}",
"authentication_required": "需要验证",
"authentication_required_long": "此操作需要验证",
@ -19,10 +19,9 @@
"invalid_password": "密码错误",
"invalid_usage": "用法错误,输入 --help 查看帮助信息",
"ldap_attribute_already_exists": "参数{attribute}已赋值{value}",
"ldap_operation_error": "LDAP操作时发生了错误",
"ldap_server_down": "无法连接LDAP服务器",
"logged_in": "登录成功",
"logged_out": "注销成功",
"logged_out": "登出",
"not_logged_in": "您未登录",
"operation_interrupted": "操作中断",
"password": "密码",

View file

@ -16,10 +16,9 @@
"invalid_password": "Passwort falsch",
"invalid_usage": "Falscher Aufruf, verwende --help für den Hilfstext",
"ldap_attribute_already_exists": "Attribute existieren bereits: '{attribute}={value}'",
"ldap_operation_error": "Ein Fehler trat während der LDAP Abfrage auf",
"ldap_server_down": "LDAP-Server nicht erreichbar",
"logged_in": "Angemeldet",
"logged_out": "Ausgeloggt",
"logged_out": "Abgemeldet",
"not_logged_in": "Du bist nicht angemeldet",
"operation_interrupted": "Vorgang unterbrochen",
"password": "Passwort",
@ -37,5 +36,24 @@
"deprecated_command_alias": "'{prog} {old}' ist veraltet und wird bald entfernt werden, benutze '{prog} {new}' stattdessen",
"unknown_group": "Gruppe '{group}' ist unbekannt",
"unknown_user": "Benutzer '{user}' ist unbekannt",
"info": "Info:"
"info": "Info:",
"invalid_token": "Ungültiger Token - bitte authentifizieren",
"corrupted_json": "Beschädigtes JSON gelesen von {ressource:s} (reason: {error:s})",
"unknown_error_reading_file": "Unbekannter Fehler beim Lesen der Datei {file:s} (reason: {error:s})",
"cannot_write_file": "Kann Datei {file:s} nicht schreiben (reason: {error:s})",
"cannot_open_file": "Kann Datei {file:s} nicht öffnen (reason: {error:s})",
"corrupted_yaml": "Beschädigtes YAML gelesen von {ressource:s} (reason: {error:s})",
"warn_the_user_that_lock_is_acquired": "der andere Befehl wurde gerade abgeschlossen, starte jetzt diesen Befehl",
"warn_the_user_about_waiting_lock_again": "Immer noch wartend...",
"warn_the_user_about_waiting_lock": "Ein anderer YunoHost Befehl läuft gerade, wir warten bis er fertig ist, bevor dieser laufen kann",
"command_unknown": "Befehl '{command:s}' unbekannt?",
"download_bad_status_code": "{url:s} lieferte folgende(n) Status Code(s) {code:s}",
"download_unknown_error": "Fehler beim Herunterladen von Daten von {url:s}: {error:s}",
"download_timeout": "{url:s} brauchte zu lange zum Antworten, hab aufgegeben.",
"download_ssl_error": "SSL Fehler beim Verbinden zu {url:s}",
"invalid_url": "Ungültige URL {url:s} (existiert diese Seite?)",
"error_changing_file_permissions": "Fehler beim Ändern der Berechtigungen für {path:s}: {error:s}",
"error_removing": "Fehler beim Entfernen {path:s}: {error:s}",
"error_writing_file": "Fehler beim Schreiben von Datei {file:s}: {error:s}",
"corrupted_toml": "Beschädigtes TOML gelesen von {ressource:s} (reason: {error:s})"
}

View file

@ -1 +1,4 @@
{}
{
"logged_out": "Αποσυνδέθηκα",
"password": "Κωδικός πρόσβασης"
}

View file

@ -1,6 +1,5 @@
{
"argument_required": "Argument '{argument}' is required",
"authentication_profile_required": "Authentication to profile '{profile}' required",
"authentication_required": "Authentication required",
"authentication_required_long": "Authentication is required to perform this action",
"colon": "{}: ",
@ -17,6 +16,7 @@
"instance_already_running": "There is already a YunoHost operation running. Please wait for it to finish before running another one.",
"invalid_argument": "Invalid argument '{argument}': {error}",
"invalid_password": "Invalid password",
"invalid_token": "Invalid token - please authenticate",
"invalid_usage": "Invalid usage, pass --help to see help",
"ldap_attribute_already_exists": "Attribute '{attribute}' already exists with value '{value}'",
"ldap_server_down": "Unable to reach LDAP server",
@ -31,6 +31,7 @@
"success": "Success!",
"unable_authenticate": "Unable to authenticate",
"unable_retrieve_session": "Unable to retrieve the session because '{exception}'",
"session_expired": "The session expired. Please re-authenticate.",
"unknown_group": "Unknown '{group}' group",
"unknown_user": "Unknown '{user}' user",
"values_mismatch": "Values don't match",
@ -54,5 +55,6 @@
"command_unknown": "Command '{command:s}' unknown ?",
"warn_the_user_about_waiting_lock": "Another YunoHost command is running right now, we are waiting for it to finish before running this one",
"warn_the_user_about_waiting_lock_again": "Still waiting...",
"warn_the_user_that_lock_is_acquired": "the other command just complet, now starting this command"
"warn_the_user_that_lock_is_acquired": "the other command just completed, now starting this command",
"ldap_server_is_down_restart_it": "the ldap service is down, attempt to restart it..."
}

View file

@ -1,3 +1,58 @@
{
"password": "Pasvorto"
"password": "Pasvorto",
"colon": "{}: ",
"warn_the_user_that_lock_is_acquired": "la alia komando ĵus kompletigis, nun komencante ĉi tiun komandon",
"warn_the_user_about_waiting_lock_again": "Ankoraŭ atendanta...",
"warn_the_user_about_waiting_lock": "Alia komando de YunoHost funkcias ĝuste nun, ni atendas, ke ĝi finiĝos antaŭ ol funkcii ĉi tiu",
"command_unknown": "Komando '{command:s}' nekonata?",
"download_bad_status_code": "{url:s} redonita statuskodo {code:s}",
"download_unknown_error": "Eraro dum elŝutado de datumoj de {url:s}: {error:s}",
"download_timeout": "{url:s} prenis tro da tempo por respondi, rezignis.",
"download_ssl_error": "SSL-eraro dum konekto al {url:s}",
"invalid_url": "Nevalida url {url:s} (ĉu ĉi tiu retejo ekzistas?)",
"error_changing_file_permissions": "Eraro dum ŝanĝo de permesoj por {path:s}: {error:s}",
"error_removing": "Eraro dum la forigo de {path:s}: {error:s}",
"error_writing_file": "Eraro skribinte dosieron {file:s}: {error:s}",
"corrupted_toml": "Korupta toml legita el {ressource:s} (kialo: {error:s})",
"corrupted_yaml": "Korupta yaml legita de {ressource:s} (kialo: {error:s})",
"corrupted_json": "Koruptita json legita de {ressource:s} (kialo: {error:s})",
"unknown_error_reading_file": "Nekonata eraro dum provi legi dosieron {file:s} (kialo: {error:s})",
"cannot_write_file": "Ne povis skribi dosieron {file:s} (kialo: {error:s})",
"cannot_open_file": "Ne povis malfermi dosieron {file: s} (kialo: {error: s})",
"websocket_request_expected": "Atendis ret-peto",
"warning": "Averto:",
"values_mismatch": "Valoroj ne kongruas",
"unknown_user": "Nekonata uzanto '{user}'",
"unknown_group": "Nekonata grupo \"{group}\"",
"unable_retrieve_session": "Ne eblas retrovi la sesion ĉar '{exception}'",
"unable_authenticate": "Ne eblas aŭtentiĝi",
"success": "Sukceson!",
"server_already_running": "Servilo jam funkcias sur tiu haveno",
"root_required": "Vi devas esti 'root' por plenumi ĉi tiun agon",
"pattern_not_match": "Ne kongruas kun ŝablono",
"operation_interrupted": "Operacio interrompita",
"not_logged_in": "Vi ne estas ensalutinta",
"logged_in": "Ensalutinta",
"ldap_server_down": "Ne eblas atingi la servilon LDAP",
"ldap_attribute_already_exists": "Atributo '{attribute}' jam ekzistas kun valoro '{value}'",
"invalid_usage": "Nevalida uzado, preterpase '--help' por vidi helpon",
"invalid_password": "Nevalida pasvorto",
"invalid_argument": "Nevalida argumento '{argument}': {error}",
"instance_already_running": "Jam funkcias YunoHost-operacio. Bonvolu atendi, ke ĝi finiĝos antaŭ ol funkcii alia.",
"info": "informoj:",
"folder_not_exist": "Dosierujo ne ekzistas",
"folder_exists": "Dosierujo jam ekzistas: '{path}'",
"file_not_exist": "Dosiero ne ekzistas: '{path}'",
"file_exists": "Dosiero jam ekzistas: '{path}'",
"error_see_log": "Eraro okazis. Bonvolu vidi la protokolojn por detaloj, ili troviĝas en /var/log/yunohost/.",
"error": "Eraro:",
"deprecated_command_alias": "'{prog} {old}' malakceptas kaj estos forigita estonte, uzu anstataŭe '{prog} {new}'",
"deprecated_command": "'{prog} {command}' malakceptas kaj estos forigita estonte",
"confirm": "Konfirmu {prompt}",
"authentication_required_long": "Aŭtentigo necesas por plenumi ĉi tiun agon",
"authentication_required": "Aŭtentigo bezonata",
"authentication_profile_required": "Aŭtentigo al la profilo '{profile}' bezonata",
"argument_required": "Argumento '{argument}' estas bezonata",
"logged_out": "Ensalutinta",
"invalid_token": "Nevalida tokeno - bonvolu autentiki"
}

View file

@ -18,7 +18,6 @@
"invalid_password": "Contraseña no válida",
"invalid_usage": "Uso no válido, utilice --help para ver la ayuda",
"ldap_attribute_already_exists": "El atributo «{attribute}» ya existe con el valor «{value}»",
"ldap_operation_error": "Ha ocurrido un error durante la operación de LDAP",
"ldap_server_down": "No se pudo conectar con el servidor LDAP",
"logged_in": "Sesión iniciada",
"logged_out": "Sesión cerrada",
@ -53,7 +52,8 @@
"corrupted_yaml": "Lectura corrupta de yaml desde {ressource:s} (motivo: {error:s})",
"info": "Información:",
"corrupted_toml": "Lectura corrupta de TOML desde {ressource:s} (motivo: {error:s})",
"warn_the_user_that_lock_is_acquired": "la otra orden ha terminado, iniciando esta orden ahora",
"warn_the_user_that_lock_is_acquired": "la otra orden recién terminó, iniciando esta orden ahora",
"warn_the_user_about_waiting_lock_again": "Aún esperando...",
"warn_the_user_about_waiting_lock": "Otra orden de YunoHost se está ejecutando ahora, estamos esperando a que termine antes de ejecutar esta"
"warn_the_user_about_waiting_lock": "Otra orden de YunoHost se está ejecutando ahora, estamos esperando a que termine antes de ejecutar esta",
"invalid_token": "Token invalido - vuelva a autenticarte"
}

View file

@ -1,3 +1,6 @@
{
"argument_required": "'{argument}' argumentua beharrezkoa da"
"argument_required": "'{argument}' argumentua beharrezkoa da",
"logged_out": "Saioa amaitu",
"password": "Pasahitza",
"colon": "{}: "
}

View file

@ -18,7 +18,6 @@
"invalid_password": "Mot de passe incorrect",
"invalid_usage": "Utilisation erronée, utilisez --help pour accéder à laide",
"ldap_attribute_already_exists": "Lattribut '{attribute}' existe déjà avec la valeur suivante : '{value}'",
"ldap_operation_error": "Une erreur est survenue lors de lopération LDAP",
"ldap_server_down": "Impossible datteindre le serveur LDAP",
"logged_in": "Connecté",
"logged_out": "Déconnecté",
@ -55,5 +54,6 @@
"corrupted_toml": "Fichier TOML corrompu en lecture depuis {ressource:s} (cause : {error:s})",
"warn_the_user_about_waiting_lock": "Une autre commande YunoHost est actuellement en cours, nous attendons qu'elle se termine avant de démarrer celle là",
"warn_the_user_about_waiting_lock_again": "Toujours en attente...",
"warn_the_user_that_lock_is_acquired": "l'autre commande vient de se terminer, lancement de cette commande"
"warn_the_user_that_lock_is_acquired": "lautre commande vient de se terminer, lancement de cette commande",
"invalid_token": "Jeton non valide - veuillez vous authentifier"
}

View file

@ -18,7 +18,6 @@
"invalid_password": "अवैध पासवर्ड",
"invalid_usage": "अवैध उपयोग, सहायता देखने के लिए --help साथ लिखे।",
"ldap_attribute_already_exists": "'{attribute}' तर्क पहले इस वैल्यू '{value}' से मौजूद है।",
"ldap_operation_error": "LDAP ऑपरेशन के दौरान त्रुटि हो गई है।",
"ldap_server_down": "LDAP सर्वर तक पहुंचने में असमर्थ।",
"logged_in": "लोग्ड इन",
"logged_out": "लॉग आउट",

View file

@ -1 +1,4 @@
{}
{
"logged_out": "Kilépett",
"password": "Jelszó"
}

View file

@ -15,12 +15,11 @@
"file_not_exist": "Il file non esiste: '{path}'",
"folder_exists": "La cartella esiste già: '{path}'",
"folder_not_exist": "La cartella non esiste",
"instance_already_running": "Un'istanza è già in esecuzione",
"instance_already_running": "Esiste già un'operazione YunoHost in esecuzione. Attendi il completamento prima di eseguirne un altro.",
"invalid_argument": "Argomento non valido '{argument}': {error}",
"invalid_password": "Password non valida",
"invalid_usage": "Utilizzo non valido, usa --help per vedere l'aiuto",
"ldap_attribute_already_exists": "L'attributo '{attribute}' esiste già con valore '{value}'",
"ldap_operation_error": "Si è verificato un errore durante l'operazione LDAP",
"ldap_server_down": "Impossibile raggiungere il server LDAP",
"logged_in": "Connesso",
"not_logged_in": "Non hai effettuato l'accesso",
@ -31,7 +30,7 @@
"server_already_running": "Un server è già in esecuzione su quella porta",
"success": "Riuscito!",
"unable_authenticate": "Autenticazione fallita",
"unable_retrieve_session": "Recupero della sessione non riuscito",
"unable_retrieve_session": "Impossibile recuperare la sessione perché \"{exception}\"",
"unknown_group": "Gruppo '{group}' sconosciuto",
"unknown_user": "Utente '{user}' sconosciuto",
"values_mismatch": "I valori non corrispondono",
@ -39,7 +38,7 @@
"websocket_request_expected": "Richiesta WebSocket attesa",
"cannot_open_file": "Impossibile aprire il file {file:s} (motivo: {error:s})",
"cannot_write_file": "Impossibile scrivere il file {file:s} (motivo: {error:s})",
"unknown_error_reading_file": "Errore sconosciuto nel tentativo di leggere il file {file:s}",
"unknown_error_reading_file": "Errore sconosciuto durante il tentativo di leggere il file {file:s} (motivo: {errore:s})",
"corrupted_json": "Lettura json corrotta da {ressource:s} (motivo: {error:s})",
"corrupted_yaml": "Lettura yaml corrotta da {ressource:s} (motivo: {error:s})",
"error_writing_file": "Errore durante la scrittura del file {file:s}: {error:s}",
@ -51,5 +50,10 @@
"download_unknown_error": "Errore durante il download di dati da {url:s} : {error:s}",
"download_bad_status_code": "{url:s} ha restituito il codice di stato {code:s}",
"command_unknown": "Comando '{command:s}' sconosciuto ?",
"info": "Info:"
"info": "Info:",
"warn_the_user_that_lock_is_acquired": "l'altro comando è appena completato, ora avvia questo comando",
"warn_the_user_about_waiting_lock_again": "Sto ancora aspettando ...",
"warn_the_user_about_waiting_lock": "Un altro comando YunoHost è in esecuzione in questo momento, stiamo aspettando che finisca prima di eseguire questo",
"corrupted_toml": "Toml corrotto da {ressource:s} (motivo: {errore:s})",
"invalid_token": "Token non valido: autenticare"
}

4
locales/ne.json Normal file
View file

@ -0,0 +1,4 @@
{
"logged_out": "लग आउट",
"password": "पासवर्ड"
}

View file

@ -11,12 +11,11 @@
"file_not_exist": "Bestand bestaat niet: '{path}'",
"folder_exists": "Deze map bestaat al: '{path}'",
"folder_not_exist": "Map bestaat niet",
"instance_already_running": "Er is al een instantie actief",
"instance_already_running": "Er is al een instantie actief, bedankt om te wachten tot deze afgesloten is alvorens een andere te starten.",
"invalid_argument": "Ongeldig argument '{argument}': {error}",
"invalid_password": "Ongeldig wachtwoord",
"invalid_usage": "Ongeldig gebruik, doe --help om de hulptekst te lezen",
"ldap_attribute_already_exists": "Attribuut '{attribute}' bestaat al met waarde '{value}'",
"ldap_operation_error": "Er is een fout opgetreden bij het uitvoeren van LDAP operatie",
"ldap_server_down": "Kan LDAP server niet bereiken",
"logged_in": "Ingelogd",
"logged_out": "Uitgelogd",
@ -29,7 +28,7 @@
"server_already_running": "Er is al een server actief op die poort",
"success": "Succes!",
"unable_authenticate": "Aanmelding niet mogelijk",
"unable_retrieve_session": "Kan de sessie niet ophalen",
"unable_retrieve_session": "Het is onmogelijk op de sessie op te halen omwille van '{exception}'",
"values_mismatch": "Waarden zijn niet gelijk",
"warning": "Waarschuwing:",
"websocket_request_expected": "Verwachtte een WebSocket request",
@ -39,7 +38,7 @@
"unknown_user": "Gebruiker '{user}' is onbekend",
"cannot_open_file": "Niet mogelijk om bestand {file:s} te openen (reden: {error:s})",
"cannot_write_file": "Niet gelukt om bestand {file:s} te schrijven (reden: {error:s})",
"unknown_error_reading_file": "Ongekende fout tijdens het lezen van bestand {file:s}",
"unknown_error_reading_file": "Ongekende fout tijdens het lezen van bestand {file:s} (cause:{error:s})",
"corrupted_json": "Corrupte json gelezen van {ressource:s} (reden: {error:s})",
"error_writing_file": "Fout tijdens het schrijven van bestand {file:s}: {error:s}",
"error_removing": "Fout tijdens het verwijderen van {path:s}: {error:s}",
@ -49,5 +48,12 @@
"download_timeout": "{url:s} neemt te veel tijd om te antwoorden, we geven het op.",
"download_unknown_error": "Fout tijdens het downloaden van data van {url:s}: {error:s}",
"download_bad_status_code": "{url:s} stuurt status code {code:s}",
"command_unknown": "Opdracht '{command:s}' ongekend ?"
"command_unknown": "Opdracht '{command:s}' ongekend ?",
"warn_the_user_that_lock_is_acquired": "de andere opdracht is zojuist voltooid en start nu deze opdracht",
"warn_the_user_about_waiting_lock_again": "Nog steeds aan het wachten...",
"warn_the_user_about_waiting_lock": "Een ander YunoHost commando wordt uitgevoerd, we wachten tot het gedaan is alovrens dit te starten",
"corrupted_toml": "Ongeldige TOML werd gelezen op {ressource:s} (reason: {error:s})",
"corrupted_yaml": "Ongeldig YAML bestand op {ressource:s} (reason: {error:s})",
"invalid_token": "Ongeldig token - gelieve in te loggen",
"info": "Ter info:"
}

View file

@ -16,7 +16,7 @@
"file_not_exist": "Lo fichièr « {path} » existís pas",
"folder_exists": "Lo repertòri existís ja: « {path} »",
"folder_not_exist": "Lo repertòri existís pas",
"instance_already_running": "Una instància es ja en execucion",
"instance_already_running": "I a ja una operacion de YunoHost en cors. Mercés desperar que sacabe abans de ne lançar una mai.",
"invalid_argument": "Argument « {argument} » incorrècte: {error}",
"invalid_password": "Senhal incorrècte",
"ldap_server_down": "Impossible daténher lo servidor LDAP",
@ -31,7 +31,6 @@
"warning": "Atencion:",
"invalid_usage": "Usatge invalid, utilizatz --help per accedir a lajuda",
"ldap_attribute_already_exists": "Latribut « {attribute} » existís ja amb la valor: {value}",
"ldap_operation_error": "Una error ses producha pendent loperacion LDAP",
"operation_interrupted": "Operacion interrompuda",
"server_already_running": "Un servidor es ja en execucion sus aqueste pòrt",
"success": "Capitada!",
@ -55,5 +54,6 @@
"corrupted_toml": "Fichièr TOML corromput en lectura de {ressource:s} estant (rason: {error:s})",
"warn_the_user_about_waiting_lock": "Una autra comanda YunoHost es en execucion, sèm a esperar quacabe abans daviar aquesta daquí",
"warn_the_user_about_waiting_lock_again": "Encara en espèra…",
"warn_the_user_that_lock_is_acquired": "lautra comanda ven dacabar, lançament daquesta comanda"
"warn_the_user_that_lock_is_acquired": "lautra comanda ven dacabar, ara lançament daquesta comanda",
"invalid_token": "Geton invalid - volgatz vos autentificar"
}

View file

@ -1 +1,57 @@
{}
{
"logged_out": "Wylogowano",
"password": "Hasło",
"warn_the_user_that_lock_is_acquired": "drugie polecenie właśnie się zakończyło, teraz uruchamiając to polecenie",
"warn_the_user_about_waiting_lock_again": "Wciąż czekam...",
"warn_the_user_about_waiting_lock": "Kolejne polecenie YunoHost jest teraz uruchomione, czekamy na jego zakończenie przed uruchomieniem tego",
"command_unknown": "Polecenie „{command:s}” jest nieznane?",
"download_bad_status_code": "{url:s} zwrócił kod stanu {code:s}",
"download_unknown_error": "Błąd podczas pobierania danych z {url:s}: {error:s}",
"download_timeout": "{url:s} odpowiedział zbyt długo, poddał się.",
"download_ssl_error": "Błąd SSL podczas łączenia z {url:s}",
"invalid_url": "Nieprawidłowy adres URL {url:s} (czy ta strona istnieje?)",
"error_changing_file_permissions": "Błąd podczas zmiany uprawnień dla {path:s}: {error:s}",
"error_removing": "Błąd podczas usuwania {path:s}: {error:s}",
"error_writing_file": "Błąd podczas zapisywania pliku {file:s}: {error:s}",
"corrupted_toml": "Uszkodzony toml z {ressource: s} (powód: {error:s})",
"corrupted_yaml": "Uszkodzony yaml odczytany z {ressource:s} (powód: {error:s})",
"corrupted_json": "Uszkodzony json odczytany z {ressource:s} (powód: {error:s})",
"unknown_error_reading_file": "Nieznany błąd podczas próby odczytania pliku {file:s} (przyczyna: {error:s})",
"cannot_write_file": "Nie można zapisać pliku {file:s} (przyczyna: {error:s})",
"cannot_open_file": "Nie można otworzyć pliku {file:s} (przyczyna: {error:s})",
"websocket_request_expected": "Oczekiwano żądania WebSocket",
"warning": "Ostrzeżenie:",
"values_mismatch": "Wartości nie pasują",
"unknown_user": "Nieznany użytkownik „{user}”",
"unknown_group": "Nieznana grupa „{group}”",
"unable_retrieve_session": "Nie można pobrać sesji, ponieważ „{exception}”",
"unable_authenticate": "Nie można uwierzytelnić",
"success": "Sukces!",
"server_already_running": "Serwer już działa na tym porcie",
"root_required": "Aby wykonać tę akcję, musisz być rootem",
"pattern_not_match": "Nie pasuje do wzoru",
"operation_interrupted": "Operacja przerwana",
"not_logged_in": "Nie jesteś zalogowany",
"logged_in": "Zalogowany",
"ldap_server_down": "Nie można połączyć się z serwerem LDAP",
"ldap_attribute_already_exists": "Atrybut „{attribute}” już istnieje z wartością „{value}”",
"invalid_usage": "Nieprawidłowe użycie. Przejdź --help, aby wyświetlić pomoc",
"invalid_token": "Nieprawidłowy token - proszę uwierzytelnić",
"invalid_password": "Nieprawidłowe hasło",
"invalid_argument": "Nieprawidłowy argument „{argument}”: {error}",
"instance_already_running": "Trwa już operacja YunoHost. Zaczekaj na zakończenie, zanim uruchomisz kolejny.",
"info": "Informacje:",
"folder_not_exist": "Folder nie istnieje",
"folder_exists": "Folder już istnieje: „{path}”",
"file_not_exist": "Plik nie istnieje: „{path}”",
"file_exists": "Plik już istnieje: „{path}”",
"error_see_log": "Wystąpił błąd. Szczegółowe informacje można znaleźć w dziennikach, znajdują się one w katalogu /var/log/yunohost/.",
"error": "Błąd:",
"deprecated_command_alias": "„{prog} {old}” jest przestarzałe i zostanie usunięte w przyszłości, zamiast tego użyj „{prog} {new}”",
"deprecated_command": "„{prog} {command}” jest przestarzałe i zostanie usunięte w przyszłości",
"confirm": "Potwierdź {prompt}",
"colon": "{}: ",
"authentication_required_long": "Do wykonania tej czynności wymagane jest uwierzytelnienie",
"authentication_required": "Wymagane uwierzytelnienie",
"argument_required": "Argument „{argument}” jest wymagany"
}

View file

@ -10,12 +10,11 @@
"file_not_exist": "O ficheiro não existe: '{path}'",
"folder_exists": "A pasta já existe: '{path}'",
"folder_not_exist": "A pasta não existe",
"instance_already_running": "O serviço já está em execussão",
"instance_already_running": "Já existe uma operação YunoHost em execução. Aguarde o término antes de executar outro.",
"invalid_argument": "Argumento inválido '{argument}': {error}",
"invalid_password": "Senha incorreta",
"invalid_usage": "Uso invalido, utilizar --help para ver a ajuda",
"ldap_attribute_already_exists": "O atributo '{attribute}' já existe com valor '{value}'",
"ldap_operation_error": "Um erro ocorreu durante a operação LDAP",
"ldap_server_down": "Não foi possível comunicar com o servidor LDAP",
"logged_in": "Sessão iniciada",
"logged_out": "Sessão terminada",
@ -28,7 +27,7 @@
"server_already_running": "Existe um servidor ativo nessa porta",
"success": "Sucesso!",
"unable_authenticate": "Não foi possível autenticar",
"unable_retrieve_session": "Não foi possível recuperar a sessão",
"unable_retrieve_session": "Não foi possível recuperar a sessão porque '{exception}'",
"values_mismatch": "Os valores não coincidem",
"warning": "Aviso:",
"websocket_request_expected": "Esperado um pedido a WebSocket",
@ -39,7 +38,7 @@
"unknown_user": "Nome de utilizador '{user}' desconhecido",
"cannot_open_file": "Não foi possível abrir o arquivo {file:s} (reason: {error:s})",
"cannot_write_file": "Não foi possível abrir o arquivo {file:s} (reason: {error:s})",
"unknown_error_reading_file": "Erro desconhecido ao tentar ler o arquivo {file:s}",
"unknown_error_reading_file": "Erro desconhecido ao tentar ler o arquivo {file:s} (motivo: {error:s})",
"error_writing_file": "Erro ao gravar arquivo {file:s}: {error:s}",
"error_removing": "Erro ao remover {path:s}: {error:s}",
"error_changing_file_permissions": "Erro ao alterar as permissões para {path:s}: {error:s}",
@ -50,5 +49,11 @@
"download_bad_status_code": "{url:s} retornou o código de status {code:s}",
"command_unknown": "Comando '{command:s}' desconhecido ?",
"corrupted_json": "Json corrompido lido do {ressource:s} (motivo: {error:s})",
"corrupted_yaml": "Yaml corrompido lido do {ressource:s} (motivo: {error:s})"
"corrupted_yaml": "Yaml corrompido lido do {ressource:s} (motivo: {error:s})",
"warn_the_user_that_lock_is_acquired": "o outro comando acabou de concluir, agora iniciando este comando",
"warn_the_user_about_waiting_lock_again": "Ainda esperando...",
"warn_the_user_about_waiting_lock": "Outro comando YunoHost está sendo executado agora, estamos aguardando o término antes de executar este",
"corrupted_toml": "Toml corrompido lido em {ressource:s} (motivo: {error:s})",
"invalid_token": "Token inválido - autentique",
"info": "Informações:"
}

View file

@ -32,7 +32,7 @@
"websocket_request_expected": "Ожидается запрос WebSocket",
"cannot_open_file": "Не могу открыть файл {file:s} (причина: {error:s})",
"cannot_write_file": "Не могу записать файл {file:s} (причина: {error:s})",
"unknown_error_reading_file": "Неизвестная ошибка при чтении файла {file:s}",
"unknown_error_reading_file": "Неизвестная ошибка при попытке прочитать файл {file:s} (причина: {error:s})",
"corrupted_yaml": "Повреждённой yaml получен от {ressource:s} (причина: {error:s})",
"error_writing_file": "Ошибка при записи файла {file:s}: {error:s}",
"error_removing": "Ошибка при удалении {path:s}: {error:s}",
@ -40,9 +40,19 @@
"download_ssl_error": "Ошибка SSL при соединении с {url:s}",
"download_timeout": "Превышено время ожидания ответа от {url:s}.",
"download_unknown_error": "Ошибка при загрузке данных с {url:s} : {error:s}",
"instance_already_running": "Процесс уже запущен",
"ldap_operation_error": "Ошибка в процессе работы LDAP",
"instance_already_running": "Операция YunoHost уже запущена. Пожалуйста, подождите, пока он закончится, прежде чем запускать другой.",
"root_required": "Чтобы выполнить это действие, вы должны иметь права root",
"corrupted_json": "Повреждённый json получен от {ressource:s} (причина: {error:s})",
"command_unknown": "Команда '{command:s}' неизвестна ?"
"command_unknown": "Команда '{command:s}' неизвестна ?",
"warn_the_user_that_lock_is_acquired": "другая команда только что завершилась, теперь запускает эту команду",
"warn_the_user_about_waiting_lock_again": "Все еще жду...",
"warn_the_user_about_waiting_lock": "Сейчас запускается еще одна команда YunoHost, мы ждем ее завершения, прежде чем запустить эту",
"download_bad_status_code": "{url:s} вернул код состояния {code:s}",
"error_changing_file_permissions": "Ошибка при изменении разрешений для {path:s}: {error:s}",
"corrupted_toml": "Поврежденный том, прочитанный из {ressource:s} (причина: {error:s})",
"unable_retrieve_session": "Невозможно получить сеанс, так как '{exception}'",
"ldap_server_down": "Невозможно связаться с сервером LDAP",
"invalid_usage": "Неправильное использование, передайте --help, чтобы увидеть помощь",
"invalid_token": "Неверный токен - пожалуйста, авторизуйтесь",
"info": "Информация:"
}

View file

@ -1 +1,57 @@
{}
{
"warn_the_user_about_waiting_lock_again": "Väntar fortfarande …",
"download_bad_status_code": "{url:s} svarade med statuskod {code:s}",
"download_timeout": "Gav upp eftersom {url:s} tog för lång tid på sig att svara.",
"download_ssl_error": "Ett SSL-fel påträffades vid anslutning till {url:s}",
"cannot_write_file": "Kunde inte skriva till filen {file:s} (orsak: {error:s})",
"cannot_open_file": "Kunde inte öppna filen {file:s} (orsak: {error:s})",
"websocket_request_expected": "Förväntade en WebSocket-förfrågan",
"warning": "Varning:",
"values_mismatch": "Värdena stämmer inte överens",
"unknown_user": "Okänd användare '{user}'",
"unknown_group": "Okänd grupp '{group}'",
"success": "Lyckades!",
"server_already_running": "En server använder redan den porten",
"root_required": "Du måste vara inloggad som root för att utföra den här åtgärden",
"pattern_not_match": "Stämmer inte in på mönstret",
"operation_interrupted": "Behandling avbruten",
"not_logged_in": "Du är inte inloggad",
"logged_in": "Inloggad",
"ldap_attribute_already_exists": "Attributet '{attribute}' finns redan med värdet '{value}'",
"invalid_password": "Ogiltigt lösenord",
"invalid_argument": "Ogiltig parameter '{argument}': {error}",
"logged_out": "Utloggad",
"info": "Info:",
"folder_not_exist": "Katalogen finns inte",
"folder_exists": "Katalogen finns redan: '{path}'",
"file_not_exist": "Filen finns inte: '{path}'",
"file_exists": "Filen finns redan: '{path}'",
"error_see_log": "Ett fel har inträffat. Kolla gärna i loggfilerna för mer information, de finns i /var/log/yunohost/.",
"error": "Fel:",
"deprecated_command_alias": "'{prog} {old}' rekommenderas inte längre och kommer tas bort i framtiden, använd '{prog} {new}' istället",
"deprecated_command": "'{prog} {command}' rekommenderas inte längre och kommer tas bort i framtiden",
"confirm": "Bekräfta {prompt}",
"colon": "{}: ",
"argument_required": "Parametern '{argument}' krävs",
"password": "Lösenord",
"warn_the_user_that_lock_is_acquired": "det andra kommandot har bara slutförts, nu startar du det här kommandot",
"warn_the_user_about_waiting_lock": "Ett annat YunoHost-kommando körs just nu, vi väntar på att det ska slutföras innan det här körs",
"command_unknown": "Kommando '{command:s}' okänd?",
"download_unknown_error": "Fel vid nedladdning av data från {url:s}: {error:s}",
"invalid_url": "Ogiltig url {url:s} (finns den här webbplatsen?)",
"error_changing_file_permissions": "Fel vid ändring av behörigheter för {path:s}: {error:s}",
"error_removing": "Fel vid borttagning av {path:s}: {error:s}",
"error_writing_file": "Fel vid skrivning av fil {file:s}: {error:s}",
"corrupted_toml": "Korrupt toml läst från {ressource:s} (anledning: {error:s})",
"corrupted_yaml": "Skadad yaml läst från {ressource:s} (anledning: {error:s})",
"corrupted_json": "Skadad json läst från {ressource:s} (anledning: {error:s})",
"unknown_error_reading_file": "Okänt fel vid försök att läsa filen {file:s} (anledning: {error:s})",
"unable_retrieve_session": "Det gick inte att hämta sessionen eftersom '{exception}'",
"unable_authenticate": "Det går inte att verifiera",
"ldap_server_down": "Det går inte att nå LDAP-servern",
"invalid_usage": "Ogiltig användning, pass --help för att se hjälp",
"invalid_token": "Ogiltigt token - verifiera",
"instance_already_running": "Det finns redan en YunoHost-operation. Vänta tills den är klar innan du kör en annan.",
"authentication_required_long": "Autentisering krävs för att utföra denna åtgärd",
"authentication_required": "Autentisering krävs"
}

View file

@ -6,12 +6,11 @@
"colon": "{}: ",
"confirm": "{prompt}'i doğrulayın",
"error": "Hata:",
"error_see_log": "Bir hata oluştu. Detaylar için lütfen loga bakınız",
"instance_already_running": "Uygulama zaten çalışıyor",
"error_see_log": "Bir hata oluştu. Ayrıntılar için lütfen günlüklere bakın, bunlar /var/log/yunohost/ dizinindedir.",
"instance_already_running": "Halihazırda bir YunoHost operasyonu var. Lütfen başka bir tane çalıştırmadan önce bitmesini bekleyin.",
"invalid_argument": "Geçersiz argüman '{argument}': {error}",
"invalid_password": "Geçersiz parola",
"ldap_attribute_already_exists": "'{attribute}={value}' özelliği zaten mevcut",
"ldap_operation_error": "LDAP işlemi sırasında hata oluştu",
"ldap_server_down": "LDAP sunucusuna erişilemiyor",
"logged_in": "Giriş yapıldı",
"logged_out": ıkış yapıldı",
@ -24,8 +23,37 @@
"server_already_running": "Bu portta zaten çalışan bir sunucu var",
"success": "İşlem Başarılı!",
"unable_authenticate": "Yetkilendirme başarısız",
"unable_retrieve_session": "Oturum bilgileri alınamadı",
"unable_retrieve_session": "'{exception}' nedeniyle oturum alınamadı",
"values_mismatch": "Değerler uyuşmuyor",
"warning": "Uyarı:",
"websocket_request_expected": "WebSocket isteği gerekli"
"websocket_request_expected": "WebSocket isteği gerekli",
"warn_the_user_that_lock_is_acquired": "diğer komut şimdi tamamlandı, şimdi bu komutu başlatıyor",
"warn_the_user_about_waiting_lock_again": "Hala bekliyor...",
"warn_the_user_about_waiting_lock": "Başka bir YunoHost komutu şu anda çalışıyor, bunu çalıştırmadan önce bitmesini bekliyoruz",
"command_unknown": "'{Command:s}' komutu bilinmiyor mu?",
"download_bad_status_code": "{url:s} döndürülen durum kodu {code:s}",
"download_unknown_error": "{url:s} adresinden veri indirilirken hata oluştu: {error:s}",
"download_timeout": "{url:s} yanıtlaması çok uzun sürdü, pes etti.",
"download_ssl_error": "{url:s} ağına bağlanırken SSL hatası",
"invalid_url": "Geçersiz url {url:s} (bu site var mı?)",
"error_changing_file_permissions": "{Path:s} için izinler değiştirilirken hata oluştu: {error:s}",
"error_removing": "{Path:s} kaldırılırken hata oluştu: {error:s}",
"error_writing_file": "{File:s} dosyası yazılırken hata oluştu: {error:s}",
"corrupted_toml": "{Ressource:s} kaynağından okunan bozuk toml (nedeni: {hata:s})",
"corrupted_yaml": "{Ressource:s} kaynağından bozuk yaml okunuyor (nedeni: {error:s})",
"corrupted_json": "{Ressource:s} adresinden okunan bozuk json (nedeni: {error:s})",
"unknown_error_reading_file": "{File:s} dosyasını okumaya çalışırken bilinmeyen hata (nedeni: {error:s})",
"cannot_write_file": "{File:s} dosyası yazılamadı (nedeni: {error:s})",
"cannot_open_file": "{File:s} dosyasıılamadı (nedeni: {error:s})",
"unknown_user": "Bilinmeyen '{user}' kullanıcı",
"unknown_group": "Bilinmeyen '{group}' grubu",
"invalid_usage": "Geçersiz kullanım, yardım görmek için --help iletin",
"invalid_token": "Geçersiz simge - lütfen kimlik doğrulaması yapın",
"info": "Bilgi:",
"folder_not_exist": "Klasör mevcut değil",
"folder_exists": "Klasör zaten var: '{path}'",
"file_not_exist": "Dosya mevcut değil: '{path}'",
"file_exists": "Dosya zaten var: '{path}'",
"deprecated_command_alias": "'{prog} {old}' kullanımdan kaldırıldı ve gelecekte kaldırılacak, bunun yerine '{prog} {new}' kullanın",
"deprecated_command": "'{prog} {command}' kullanımdan kaldırıldı ve gelecekte kaldırılacak"
}

View file

@ -1,16 +1,17 @@
# -*- coding: utf-8 -*-
from moulinette.core import init_interface, MoulinetteError, MoulinetteSignals, Moulinette18n
from moulinette.core import (
init_interface,
MoulinetteError,
MoulinetteSignals,
Moulinette18n,
)
from moulinette.globals import init_moulinette_env
__title__ = 'moulinette'
__version__ = '0.1'
__author__ = ['Kload',
'jlebleu',
'titoko',
'beudbeud',
'npze']
__license__ = 'AGPL 3.0'
__title__ = "moulinette"
__version__ = "0.1"
__author__ = ["Kload", "jlebleu", "titoko", "beudbeud", "npze"]
__license__ = "AGPL 3.0"
__credits__ = """
Copyright (C) 2014 YUNOHOST.ORG
@ -28,8 +29,14 @@ __credits__ = """
along with this program; if not, see http://www.gnu.org/licenses
"""
__all__ = [
'init', 'api', 'cli', 'm18n', 'env',
'init_interface', 'MoulinetteError',
"init",
"api",
"cli",
"m18n",
"msignals",
"env",
"init_interface",
"MoulinetteError",
]
@ -40,6 +47,7 @@ m18n = Moulinette18n()
# Package functions
def init(logging_config=None, **kwargs):
"""Package initialization
@ -61,13 +69,15 @@ def init(logging_config=None, **kwargs):
configure_logging(logging_config)
# Add library directory to python path
sys.path.insert(0, init_moulinette_env()['LIB_DIR'])
sys.path.insert(0, init_moulinette_env()["LIB_DIR"])
# Easy access to interfaces
def api(namespaces, host='localhost', port=80, routes={},
use_websocket=True, use_cache=True):
def api(
namespaces, host="localhost", port=80, routes={}, use_websocket=True, use_cache=True
):
"""Web server (API) interface
Run a HTTP server with the moulinette for an API usage.
@ -84,29 +94,33 @@ def api(namespaces, host='localhost', port=80, routes={},
"""
try:
moulinette = init_interface('api',
kwargs={
'routes': routes,
'use_websocket': use_websocket
},
actionsmap={
'namespaces': namespaces,
'use_cache': use_cache
}
moulinette = init_interface(
"api",
kwargs={"routes": routes, "use_websocket": use_websocket},
actionsmap={"namespaces": namespaces, "use_cache": use_cache},
)
moulinette.run(host, port)
except MoulinetteError as e:
import logging
logging.getLogger(namespaces[0]).error(e.strerror)
return e.errno if hasattr(e, "errno") else 1
except KeyboardInterrupt:
import logging
logging.getLogger(namespaces[0]).info(m18n.g('operation_interrupted'))
logging.getLogger(namespaces[0]).info(m18n.g("operation_interrupted"))
return 0
def cli(namespaces, args, use_cache=True, output_as=None,
password=None, timeout=None, parser_kwargs={}):
def cli(
namespaces,
args,
use_cache=True,
output_as=None,
password=None,
timeout=None,
parser_kwargs={},
):
"""Command line interface
Execute an action with the moulinette from the CLI and print its
@ -125,16 +139,18 @@ def cli(namespaces, args, use_cache=True, output_as=None,
"""
try:
moulinette = init_interface('cli',
moulinette = init_interface(
"cli",
actionsmap={
'namespaces': namespaces,
'use_cache': use_cache,
'parser_kwargs': parser_kwargs,
"namespaces": namespaces,
"use_cache": use_cache,
"parser_kwargs": parser_kwargs,
},
)
moulinette.run(args, output_as=output_as, password=password, timeout=timeout)
except MoulinetteError as e:
import logging
logging.getLogger(namespaces[0]).error(e.strerror)
return 1
return 0

View file

@ -7,23 +7,23 @@ import yaml
import cPickle as pickle
from time import time
from collections import OrderedDict
from importlib import import_module
from moulinette import m18n, msignals
from moulinette.cache import open_cachefile
from moulinette.globals import init_moulinette_env
from moulinette.core import (MoulinetteError, MoulinetteLock)
from moulinette.interfaces import (
BaseActionsMapParser, GLOBAL_SECTION, TO_RETURN_PROP
)
from moulinette.core import MoulinetteError, MoulinetteLock
from moulinette.interfaces import BaseActionsMapParser, GLOBAL_SECTION, TO_RETURN_PROP
from moulinette.utils.log import start_action_logging
logger = logging.getLogger('moulinette.actionsmap')
logger = logging.getLogger("moulinette.actionsmap")
# Extra parameters ----------------------------------------------------
# Extra parameters definition
class _ExtraParameter(object):
"""
@ -86,21 +86,26 @@ class _ExtraParameter(object):
class CommentParameter(_ExtraParameter):
name = "comment"
skipped_iface = ['api']
skipped_iface = ["api"]
def __call__(self, message, arg_name, arg_value):
if arg_value:
return
return msignals.display(m18n.n(message))
@classmethod
def validate(klass, value, arg_name):
# Deprecated boolean or empty string
if isinstance(value, bool) or (isinstance(value, str) and not value):
logger.warning("expecting a non-empty string for extra parameter '%s' of "
"argument '%s'", klass.name, arg_name)
logger.warning(
"expecting a non-empty string for extra parameter '%s' of "
"argument '%s'",
klass.name,
arg_name,
)
value = arg_name
elif not isinstance(value, str):
raise TypeError("parameter value must be a string, got %r"
% value)
raise TypeError("parameter value must be a string, got %r" % value)
return value
@ -113,8 +118,9 @@ class AskParameter(_ExtraParameter):
when asking the argument value.
"""
name = 'ask'
skipped_iface = ['api']
name = "ask"
skipped_iface = ["api"]
def __call__(self, message, arg_name, arg_value):
if arg_value:
@ -130,12 +136,15 @@ class AskParameter(_ExtraParameter):
def validate(klass, value, arg_name):
# Deprecated boolean or empty string
if isinstance(value, bool) or (isinstance(value, str) and not value):
logger.warning("expecting a non-empty string for extra parameter '%s' of "
"argument '%s'", klass.name, arg_name)
logger.warning(
"expecting a non-empty string for extra parameter '%s' of "
"argument '%s'",
klass.name,
arg_name,
)
value = arg_name
elif not isinstance(value, str):
raise TypeError("parameter value must be a string, got %r"
% value)
raise TypeError("parameter value must be a string, got %r" % value)
return value
@ -148,7 +157,8 @@ class PasswordParameter(AskParameter):
when asking the password.
"""
name = 'password'
name = "password"
def __call__(self, message, arg_name, arg_value):
if arg_value:
@ -170,40 +180,45 @@ class PatternParameter(_ExtraParameter):
the message to display if it doesn't match.
"""
name = 'pattern'
name = "pattern"
def __call__(self, arguments, arg_name, arg_value):
pattern, message = (arguments[0], arguments[1])
# Use temporarly utf-8 encoded value
try:
v = unicode(arg_value, 'utf-8')
v = unicode(arg_value, "utf-8")
except:
v = arg_value
if v and not re.match(pattern, v or '', re.UNICODE):
logger.debug("argument value '%s' for '%s' doesn't match pattern '%s'",
v, arg_name, pattern)
if v and not re.match(pattern, v or "", re.UNICODE):
logger.warning(
"argument value '%s' for '%s' doesn't match pattern '%s'",
v,
arg_name,
pattern,
)
# Attempt to retrieve message translation
msg = m18n.n(message)
if msg == message:
msg = m18n.g(message)
raise MoulinetteError('invalid_argument',
argument=arg_name, error=msg)
raise MoulinetteError("invalid_argument", argument=arg_name, error=msg)
return arg_value
@staticmethod
def validate(value, arg_name):
# Deprecated string type
if isinstance(value, str):
logger.warning("expecting a list as extra parameter 'pattern' of "
"argument '%s'", arg_name)
value = [value, 'pattern_not_match']
logger.warning(
"expecting a list as extra parameter 'pattern' of " "argument '%s'",
arg_name,
)
value = [value, "pattern_not_match"]
elif not isinstance(value, list) or len(value) != 2:
raise TypeError("parameter value must be a list, got %r"
% value)
raise TypeError("parameter value must be a list, got %r" % value)
return value
@ -215,21 +230,19 @@ class RequiredParameter(_ExtraParameter):
The value of this parameter must be a boolean which is set to False by
default.
"""
name = 'required'
name = "required"
def __call__(self, required, arg_name, arg_value):
if required and (arg_value is None or arg_value == ''):
logger.debug("argument '%s' is required",
arg_name)
raise MoulinetteError('argument_required',
argument=arg_name)
if required and (arg_value is None or arg_value == ""):
logger.warning("argument '%s' is required", arg_name)
raise MoulinetteError("argument_required", argument=arg_name)
return arg_value
@staticmethod
def validate(value, arg_name):
if not isinstance(value, bool):
raise TypeError("parameter value must be a list, got %r"
% value)
raise TypeError("parameter value must be a boolean, got %r" % value)
return value
@ -238,8 +251,13 @@ The list of available extra parameters classes. It will keep to this list
order on argument parsing.
"""
extraparameters_list = [CommentParameter, AskParameter, PasswordParameter,
RequiredParameter, PatternParameter]
extraparameters_list = [
CommentParameter,
AskParameter,
PasswordParameter,
RequiredParameter,
PatternParameter,
]
# Extra parameters argument Parser
@ -264,7 +282,7 @@ class ExtraArgumentParser(object):
if iface in klass.skipped_iface:
continue
self.extra[klass.name] = klass
logger.debug('extra parameter classes loaded: %s', self.extra.keys())
logger.debug("extra parameter classes loaded: %s", self.extra.keys())
def validate(self, arg_name, parameters):
"""
@ -286,9 +304,14 @@ class ExtraArgumentParser(object):
# Validate parameter value
parameters[p] = klass.validate(v, arg_name)
except Exception as e:
logger.error("unable to validate extra parameter '%s' "
"for argument '%s': %s", p, arg_name, e)
raise MoulinetteError('error_see_log')
logger.error(
"unable to validate extra parameter '%s' "
"for argument '%s': %s",
p,
arg_name,
e,
)
raise MoulinetteError("error_see_log")
return parameters
@ -353,12 +376,15 @@ class ExtraArgumentParser(object):
# Main class ----------------------------------------------------------
def ordered_yaml_load(stream):
class OrderedLoader(yaml.Loader):
pass
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
lambda loader, node: OrderedDict(loader.construct_pairs(node)))
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
)
return yaml.load(stream, OrderedLoader)
@ -386,16 +412,15 @@ class ActionsMap(object):
"""
def __init__(self, parser_class, namespaces=[], use_cache=True,
parser_kwargs={}):
def __init__(self, parser_class, namespaces=[], use_cache=True, parser_kwargs={}):
if not issubclass(parser_class, BaseActionsMapParser):
raise ValueError("Invalid parser class '%s'" % parser_class.__name__)
self.parser_class = parser_class
self.use_cache = use_cache
moulinette_env = init_moulinette_env()
DATA_DIR = moulinette_env['DATA_DIR']
CACHE_DIR = moulinette_env['CACHE_DIR']
DATA_DIR = moulinette_env["DATA_DIR"]
CACHE_DIR = moulinette_env["CACHE_DIR"]
if len(namespaces) == 0:
namespaces = self.get_namespaces()
@ -405,13 +430,13 @@ class ActionsMap(object):
for n in namespaces:
logger.debug("loading actions map namespace '%s'", n)
actionsmap_yml = '%s/actionsmap/%s.yml' % (DATA_DIR, n)
actionsmap_yml = "%s/actionsmap/%s.yml" % (DATA_DIR, n)
actionsmap_yml_stat = os.stat(actionsmap_yml)
actionsmap_pkl = '%s/actionsmap/%s-%d-%d.pkl' % (
actionsmap_pkl = "%s/actionsmap/%s-%d-%d.pkl" % (
CACHE_DIR,
n,
actionsmap_yml_stat.st_size,
actionsmap_yml_stat.st_mtime
actionsmap_yml_stat.st_mtime,
)
if use_cache and os.path.exists(actionsmap_pkl):
@ -442,25 +467,37 @@ class ActionsMap(object):
"""Return the instance of the interface's actions map parser"""
return self._parser
def get_authenticator(self, profile='default'):
"""Get an authenticator instance
def get_authenticator_for_profile(self, auth_profile):
Retrieve the authenticator for the given profile and return a
new instance.
Keyword arguments:
- profile -- An authenticator profile name
Returns:
A new _BaseAuthenticator derived instance
"""
# Fetch the configuration for the authenticator module as defined in the actionmap
try:
auth = self.parser.get_global_conf('authenticator', profile)[1]
auth_conf = self.parser.global_conf["authenticator"][auth_profile]
except KeyError:
raise ValueError("Unknown authenticator profile '%s'" % profile)
raise ValueError("Unknown authenticator profile '%s'" % auth_profile)
# Load and initialize the authenticator module
try:
mod = import_module("moulinette.authenticators.%s" % auth_conf["vendor"])
except ImportError:
logger.exception(
"unable to load authenticator vendor '%s'", auth_conf["vendor"]
)
raise MoulinetteError("error_see_log")
else:
return auth()
return mod.Authenticator(**auth_conf)
def check_authentication_if_required(self, args, **kwargs):
auth_profile = self.parser.auth_required(args, **kwargs)
if not auth_profile:
return
authenticator = self.get_authenticator_for_profile(auth_profile)
auth = msignals.authenticate(authenticator)
if not auth.is_authenticated:
raise MoulinetteError("authentication_required_long")
def process(self, args, timeout=None, **kwargs):
"""
@ -473,11 +510,15 @@ class ActionsMap(object):
- **kwargs -- Additional interface arguments
"""
# Perform authentication if needed
self.check_authentication_if_required(args, **kwargs)
# Parse arguments
arguments = vars(self.parser.parse_args(args, **kwargs))
# Retrieve tid and parse arguments with extra parameters
tid = arguments.pop('_tid')
tid = arguments.pop("_tid")
arguments = self.extraparser.parse_args(tid, arguments)
# Return immediately if a value is defined
@ -487,38 +528,57 @@ class ActionsMap(object):
# Retrieve action information
if len(tid) == 4:
namespace, category, subcategory, action = tid
func_name = '%s_%s_%s' % (category, subcategory.replace('-', '_'), action.replace('-', '_'))
full_action_name = "%s.%s.%s.%s" % (namespace, category, subcategory, action)
func_name = "%s_%s_%s" % (
category,
subcategory.replace("-", "_"),
action.replace("-", "_"),
)
full_action_name = "%s.%s.%s.%s" % (
namespace,
category,
subcategory,
action,
)
else:
assert len(tid) == 3
namespace, category, action = tid
subcategory = None
func_name = '%s_%s' % (category, action.replace('-', '_'))
func_name = "%s_%s" % (category, action.replace("-", "_"))
full_action_name = "%s.%s.%s" % (namespace, category, action)
# Lock the moulinette for the namespace
with MoulinetteLock(namespace, timeout):
start = time()
try:
mod = __import__('%s.%s' % (namespace, category),
globals=globals(), level=0,
fromlist=[func_name])
logger.debug('loading python module %s took %.3fs',
'%s.%s' % (namespace, category), time() - start)
mod = __import__(
"%s.%s" % (namespace, category),
globals=globals(),
level=0,
fromlist=[func_name],
)
logger.debug(
"loading python module %s took %.3fs",
"%s.%s" % (namespace, category),
time() - start,
)
func = getattr(mod, func_name)
except (AttributeError, ImportError):
logger.exception("unable to load function %s.%s",
namespace, func_name)
raise MoulinetteError('error_see_log')
import traceback
traceback.print_exc()
logger.exception("unable to load function %s.%s", namespace, func_name)
raise MoulinetteError("error_see_log")
else:
log_id = start_action_logging()
if logger.isEnabledFor(logging.DEBUG):
# Log arguments in debug mode only for safety reasons
logger.info('processing action [%s]: %s with args=%s',
log_id, full_action_name, arguments)
logger.info(
"processing action [%s]: %s with args=%s",
log_id,
full_action_name,
arguments,
)
else:
logger.info('processing action [%s]: %s',
log_id, full_action_name)
logger.info("processing action [%s]: %s", log_id, full_action_name)
# Load translation and process the action
m18n.load_namespace(namespace)
@ -527,8 +587,7 @@ class ActionsMap(object):
return func(**arguments)
finally:
stop = time()
logger.debug('action [%s] executed in %.3fs',
log_id, stop - start)
logger.debug("action [%s] executed in %.3fs", log_id, stop - start)
@staticmethod
def get_namespaces():
@ -542,10 +601,10 @@ class ActionsMap(object):
namespaces = []
moulinette_env = init_moulinette_env()
DATA_DIR = moulinette_env['DATA_DIR']
DATA_DIR = moulinette_env["DATA_DIR"]
for f in os.listdir('%s/actionsmap' % DATA_DIR):
if f.endswith('.yml'):
for f in os.listdir("%s/actionsmap" % DATA_DIR):
if f.endswith(".yml"):
namespaces.append(f[:-4])
return namespaces
@ -562,8 +621,8 @@ class ActionsMap(object):
"""
moulinette_env = init_moulinette_env()
CACHE_DIR = moulinette_env['CACHE_DIR']
DATA_DIR = moulinette_env['DATA_DIR']
CACHE_DIR = moulinette_env["CACHE_DIR"]
DATA_DIR = moulinette_env["DATA_DIR"]
actionsmaps = {}
if not namespaces:
@ -574,23 +633,23 @@ class ActionsMap(object):
logger.debug("generating cache for actions map namespace '%s'", n)
# Read actions map from yaml file
am_file = '%s/actionsmap/%s.yml' % (DATA_DIR, n)
with open(am_file, 'r') as f:
am_file = "%s/actionsmap/%s.yml" % (DATA_DIR, n)
with open(am_file, "r") as f:
actionsmaps[n] = ordered_yaml_load(f)
# at installation, cachedir might not exists
if os.path.exists('%s/actionsmap/' % CACHE_DIR):
if os.path.exists("%s/actionsmap/" % CACHE_DIR):
# clean old cached files
for i in os.listdir('%s/actionsmap/' % CACHE_DIR):
for i in os.listdir("%s/actionsmap/" % CACHE_DIR):
if i.endswith(".pkl"):
os.remove('%s/actionsmap/%s' % (CACHE_DIR, i))
os.remove("%s/actionsmap/%s" % (CACHE_DIR, i))
# Cache actions map into pickle file
am_file_stat = os.stat(am_file)
pkl = '%s-%d-%d.pkl' % (n, am_file_stat.st_size, am_file_stat.st_mtime)
pkl = "%s-%d-%d.pkl" % (n, am_file_stat.st_size, am_file_stat.st_mtime)
with open_cachefile(pkl, 'w', subdir='actionsmap') as f:
with open_cachefile(pkl, "w", subdir="actionsmap") as f:
pickle.dump(actionsmaps[n], f)
return actionsmaps
@ -631,86 +690,97 @@ class ActionsMap(object):
# * actionsmap is the actual actionsmap that we care about
for namespace, actionsmap in actionsmaps.items():
# Retrieve global parameters
_global = actionsmap.pop('_global', {})
_global = actionsmap.pop("_global", {})
# Set the global configuration to use for the parser.
top_parser.set_global_conf(_global['configuration'])
top_parser.set_global_conf(_global["configuration"])
if top_parser.has_global_parser():
top_parser.add_global_arguments(_global['arguments'])
top_parser.add_global_arguments(_global["arguments"])
# category_name is stuff like "user", "domain", "hooks"...
# category_values is the values of this category (like actions)
for category_name, category_values in actionsmap.items():
if "actions" in category_values:
actions = category_values.pop('actions')
actions = category_values.pop("actions")
else:
actions = {}
if "subcategories" in category_values:
subcategories = category_values.pop('subcategories')
subcategories = category_values.pop("subcategories")
else:
subcategories = {}
# Get category parser
category_parser = top_parser.add_category_parser(category_name,
**category_values)
category_parser = top_parser.add_category_parser(
category_name, **category_values
)
# action_name is like "list" of "domain list"
# action_options are the values
for action_name, action_options in actions.items():
arguments = action_options.pop('arguments', {})
arguments = action_options.pop("arguments", {})
tid = (namespace, category_name, action_name)
# Get action parser
action_parser = category_parser.add_action_parser(action_name,
tid,
**action_options)
action_parser = category_parser.add_action_parser(
action_name, tid, **action_options
)
if action_parser is None: # No parser for the action
continue
# Store action identifier and add arguments
action_parser.set_defaults(_tid=tid)
action_parser.add_arguments(arguments,
action_parser.add_arguments(
arguments,
extraparser=self.extraparser,
format_arg_names=top_parser.format_arg_names,
validate_extra=validate_extra)
validate_extra=validate_extra,
)
if 'configuration' in action_options:
category_parser.set_conf(tid, action_options['configuration'])
if "configuration" in action_options:
category_parser.set_conf(tid, action_options["configuration"])
# subcategory_name is like "cert" in "domain cert status"
# subcategory_values is the values of this subcategory (like actions)
for subcategory_name, subcategory_values in subcategories.items():
actions = subcategory_values.pop('actions')
actions = subcategory_values.pop("actions")
# Get subcategory parser
subcategory_parser = category_parser.add_subcategory_parser(subcategory_name, **subcategory_values)
subcategory_parser = category_parser.add_subcategory_parser(
subcategory_name, **subcategory_values
)
# action_name is like "status" of "domain cert status"
# action_options are the values
for action_name, action_options in actions.items():
arguments = action_options.pop('arguments', {})
arguments = action_options.pop("arguments", {})
tid = (namespace, category_name, subcategory_name, action_name)
try:
# Get action parser
action_parser = subcategory_parser.add_action_parser(action_name, tid, **action_options)
action_parser = subcategory_parser.add_action_parser(
action_name, tid, **action_options
)
except AttributeError:
# No parser for the action
continue
# Store action identifier and add arguments
action_parser.set_defaults(_tid=tid)
action_parser.add_arguments(arguments,
action_parser.add_arguments(
arguments,
extraparser=self.extraparser,
format_arg_names=top_parser.format_arg_names,
validate_extra=validate_extra)
validate_extra=validate_extra,
)
if 'configuration' in action_options:
category_parser.set_conf(tid, action_options['configuration'])
if "configuration" in action_options:
category_parser.set_conf(
tid, action_options["configuration"]
)
return top_parser

View file

@ -1,16 +1,19 @@
# -*- coding: utf-8 -*-
import gnupg
import os
import logging
import hashlib
import hmac
from moulinette.cache import open_cachefile
from moulinette.cache import open_cachefile, get_cachedir, cachefile_exists
from moulinette.core import MoulinetteError
logger = logging.getLogger('moulinette.authenticator')
logger = logging.getLogger("moulinette.authenticator")
# Base Class -----------------------------------------------------------
class BaseAuthenticator(object):
"""Authenticator base representation
@ -29,8 +32,11 @@ class BaseAuthenticator(object):
"""
def __init__(self, name):
def __init__(self, name, vendor, parameters, extra):
self._name = name
self.vendor = vendor
self.is_authenticated = False
self.extra = extra
@property
def name(self):
@ -43,12 +49,6 @@ class BaseAuthenticator(object):
"""The vendor name of the authenticator"""
vendor = None
@property
def is_authenticated(self):
"""Either the instance is authenticated or not"""
raise NotImplementedError("derived class '%s' must override this property" %
self.__class__.__name__)
# Virtual methods
# Each authenticator classes must implement these methods.
@ -62,8 +62,9 @@ class BaseAuthenticator(object):
- password -- A clear text password
"""
raise NotImplementedError("derived class '%s' must override this method" %
self.__class__.__name__)
raise NotImplementedError(
"derived class '%s' must override this method" % self.__class__.__name__
)
# Authentication methods
@ -75,7 +76,7 @@ class BaseAuthenticator(object):
instance is returned and the session is registered for the token
if 'token' and 'password' are given.
The token is composed by the session identifier and a session
hash - to use for encryption - as a 2-tuple.
hash (the "true token") - to use for encryption - as a 2-tuple.
Keyword arguments:
- password -- A clear text password
@ -87,81 +88,139 @@ class BaseAuthenticator(object):
"""
if self.is_authenticated:
return self
store_session = True if password and token else False
if token:
try:
# Extract id and hash from token
s_id, s_hash = token
except TypeError as e:
logger.error("unable to extract token parts from '%s' because '%s'", token, e)
if password is None:
raise MoulinetteError('error_see_log')
logger.info("session will not be stored")
store_session = False
else:
if password is None:
# Retrieve session
password = self._retrieve_session(s_id, s_hash)
#
# Authenticate using the password
#
if password:
try:
# Attempt to authenticate
self.authenticate(password)
except MoulinetteError:
raise
except Exception as e:
logger.exception("authentication (name: '%s', vendor: '%s') fails because '%s'",
self.name, self.vendor, e)
raise MoulinetteError('unable_authenticate')
logger.exception(
"authentication (name: '%s', vendor: '%s') fails because '%s'",
self.name,
self.vendor,
e,
)
raise MoulinetteError("unable_authenticate")
# Store session
if store_session:
self.is_authenticated = True
# Store session for later using the provided (new) token if any
if token:
try:
self._store_session(s_id, s_hash, password)
s_id, s_token = token
self._store_session(s_id, s_token)
except Exception as e:
import traceback
traceback.print_exc()
logger.exception("unable to store session because %s", e)
else:
logger.debug("session has been stored")
#
# Authenticate using the token provided
#
elif token:
try:
s_id, s_token = token
# Attempt to authenticate
self._authenticate_session(s_id, s_token)
except MoulinetteError as e:
raise
except Exception as e:
logger.exception(
"authentication (name: '%s', vendor: '%s') fails because '%s'",
self.name,
self.vendor,
e,
)
raise MoulinetteError("unable_authenticate")
else:
self.is_authenticated = True
#
# No credentials given, can't authenticate
#
else:
raise MoulinetteError("unable_authenticate")
return self
# Private methods
def _open_sessionfile(self, session_id, mode='r'):
def _open_sessionfile(self, session_id, mode="r"):
"""Open a session file for this instance in given mode"""
return open_cachefile('%s.asc' % session_id, mode,
subdir='session/%s' % self.name)
return open_cachefile(
"%s.asc" % session_id, mode, subdir="session/%s" % self.name
)
def _store_session(self, session_id, session_hash, password):
"""Store a session and its associated password"""
gpg = gnupg.GPG()
gpg.encoding = 'utf-8'
def _session_exists(self, session_id):
"""Check a session exists"""
return cachefile_exists("%s.asc" % session_id, subdir="session/%s" % self.name)
# Encrypt the password using the session hash
s = str(gpg.encrypt(password, None, symmetric=True, passphrase=session_hash))
assert len(s), "For some reason GPG can't perform encryption, maybe check /root/.gnupg/gpg.conf or re-run with gpg = gnupg.GPG(verbose=True) ?"
def _store_session(self, session_id, session_token):
"""Store a session to be able to use it later to reauthenticate"""
with self._open_sessionfile(session_id, 'w') as f:
f.write(s)
# We store a hash of the session_id and the session_token (the token is assumed to be secret)
to_hash = "{id}:{token}".format(id=session_id, token=session_token)
hash_ = hashlib.sha256(to_hash).hexdigest()
with self._open_sessionfile(session_id, "w") as f:
f.write(hash_)
def _retrieve_session(self, session_id, session_hash):
"""Retrieve a session and return its associated password"""
def _authenticate_session(self, session_id, session_token):
"""Checks session and token against the stored session token"""
if not self._session_exists(session_id):
raise MoulinetteError("session_expired")
try:
with self._open_sessionfile(session_id, 'r') as f:
enc_pwd = f.read()
# FIXME : shouldn't we also add a check that this session file
# is not too old ? e.g. not older than 24 hours ? idk...
with self._open_sessionfile(session_id, "r") as f:
stored_hash = f.read()
except IOError as e:
logger.debug("unable to retrieve session", exc_info=1)
raise MoulinetteError('unable_retrieve_session', exception=e)
raise MoulinetteError("unable_retrieve_session", exception=e)
else:
gpg = gnupg.GPG()
gpg.encoding = 'utf-8'
#
# session_id (or just id) : This is unique id for the current session from the user. Not too important
# if this info gets stolen somehow. It is stored in the client's side (browser) using regular cookies.
#
# session_token (or just token) : This is a secret info, like some sort of ephemeral password,
# used to authenticate the session without the user having to retype the password all the time...
# - It is generated on our side during the initial auth of the user (which happens with the actual admin password)
# - It is stored on the client's side (browser) using (signed) cookies.
# - We also store it on our side in the form of a hash of {id}:{token} (c.f. _store_session).
# We could simply store the raw token, but hashing it is an additonal low-cost security layer
# in case this info gets exposed for some reason (e.g. bad file perms for reasons...)
#
# When the user comes back, we fetch the session_id and session_token from its cookies. Then we
# re-hash the {id}:{token} and compare it to the previously stored hash for this session_id ...
# It it matches, then the user is authenticated. Otherwise, the token is invalid.
#
to_hash = "{id}:{token}".format(id=session_id, token=session_token)
hash_ = hashlib.sha256(to_hash).hexdigest()
decrypted = gpg.decrypt(enc_pwd, passphrase=session_hash)
if decrypted.ok is not True:
error_message = "unable to decrypt password for the session: %s" % decrypted.status
logger.error(error_message)
raise MoulinetteError('unable_retrieve_session', exception=error_message)
return decrypted.data
if not hmac.compare_digest(hash_, stored_hash):
raise MoulinetteError("invalid_token")
else:
return
def _clean_session(self, session_id):
"""Clean a session cache
Remove cache for the session 'session_id' and for this authenticator profile
Keyword arguments:
- session_id -- The session id to clean
"""
sessiondir = get_cachedir("session")
try:
os.remove(os.path.join(sessiondir, self.name, "%s.asc" % session_id))
except OSError:
pass

View file

@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
import logging
from moulinette.core import MoulinetteError
from moulinette.authenticators import BaseAuthenticator
logger = logging.getLogger("moulinette.authenticator.dummy")
# Dummy authenticator implementation
class Authenticator(BaseAuthenticator):
"""Dummy authenticator used for tests
"""
vendor = "dummy"
def __init__(self, name, vendor, parameters, extra):
logger.debug("initialize authenticator dummy")
super(Authenticator, self).__init__(name, vendor, parameters, extra)
def authenticate(self, password=None):
if not password == self.name:
raise MoulinetteError("invalid_password")
return self

View file

@ -2,22 +2,26 @@
# TODO: Use Python3 to remove this fix!
from __future__ import absolute_import
import os
import logging
import random
import string
import crypt
import ldap
import ldap.sasl
import time
import ldap.modlist as modlist
from moulinette.core import MoulinetteError
from moulinette import m18n
from moulinette.core import MoulinetteError, MoulinetteLdapIsDownError
from moulinette.authenticators import BaseAuthenticator
logger = logging.getLogger('moulinette.authenticator.ldap')
logger = logging.getLogger("moulinette.authenticator.ldap")
# LDAP Class Implementation --------------------------------------------
class Authenticator(BaseAuthenticator):
"""LDAP Authenticator
@ -33,23 +37,28 @@ class Authenticator(BaseAuthenticator):
"""
def __init__(self, name, uri, base_dn, user_rdn=None):
logger.debug("initialize authenticator '%s' with: uri='%s', "
"base_dn='%s', user_rdn='%s'", name, uri, base_dn, user_rdn)
super(Authenticator, self).__init__(name)
def __init__(self, name, vendor, parameters, extra):
self.uri = parameters["uri"]
self.basedn = parameters["base_dn"]
self.userdn = parameters["user_rdn"]
self.extra = extra
self.sasldn = "cn=external,cn=auth"
self.adminuser = "admin"
self.admindn = "cn=%s,dc=yunohost,dc=org" % self.adminuser
logger.debug(
"initialize authenticator '%s' with: uri='%s', "
"base_dn='%s', user_rdn='%s'",
name,
self._get_uri(),
self.basedn,
self.userdn,
)
super(Authenticator, self).__init__(name, vendor, parameters, extra)
self.uri = uri
self.basedn = base_dn
if user_rdn:
self.userdn = user_rdn
if 'cn=external,cn=auth' in user_rdn:
if self.userdn and self.sasldn in self.userdn:
self.authenticate(None)
else:
self.con = None
else:
# Initialize anonymous usage
self.userdn = ''
self.authenticate(None)
def __del__(self):
"""Disconnect and free ressources"""
@ -58,40 +67,51 @@ class Authenticator(BaseAuthenticator):
# Implement virtual properties
vendor = 'ldap'
@property
def is_authenticated(self):
if self.con is None:
return False
try:
# Retrieve identity
who = self.con.whoami_s()
except Exception as e:
logger.warning("Error during ldap authentication process: %s", e)
return False
else:
if who[3:] == self.userdn:
return True
return False
vendor = "ldap"
# Implement virtual methods
def authenticate(self, password):
try:
con = ldap.ldapobject.ReconnectLDAPObject(self.uri, retry_max=10, retry_delay=0.5)
def authenticate(self, password=None):
def _reconnect():
con = ldap.ldapobject.ReconnectLDAPObject(
self._get_uri(), retry_max=10, retry_delay=0.5
)
if self.userdn:
if 'cn=external,cn=auth' in self.userdn:
con.sasl_non_interactive_bind_s('EXTERNAL')
if self.sasldn in self.userdn:
con.sasl_non_interactive_bind_s("EXTERNAL")
else:
con.simple_bind_s(self.userdn, password)
else:
con.simple_bind_s()
return con
try:
con = _reconnect()
except ldap.INVALID_CREDENTIALS:
raise MoulinetteError('invalid_password')
raise MoulinetteError("invalid_password")
except ldap.SERVER_DOWN:
logger.exception('unable to reach the server to authenticate')
raise MoulinetteError('ldap_server_down')
# ldap is down, attempt to restart it before really failing
logger.warning(m18n.g("ldap_server_is_down_restart_it"))
os.system("systemctl restart slapd")
time.sleep(10) # waits 10 secondes so we are sure that slapd has restarted
try:
con = _reconnect()
except ldap.SERVER_DOWN:
raise MoulinetteLdapIsDownError("ldap_server_down")
# Check that we are indeed logged in with the right identity
try:
# whoami_s return dn:..., then delete these 3 characters
who = con.whoami_s()[3:]
except Exception as e:
logger.warning("Error during ldap authentication process: %s", e)
raise
else:
# FIXME: During SASL bind whoami from the test server return the admindn while userdn is returned normally :
if not (who == self.admindn or who == self.userdn):
raise MoulinetteError("Not logged in with the expected userdn ?!")
else:
self.con = con
self._ensure_password_uses_strong_hash(password)
@ -99,13 +119,14 @@ class Authenticator(BaseAuthenticator):
def _ensure_password_uses_strong_hash(self, password):
# XXX this has been copy pasted from YunoHost, should we put that into moulinette?
def _hash_user_password(password):
char_set = string.ascii_uppercase + string.ascii_lowercase + string.digits + "./"
salt = ''.join([random.SystemRandom().choice(char_set) for x in range(16)])
salt = '$6$' + salt + '$'
return '{CRYPT}' + crypt.crypt(str(password), salt)
char_set = (
string.ascii_uppercase + string.ascii_lowercase + string.digits + "./"
)
salt = "".join([random.SystemRandom().choice(char_set) for x in range(16)])
salt = "$6$" + salt + "$"
return "{CRYPT}" + crypt.crypt(str(password), salt)
hashed_password = self.search("cn=admin,dc=yunohost,dc=org",
attrs=["userPassword"])[0]
hashed_password = self.search(self.admindn, attrs=["userPassword"])[0]
# post-install situation, password is not already set
if "userPassword" not in hashed_password or not hashed_password["userPassword"]:
@ -113,14 +134,15 @@ class Authenticator(BaseAuthenticator):
# we aren't using sha-512 but something else that is weaker, proceed to upgrade
if not hashed_password["userPassword"][0].startswith("{CRYPT}$6$"):
self.update("cn=admin", {
"userPassword": _hash_user_password(password),
})
self.update(
"cn=%s" % self.adminuser,
{"userPassword": [_hash_user_password(password)]},
)
# Additional LDAP methods
# TODO: Review these methods
def search(self, base=None, filter='(objectClass=*)', attrs=['dn']):
def search(self, base=None, filter="(objectClass=*)", attrs=["dn"]):
"""Search in LDAP base
Perform an LDAP search operation with given arguments and return
@ -143,17 +165,16 @@ class Authenticator(BaseAuthenticator):
except Exception as e:
raise MoulinetteError(
"error during LDAP search operation with: base='%s', "
"filter='%s', attrs=%s and exception %s"
% (base, filter, attrs, e),
raw_msg=True
"filter='%s', attrs=%s and exception %s" % (base, filter, attrs, e),
raw_msg=True,
)
result_list = []
if not attrs or 'dn' not in attrs:
if not attrs or "dn" not in attrs:
result_list = [entry for dn, entry in result]
else:
for dn, entry in result:
entry['dn'] = [dn]
entry["dn"] = [dn]
result_list.append(entry)
return result_list
@ -169,7 +190,7 @@ class Authenticator(BaseAuthenticator):
Boolean | MoulinetteError
"""
dn = rdn + ',' + self.basedn
dn = rdn + "," + self.basedn
ldif = modlist.addModlist(attr_dict)
try:
@ -177,9 +198,8 @@ class Authenticator(BaseAuthenticator):
except Exception as e:
raise MoulinetteError(
"error during LDAP add operation with: rdn='%s', "
"attr_dict=%s and exception %s"
% (rdn, attr_dict, e),
raw_msg=True
"attr_dict=%s and exception %s" % (rdn, attr_dict, e),
raw_msg=True,
)
else:
return True
@ -195,14 +215,14 @@ class Authenticator(BaseAuthenticator):
Boolean | MoulinetteError
"""
dn = rdn + ',' + self.basedn
dn = rdn + "," + self.basedn
try:
self.con.delete_s(dn)
except Exception as e:
raise MoulinetteError(
"error during LDAP delete operation with: rdn='%s' and exception %s"
% (rdn, e),
raw_msg=True
raw_msg=True,
)
else:
return True
@ -220,7 +240,7 @@ class Authenticator(BaseAuthenticator):
Boolean | MoulinetteError
"""
dn = rdn + ',' + self.basedn
dn = rdn + "," + self.basedn
actual_entry = self.search(base=dn, attrs=None)
ldif = modlist.modifyModlist(actual_entry[0], attr_dict, ignore_oldexistent=1)
@ -231,7 +251,8 @@ class Authenticator(BaseAuthenticator):
try:
if new_rdn:
self.con.rename_s(dn, new_rdn)
dn = new_rdn + ',' + self.basedn
new_base = dn.split(",", 1)[1]
dn = new_rdn + "," + new_base
self.con.modify_ext_s(dn, ldif)
except Exception as e:
@ -239,7 +260,7 @@ class Authenticator(BaseAuthenticator):
"error during LDAP update operation with: rdn='%s', "
"attr_dict=%s, new_rdn=%s and exception: %s"
% (rdn, attr_dict, new_rdn, e),
raw_msg=True
raw_msg=True,
)
else:
return True
@ -257,11 +278,16 @@ class Authenticator(BaseAuthenticator):
"""
attr_found = self.get_conflict(value_dict)
if attr_found:
logger.info("attribute '%s' with value '%s' is not unique",
attr_found[0], attr_found[1])
raise MoulinetteError('ldap_attribute_already_exists',
logger.info(
"attribute '%s' with value '%s' is not unique",
attr_found[0],
attr_found[1],
)
raise MoulinetteError(
"ldap_attribute_already_exists",
attribute=attr_found[0],
value=attr_found[1])
value=attr_found[1],
)
return True
def get_conflict(self, value_dict, base_dn=None):
@ -272,12 +298,15 @@ class Authenticator(BaseAuthenticator):
value_dict -- Dictionnary of attributes/values to check
Returns:
None | list with Fist conflict attribute name and value
None | tuple with Fist conflict attribute name and value
"""
for attr, value in value_dict.items():
if not self.search(base=base_dn, filter=attr + '=' + value):
if not self.search(base=base_dn, filter=attr + "=" + value):
continue
else:
return (attr, value)
return None
def _get_uri(self):
return self.uri

View file

@ -5,7 +5,7 @@ import os
from moulinette.globals import init_moulinette_env
def get_cachedir(subdir='', make_dir=True):
def get_cachedir(subdir="", make_dir=True):
"""Get the path to a cache directory
Return the path to the cache directory from an optional
@ -16,7 +16,7 @@ def get_cachedir(subdir='', make_dir=True):
- make_dir -- False to not make directory if it not exists
"""
CACHE_DIR = init_moulinette_env()['CACHE_DIR']
CACHE_DIR = init_moulinette_env()["CACHE_DIR"]
path = os.path.join(CACHE_DIR, subdir)
@ -25,7 +25,7 @@ def get_cachedir(subdir='', make_dir=True):
return path
def open_cachefile(filename, mode='r', **kwargs):
def open_cachefile(filename, mode="r", subdir=""):
"""Open a cache file and return a stream
Attempt to open in 'mode' the cache file 'filename' from the
@ -39,9 +39,13 @@ def open_cachefile(filename, mode='r', **kwargs):
- **kwargs -- Optional arguments for get_cachedir
"""
# Set make_dir if not given
kwargs['make_dir'] = kwargs.get('make_dir',
True if mode[0] == 'w' else False)
cache_dir = get_cachedir(**kwargs)
cache_dir = get_cachedir(subdir, make_dir=True if mode[0] == "w" else False)
file_path = os.path.join(cache_dir, filename)
return open(file_path, mode)
def cachefile_exists(filename, subdir=""):
cache_dir = get_cachedir(subdir, make_dir=False)
file_path = os.path.join(cache_dir, filename)
return os.path.exists(file_path)

View file

@ -9,10 +9,9 @@ from importlib import import_module
import moulinette
from moulinette.globals import init_moulinette_env
from moulinette.cache import get_cachedir
logger = logging.getLogger('moulinette.core')
logger = logging.getLogger("moulinette.core")
def during_unittests_run():
@ -21,6 +20,7 @@ def during_unittests_run():
# Internationalization -------------------------------------------------
class Translator(object):
"""Internationalization class
@ -34,15 +34,16 @@ class Translator(object):
"""
def __init__(self, locale_dir, default_locale='en'):
def __init__(self, locale_dir, default_locale="en"):
self.locale_dir = locale_dir
self.locale = default_locale
self._translations = {}
# Attempt to load default translations
if not self._load_translations(default_locale):
logger.error("unable to load locale '%s' from '%s'",
default_locale, locale_dir)
logger.error(
"unable to load locale '%s' from '%s'", default_locale, locale_dir
)
self.default_locale = default_locale
def get_locales(self):
@ -50,7 +51,7 @@ class Translator(object):
locales = []
for f in os.listdir(self.locale_dir):
if f.endswith('.json'):
if f.endswith(".json"):
# TODO: Validate locale
locales.append(f[:-5])
return locales
@ -70,8 +71,11 @@ class Translator(object):
"""
if locale not in self._translations:
if not self._load_translations(locale):
logger.debug("unable to load locale '%s' from '%s'",
self.default_locale, self.locale_dir)
logger.debug(
"unable to load locale '%s' from '%s'",
self.default_locale,
self.locale_dir,
)
# Revert to default locale
self.locale = self.default_locale
@ -94,11 +98,18 @@ class Translator(object):
failed_to_format = False
if key in self._translations.get(self.locale, {}):
try:
return self._translations[self.locale][key].encode('utf-8').format(*args, **kwargs)
return (
self._translations[self.locale][key]
.encode("utf-8")
.format(*args, **kwargs)
)
except KeyError as e:
unformatted_string = self._translations[self.locale][key].encode('utf-8')
error_message = "Failed to format translated string '%s': '%s' with arguments '%s' and '%s, raising error: %s(%s) (don't panic this is just a warning)" % (
key, unformatted_string, args, kwargs, e.__class__.__name__, e
unformatted_string = self._translations[self.locale][key].encode(
"utf-8"
)
error_message = (
"Failed to format translated string '%s': '%s' with arguments '%s' and '%s, raising error: %s(%s) (don't panic this is just a warning)"
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
)
if not during_unittests_run():
@ -108,25 +119,37 @@ class Translator(object):
failed_to_format = True
if failed_to_format or (self.default_locale != self.locale and key in self._translations.get(self.default_locale, {})):
logger.info("untranslated key '%s' for locale '%s'",
key, self.locale)
if failed_to_format or (
self.default_locale != self.locale
and key in self._translations.get(self.default_locale, {})
):
logger.info("untranslated key '%s' for locale '%s'", key, self.locale)
try:
return self._translations[self.default_locale][key].encode('utf-8').format(*args, **kwargs)
return (
self._translations[self.default_locale][key]
.encode("utf-8")
.format(*args, **kwargs)
)
except KeyError as e:
unformatted_string = self._translations[self.default_locale][key].encode('utf-8')
error_message = "Failed to format translatable string '%s': '%s' with arguments '%s' and '%s', raising error: %s(%s) (don't panic this is just a warning)" % (
key, unformatted_string, args, kwargs, e.__class__.__name__, e
unformatted_string = self._translations[self.default_locale][
key
].encode("utf-8")
error_message = (
"Failed to format translatable string '%s': '%s' with arguments '%s' and '%s', raising error: %s(%s) (don't panic this is just a warning)"
% (key, unformatted_string, args, kwargs, e.__class__.__name__, e)
)
if not during_unittests_run():
logger.exception(error_message)
else:
raise Exception(error_message)
return self._translations[self.default_locale][key].encode('utf-8')
return self._translations[self.default_locale][key].encode("utf-8")
error_message = "unable to retrieve string to translate with key '%s' for default locale 'locales/%s.json' file (don't panic this is just a warning)" % (key, self.default_locale)
error_message = (
"unable to retrieve string to translate with key '%s' for default locale 'locales/%s.json' file (don't panic this is just a warning)"
% (key, self.default_locale)
)
if not during_unittests_run():
logger.exception(error_message)
@ -153,8 +176,8 @@ class Translator(object):
return True
try:
with open('%s/%s.json' % (self.locale_dir, locale), 'r') as f:
j = json.load(f, 'utf-8')
with open("%s/%s.json" % (self.locale_dir, locale), "r") as f:
j = json.load(f, "utf-8")
except IOError:
return False
else:
@ -175,13 +198,12 @@ class Moulinette18n(object):
"""
def __init__(self, default_locale='en'):
def __init__(self, default_locale="en"):
self.default_locale = default_locale
self.locale = default_locale
moulinette_env = init_moulinette_env()
self.locales_dir = moulinette_env['LOCALES_DIR']
self.lib_dir = moulinette_env['LIB_DIR']
self.locales_dir = moulinette_env["LOCALES_DIR"]
# Init global translator
self._global = Translator(self.locales_dir, default_locale)
@ -202,8 +224,10 @@ class Moulinette18n(object):
"""
if namespace not in self._namespaces:
# Create new Translator object
translator = Translator('%s/%s/locales' % (self.lib_dir, namespace),
self.default_locale)
lib_dir = init_moulinette_env()["LIB_DIR"]
translator = Translator(
"%s/%s/locales" % (lib_dir, namespace), self.default_locale
)
translator.set_locale(self.locale)
self._namespaces[namespace] = translator
@ -273,21 +297,21 @@ class MoulinetteSignals(object):
if signal not in self.signals:
logger.error("unknown signal '%s'", signal)
return
setattr(self, '_%s' % signal, handler)
setattr(self, "_%s" % signal, handler)
def clear_handler(self, signal):
"""Clear the handler of a signal"""
if signal not in self.signals:
logger.error("unknown signal '%s'", signal)
return
setattr(self, '_%s' % signal, self._notimplemented)
setattr(self, "_%s" % signal, self._notimplemented)
# Signals definitions
"""The list of available signals"""
signals = {'authenticate', 'prompt', 'display'}
signals = {"authenticate", "prompt", "display"}
def authenticate(self, authenticator, help):
def authenticate(self, authenticator):
"""Process the authentication
Attempt to authenticate to the given authenticator and return
@ -297,7 +321,6 @@ class MoulinetteSignals(object):
Keyword arguments:
- authenticator -- The authenticator object to use
- help -- The translation key of the authenticator's help message
Returns:
The authenticator object
@ -305,9 +328,9 @@ class MoulinetteSignals(object):
"""
if authenticator.is_authenticated:
return authenticator
return self._authenticate(authenticator, help)
return self._authenticate(authenticator)
def prompt(self, message, is_password=False, confirm=False, color='blue'):
def prompt(self, message, is_password=False, confirm=False, color="blue"):
"""Prompt for a value
Prompt the interface for a parameter value which is a password
@ -328,7 +351,7 @@ class MoulinetteSignals(object):
"""
return self._prompt(message, is_password, confirm, color=color)
def display(self, message, style='info'):
def display(self, message, style="info"):
"""Display a message
Display a message with a given style to the user.
@ -354,6 +377,7 @@ class MoulinetteSignals(object):
# Interfaces & Authenticators management -------------------------------
def init_interface(name, kwargs={}, actionsmap={}):
"""Return a new interface instance
@ -373,10 +397,10 @@ def init_interface(name, kwargs={}, actionsmap={}):
from moulinette.actionsmap import ActionsMap
try:
mod = import_module('moulinette.interfaces.%s' % name)
except ImportError:
logger.exception("unable to load interface '%s'", name)
raise MoulinetteError('error_see_log')
mod = import_module("moulinette.interfaces.%s" % name)
except ImportError as e:
logger.exception("unable to load interface '%s' : %s", name, e)
raise MoulinetteError("error_see_log")
else:
try:
# Retrieve interface classes
@ -384,66 +408,23 @@ def init_interface(name, kwargs={}, actionsmap={}):
interface = mod.Interface
except AttributeError:
logger.exception("unable to retrieve classes of interface '%s'", name)
raise MoulinetteError('error_see_log')
raise MoulinetteError("error_see_log")
# Instantiate or retrieve ActionsMap
if isinstance(actionsmap, dict):
amap = ActionsMap(actionsmap.pop('parser', parser), **actionsmap)
amap = ActionsMap(actionsmap.pop("parser", parser), **actionsmap)
elif isinstance(actionsmap, ActionsMap):
amap = actionsmap
else:
logger.error("invalid actionsmap value %r", actionsmap)
raise MoulinetteError('error_see_log')
raise MoulinetteError("error_see_log")
return interface(amap, **kwargs)
def init_authenticator(vendor_and_name, kwargs={}):
"""Return a new authenticator instance
Retrieve the given authenticator vendor and return a new instance of
its Authenticator class for the given profile.
Keyword arguments:
- vendor -- The authenticator vendor name
- name -- The authenticator profile name
- kwargs -- A dict of arguments for the authenticator profile
"""
(vendor, name) = vendor_and_name
try:
mod = import_module('moulinette.authenticators.%s' % vendor)
except ImportError:
logger.exception("unable to load authenticator vendor '%s'", vendor)
raise MoulinetteError('error_see_log')
else:
return mod.Authenticator(name, **kwargs)
def clean_session(session_id, profiles=[]):
"""Clean a session cache
Remove cache for the session 'session_id' and for profiles in
'profiles' or for all of them if the list is empty.
Keyword arguments:
- session_id -- The session id to clean
- profiles -- A list of profiles to clean
"""
sessiondir = get_cachedir('session')
if not profiles:
profiles = os.listdir(sessiondir)
for p in profiles:
try:
os.unlink(os.path.join(sessiondir, p, '%s.asc' % session_id))
except OSError:
pass
# Moulinette core classes ----------------------------------------------
class MoulinetteError(Exception):
"""Moulinette base exception"""
@ -457,6 +438,10 @@ class MoulinetteError(Exception):
self.strerror = msg
class MoulinetteLdapIsDownError(MoulinetteError):
"""Used when ldap is down"""
class MoulinetteLock(object):
"""Locker for a moulinette instance
@ -473,12 +458,14 @@ class MoulinetteLock(object):
"""
def __init__(self, namespace, timeout=None, interval=.5):
base_lockfile = "/var/run/moulinette_%s.lock"
def __init__(self, namespace, timeout=None, interval=0.5):
self.namespace = namespace
self.timeout = timeout
self.interval = interval
self._lockfile = '/var/run/moulinette_%s.lock' % namespace
self._lockfile = self.base_lockfile % namespace
self._stale_checked = False
self._locked = False
@ -499,7 +486,7 @@ class MoulinetteLock(object):
# after 15*4 seconds, then 15*4*4 seconds...
warning_treshold = 15
logger.debug('acquiring lock...')
logger.debug("acquiring lock...")
while True:
@ -516,20 +503,24 @@ class MoulinetteLock(object):
# Check locked process still exist and take lock if it doesnt
# FIXME : what do in the context of multiple locks :|
first_lock = lock_pids[0]
if not os.path.exists(os.path.join('/proc', str(first_lock), 'exe')):
logger.debug('stale lock file found')
if not os.path.exists(os.path.join("/proc", str(first_lock), "exe")):
logger.debug("stale lock file found")
self._lock()
break
if self.timeout is not None and (time.time() - start_time) > self.timeout:
raise MoulinetteError('instance_already_running')
raise MoulinetteError("instance_already_running")
# warn the user if it's been too much time since they are waiting
if (time.time() - start_time) > warning_treshold:
if warning_treshold == 15:
logger.warning(moulinette.m18n.g('warn_the_user_about_waiting_lock'))
logger.warning(
moulinette.m18n.g("warn_the_user_about_waiting_lock")
)
else:
logger.warning(moulinette.m18n.g('warn_the_user_about_waiting_lock_again'))
logger.warning(
moulinette.m18n.g("warn_the_user_about_waiting_lock_again")
)
warning_treshold *= 4
# Wait before checking again
@ -538,8 +529,8 @@ class MoulinetteLock(object):
# we have warned the user that we were waiting, for better UX also them
# that we have stop waiting and that the command is processing now
if warning_treshold != 15:
logger.warning(moulinette.m18n.g('warn_the_user_that_lock_is_acquired'))
logger.debug('lock has been acquired')
logger.warning(moulinette.m18n.g("warn_the_user_that_lock_is_acquired"))
logger.debug("lock has been acquired")
self._locked = True
def release(self):
@ -552,16 +543,18 @@ class MoulinetteLock(object):
if os.path.exists(self._lockfile):
os.unlink(self._lockfile)
else:
logger.warning("Uhoh, somehow the lock %s did not exist ..." % self._lockfile)
logger.debug('lock has been released')
logger.warning(
"Uhoh, somehow the lock %s did not exist ..." % self._lockfile
)
logger.debug("lock has been released")
self._locked = False
def _lock(self):
try:
with open(self._lockfile, 'w') as f:
with open(self._lockfile, "w") as f:
f.write(str(os.getpid()))
except IOError:
raise MoulinetteError('root_required')
raise MoulinetteError("root_required")
def _lock_PIDs(self):
@ -569,10 +562,10 @@ class MoulinetteLock(object):
return []
with open(self._lockfile) as f:
lock_pids = f.read().strip().split('\n')
lock_pids = f.read().strip().split("\n")
# Make sure to convert those pids to integers
lock_pids = [int(pid) for pid in lock_pids if pid.strip() != '']
lock_pids = [int(pid) for pid in lock_pids if pid.strip() != ""]
return lock_pids

View file

@ -5,8 +5,10 @@ from os import environ
def init_moulinette_env():
return {
'DATA_DIR': environ.get('MOULINETTE_DATA_DIR', '/usr/share/moulinette'),
'LIB_DIR': environ.get('MOULINETTE_LIB_DIR', '/usr/lib/moulinette'),
'LOCALES_DIR': environ.get('MOULINETTE_LOCALES_DIR', '/usr/share/moulinette/locale'),
'CACHE_DIR': environ.get('MOULINETTE_CACHE_DIR', '/var/cache/moulinette'),
"DATA_DIR": environ.get("MOULINETTE_DATA_DIR", "/usr/share/moulinette"),
"LIB_DIR": environ.get("MOULINETTE_LIB_DIR", "/usr/lib/moulinette"),
"LOCALES_DIR": environ.get(
"MOULINETTE_LOCALES_DIR", "/usr/share/moulinette/locale"
),
"CACHE_DIR": environ.get("MOULINETTE_CACHE_DIR", "/var/cache/moulinette"),
}

View file

@ -6,18 +6,19 @@ import argparse
import copy
from collections import deque, OrderedDict
from moulinette import msignals, msettings, m18n
from moulinette.core import (init_authenticator, MoulinetteError)
from moulinette import msettings, m18n
from moulinette.core import MoulinetteError
logger = logging.getLogger('moulinette.interface')
logger = logging.getLogger("moulinette.interface")
GLOBAL_SECTION = '_global'
TO_RETURN_PROP = '_to_return'
CALLBACKS_PROP = '_callbacks'
GLOBAL_SECTION = "_global"
TO_RETURN_PROP = "_to_return"
CALLBACKS_PROP = "_callbacks"
# Base Class -----------------------------------------------------------
class BaseActionsMapParser(object):
"""Actions map's base Parser
@ -37,9 +38,8 @@ class BaseActionsMapParser(object):
if parent:
self._o = parent
else:
logger.debug('initializing base actions map parser for %s',
self.interface)
msettings['interface'] = self.interface
logger.debug("initializing base actions map parser for %s", self.interface)
msettings["interface"] = self.interface
self._o = self
self._global_conf = {}
@ -55,7 +55,7 @@ class BaseActionsMapParser(object):
# Each parser classes must implement these methods.
@staticmethod
def format_arg_names(self, name, full):
def format_arg_names(name, full):
"""Format argument name
Format agument name depending on its 'full' parameter and return
@ -70,8 +70,7 @@ class BaseActionsMapParser(object):
A list of option strings
"""
raise NotImplementedError("derived class '%s' must override this method" %
self.__class__.__name__)
raise NotImplementedError("derived class must override this method")
def has_global_parser(self):
return False
@ -85,8 +84,9 @@ class BaseActionsMapParser(object):
An ArgumentParser based object
"""
raise NotImplementedError("derived class '%s' must override this method" %
self.__class__.__name__)
raise NotImplementedError(
"derived class '%s' must override this method" % self.__class__.__name__
)
def add_category_parser(self, name, **kwargs):
"""Add a parser for a category
@ -100,8 +100,9 @@ class BaseActionsMapParser(object):
A BaseParser based object
"""
raise NotImplementedError("derived class '%s' must override this method" %
self.__class__.__name__)
raise NotImplementedError(
"derived class '%s' must override this method" % self.__class__.__name__
)
def add_action_parser(self, name, tid, **kwargs):
"""Add a parser for an action
@ -116,8 +117,23 @@ class BaseActionsMapParser(object):
An ArgumentParser based object
"""
raise NotImplementedError("derived class '%s' must override this method" %
self.__class__.__name__)
raise NotImplementedError(
"derived class '%s' must override this method" % self.__class__.__name__
)
def auth_required(self, args, **kwargs):
"""Check if authentication is required to run the requested action
Keyword arguments:
- args -- Arguments string or dict (TODO)
Returns:
False, or the authentication profile required
"""
raise NotImplementedError(
"derived class '%s' must override this method" % self.__class__.__name__
)
def parse_args(self, args, **kwargs):
"""Parse arguments
@ -132,17 +148,20 @@ class BaseActionsMapParser(object):
The populated namespace
"""
raise NotImplementedError("derived class '%s' must override this method" %
self.__class__.__name__)
raise NotImplementedError(
"derived class '%s' must override this method" % self.__class__.__name__
)
# Arguments helpers
def prepare_action_namespace(self, tid, namespace=None):
@staticmethod
def prepare_action_namespace(tid, namespace=None):
"""Prepare the namespace for a given action"""
# Validate tid and namespace
if not isinstance(tid, tuple) and \
(namespace is None or not hasattr(namespace, TO_RETURN_PROP)):
raise MoulinetteError('invalid_usage')
if not isinstance(tid, tuple) and (
namespace is None or not hasattr(namespace, TO_RETURN_PROP)
):
raise MoulinetteError("invalid_usage")
elif not tid:
tid = GLOBAL_SECTION
@ -151,18 +170,6 @@ class BaseActionsMapParser(object):
namespace = argparse.Namespace()
namespace._tid = tid
# Perform authentication if needed
if self.get_conf(tid, 'authenticate'):
auth_conf, cls = self.get_conf(tid, 'authenticator')
# TODO: Catch errors
auth = msignals.authenticate(cls(), **auth_conf)
if not auth.is_authenticated:
raise MoulinetteError('authentication_required_long')
if self.get_conf(tid, 'argument_auth') and \
self.get_conf(tid, 'authenticate') == 'all':
namespace.auth = auth
return namespace
# Configuration access
@ -172,24 +179,6 @@ class BaseActionsMapParser(object):
"""Return the global configuration of the parser"""
return self._o._global_conf
def get_global_conf(self, name, profile='default'):
"""Get the global value of a configuration
Return the formated global value of the configuration 'name' for
the given profile. If the configuration doesn't provide profile,
the formated default value is returned.
Keyword arguments:
- name -- The configuration name
- profile -- The profile of the configuration
"""
if name == 'authenticator':
value = self.global_conf[name][profile]
else:
value = self.global_conf[name]
return self._format_conf(name, value)
def set_global_conf(self, configuration):
"""Set global configuration
@ -214,11 +203,9 @@ class BaseActionsMapParser(object):
"""
try:
value = self._o._conf[action][name]
return self._o._conf[action][name]
except KeyError:
return self.get_global_conf(name)
else:
return self._format_conf(name, value)
return self.global_conf[name]
def set_conf(self, action, configuration):
"""Set configuration for an action
@ -248,109 +235,66 @@ class BaseActionsMapParser(object):
# -- 'authenficate'
try:
ifaces = configuration['authenticate']
ifaces = configuration["authenticate"]
except KeyError:
pass
else:
if ifaces == 'all':
conf['authenticate'] = ifaces
if ifaces == "all":
conf["authenticate"] = ifaces
elif ifaces is False:
conf['authenticate'] = False
conf["authenticate"] = False
elif isinstance(ifaces, list):
# Store only if authentication is needed
conf['authenticate'] = True if self.interface in ifaces else False
if "all" in ifaces:
conf["authenticate"] = "all"
else:
logger.error("expecting 'all', 'False' or a list for "
"configuration 'authenticate', got %r", ifaces)
raise MoulinetteError('error_see_log')
# Store only if authentication is needed
conf["authenticate"] = True if self.interface in ifaces else False
else:
logger.error(
"expecting 'all', 'False' or a list for "
"configuration 'authenticate', got %r",
ifaces,
)
raise MoulinetteError("error_see_log")
# -- 'authenticator'
try:
auth = configuration['authenticator']
except KeyError:
pass
else:
auth = configuration.get("authenticator", "default")
if not is_global and isinstance(auth, str):
try:
# Store needed authenticator profile
conf['authenticator'] = self.global_conf['authenticator'][auth]
except KeyError:
logger.error("requesting profile '%s' which is undefined in "
"global configuration of 'authenticator'", auth)
raise MoulinetteError('error_see_log')
if auth not in self.global_conf["authenticator"]:
logger.error(
"requesting profile '%s' which is undefined in "
"global configuration of 'authenticator'",
auth,
)
raise MoulinetteError("error_see_log")
else:
conf["authenticator"] = auth
elif is_global and isinstance(auth, dict):
if len(auth) == 0:
logger.warning('no profile defined in global configuration '
"for 'authenticator'")
logger.warning(
"no profile defined in global configuration " "for 'authenticator'"
)
else:
auths = {}
for auth_name, auth_conf in auth.items():
# Add authenticator profile as a 3-tuple
# (identifier, configuration, parameters) with
# - identifier: the authenticator vendor and its
# profile name as a 2-tuple
# - configuration: a dict of additional global
# configuration (i.e. 'help')
# - parameters: a dict of arguments for the
# authenticator profile
auths[auth_name] = ((auth_conf.get('vendor'), auth_name),
{'help': auth_conf.get('help', None)},
auth_conf.get('parameters', {}))
conf['authenticator'] = auths
auths[auth_name] = {
"name": auth_name,
"vendor": auth_conf.get("vendor"),
"parameters": auth_conf.get("parameters", {}),
"extra": {"help": auth_conf.get("help", None)},
}
conf["authenticator"] = auths
else:
logger.error("expecting a dict of profile(s) or a profile name "
"for configuration 'authenticator', got %r", auth)
raise MoulinetteError('error_see_log')
# -- 'argument_auth'
try:
arg_auth = configuration['argument_auth']
except KeyError:
pass
else:
if isinstance(arg_auth, bool):
conf['argument_auth'] = arg_auth
else:
logger.error("expecting a boolean for configuration "
"'argument_auth', got %r", arg_auth)
raise MoulinetteError('error_see_log')
# -- 'lock'
try:
lock = configuration['lock']
except KeyError:
pass
else:
if isinstance(lock, bool):
conf['lock'] = lock
else:
logger.error("expecting a boolean for configuration 'lock', "
"got %r", lock)
raise MoulinetteError('error_see_log')
logger.error(
"expecting a dict of profile(s) or a profile name "
"for configuration 'authenticator', got %r",
auth,
)
raise MoulinetteError("error_see_log")
return conf
def _format_conf(self, name, value):
"""Format a configuration value
Return the formated value of the configuration 'name' from its
given value.
Keyword arguments:
- name -- The name of the configuration
- value -- The value to format
"""
if name == 'authenticator' and value:
(identifier, configuration, parameters) = value
# Return global configuration and an authenticator
# instanciator as a 2-tuple
return (configuration,
lambda: init_authenticator(identifier, parameters))
return value
class BaseInterface(object):
@ -364,55 +308,62 @@ class BaseInterface(object):
- actionsmap -- The ActionsMap instance to connect to
"""
# TODO: Add common interface methods and try to standardize default ones
def __init__(self, actionsmap):
raise NotImplementedError("derived class '%s' must override this method" %
self.__class__.__name__)
raise NotImplementedError(
"derived class '%s' must override this method" % self.__class__.__name__
)
# Argument parser ------------------------------------------------------
class _CallbackAction(argparse.Action):
def __init__(self,
class _CallbackAction(argparse.Action):
def __init__(
self,
option_strings,
dest,
nargs=0,
callback={},
default=argparse.SUPPRESS,
help=None):
if not callback or 'method' not in callback:
raise ValueError('callback must be provided with at least '
'a method key')
help=None,
):
if not callback or "method" not in callback:
raise ValueError("callback must be provided with at least " "a method key")
super(_CallbackAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
default=default,
help=help)
self.callback_method = callback.get('method')
self.callback_kwargs = callback.get('kwargs', {})
self.callback_return = callback.get('return', False)
logger.debug("registering new callback action '{0}' to {1}".format(
self.callback_method, option_strings))
help=help,
)
self.callback_method = callback.get("method")
self.callback_kwargs = callback.get("kwargs", {})
self.callback_return = callback.get("return", False)
logger.debug(
"registering new callback action '{0}' to {1}".format(
self.callback_method, option_strings
)
)
@property
def callback(self):
if not hasattr(self, '_callback'):
if not hasattr(self, "_callback"):
self._retrieve_callback()
return self._callback
def _retrieve_callback(self):
# Attempt to retrieve callback method
mod_name, func_name = (self.callback_method).rsplit('.', 1)
mod_name, func_name = (self.callback_method).rsplit(".", 1)
try:
mod = __import__(mod_name, globals=globals(), level=0,
fromlist=[func_name])
mod = __import__(mod_name, globals=globals(), level=0, fromlist=[func_name])
func = getattr(mod, func_name)
except (AttributeError, ImportError):
raise ValueError('unable to import method {0}'.format(
self.callback_method))
import traceback
traceback.print_exc()
raise ValueError("unable to import method {0}".format(self.callback_method))
self._callback = func
def __call__(self, parser, namespace, values, option_string=None):
@ -425,9 +376,11 @@ class _CallbackAction(argparse.Action):
# Execute callback and get returned value
value = self.callback(namespace, values, **self.callback_kwargs)
except:
logger.exception("cannot get value from callback method "
"'{0}'".format(self.callback_method))
raise MoulinetteError('error_see_log')
logger.exception(
"cannot get value from callback method "
"'{0}'".format(self.callback_method)
)
raise MoulinetteError("error_see_log")
else:
if value:
if self.callback_return:
@ -452,23 +405,22 @@ class _ExtendedSubParsersAction(argparse._SubParsersAction):
"""
def __init__(self, *args, **kwargs):
required = kwargs.pop('required', False)
required = kwargs.pop("required", False)
super(_ExtendedSubParsersAction, self).__init__(*args, **kwargs)
self.required = required
self._deprecated_command_map = {}
def add_parser(self, name, type_=None, **kwargs):
deprecated = kwargs.pop('deprecated', False)
deprecated_alias = kwargs.pop('deprecated_alias', [])
deprecated = kwargs.pop("deprecated", False)
deprecated_alias = kwargs.pop("deprecated_alias", [])
if deprecated:
self._deprecated_command_map[name] = None
if 'help' in kwargs:
del kwargs['help']
if "help" in kwargs:
del kwargs["help"]
parser = super(_ExtendedSubParsersAction, self).add_parser(
name, **kwargs)
parser = super(_ExtendedSubParsersAction, self).add_parser(name, **kwargs)
# Append each deprecated command alias name
for command in deprecated_alias:
@ -490,27 +442,34 @@ class _ExtendedSubParsersAction(argparse._SubParsersAction):
else:
# Warn the user about deprecated command
if correct_name is None:
logger.warning(m18n.g('deprecated_command', prog=parser.prog,
command=parser_name))
logger.warning(
m18n.g("deprecated_command", prog=parser.prog, command=parser_name)
)
else:
logger.warning(m18n.g('deprecated_command_alias',
old=parser_name, new=correct_name,
prog=parser.prog))
logger.warning(
m18n.g(
"deprecated_command_alias",
old=parser_name,
new=correct_name,
prog=parser.prog,
)
)
values[0] = correct_name
return super(_ExtendedSubParsersAction, self).__call__(
parser, namespace, values, option_string)
parser, namespace, values, option_string
)
class ExtendedArgumentParser(argparse.ArgumentParser):
def __init__(self, *args, **kwargs):
super(ExtendedArgumentParser, self).__init__(formatter_class=PositionalsFirstHelpFormatter,
*args, **kwargs)
super(ExtendedArgumentParser, self).__init__(
formatter_class=PositionalsFirstHelpFormatter, *args, **kwargs
)
# Register additional actions
self.register('action', 'callback', _CallbackAction)
self.register('action', 'parsers', _ExtendedSubParsersAction)
self.register("action", "callback", _CallbackAction)
self.register("action", "parsers", _ExtendedSubParsersAction)
def enqueue_callback(self, namespace, callback, values):
queue = self._get_callbacks_queue(namespace)
@ -538,30 +497,33 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
queue = list()
return queue
def add_arguments(self, arguments, extraparser, format_arg_names=None, validate_extra=True):
def add_arguments(
self, arguments, extraparser, format_arg_names=None, validate_extra=True
):
for argument_name, argument_options in arguments.items():
# will adapt arguments name for cli or api context
names = format_arg_names(str(argument_name),
argument_options.pop('full', None))
names = format_arg_names(
str(argument_name), argument_options.pop("full", None)
)
if "type" in argument_options:
argument_options['type'] = eval(argument_options['type'])
argument_options["type"] = eval(argument_options["type"])
if "extra" in argument_options:
extra = argument_options.pop('extra')
extra = argument_options.pop("extra")
argument_dest = self.add_argument(*names, **argument_options).dest
extraparser.add_argument(self.get_default("_tid"),
argument_dest, extra, validate_extra)
extraparser.add_argument(
self.get_default("_tid"), argument_dest, extra, validate_extra
)
continue
self.add_argument(*names, **argument_options)
def _get_nargs_pattern(self, action):
if action.nargs == argparse.PARSER and not action.required:
return '([-AO]*)'
return "([-AO]*)"
else:
return super(ExtendedArgumentParser, self)._get_nargs_pattern(
action)
return super(ExtendedArgumentParser, self)._get_nargs_pattern(action)
def _get_values(self, action, arg_strings):
if action.nargs == argparse.PARSER and not action.required:
@ -571,8 +533,7 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
else:
value = argparse.SUPPRESS
else:
value = super(ExtendedArgumentParser, self)._get_values(
action, arg_strings)
value = super(ExtendedArgumentParser, self)._get_values(action, arg_strings)
return value
# Adapted from :
@ -581,8 +542,7 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
@ -600,14 +560,30 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
subcategories_subparser = copy.copy(action_group._group_actions[0])
# Filter "action"-type and "subcategory"-type commands
actions_subparser.choices = OrderedDict([(k, v) for k, v in actions_subparser.choices.items() if v.type == "action"])
subcategories_subparser.choices = OrderedDict([(k, v) for k, v in subcategories_subparser.choices.items() if v.type == "subcategory"])
actions_subparser.choices = OrderedDict(
[
(k, v)
for k, v in actions_subparser.choices.items()
if v.type == "action"
]
)
subcategories_subparser.choices = OrderedDict(
[
(k, v)
for k, v in subcategories_subparser.choices.items()
if v.type == "subcategory"
]
)
actions_choices = actions_subparser.choices.keys()
subcategories_choices = subcategories_subparser.choices.keys()
actions_subparser._choices_actions = [c for c in choice_actions if c.dest in actions_choices]
subcategories_subparser._choices_actions = [c for c in choice_actions if c.dest in subcategories_choices]
actions_subparser._choices_actions = [
c for c in choice_actions if c.dest in actions_choices
]
subcategories_subparser._choices_actions = [
c for c in choice_actions if c.dest in subcategories_choices
]
# Display each section (actions and subcategories)
if actions_choices != []:
@ -642,11 +618,10 @@ class ExtendedArgumentParser(argparse.ArgumentParser):
# and fix is inspired from here :
# https://stackoverflow.com/questions/26985650/argparse-do-not-catch-positional-arguments-with-nargs/26986546#26986546
class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
# TWEAK : not using gettext here...
prefix = 'usage: '
prefix = "usage: "
# if usage is specified, use that
if usage is not None:
@ -654,11 +629,11 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
usage = "%(prog)s" % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
prog = "%(prog)s" % dict(prog=self._prog)
# split optionals from positionals
optionals = []
@ -673,20 +648,20 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
format = self._format_actions_usage
# TWEAK here : positionals first
action_usage = format(positionals + optionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
usage = " ".join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
part_regexp = r"\(.*?\)+|\[.*?\]+|\S+"
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = re.findall(part_regexp, opt_usage)
pos_parts = re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
assert " ".join(opt_parts) == opt_usage
assert " ".join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
@ -698,20 +673,20 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
lines.append(indent + " ".join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
lines.append(indent + " ".join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
lines[0] = lines[0][len(indent) :]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
indent = " " * (len(prefix) + len(prog) + 1)
# START TWEAK : pos_parts first, then opt_parts
if pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
@ -724,7 +699,7 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
indent = " " * len(prefix)
parts = pos_parts + opt_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
@ -735,7 +710,7 @@ class PositionalsFirstHelpFormatter(argparse.HelpFormatter):
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
usage = "\n".join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
return "%s%s\n\n" % (prefix, usage)

View file

@ -14,22 +14,24 @@ from bottle import run, request, response, Bottle, HTTPResponse
from bottle import abort
from moulinette import msignals, m18n, env
from moulinette.core import MoulinetteError, clean_session
from moulinette.core import MoulinetteError
from moulinette.interfaces import (
BaseActionsMapParser, BaseInterface, ExtendedArgumentParser,
BaseActionsMapParser,
BaseInterface,
ExtendedArgumentParser,
)
from moulinette.utils import log
from moulinette.utils.serialize import JSONExtendedEncoder
from moulinette.utils.text import random_ascii
logger = log.getLogger('moulinette.interface.api')
logger = log.getLogger("moulinette.interface.api")
# API helpers ----------------------------------------------------------
CSRF_TYPES = set(["text/plain",
"application/x-www-form-urlencoded",
"multipart/form-data"])
CSRF_TYPES = set(
["text/plain", "application/x-www-form-urlencoded", "multipart/form-data"]
)
def is_csrf():
@ -39,7 +41,7 @@ def is_csrf():
return False
if request.content_type is None:
return True
content_type = request.content_type.lower().split(';')[0]
content_type = request.content_type.lower().split(";")[0]
if content_type not in CSRF_TYPES:
return False
@ -53,12 +55,14 @@ def filter_csrf(callback):
abort(403, "CSRF protection")
else:
return callback(*args, **kwargs)
return wrapper
class LogQueues(dict):
"""Map of session id to queue."""
pass
@ -74,7 +78,7 @@ class APIQueueHandler(logging.Handler):
self.queues = LogQueues()
def emit(self, record):
sid = request.get_cookie('session.id')
sid = request.get_cookie("session.id")
try:
queue = self.queues[sid]
except KeyError:
@ -99,9 +103,9 @@ class _HTTPArgumentParser(object):
def __init__(self):
# Initialize the ArgumentParser object
self._parser = ExtendedArgumentParser(usage='',
prefix_chars='@',
add_help=False)
self._parser = ExtendedArgumentParser(
usage="", prefix_chars="@", add_help=False
)
self._parser.error = self._error
self._positional = [] # list(arg_name)
@ -113,20 +117,24 @@ class _HTTPArgumentParser(object):
def get_default(self, dest):
return self._parser.get_default(dest)
def add_arguments(self, arguments, extraparser, format_arg_names=None, validate_extra=True):
def add_arguments(
self, arguments, extraparser, format_arg_names=None, validate_extra=True
):
for argument_name, argument_options in arguments.items():
# will adapt arguments name for cli or api context
names = format_arg_names(str(argument_name),
argument_options.pop('full', None))
names = format_arg_names(
str(argument_name), argument_options.pop("full", None)
)
if "type" in argument_options:
argument_options['type'] = eval(argument_options['type'])
argument_options["type"] = eval(argument_options["type"])
if "extra" in argument_options:
extra = argument_options.pop('extra')
extra = argument_options.pop("extra")
argument_dest = self.add_argument(*names, **argument_options).dest
extraparser.add_argument(self.get_default("_tid"),
argument_dest, extra, validate_extra)
extraparser.add_argument(
self.get_default("_tid"), argument_dest, extra, validate_extra
)
continue
self.add_argument(*names, **argument_options)
@ -166,12 +174,19 @@ class _HTTPArgumentParser(object):
if isinstance(v, str):
arg_strings.append(v)
else:
logger.warning("unsupported argument value type %r "
"in %s for option string %s", v, value,
option_string)
logger.warning(
"unsupported argument value type %r "
"in %s for option string %s",
v,
value,
option_string,
)
else:
logger.warning("unsupported argument type %r for option "
"string %s", value, option_string)
logger.warning(
"unsupported argument type %r for option " "string %s",
value,
option_string,
)
return arg_strings
@ -208,14 +223,15 @@ class _ActionsMapPlugin(object):
to serve messages coming from the 'display' signal
"""
name = 'actionsmap'
name = "actionsmap"
api = 2
def __init__(self, actionsmap, use_websocket, log_queues={}):
# Connect signals to handlers
msignals.set_handler('authenticate', self._do_authenticate)
msignals.set_handler("authenticate", self._do_authenticate)
if use_websocket:
msignals.set_handler('display', self._do_display)
msignals.set_handler("display", self._do_display)
self.actionsmap = actionsmap
self.use_websocket = use_websocket
@ -237,37 +253,50 @@ class _ActionsMapPlugin(object):
def wrapper():
kwargs = {}
try:
kwargs['password'] = request.POST['password']
kwargs["password"] = request.POST["password"]
except KeyError:
raise HTTPBadRequestResponse("Missing password parameter")
try:
kwargs['profile'] = request.POST['profile']
except KeyError:
pass
kwargs["profile"] = request.POST.get("profile", "default")
return callback(**kwargs)
return wrapper
# Logout wrapper
def _logout(callback):
def wrapper():
kwargs = {}
try:
kwargs['profile'] = request.POST.get('profile')
except KeyError:
pass
kwargs["profile"] = request.POST.get("profile", "default")
return callback(**kwargs)
return wrapper
# Append authentication routes
app.route('/login', name='login', method='POST',
callback=self.login, skip=['actionsmap'], apply=_login)
app.route('/logout', name='logout', method='GET',
callback=self.logout, skip=['actionsmap'], apply=_logout)
app.route(
"/login",
name="login",
method="POST",
callback=self.login,
skip=["actionsmap"],
apply=_login,
)
app.route(
"/logout",
name="logout",
method="GET",
callback=self.logout,
skip=["actionsmap"],
apply=_logout,
)
# Append messages route
if self.use_websocket:
app.route('/messages', name='messages',
callback=self.messages, skip=['actionsmap'])
app.route(
"/messages",
name="messages",
callback=self.messages,
skip=["actionsmap"],
)
# Append routes from the actions map
for (m, p) in self.actionsmap.parser.routes:
@ -284,6 +313,7 @@ class _ActionsMapPlugin(object):
context -- An instance of Route
"""
def _format(value):
if isinstance(value, list) and len(value) == 1:
return value[0]
@ -297,11 +327,10 @@ class _ActionsMapPlugin(object):
# Append other request params
for k, v in request.params.dict.items():
v = _format(v)
try:
curr_v = params[k]
except KeyError:
if k not in params.keys():
params[k] = v
else:
curr_v = params[k]
# Append param value to the list
if not isinstance(curr_v, list):
curr_v = [curr_v]
@ -314,11 +343,12 @@ class _ActionsMapPlugin(object):
# Process the action
return callback((request.method, context.rule), params)
return wrapper
# Routes callbacks
def login(self, password, profile='default'):
def login(self, password, profile):
"""Log in to an authenticator profile
Attempt to authenticate to a given authenticator profile and
@ -331,22 +361,32 @@ class _ActionsMapPlugin(object):
"""
# Retrieve session values
s_id = request.get_cookie('session.id') or random_ascii()
try:
s_id = request.get_cookie("session.id") or random_ascii()
except:
# Super rare case where there are super weird cookie / cache issue
# Previous line throws a CookieError that creates a 500 error ...
# So let's catch it and just use a fresh ID then...
s_id = random_ascii()
try:
s_secret = self.secrets[s_id]
except KeyError:
s_hashes = {}
s_tokens = {}
else:
s_hashes = request.get_cookie('session.hashes',
secret=s_secret) or {}
s_hash = random_ascii()
try:
s_tokens = request.get_cookie("session.tokens", secret=s_secret) or {}
except:
# Same as for session.id a few lines before
s_tokens = {}
s_new_token = random_ascii()
try:
# Attempt to authenticate
auth = self.actionsmap.get_authenticator(profile)
auth(password, token=(s_id, s_hash))
authenticator = self.actionsmap.get_authenticator_for_profile(profile)
authenticator(password, token=(s_id, s_new_token))
except MoulinetteError as e:
if len(s_hashes) > 0:
if len(s_tokens) > 0:
try:
self.logout(profile)
except:
@ -354,15 +394,16 @@ class _ActionsMapPlugin(object):
raise HTTPUnauthorizedResponse(e.strerror)
else:
# Update dicts with new values
s_hashes[profile] = s_hash
s_tokens[profile] = s_new_token
self.secrets[s_id] = s_secret = random_ascii()
response.set_cookie('session.id', s_id, secure=True)
response.set_cookie('session.hashes', s_hashes, secure=True,
secret=s_secret)
return m18n.g('logged_in')
response.set_cookie("session.id", s_id, secure=True)
response.set_cookie(
"session.tokens", s_tokens, secure=True, secret=s_secret
)
return m18n.g("logged_in")
def logout(self, profile=None):
def logout(self, profile):
"""Log out from an authenticator profile
Attempt to unregister a given profile - or all by default - from
@ -372,17 +413,26 @@ class _ActionsMapPlugin(object):
- profile -- The authenticator profile name to log out
"""
s_id = request.get_cookie('session.id')
s_id = request.get_cookie("session.id")
# We check that there's a (signed) session.hash available
# for additional security ?
# (An attacker could not craft such signed hashed ? (FIXME : need to make sure of this))
try:
del self.secrets[s_id]
s_secret = self.secrets[s_id]
except KeyError:
raise HTTPUnauthorizedResponse(m18n.g('not_logged_in'))
s_secret = {}
if profile not in request.get_cookie(
"session.tokens", secret=s_secret, default={}
):
raise HTTPUnauthorizedResponse(m18n.g("not_logged_in"))
else:
del self.secrets[s_id]
authenticator = self.actionsmap.get_authenticator_for_profile(profile)
authenticator._clean_session(s_id)
# TODO: Clean the session for profile only
# Delete cookie and clean the session
response.set_cookie('session.hashes', '', max_age=-1)
clean_session(s_id)
return m18n.g('logged_out')
response.set_cookie("session.tokens", "", max_age=-1)
return m18n.g("logged_out")
def messages(self):
"""Listen to the messages WebSocket stream
@ -392,7 +442,7 @@ class _ActionsMapPlugin(object):
dict { style: message }.
"""
s_id = request.get_cookie('session.id')
s_id = request.get_cookie("session.id")
try:
queue = self.log_queues[s_id]
except KeyError:
@ -400,9 +450,9 @@ class _ActionsMapPlugin(object):
queue = Queue()
self.log_queues[s_id] = queue
wsock = request.environ.get('wsgi.websocket')
wsock = request.environ.get("wsgi.websocket")
if not wsock:
raise HTTPErrorResponse(m18n.g('websocket_request_expected'))
raise HTTPErrorResponse(m18n.g("websocket_request_expected"))
while True:
item = queue.get()
@ -443,17 +493,16 @@ class _ActionsMapPlugin(object):
if isinstance(e, HTTPResponse):
raise e
import traceback
tb = traceback.format_exc()
logs = {"route": _route,
"arguments": arguments,
"traceback": tb}
logs = {"route": _route, "arguments": arguments, "traceback": tb}
return HTTPErrorResponse(json_encode(logs))
else:
return format_for_response(ret)
finally:
# Close opened WebSocket by putting StopIteration in the queue
try:
queue = self.log_queues[request.get_cookie('session.id')]
queue = self.log_queues[request.get_cookie("session.id")]
except KeyError:
pass
else:
@ -461,26 +510,23 @@ class _ActionsMapPlugin(object):
# Signals handlers
def _do_authenticate(self, authenticator, help):
def _do_authenticate(self, authenticator):
"""Process the authentication
Handle the core.MoulinetteSignals.authenticate signal.
"""
s_id = request.get_cookie('session.id')
s_id = request.get_cookie("session.id")
try:
s_secret = self.secrets[s_id]
s_hash = request.get_cookie('session.hashes',
secret=s_secret, default={})[authenticator.name]
s_token = request.get_cookie("session.tokens", secret=s_secret, default={})[
authenticator.name
]
except KeyError:
if authenticator.name == 'default':
msg = m18n.g('authentication_required')
else:
msg = m18n.g('authentication_profile_required',
profile=authenticator.name)
msg = m18n.g("authentication_required")
raise HTTPUnauthorizedResponse(msg)
else:
return authenticator(token=(s_id, s_hash))
return authenticator(token=(s_id, s_token))
def _do_display(self, message, style):
"""Display a message
@ -488,7 +534,7 @@ class _ActionsMapPlugin(object):
Handle the core.MoulinetteSignals.display signal.
"""
s_id = request.get_cookie('session.id')
s_id = request.get_cookie("session.id")
try:
queue = self.log_queues[s_id]
except KeyError:
@ -504,50 +550,48 @@ class _ActionsMapPlugin(object):
# HTTP Responses -------------------------------------------------------
class HTTPOKResponse(HTTPResponse):
def __init__(self, output=''):
class HTTPOKResponse(HTTPResponse):
def __init__(self, output=""):
super(HTTPOKResponse, self).__init__(output, 200)
class HTTPBadRequestResponse(HTTPResponse):
def __init__(self, output=''):
def __init__(self, output=""):
super(HTTPBadRequestResponse, self).__init__(output, 400)
class HTTPUnauthorizedResponse(HTTPResponse):
def __init__(self, output=''):
def __init__(self, output=""):
super(HTTPUnauthorizedResponse, self).__init__(output, 401)
class HTTPErrorResponse(HTTPResponse):
def __init__(self, output=''):
def __init__(self, output=""):
super(HTTPErrorResponse, self).__init__(output, 500)
def format_for_response(content):
"""Format the resulted content of a request for the HTTP response."""
if request.method == 'POST':
if request.method == "POST":
response.status = 201 # Created
elif request.method == 'GET':
elif request.method == "GET":
response.status = 200 # Ok
else:
# Return empty string if no content
if content is None or len(content) == 0:
response.status = 204 # No Content
return ''
return ""
response.status = 200
# Return JSON-style response
response.content_type = 'application/json'
response.content_type = "application/json"
return json_encode(content, cls=JSONExtendedEncoder)
# API Classes Implementation -------------------------------------------
class ActionsMapParser(BaseActionsMapParser):
"""Actions map's Parser for the API
@ -561,7 +605,7 @@ class ActionsMapParser(BaseActionsMapParser):
super(ActionsMapParser, self).__init__(parent)
self._parsers = {} # dict({(method, path): _HTTPArgumentParser})
self._route_re = re.compile(r'(GET|POST|PUT|DELETE) (/\S+)')
self._route_re = re.compile(r"(GET|POST|PUT|DELETE) (/\S+)")
@property
def routes(self):
@ -570,19 +614,19 @@ class ActionsMapParser(BaseActionsMapParser):
# Implement virtual properties
interface = 'api'
interface = "api"
# Implement virtual methods
@staticmethod
def format_arg_names(name, full):
if name[0] != '-':
if name[0] != "-":
return [name]
if full:
return [full.replace('--', '@', 1)]
if name.startswith('--'):
return [name.replace('--', '@', 1)]
return [name.replace('-', '@', 1)]
return [full.replace("--", "@", 1)]
if name.startswith("--"):
return [name.replace("--", "@", 1)]
return [name.replace("-", "@", 1)]
def add_category_parser(self, name, **kwargs):
return self
@ -611,8 +655,9 @@ class ActionsMapParser(BaseActionsMapParser):
try:
keys.append(self._extract_route(r))
except ValueError as e:
logger.warning("cannot add api route '%s' for "
"action %s: %s", r, tid, e)
logger.warning(
"cannot add api route '%s' for " "action %s: %s", r, tid, e
)
continue
if len(keys) == 0:
raise ValueError("no valid api route found")
@ -627,6 +672,28 @@ class ActionsMapParser(BaseActionsMapParser):
# Return the created parser
return parser
def auth_required(self, args, **kwargs):
try:
# Retrieve the tid for the route
tid, _ = self._parsers[kwargs.get("route")]
except KeyError:
logger.error("no argument parser found for route '%s'", kwargs.get("route"))
raise MoulinetteError("error_see_log")
if self.get_conf(tid, "authenticate"):
authenticator = self.get_conf(tid, "authenticator")
# If several authenticator, use the default one
if isinstance(authenticator, dict):
if "default" in authenticator:
authenticator = "default"
else:
# TODO which one should we use?
pass
return authenticator
else:
return False
def parse_args(self, args, route, **kwargs):
"""Parse arguments
@ -635,28 +702,13 @@ class ActionsMapParser(BaseActionsMapParser):
"""
try:
# Retrieve the tid and the parser for the route
tid, parser = self._parsers[route]
# Retrieve the parser for the route
_, parser = self._parsers[route]
except KeyError:
logger.error("no argument parser found for route '%s'", route)
raise MoulinetteError('error_see_log')
raise MoulinetteError("error_see_log")
ret = argparse.Namespace()
# Perform authentication if needed
if self.get_conf(tid, 'authenticate'):
# TODO: Clean this hard fix and find a way to set an authenticator
# to use for the api only
# auth_conf, klass = self.get_conf(tid, 'authenticator')
auth_conf, klass = self.get_global_conf('authenticator', 'default')
# TODO: Catch errors
auth = msignals.authenticate(klass(), **auth_conf)
if not auth.is_authenticated:
raise MoulinetteError('authentication_required_long')
if self.get_conf(tid, 'argument_auth') and \
self.get_conf(tid, 'authenticate') == 'all':
ret.auth = auth
# TODO: Catch errors?
ret = parser.parse_args(args, ret)
parser.dequeue_callbacks(ret)
@ -702,8 +754,7 @@ class Interface(BaseInterface):
"""
def __init__(self, actionsmap, routes={}, use_websocket=True,
log_queues=None):
def __init__(self, actionsmap, routes={}, use_websocket=True, log_queues=None):
self.use_websocket = use_websocket
# Attempt to retrieve log queues from an APIQueueHandler
@ -718,14 +769,15 @@ class Interface(BaseInterface):
# Wrapper which sets proper header
def apiheader(callback):
def wrapper(*args, **kwargs):
response.set_header('Access-Control-Allow-Origin', '*')
response.set_header("Access-Control-Allow-Origin", "*")
return callback(*args, **kwargs)
return wrapper
# Attempt to retrieve and set locale
def api18n(callback):
try:
locale = request.params.pop('locale')
locale = request.params.pop("locale")
except KeyError:
locale = m18n.default_locale
m18n.set_locale(locale)
@ -738,17 +790,17 @@ class Interface(BaseInterface):
app.install(_ActionsMapPlugin(actionsmap, use_websocket, log_queues))
# Append default routes
# app.route(['/api', '/api/<category:re:[a-z]+>'], method='GET',
# callback=self.doc, skip=['actionsmap'])
# app.route(['/api', '/api/<category:re:[a-z]+>'], method='GET',
# callback=self.doc, skip=['actionsmap'])
# Append additional routes
# TODO: Add optional authentication to those routes?
for (m, p), c in routes.items():
app.route(p, method=m, callback=c, skip=['actionsmap'])
app.route(p, method=m, callback=c, skip=["actionsmap"])
self._app = app
def run(self, host='localhost', port=80):
def run(self, host="localhost", port=80):
"""Run the moulinette
Start a server instance on the given port to serve moulinette
@ -759,25 +811,29 @@ class Interface(BaseInterface):
- port -- Server port to bind to
"""
logger.debug("starting the server instance in %s:%d with websocket=%s",
host, port, self.use_websocket)
logger.debug(
"starting the server instance in %s:%d with websocket=%s",
host,
port,
self.use_websocket,
)
try:
if self.use_websocket:
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
server = WSGIServer((host, port), self._app,
handler_class=WebSocketHandler)
server = WSGIServer(
(host, port), self._app, handler_class=WebSocketHandler
)
server.serve_forever()
else:
run(self._app, host=host, port=port)
except IOError as e:
logger.exception("unable to start the server instance on %s:%d",
host, port)
logger.exception("unable to start the server instance on %s:%d", host, port)
if e.args[0] == errno.EADDRINUSE:
raise MoulinetteError('server_already_running')
raise MoulinetteError('error_see_log')
raise MoulinetteError("server_already_running")
raise MoulinetteError("error_see_log")
# Routes handlers
@ -789,14 +845,14 @@ class Interface(BaseInterface):
category -- Name of the category
"""
DATA_DIR = env()['DATA_DIR']
DATA_DIR = env()["DATA_DIR"]
if category is None:
with open('%s/../doc/resources.json' % DATA_DIR) as f:
with open("%s/../doc/resources.json" % DATA_DIR) as f:
return f.read()
try:
with open('%s/../doc/%s.json' % (DATA_DIR, category)) as f:
with open("%s/../doc/%s.json" % (DATA_DIR, category)) as f:
return f.read()
except IOError:
return None

View file

@ -14,27 +14,29 @@ import argcomplete
from moulinette import msignals, m18n
from moulinette.core import MoulinetteError
from moulinette.interfaces import (
BaseActionsMapParser, BaseInterface, ExtendedArgumentParser,
BaseActionsMapParser,
BaseInterface,
ExtendedArgumentParser,
)
from moulinette.utils import log
logger = log.getLogger('moulinette.cli')
logger = log.getLogger("moulinette.cli")
# CLI helpers ----------------------------------------------------------
CLI_COLOR_TEMPLATE = '\033[{:d}m\033[1m'
END_CLI_COLOR = '\033[m'
CLI_COLOR_TEMPLATE = "\033[{:d}m\033[1m"
END_CLI_COLOR = "\033[m"
colors_codes = {
'red': CLI_COLOR_TEMPLATE.format(31),
'green': CLI_COLOR_TEMPLATE.format(32),
'yellow': CLI_COLOR_TEMPLATE.format(33),
'blue': CLI_COLOR_TEMPLATE.format(34),
'purple': CLI_COLOR_TEMPLATE.format(35),
'cyan': CLI_COLOR_TEMPLATE.format(36),
'white': CLI_COLOR_TEMPLATE.format(37),
"red": CLI_COLOR_TEMPLATE.format(31),
"green": CLI_COLOR_TEMPLATE.format(32),
"yellow": CLI_COLOR_TEMPLATE.format(33),
"blue": CLI_COLOR_TEMPLATE.format(34),
"purple": CLI_COLOR_TEMPLATE.format(35),
"cyan": CLI_COLOR_TEMPLATE.format(36),
"white": CLI_COLOR_TEMPLATE.format(37),
}
@ -49,7 +51,7 @@ def colorize(astr, color):
"""
if os.isatty(1):
return '{:s}{:s}{:s}'.format(colors_codes[color], astr, END_CLI_COLOR)
return "{:s}{:s}{:s}".format(colors_codes[color], astr, END_CLI_COLOR)
else:
return astr
@ -90,7 +92,7 @@ def plain_print_dict(d, depth=0):
plain_print_dict(v, depth + 1)
else:
if isinstance(d, unicode):
d = d.encode('utf-8')
d = d.encode("utf-8")
print(d)
@ -108,7 +110,7 @@ def pretty_date(_date):
nowtz = nowtz.replace(tzinfo=pytz.utc)
offsetHour = nowutc - nowtz
offsetHour = int(round(offsetHour.total_seconds() / 3600))
localtz = 'Etc/GMT%+d' % offsetHour
localtz = "Etc/GMT%+d" % offsetHour
# Transform naive date into UTC date
if _date.tzinfo is None:
@ -137,7 +139,7 @@ def pretty_print_dict(d, depth=0):
keys = sorted(keys)
for k in keys:
v = d[k]
k = colorize(str(k), 'purple')
k = colorize(str(k), "purple")
if isinstance(v, (tuple, set)):
v = list(v)
if isinstance(v, list) and len(v) == 1:
@ -154,13 +156,13 @@ def pretty_print_dict(d, depth=0):
pretty_print_dict({key: value}, depth + 1)
else:
if isinstance(value, unicode):
value = value.encode('utf-8')
value = value.encode("utf-8")
elif isinstance(v, date):
v = pretty_date(v)
print("{:s}- {}".format(" " * (depth + 1), value))
else:
if isinstance(v, unicode):
v = v.encode('utf-8')
v = v.encode("utf-8")
elif isinstance(v, date):
v = pretty_date(v)
print("{:s}{}: {}".format(" " * depth, k, v))
@ -170,12 +172,13 @@ def get_locale():
"""Return current user locale"""
lang = locale.getdefaultlocale()[0]
if not lang:
return ''
return ""
return lang[:2]
# CLI Classes Implementation -------------------------------------------
class TTYHandler(logging.StreamHandler):
"""TTY log handler
@ -193,17 +196,18 @@ class TTYHandler(logging.StreamHandler):
stderr. Otherwise, they are sent to stdout.
"""
LEVELS_COLOR = {
log.NOTSET: 'white',
log.DEBUG: 'white',
log.INFO: 'cyan',
log.SUCCESS: 'green',
log.WARNING: 'yellow',
log.ERROR: 'red',
log.CRITICAL: 'red',
log.NOTSET: "white",
log.DEBUG: "white",
log.INFO: "cyan",
log.SUCCESS: "green",
log.WARNING: "yellow",
log.ERROR: "red",
log.CRITICAL: "red",
}
def __init__(self, message_key='fmessage'):
def __init__(self, message_key="fmessage"):
logging.StreamHandler.__init__(self)
self.message_key = message_key
@ -211,16 +215,15 @@ class TTYHandler(logging.StreamHandler):
"""Enhance message with level and colors if supported."""
msg = record.getMessage()
if self.supports_color():
level = ''
level = ""
if self.level <= log.DEBUG:
# add level name before message
level = '%s ' % record.levelname
elif record.levelname in ['SUCCESS', 'WARNING', 'ERROR', 'INFO']:
level = "%s " % record.levelname
elif record.levelname in ["SUCCESS", "WARNING", "ERROR", "INFO"]:
# add translated level name before message
level = '%s ' % m18n.g(record.levelname.lower())
color = self.LEVELS_COLOR.get(record.levelno, 'white')
msg = '{0}{1}{2}{3}'.format(
colors_codes[color], level, END_CLI_COLOR, msg)
level = "%s " % m18n.g(record.levelname.lower())
color = self.LEVELS_COLOR.get(record.levelno, "white")
msg = "{0}{1}{2}{3}".format(colors_codes[color], level, END_CLI_COLOR, msg)
if self.formatter:
# use user-defined formatter
record.__dict__[self.message_key] = msg
@ -237,7 +240,7 @@ class TTYHandler(logging.StreamHandler):
def supports_color(self):
"""Check whether current stream supports color."""
if hasattr(self.stream, 'isatty') and self.stream.isatty():
if hasattr(self.stream, "isatty") and self.stream.isatty():
return True
return False
@ -257,12 +260,13 @@ class ActionsMapParser(BaseActionsMapParser):
"""
def __init__(self, parent=None, parser=None, subparser_kwargs=None,
top_parser=None, **kwargs):
def __init__(
self, parent=None, parser=None, subparser_kwargs=None, top_parser=None, **kwargs
):
super(ActionsMapParser, self).__init__(parent)
if subparser_kwargs is None:
subparser_kwargs = {'title': "categories", 'required': False}
subparser_kwargs = {"title": "categories", "required": False}
self._parser = parser or ExtendedArgumentParser()
self._subparsers = self._parser.add_subparsers(**subparser_kwargs)
@ -278,13 +282,13 @@ class ActionsMapParser(BaseActionsMapParser):
# Implement virtual properties
interface = 'cli'
interface = "cli"
# Implement virtual methods
@staticmethod
def format_arg_names(name, full):
if name[0] == '-' and full:
if name.startswith("-") and full:
return [name, full]
return [name]
@ -301,13 +305,10 @@ class ActionsMapParser(BaseActionsMapParser):
A new ActionsMapParser object for the category
"""
parser = self._subparsers.add_parser(name,
description=category_help,
help=category_help,
**kwargs)
return self.__class__(self, parser, {
'title': "subcommands", 'required': True
})
parser = self._subparsers.add_parser(
name, description=category_help, help=category_help, **kwargs
)
return self.__class__(self, parser, {"title": "subcommands", "required": True})
def add_subcategory_parser(self, name, subcategory_help=None, **kwargs):
"""Add a parser for a subcategory
@ -319,17 +320,24 @@ class ActionsMapParser(BaseActionsMapParser):
A new ActionsMapParser object for the category
"""
parser = self._subparsers.add_parser(name,
parser = self._subparsers.add_parser(
name,
type_="subcategory",
description=subcategory_help,
help=subcategory_help,
**kwargs)
return self.__class__(self, parser, {
'title': "actions", 'required': True
})
**kwargs
)
return self.__class__(self, parser, {"title": "actions", "required": True})
def add_action_parser(self, name, tid, action_help=None, deprecated=False,
deprecated_alias=[], **kwargs):
def add_action_parser(
self,
name,
tid,
action_help=None,
deprecated=False,
deprecated_alias=[],
**kwargs
):
"""Add a parser for an action
Keyword arguments:
@ -341,31 +349,60 @@ class ActionsMapParser(BaseActionsMapParser):
A new ExtendedArgumentParser object for the action
"""
return self._subparsers.add_parser(name,
return self._subparsers.add_parser(
name,
type_="action",
help=action_help,
description=action_help,
deprecated=deprecated,
deprecated_alias=deprecated_alias)
deprecated_alias=deprecated_alias,
)
def add_global_arguments(self, arguments):
for argument_name, argument_options in arguments.items():
# will adapt arguments name for cli or api context
names = self.format_arg_names(str(argument_name),
argument_options.pop('full', None))
names = self.format_arg_names(
str(argument_name), argument_options.pop("full", None)
)
self.global_parser.add_argument(*names, **argument_options)
def auth_required(self, args, **kwargs):
# FIXME? idk .. this try/except is duplicated from parse_args below
# Just to be able to obtain the tid
try:
ret = self._parser.parse_args(args)
except SystemExit:
raise
except:
logger.exception("unable to parse arguments '%s'", " ".join(args))
raise MoulinetteError("error_see_log")
tid = getattr(ret, "_tid", None)
if self.get_conf(tid, "authenticate"):
authenticator = self.get_conf(tid, "authenticator")
# If several authenticator, use the default one
if isinstance(authenticator, dict):
if "default" in authenticator:
authenticator = "default"
else:
# TODO which one should we use?
pass
return authenticator
else:
return False
def parse_args(self, args, **kwargs):
try:
ret = self._parser.parse_args(args)
except SystemExit:
raise
except:
logger.exception("unable to parse arguments '%s'", ' '.join(args))
raise MoulinetteError('error_see_log')
logger.exception("unable to parse arguments '%s'", " ".join(args))
raise MoulinetteError("error_see_log")
else:
self.prepare_action_namespace(getattr(ret, '_tid', None), ret)
self.prepare_action_namespace(getattr(ret, "_tid", None), ret)
self._parser.dequeue_callbacks(ret)
return ret
@ -387,10 +424,10 @@ class Interface(BaseInterface):
m18n.set_locale(get_locale())
# Connect signals to handlers
msignals.set_handler('display', self._do_display)
msignals.set_handler("display", self._do_display)
if os.isatty(1):
msignals.set_handler('authenticate', self._do_authenticate)
msignals.set_handler('prompt', self._do_prompt)
msignals.set_handler("authenticate", self._do_authenticate)
msignals.set_handler("prompt", self._do_prompt)
self.actionsmap = actionsmap
@ -410,30 +447,33 @@ class Interface(BaseInterface):
- timeout -- Number of seconds before this command will timeout because it can't acquire the lock (meaning that another command is currently running), by default there is no timeout and the command will wait until it can get the lock
"""
if output_as and output_as not in ['json', 'plain', 'none']:
raise MoulinetteError('invalid_usage')
if output_as and output_as not in ["json", "plain", "none"]:
raise MoulinetteError("invalid_usage")
# auto-complete
argcomplete.autocomplete(self.actionsmap.parser._parser)
# Set handler for authentication
if password:
msignals.set_handler('authenticate',
lambda a, h: a(password=password))
msignals.set_handler("authenticate", lambda a: a(password=password))
else:
if os.isatty(1):
msignals.set_handler("authenticate", self._do_authenticate)
try:
ret = self.actionsmap.process(args, timeout=timeout)
except (KeyboardInterrupt, EOFError):
raise MoulinetteError('operation_interrupted')
raise MoulinetteError("operation_interrupted")
if ret is None or output_as == 'none':
if ret is None or output_as == "none":
return
# Format and print result
if output_as:
if output_as == 'json':
if output_as == "json":
import json
from moulinette.utils.serialize import JSONExtendedEncoder
print(json.dumps(ret, cls=JSONExtendedEncoder))
else:
plain_print_dict(ret)
@ -444,18 +484,18 @@ class Interface(BaseInterface):
# Signals handlers
def _do_authenticate(self, authenticator, help):
def _do_authenticate(self, authenticator):
"""Process the authentication
Handle the core.MoulinetteSignals.authenticate signal.
"""
# TODO: Allow token authentication?
msg = m18n.n(help) if help else m18n.g('password')
return authenticator(password=self._do_prompt(msg, True, False,
color='yellow'))
help = authenticator.extra.get("help")
msg = m18n.n(help) if help else m18n.g("password")
return authenticator(password=self._do_prompt(msg, True, False, color="yellow"))
def _do_prompt(self, message, is_password, confirm, color='blue'):
def _do_prompt(self, message, is_password, confirm, color="blue"):
"""Prompt for a value
Handle the core.MoulinetteSignals.prompt signal.
@ -465,16 +505,15 @@ class Interface(BaseInterface):
"""
if is_password:
prompt = lambda m: getpass.getpass(colorize(m18n.g('colon', m),
color))
prompt = lambda m: getpass.getpass(colorize(m18n.g("colon", m), color))
else:
prompt = lambda m: raw_input(colorize(m18n.g('colon', m), color))
prompt = lambda m: raw_input(colorize(m18n.g("colon", m), color))
value = prompt(message)
if confirm:
m = message[0].lower() + message[1:]
if prompt(m18n.g('confirm', prompt=m)) != value:
raise MoulinetteError('values_mismatch')
if prompt(m18n.g("confirm", prompt=m)) != value:
raise MoulinetteError("values_mismatch")
return value
@ -485,12 +524,12 @@ class Interface(BaseInterface):
"""
if isinstance(message, unicode):
message = message.encode('utf-8')
if style == 'success':
print('{} {}'.format(colorize(m18n.g('success'), 'green'), message))
elif style == 'warning':
print('{} {}'.format(colorize(m18n.g('warning'), 'yellow'), message))
elif style == 'error':
print('{} {}'.format(colorize(m18n.g('error'), 'red'), message))
message = message.encode("utf-8")
if style == "success":
print("{} {}".format(colorize(m18n.g("success"), "green"), message))
elif style == "warning":
print("{} {}".format(colorize(m18n.g("warning"), "yellow"), message))
elif style == "error":
print("{} {}".format(colorize(m18n.g("error"), "red"), message))
else:
print(message)

View file

@ -22,21 +22,25 @@ def read_file(file_path):
Keyword argument:
file_path -- Path to the text file
"""
assert isinstance(file_path, basestring), "Error: file_path '%s' should be a string but is of type '%s' instead" % (file_path, type(file_path))
assert isinstance(file_path, basestring), (
"Error: file_path '%s' should be a string but is of type '%s' instead"
% (file_path, type(file_path))
)
# Check file exists
if not os.path.isfile(file_path):
raise MoulinetteError('file_not_exist', path=file_path)
raise MoulinetteError("file_not_exist", path=file_path)
# Open file and read content
try:
with open(file_path, "r") as f:
file_content = f.read()
except IOError as e:
raise MoulinetteError('cannot_open_file', file=file_path, error=str(e))
except Exception:
raise MoulinetteError('unknown_error_reading_file',
file=file_path, error=str(e))
raise MoulinetteError("cannot_open_file", file=file_path, error=str(e))
except Exception as e:
raise MoulinetteError(
"unknown_error_reading_file", file=file_path, error=str(e)
)
return file_content
@ -56,7 +60,7 @@ def read_json(file_path):
try:
loaded_json = json.loads(file_content)
except ValueError as e:
raise MoulinetteError('corrupted_json', ressource=file_path, error=str(e))
raise MoulinetteError("corrupted_json", ressource=file_path, error=str(e))
return loaded_json
@ -76,7 +80,7 @@ def read_yaml(file_path):
try:
loaded_yaml = yaml.safe_load(file_content)
except Exception as e:
raise MoulinetteError('corrupted_yaml', ressource=file_path, error=str(e))
raise MoulinetteError("corrupted_yaml", ressource=file_path, error=str(e))
return loaded_yaml
@ -96,9 +100,7 @@ def read_toml(file_path):
try:
loaded_toml = toml.loads(file_content, _dict=OrderedDict)
except Exception as e:
raise MoulinetteError(errno.EINVAL,
m18n.g('corrupted_toml',
ressource=file_path, error=str(e)))
raise MoulinetteError("corrupted_toml", ressource=file_path, error=str(e))
return loaded_toml
@ -129,10 +131,11 @@ def read_ldif(file_path, filtred_entries=[]):
parser = LDIFPar(f)
parser.parse()
except IOError as e:
raise MoulinetteError('cannot_open_file', file=file_path, error=str(e))
raise MoulinetteError("cannot_open_file", file=file_path, error=str(e))
except Exception as e:
raise MoulinetteError('unknown_error_reading_file',
file=file_path, error=str(e))
raise MoulinetteError(
"unknown_error_reading_file", file=file_path, error=str(e)
)
return parser.all_records
@ -148,23 +151,34 @@ def write_to_file(file_path, data, file_mode="w"):
file_mode -- Mode used when writing the file. Option meant to be used
by append_to_file to avoid duplicating the code of this function.
"""
assert isinstance(data, basestring) or isinstance(data, list), "Error: data '%s' should be either a string or a list but is of type '%s'" % (data, type(data))
assert not os.path.isdir(file_path), "Error: file_path '%s' point to a dir, it should be a file" % file_path
assert os.path.isdir(os.path.dirname(file_path)), "Error: the path ('%s') base dir ('%s') is not a dir" % (file_path, os.path.dirname(file_path))
assert isinstance(data, basestring) or isinstance(data, list), (
"Error: data '%s' should be either a string or a list but is of type '%s'"
% (data, type(data))
)
assert not os.path.isdir(file_path), (
"Error: file_path '%s' point to a dir, it should be a file" % file_path
)
assert os.path.isdir(os.path.dirname(file_path)), (
"Error: the path ('%s') base dir ('%s') is not a dir"
% (file_path, os.path.dirname(file_path))
)
# If data is a list, check elements are strings and build a single string
if not isinstance(data, basestring):
for element in data:
assert isinstance(element, basestring), "Error: element '%s' should be a string but is of type '%s' instead" % (element, type(element))
data = '\n'.join(data)
assert isinstance(element, basestring), (
"Error: element '%s' should be a string but is of type '%s' instead"
% (element, type(element))
)
data = "\n".join(data)
try:
with open(file_path, file_mode) as f:
f.write(data)
except IOError as e:
raise MoulinetteError('cannot_write_file', file=file_path, error=str(e))
raise MoulinetteError("cannot_write_file", file=file_path, error=str(e))
except Exception as e:
raise MoulinetteError('error_writing_file', file=file_path, error=str(e))
raise MoulinetteError("error_writing_file", file=file_path, error=str(e))
def append_to_file(file_path, data):
@ -189,19 +203,30 @@ def write_to_json(file_path, data):
"""
# Assumptions
assert isinstance(file_path, basestring), "Error: file_path '%s' should be a string but is of type '%s' instead" % (file_path, type(file_path))
assert isinstance(data, dict) or isinstance(data, list), "Error: data '%s' should be a dict or a list but is of type '%s' instead" % (data, type(data))
assert not os.path.isdir(file_path), "Error: file_path '%s' point to a dir, it should be a file" % file_path
assert os.path.isdir(os.path.dirname(file_path)), "Error: the path ('%s') base dir ('%s') is not a dir" % (file_path, os.path.dirname(file_path))
assert isinstance(file_path, basestring), (
"Error: file_path '%s' should be a string but is of type '%s' instead"
% (file_path, type(file_path))
)
assert isinstance(data, dict) or isinstance(data, list), (
"Error: data '%s' should be a dict or a list but is of type '%s' instead"
% (data, type(data))
)
assert not os.path.isdir(file_path), (
"Error: file_path '%s' point to a dir, it should be a file" % file_path
)
assert os.path.isdir(os.path.dirname(file_path)), (
"Error: the path ('%s') base dir ('%s') is not a dir"
% (file_path, os.path.dirname(file_path))
)
# Write dict to file
try:
with open(file_path, "w") as f:
json.dump(data, f)
except IOError as e:
raise MoulinetteError('cannot_write_file', file=file_path, error=str(e))
raise MoulinetteError("cannot_write_file", file=file_path, error=str(e))
except Exception as e:
raise MoulinetteError('error_writing_file', file=file_path, error=str(e))
raise MoulinetteError("error_writing_file", file=file_path, error=str(e))
def write_to_yaml(file_path, data):
@ -223,12 +248,12 @@ def write_to_yaml(file_path, data):
with open(file_path, "w") as f:
yaml.safe_dump(data, f, default_flow_style=False)
except IOError as e:
raise MoulinetteError('cannot_write_file', file=file_path, error=str(e))
raise MoulinetteError("cannot_write_file", file=file_path, error=str(e))
except Exception as e:
raise MoulinetteError('error_writing_file', file=file_path, error=str(e))
raise MoulinetteError("error_writing_file", file=file_path, error=str(e))
def mkdir(path, mode=0o777, parents=False, uid=None, gid=None, force=False):
def mkdir(path, mode=0o0777, parents=False, uid=None, gid=None, force=False):
"""Create a directory with optional features
Create a directory and optionaly set its permissions to mode and its
@ -245,7 +270,7 @@ def mkdir(path, mode=0o777, parents=False, uid=None, gid=None, force=False):
"""
if os.path.exists(path) and not force:
raise OSError(errno.EEXIST, m18n.g('folder_exists', path=path))
raise OSError(errno.EEXIST, m18n.g("folder_exists", path=path))
if parents:
# Create parents directories as needed
@ -263,7 +288,9 @@ def mkdir(path, mode=0o777, parents=False, uid=None, gid=None, force=False):
# Create directory and set permissions
try:
oldmask = os.umask(000)
os.mkdir(path, mode)
os.umask(oldmask)
except OSError:
# mimic Python3.2+ os.makedirs exist_ok behaviour
if not force or not os.path.isdir(path):
@ -290,14 +317,14 @@ def chown(path, uid=None, gid=None, recursive=False):
try:
uid = getpwnam(uid).pw_uid
except KeyError:
raise MoulinetteError('unknown_user', user=uid)
raise MoulinetteError("unknown_user", user=uid)
elif uid is None:
uid = -1
if isinstance(gid, basestring):
try:
gid = grp.getgrnam(gid).gr_gid
except KeyError:
raise MoulinetteError('unknown_group', group=gid)
raise MoulinetteError("unknown_group", group=gid)
elif gid is None:
gid = -1
@ -310,7 +337,9 @@ def chown(path, uid=None, gid=None, recursive=False):
for f in files:
os.chown(os.path.join(root, f), uid, gid)
except Exception as e:
raise MoulinetteError('error_changing_file_permissions', path=path, error=str(e))
raise MoulinetteError(
"error_changing_file_permissions", path=path, error=str(e)
)
def chmod(path, mode, fmode=None, recursive=False):
@ -334,7 +363,9 @@ def chmod(path, mode, fmode=None, recursive=False):
for f in files:
os.chmod(os.path.join(root, f), fmode)
except Exception as e:
raise MoulinetteError('error_changing_file_permissions', path=path, error=str(e))
raise MoulinetteError(
"error_changing_file_permissions", path=path, error=str(e)
)
def rm(path, recursive=False, force=False):
@ -353,4 +384,4 @@ def rm(path, recursive=False, force=False):
os.remove(path)
except OSError as e:
if not force:
raise MoulinetteError('error_removing', path=path, error=str(e))
raise MoulinetteError("error_removing", path=path, error=str(e))

View file

@ -3,8 +3,28 @@ import logging
# import all constants because other modules try to import them from this
# module because SUCCESS is defined in this module
from logging import (addLevelName, setLoggerClass, Logger, getLogger, NOTSET, # noqa
DEBUG, INFO, WARNING, ERROR, CRITICAL)
from logging import (
addLevelName,
setLoggerClass,
Logger,
getLogger,
NOTSET, # noqa
DEBUG,
INFO,
WARNING,
ERROR,
CRITICAL,
)
__all__ = [
"NOTSET", # noqa
"DEBUG",
"INFO",
"WARNING",
"ERROR",
"CRITICAL",
"SUCCESS",
]
# Global configuration and functions -----------------------------------
@ -12,27 +32,20 @@ from logging import (addLevelName, setLoggerClass, Logger, getLogger, NOTSET, #
SUCCESS = 25
DEFAULT_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(asctime)-15s %(levelname)-8s %(name)s - %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
'stream': 'ext://sys.stdout',
},
},
'loggers': {
'moulinette': {
'level': 'DEBUG',
'handlers': ['console'],
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"simple": {"format": "%(asctime)-15s %(levelname)-8s %(name)s - %(message)s"},
},
"handlers": {
"console": {
"level": "DEBUG",
"formatter": "simple",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {"moulinette": {"level": "DEBUG", "handlers": ["console"]}},
}
@ -46,7 +59,7 @@ def configure_logging(logging_config=None):
from logging.config import dictConfig
# add custom logging level and class
addLevelName(SUCCESS, 'SUCCESS')
addLevelName(SUCCESS, "SUCCESS")
setLoggerClass(MoulinetteLogger)
# load configuration from dict
@ -65,7 +78,7 @@ def getHandlersByClass(classinfo, limit=0):
return o
handlers.append(o)
if limit != 0 and len(handlers) > limit:
return handlers[:limit - 1]
return handlers[: limit - 1]
return handlers
@ -79,6 +92,7 @@ class MoulinetteLogger(Logger):
LogRecord extra and can be used with the ActionFilter.
"""
action_id = None
def success(self, msg, *args, **kwargs):
@ -105,11 +119,11 @@ class MoulinetteLogger(Logger):
def _log(self, *args, **kwargs):
"""Append action_id if available to the extra."""
if self.action_id is not None:
extra = kwargs.get('extra', {})
if 'action_id' not in extra:
extra = kwargs.get("extra", {})
if "action_id" not in extra:
# FIXME: Get real action_id instead of logger/current one
extra['action_id'] = _get_action_id()
kwargs['extra'] = extra
extra["action_id"] = _get_action_id()
kwargs["extra"] = extra
return Logger._log(self, *args, **kwargs)
@ -120,7 +134,7 @@ action_id = 0
def _get_action_id():
return '%d.%d' % (pid, action_id)
return "%d.%d" % (pid, action_id)
def start_action_logging():
@ -146,7 +160,7 @@ def getActionLogger(name=None, logger=None, action_id=None):
"""
if not name and not logger:
raise ValueError('Either a name or a logger must be specified')
raise ValueError("Either a name or a logger must be specified")
logger = logger or getLogger(name)
logger.action_id = action_id if action_id else _get_action_id()
@ -164,15 +178,15 @@ class ActionFilter(object):
"""
def __init__(self, message_key='fmessage', strict=False):
def __init__(self, message_key="fmessage", strict=False):
self.message_key = message_key
self.strict = strict
def filter(self, record):
msg = record.getMessage()
action_id = record.__dict__.get('action_id', None)
action_id = record.__dict__.get("action_id", None)
if action_id is not None:
msg = '[{:s}] {:s}'.format(action_id, msg)
msg = "[{:s}] {:s}".format(action_id, msg)
elif self.strict:
return False
record.__dict__[self.message_key] = msg

View file

@ -15,30 +15,30 @@ def download_text(url, timeout=30, expected_status_code=200):
None to ignore the status code.
"""
import requests # lazy loading this module for performance reasons
# Assumptions
assert isinstance(url, str)
# Download file
try:
r = requests.get(url, timeout=timeout)
# Invalid URL
except requests.exceptions.ConnectionError:
raise MoulinetteError('invalid_url', url=url)
# SSL exceptions
except requests.exceptions.SSLError:
raise MoulinetteError('download_ssl_error', url=url)
raise MoulinetteError("download_ssl_error", url=url)
# Invalid URL
except requests.exceptions.ConnectionError:
raise MoulinetteError("invalid_url", url=url)
# Timeout exceptions
except requests.exceptions.Timeout:
raise MoulinetteError('download_timeout', url=url)
raise MoulinetteError("download_timeout", url=url)
# Unknown stuff
except Exception as e:
raise MoulinetteError('download_unknown_error',
url=url, error=str(e))
raise MoulinetteError("download_unknown_error", url=url, error=str(e))
# Assume error if status code is not 200 (OK)
if expected_status_code is not None \
and r.status_code != expected_status_code:
raise MoulinetteError('download_bad_status_code',
url=url, code=str(r.status_code))
if expected_status_code is not None and r.status_code != expected_status_code:
raise MoulinetteError(
"download_bad_status_code", url=url, code=str(r.status_code)
)
return r.text
@ -59,6 +59,6 @@ def download_json(url, timeout=30, expected_status_code=200):
try:
loaded_json = json.loads(text)
except ValueError as e:
raise MoulinetteError('corrupted_json', ressource=url, error=e)
raise MoulinetteError("corrupted_json", ressource=url, error=e)
return loaded_json

View file

@ -11,6 +11,7 @@ except ImportError:
from shlex import quote # Python3 >= 3.3
from .stream import async_file_reading
quote # This line is here to avoid W0611 PEP8 error (see comments above)
# Prevent to import subprocess only for common classes
@ -19,6 +20,7 @@ CalledProcessError = subprocess.CalledProcessError
# Alternative subprocess methods ---------------------------------------
def check_output(args, stderr=subprocess.STDOUT, shell=True, **kwargs):
"""Run command with arguments and return its output as a byte string
@ -31,6 +33,7 @@ def check_output(args, stderr=subprocess.STDOUT, shell=True, **kwargs):
# Call with stream access ----------------------------------------------
def call_async_output(args, callback, **kwargs):
"""Run command and provide its output asynchronously
@ -52,10 +55,9 @@ def call_async_output(args, callback, **kwargs):
Exit status of the command
"""
for a in ['stdout', 'stderr']:
for a in ["stdout", "stderr"]:
if a in kwargs:
raise ValueError('%s argument not allowed, '
'it will be overridden.' % a)
raise ValueError("%s argument not allowed, " "it will be overridden." % a)
if "stdinfo" in kwargs and kwargs["stdinfo"] is not None:
assert len(callback) == 3
@ -72,16 +74,16 @@ def call_async_output(args, callback, **kwargs):
# Validate callback argument
if isinstance(callback, tuple):
if len(callback) < 2:
raise ValueError('callback argument should be a 2-tuple')
kwargs['stdout'] = kwargs['stderr'] = subprocess.PIPE
raise ValueError("callback argument should be a 2-tuple")
kwargs["stdout"] = kwargs["stderr"] = subprocess.PIPE
separate_stderr = True
elif callable(callback):
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.STDOUT
kwargs["stdout"] = subprocess.PIPE
kwargs["stderr"] = subprocess.STDOUT
separate_stderr = False
callback = (callback,)
else:
raise ValueError('callback argument must be callable or a 2-tuple')
raise ValueError("callback argument must be callable or a 2-tuple")
# Run the command
p = subprocess.Popen(args, **kwargs)
@ -101,7 +103,7 @@ def call_async_output(args, callback, **kwargs):
stderr_consum.process_next_line()
if stdinfo:
stdinfo_consum.process_next_line()
time.sleep(.1)
time.sleep(0.1)
stderr_reader.join()
# clear the queues
stdout_consum.process_current_queue()
@ -111,7 +113,7 @@ def call_async_output(args, callback, **kwargs):
else:
while not stdout_reader.eof():
stdout_consum.process_current_queue()
time.sleep(.1)
time.sleep(0.1)
stdout_reader.join()
# clear the queue
stdout_consum.process_current_queue()
@ -131,15 +133,15 @@ def call_async_output(args, callback, **kwargs):
while time.time() - start < 10:
if p.poll() is not None:
return p.poll()
time.sleep(.1)
time.sleep(0.1)
return p.poll()
# Call multiple commands -----------------------------------------------
def run_commands(cmds, callback=None, separate_stderr=False, shell=True,
**kwargs):
def run_commands(cmds, callback=None, separate_stderr=False, shell=True, **kwargs):
"""Run multiple commands with error management
Run a list of commands and allow to manage how to treat errors either
@ -176,18 +178,18 @@ def run_commands(cmds, callback=None, separate_stderr=False, shell=True,
# stdout and stderr are specified by this code later, so they cannot be
# overriden by user input
for a in ['stdout', 'stderr']:
for a in ["stdout", "stderr"]:
if a in kwargs:
raise ValueError('%s argument not allowed, '
'it will be overridden.' % a)
raise ValueError("%s argument not allowed, " "it will be overridden." % a)
# If no callback specified...
if callback is None:
# Raise CalledProcessError on command failure
def callback(r, c, o):
raise CalledProcessError(r, c, o)
elif not callable(callback):
raise ValueError('callback argument must be callable')
raise ValueError("callback argument must be callable")
# Manage stderr
if separate_stderr:
@ -201,8 +203,9 @@ def run_commands(cmds, callback=None, separate_stderr=False, shell=True,
error = 0
for cmd in cmds:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=_stderr, shell=shell, **kwargs)
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=_stderr, shell=shell, **kwargs
)
output = _get_output(*process.communicate())
retcode = process.poll()

View file

@ -2,11 +2,12 @@ import logging
from json.encoder import JSONEncoder
import datetime
logger = logging.getLogger('moulinette.utils.serialize')
logger = logging.getLogger("moulinette.utils.serialize")
# JSON utilities -------------------------------------------------------
class JSONExtendedEncoder(JSONEncoder):
"""Extended JSON encoder
@ -26,8 +27,7 @@ class JSONExtendedEncoder(JSONEncoder):
"""Return a serializable object"""
# Convert compatible containers into list
if isinstance(o, set) or (
hasattr(o, '__iter__') and hasattr(o, 'next')):
if isinstance(o, set) or (hasattr(o, "__iter__") and hasattr(o, "next")):
return list(o)
# Display the date in its iso format ISO-8601 Internet Profile (RFC 3339)
@ -37,6 +37,9 @@ class JSONExtendedEncoder(JSONEncoder):
return o.isoformat()
# Return the repr for object that json can't encode
logger.warning('cannot properly encode in JSON the object %s, '
'returned repr is: %r', type(o), o)
logger.warning(
"cannot properly encode in JSON the object %s, " "returned repr is: %r",
type(o),
o,
)
return repr(o)

View file

@ -7,6 +7,7 @@ from multiprocessing.queues import SimpleQueue
# Read from a stream ---------------------------------------------------
class AsynchronousFileReader(Process):
"""
@ -20,8 +21,8 @@ class AsynchronousFileReader(Process):
"""
def __init__(self, fd, queue):
assert hasattr(queue, 'put')
assert hasattr(queue, 'empty')
assert hasattr(queue, "put")
assert hasattr(queue, "empty")
assert isinstance(fd, int) or callable(fd.readline)
Process.__init__(self)
self._fd = fd
@ -34,7 +35,7 @@ class AsynchronousFileReader(Process):
# Typically that's for stdout/stderr pipes
# We can read the stuff easily with 'readline'
if not isinstance(self._fd, int):
for line in iter(self._fd.readline, ''):
for line in iter(self._fd.readline, ""):
self._queue.put(line)
# Else, it got opened with os.open() and we have to read it
@ -42,9 +43,16 @@ class AsynchronousFileReader(Process):
else:
data = ""
while True:
try:
# Try to read (non-blockingly) a few bytes, append them to
# the buffer
data += os.read(self._fd, 50)
except Exception as e:
print(
"from moulinette.utils.stream: could not read file descriptor : %s"
% str(e)
)
continue
# If nobody's writing in there anymore, get out
if not data and os.fstat(self._fd).st_nlink == 0:
@ -52,10 +60,10 @@ class AsynchronousFileReader(Process):
# If we have data, extract a line (ending with \n) and feed
# it to the consumer
if data and '\n' in data:
lines = data.split('\n')
if data and "\n" in data:
lines = data.split("\n")
self._queue.put(lines[0])
data = '\n'.join(lines[1:])
data = "\n".join(lines[1:])
else:
time.sleep(0.05)
@ -75,7 +83,6 @@ class AsynchronousFileReader(Process):
class Consummer(object):
def __init__(self, queue, callback):
self.queue = queue
self.callback = callback

View file

@ -6,6 +6,7 @@ import binascii
# Pattern searching ----------------------------------------------------
def search(pattern, text, count=0, flags=0):
"""Search for pattern in a text
@ -46,7 +47,7 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
content by using the search function.
"""
with open(path, 'r+') as f:
with open(path, "r+") as f:
data = mmap.mmap(f.fileno(), 0)
match = search(pattern, data, count, flags)
data.close()
@ -55,6 +56,7 @@ def searchf(pattern, path, count=0, flags=re.MULTILINE):
# Text formatting ------------------------------------------------------
def prependlines(text, prepend):
"""Prepend a string to each line of a text"""
lines = text.splitlines(True)
@ -63,6 +65,7 @@ def prependlines(text, prepend):
# Randomize ------------------------------------------------------------
def random_ascii(length=20):
"""Return a random ascii string"""
return binascii.hexlify(os.urandom(length)).decode('ascii')
return binascii.hexlify(os.urandom(length)).decode("ascii")

View file

@ -1,2 +1,8 @@
[flake8]
ignore = E501,E128,E731,E722
ignore =
E501,
E128,
E731,
E722,
W503 # Black formatter conflict
E203 # Black formatter conflict

View file

@ -34,6 +34,9 @@ setup(name='Moulinette',
'pytz',
'pyyaml',
'toml',
'python-ldap',
'gevent-websocket',
'bottle',
],
tests_require=[
'pytest',
@ -42,5 +45,6 @@ setup(name='Moulinette',
'pytest-mock',
'requests',
'requests-mock',
'webtest'
],
)

View file

@ -0,0 +1,123 @@
#############################
# Global parameters #
#############################
_global:
configuration:
authenticate:
- all
authenticator:
default:
vendor: dummy
help: Dummy Password
yoloswag:
vendor: dummy
help: Dummy Yoloswag Password
ldap:
vendor: ldap
help: admin_password
parameters:
uri: ldap://localhost:8080
base_dn: dc=yunohost,dc=org
user_rdn: cn=admin,dc=yunohost,dc=org
arguments:
-v:
full: --version
help: Display Yoloswag versions
action: callback
callback:
method: test.src.testauth.yoloswag_version
return: true
-w:
full: --wersion
help: Not existing function
action: callback
callback:
method: test.src.testauth.not_existing_function
return: true
#############################
# Test Actions #
#############################
testauth:
actions:
none:
api: GET /test-auth/none
configuration:
authenticate: false
default:
api: GET /test-auth/default
only-api:
api: GET /test-auth/only-api
configuration:
authenticate:
- api
only-cli:
api: GET /test-auth/only-cli
configuration:
authenticate:
- cli
other-profile:
api: GET /test-auth/other-profile
configuration:
authenticate:
- all
authenticator: yoloswag
ldap:
api: GET /test-auth/ldap
configuration:
authenticate:
- all
authenticator: ldap
with_arg:
api: GET /test-auth/with_arg/<super_arg>
arguments:
super_arg:
help: Super Arg
with_extra_str_only:
api: GET /test-auth/with_extra_str_only/<only_a_str>
arguments:
only_a_str:
help: Only a String
extra:
pattern:
- !!str ^[a-zA-Z]
- "pattern_only_a_str"
with_type_int:
api: GET /test-auth/with_type_int/<only_an_int>
arguments:
only_an_int:
help: Only an Int
type: int
subcategories:
subcat:
actions:
none:
api: GET /test-auth/subcat/none
configuration:
authenticate: false
default:
api: GET /test-auth/subcat/default
post:
api: POST /test-auth/subcat/post
configuration:
authenticate:
- all
authenticator: default
other-profile:
api: GET /test-auth/subcat/other-profile
configuration:
authenticator: yoloswag

View file

@ -1,18 +1,22 @@
"""Pytest fixtures for testing."""
import toml
import yaml
import json
import os
import shutil
import pytest
from src.ldap_server import LDAPServer
def patch_init(moulinette):
"""Configure moulinette to use the YunoHost namespace."""
old_init = moulinette.core.Moulinette18n.__init__
def monkey_path_i18n_init(self, package, default_locale='en'):
def monkey_path_i18n_init(self, package, default_locale="en"):
old_init(self, package, default_locale)
self.load_namespace('moulinette')
self.load_namespace("moulinette")
moulinette.core.Moulinette18n.__init__ = monkey_path_i18n_init
@ -23,7 +27,7 @@ def patch_translate(moulinette):
def new_translate(self, key, *args, **kwargs):
if key not in self._translations[self.default_locale].keys():
message = 'Unable to retrieve key %s for default locale!' % key
message = "Unable to retrieve key %s for default locale!" % key
raise KeyError(message)
return old_translate(self, key, *args, **kwargs)
@ -38,86 +42,176 @@ def patch_translate(moulinette):
def patch_logging(moulinette):
"""Configure logging to use the custom logger."""
handlers = set(['tty'])
handlers = set(["tty", "api"])
root_handlers = set(handlers)
level = 'INFO'
tty_level = 'SUCCESS'
level = "INFO"
tty_level = "INFO"
logging = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'tty-debug': {
'format': '%(relativeCreated)-4d %(fmessage)s'
},
'precise': {
'format': '%(asctime)-15s %(levelname)-8s %(name)s %(funcName)s - %(fmessage)s' # noqa
return {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"tty-debug": {"format": "%(relativeCreated)-4d %(fmessage)s"},
"precise": {
"format": "%(asctime)-15s %(levelname)-8s %(name)s %(funcName)s - %(fmessage)s" # noqa
},
},
'filters': {
'action': {
'()': 'moulinette.utils.log.ActionFilter',
"filters": {"action": {"()": "moulinette.utils.log.ActionFilter"}},
"handlers": {
"api": {
"level": level,
"class": "moulinette.interfaces.api.APIQueueHandler",
},
"tty": {
"level": tty_level,
"class": "moulinette.interfaces.cli.TTYHandler",
"formatter": "",
},
},
'handlers': {
'tty': {
'level': tty_level,
'class': 'moulinette.interfaces.cli.TTYHandler',
'formatter': '',
"loggers": {
"moulinette": {"level": level, "handlers": [], "propagate": True},
"moulinette.interface": {
"level": level,
"handlers": handlers,
"propagate": False,
},
},
'loggers': {
'moulinette': {
'level': level,
'handlers': [],
'propagate': True,
},
'moulinette.interface': {
'level': level,
'handlers': handlers,
'propagate': False,
},
},
'root': {
'level': level,
'handlers': root_handlers,
},
"root": {"level": level, "handlers": root_handlers},
}
moulinette.init(
logging_config=logging,
_from_source=False
)
def patch_lock(moulinette):
moulinette.core.MoulinetteLock.base_lockfile = "moulinette_%s.lock"
@pytest.fixture(scope='session', autouse=True)
def moulinette():
@pytest.fixture(scope="session", autouse=True)
def moulinette(tmp_path_factory):
import moulinette
# Can't call the namespace just 'test' because
# that would lead to some "import test" not importing the right stuff
namespace = "moulitest"
tmp_cache = str(tmp_path_factory.mktemp("cache"))
tmp_data = str(tmp_path_factory.mktemp("data"))
tmp_lib = str(tmp_path_factory.mktemp("lib"))
os.environ["MOULINETTE_CACHE_DIR"] = tmp_cache
os.environ["MOULINETTE_DATA_DIR"] = tmp_data
os.environ["MOULINETTE_LIB_DIR"] = tmp_lib
shutil.copytree("./test/actionsmap", "%s/actionsmap" % tmp_data)
shutil.copytree("./test/src", "%s/%s" % (tmp_lib, namespace))
shutil.copytree("./test/locales", "%s/%s/locales" % (tmp_lib, namespace))
patch_init(moulinette)
patch_translate(moulinette)
patch_logging(moulinette)
patch_lock(moulinette)
logging = patch_logging(moulinette)
moulinette.init(logging_config=logging, _from_source=False)
return moulinette
@pytest.fixture
def moulinette_webapi(moulinette):
from webtest import TestApp
from webtest.app import CookiePolicy
# Dirty hack needed, otherwise cookies ain't reused between request .. not
# sure why :|
def return_true(self, cookie, request):
return True
CookiePolicy.return_ok_secure = return_true
moulinette_webapi = moulinette.core.init_interface(
"api",
kwargs={"routes": {}, "use_websocket": False},
actionsmap={"namespaces": ["moulitest"], "use_cache": True},
)
return TestApp(moulinette_webapi._app)
@pytest.fixture
def moulinette_cli(moulinette, mocker):
# Dirty hack needed, otherwise cookies ain't reused between request .. not
# sure why :|
import argparse
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
"--debug",
action="store_true",
default=False,
help="Log and print debug messages",
)
mocker.patch("os.isatty", return_value=True)
moulinette_cli = moulinette.core.init_interface(
"cli",
actionsmap={
"namespaces": ["moulitest"],
"use_cache": False,
"parser_kwargs": {"top_parser": parser},
},
)
mocker.stopall()
return moulinette_cli
@pytest.fixture
def test_file(tmp_path):
test_text = 'foo\nbar\n'
test_file = tmp_path / 'test.txt'
test_text = "foo\nbar\n"
test_file = tmp_path / "test.txt"
test_file.write_bytes(test_text)
return test_file
@pytest.fixture
def test_json(tmp_path):
test_json = json.dumps({'foo': 'bar'})
test_file = tmp_path / 'test.json'
test_json = json.dumps({"foo": "bar"})
test_file = tmp_path / "test.json"
test_file.write_bytes(test_json)
return test_file
@pytest.fixture
def test_yaml(tmp_path):
test_yaml = yaml.dump({"foo": "bar"})
test_file = tmp_path / "test.txt"
test_file.write_bytes(test_yaml)
return test_file
@pytest.fixture
def test_toml(tmp_path):
test_toml = toml.dumps({"foo": "bar"})
test_file = tmp_path / "test.txt"
test_file.write_bytes(str(test_toml))
return test_file
@pytest.fixture
def test_ldif(tmp_path):
test_file = tmp_path / "test.txt"
from ldif import LDIFWriter
writer = LDIFWriter(open(str(test_file), "wb"))
writer.unparse(
"mail=alice@example.com",
{
"cn": ["Alice Alison"],
"mail": ["alice@example.com"],
"objectclass": ["top", "person"],
},
)
return test_file
@pytest.fixture
def user():
return os.getlogin()
@ -125,4 +219,12 @@ def user():
@pytest.fixture
def test_url():
return 'https://some.test.url/yolo.txt'
return "https://some.test.url/yolo.txt"
@pytest.fixture
def ldap_server():
server = LDAPServer()
server.start()
yield server
server.stop()

View file

View file

@ -0,0 +1,84 @@
parents:
ou=users:
ou: users
objectClass:
- organizationalUnit
- top
ou=domains:
ou: domains
objectClass:
- organizationalUnit
- top
ou=apps:
ou: apps
objectClass:
- organizationalUnit
- top
ou=permission:
ou: permission
objectClass:
- organizationalUnit
- top
ou=groups:
ou: groups
objectClass:
- organizationalUnit
- top
ou=sudo:
ou: sudo
objectClass:
- organizationalUnit
- top
children:
cn=admin,ou=sudo:
cn: admin
sudoUser: admin
sudoHost: ALL
sudoCommand: ALL
sudoOption: "!authenticate"
objectClass:
- sudoRole
- top
cn=admins,ou=groups:
cn: admins
gidNumber: "4001"
memberUid: admin
objectClass:
- posixGroup
- top
cn=all_users,ou=groups:
cn: all_users
gidNumber: "4002"
objectClass:
- posixGroup
- groupOfNamesYnh
cn=visitors,ou=groups:
cn: visitors
gidNumber: "4003"
objectClass:
- posixGroup
- groupOfNamesYnh
depends_children:
cn=mail.main,ou=permission:
cn: mail.main
gidNumber: "5001"
objectClass:
- posixGroup
- permissionYnh
groupPermission:
- "cn=all_users,ou=groups,dc=yunohost,dc=org"
cn=xmpp.main,ou=permission:
cn: xmpp.main
gidNumber: "5002"
objectClass:
- posixGroup
- permissionYnh
groupPermission:
- "cn=all_users,ou=groups,dc=yunohost,dc=org"

View file

@ -0,0 +1,610 @@
# OpenLDAP Core schema
# $OpenLDAP$
## This work is part of OpenLDAP Software <http://www.openldap.org/>.
##
## Copyright 1998-2019 The OpenLDAP Foundation.
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted only as authorized by the OpenLDAP
## Public License.
##
## A copy of this license is available in the file LICENSE in the
## top-level directory of the distribution or, alternatively, at
## <http://www.OpenLDAP.org/license.html>.
#
## Portions Copyright (C) The Internet Society (1997-2006).
## All Rights Reserved.
##
## This document and translations of it may be copied and furnished to
## others, and derivative works that comment on or otherwise explain it
## or assist in its implementation may be prepared, copied, published
## and distributed, in whole or in part, without restriction of any
## kind, provided that the above copyright notice and this paragraph are
## included on all such copies and derivative works. However, this
## document itself may not be modified in any way, such as by removing
## the copyright notice or references to the Internet Society or other
## Internet organizations, except as needed for the purpose of
## developing Internet standards in which case the procedures for
## copyrights defined in the Internet Standards process must be
## followed, or as required to translate it into languages other than
## English.
##
## The limited permissions granted above are perpetual and will not be
## revoked by the Internet Society or its successors or assigns.
##
## This document and the information contained herein is provided on an
## "AS IS" basis and THE INTERNET SOCIETY AND THE INTERNET ENGINEERING
## TASK FORCE DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING
## BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE INFORMATION
## HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED WARRANTIES OF
## MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
#
#
# Includes LDAPv3 schema items from:
# RFC 2252/2256 (LDAPv3)
#
# Select standard track schema items:
# RFC 1274 (uid/dc)
# RFC 2079 (URI)
# RFC 2247 (dc/dcObject)
# RFC 2587 (PKI)
# RFC 2589 (Dynamic Directory Services)
# RFC 4524 (associatedDomain)
#
# Select informational schema items:
# RFC 2377 (uidObject)
#
# Standard attribute types from RFC 2256
#
# system schema
#attributetype ( 2.5.4.0 NAME 'objectClass'
# DESC 'RFC2256: object classes of the entity'
# EQUALITY objectIdentifierMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 )
# system schema
#attributetype ( 2.5.4.1 NAME ( 'aliasedObjectName' 'aliasedEntryName' )
# DESC 'RFC2256: name of aliased object'
# EQUALITY distinguishedNameMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )
attributetype ( 2.5.4.2 NAME 'knowledgeInformation'
DESC 'RFC2256: knowledge information'
EQUALITY caseIgnoreMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} )
# system schema
#attributetype ( 2.5.4.3 NAME ( 'cn' 'commonName' )
# DESC 'RFC2256: common name(s) for which the entity is known by'
# SUP name )
attributetype ( 2.5.4.4 NAME ( 'sn' 'surname' )
DESC 'RFC2256: last (family) name(s) for which the entity is known by'
SUP name )
attributetype ( 2.5.4.5 NAME 'serialNumber'
DESC 'RFC2256: serial number of the entity'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} )
# RFC 4519 definition ('countryName' in X.500 and RFC2256)
attributetype ( 2.5.4.6 NAME ( 'c' 'countryName' )
DESC 'RFC4519: two-letter ISO-3166 country code'
SUP name
SYNTAX 1.3.6.1.4.1.1466.115.121.1.11
SINGLE-VALUE )
#attributetype ( 2.5.4.6 NAME ( 'c' 'countryName' )
# DESC 'RFC2256: ISO-3166 country 2-letter code'
# SUP name SINGLE-VALUE )
attributetype ( 2.5.4.7 NAME ( 'l' 'localityName' )
DESC 'RFC2256: locality which this object resides in'
SUP name )
attributetype ( 2.5.4.8 NAME ( 'st' 'stateOrProvinceName' )
DESC 'RFC2256: state or province which this object resides in'
SUP name )
attributetype ( 2.5.4.9 NAME ( 'street' 'streetAddress' )
DESC 'RFC2256: street address of this object'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} )
attributetype ( 2.5.4.10 NAME ( 'o' 'organizationName' )
DESC 'RFC2256: organization this object belongs to'
SUP name )
attributetype ( 2.5.4.11 NAME ( 'ou' 'organizationalUnitName' )
DESC 'RFC2256: organizational unit this object belongs to'
SUP name )
attributetype ( 2.5.4.12 NAME 'title'
DESC 'RFC2256: title associated with the entity'
SUP name )
# system schema
#attributetype ( 2.5.4.13 NAME 'description'
# DESC 'RFC2256: descriptive information'
# EQUALITY caseIgnoreMatch
# SUBSTR caseIgnoreSubstringsMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{1024} )
# Deprecated by enhancedSearchGuide
attributetype ( 2.5.4.14 NAME 'searchGuide'
DESC 'RFC2256: search guide, deprecated by enhancedSearchGuide'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.25 )
attributetype ( 2.5.4.15 NAME 'businessCategory'
DESC 'RFC2256: business category'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} )
attributetype ( 2.5.4.16 NAME 'postalAddress'
DESC 'RFC2256: postal address'
EQUALITY caseIgnoreListMatch
SUBSTR caseIgnoreListSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.41 )
attributetype ( 2.5.4.17 NAME 'postalCode'
DESC 'RFC2256: postal code'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} )
attributetype ( 2.5.4.18 NAME 'postOfficeBox'
DESC 'RFC2256: Post Office Box'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} )
attributetype ( 2.5.4.19 NAME 'physicalDeliveryOfficeName'
DESC 'RFC2256: Physical Delivery Office Name'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} )
attributetype ( 2.5.4.20 NAME 'telephoneNumber'
DESC 'RFC2256: Telephone Number'
EQUALITY telephoneNumberMatch
SUBSTR telephoneNumberSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{32} )
attributetype ( 2.5.4.21 NAME 'telexNumber'
DESC 'RFC2256: Telex Number'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.52 )
attributetype ( 2.5.4.22 NAME 'teletexTerminalIdentifier'
DESC 'RFC2256: Teletex Terminal Identifier'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.51 )
attributetype ( 2.5.4.23 NAME ( 'facsimileTelephoneNumber' 'fax' )
DESC 'RFC2256: Facsimile (Fax) Telephone Number'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.22 )
attributetype ( 2.5.4.24 NAME 'x121Address'
DESC 'RFC2256: X.121 Address'
EQUALITY numericStringMatch
SUBSTR numericStringSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{15} )
attributetype ( 2.5.4.25 NAME 'internationaliSDNNumber'
DESC 'RFC2256: international ISDN number'
EQUALITY numericStringMatch
SUBSTR numericStringSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{16} )
attributetype ( 2.5.4.26 NAME 'registeredAddress'
DESC 'RFC2256: registered postal address'
SUP postalAddress
SYNTAX 1.3.6.1.4.1.1466.115.121.1.41 )
attributetype ( 2.5.4.27 NAME 'destinationIndicator'
DESC 'RFC2256: destination indicator'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{128} )
attributetype ( 2.5.4.28 NAME 'preferredDeliveryMethod'
DESC 'RFC2256: preferred delivery method'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.14
SINGLE-VALUE )
attributetype ( 2.5.4.29 NAME 'presentationAddress'
DESC 'RFC2256: presentation address'
EQUALITY presentationAddressMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.43
SINGLE-VALUE )
attributetype ( 2.5.4.30 NAME 'supportedApplicationContext'
DESC 'RFC2256: supported application context'
EQUALITY objectIdentifierMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 )
attributetype ( 2.5.4.31 NAME 'member'
DESC 'RFC2256: member of a group'
SUP distinguishedName )
attributetype ( 2.5.4.32 NAME 'owner'
DESC 'RFC2256: owner (of the object)'
SUP distinguishedName )
attributetype ( 2.5.4.33 NAME 'roleOccupant'
DESC 'RFC2256: occupant of role'
SUP distinguishedName )
# system schema
#attributetype ( 2.5.4.34 NAME 'seeAlso'
# DESC 'RFC2256: DN of related object'
# SUP distinguishedName )
# system schema
#attributetype ( 2.5.4.35 NAME 'userPassword'
# DESC 'RFC2256/2307: password of user'
# EQUALITY octetStringMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{128} )
# Must be transferred using ;binary
# with certificateExactMatch rule (per X.509)
attributetype ( 2.5.4.36 NAME 'userCertificate'
DESC 'RFC2256: X.509 user certificate, use ;binary'
EQUALITY certificateExactMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.8 )
# Must be transferred using ;binary
# with certificateExactMatch rule (per X.509)
attributetype ( 2.5.4.37 NAME 'cACertificate'
DESC 'RFC2256: X.509 CA certificate, use ;binary'
EQUALITY certificateExactMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.8 )
# Must be transferred using ;binary
attributetype ( 2.5.4.38 NAME 'authorityRevocationList'
DESC 'RFC2256: X.509 authority revocation list, use ;binary'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.9 )
# Must be transferred using ;binary
attributetype ( 2.5.4.39 NAME 'certificateRevocationList'
DESC 'RFC2256: X.509 certificate revocation list, use ;binary'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.9 )
# Must be stored and requested in the binary form
attributetype ( 2.5.4.40 NAME 'crossCertificatePair'
DESC 'RFC2256: X.509 cross certificate pair, use ;binary'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.10 )
# system schema
#attributetype ( 2.5.4.41 NAME 'name'
# EQUALITY caseIgnoreMatch
# SUBSTR caseIgnoreSubstringsMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} )
attributetype ( 2.5.4.42 NAME ( 'givenName' 'gn' )
DESC 'RFC2256: first name(s) for which the entity is known by'
SUP name )
attributetype ( 2.5.4.43 NAME 'initials'
DESC 'RFC2256: initials of some or all of names, but not the surname(s).'
SUP name )
attributetype ( 2.5.4.44 NAME 'generationQualifier'
DESC 'RFC2256: name qualifier indicating a generation'
SUP name )
attributetype ( 2.5.4.45 NAME 'x500UniqueIdentifier'
DESC 'RFC2256: X.500 unique identifier'
EQUALITY bitStringMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.6 )
attributetype ( 2.5.4.46 NAME 'dnQualifier'
DESC 'RFC2256: DN qualifier'
EQUALITY caseIgnoreMatch
ORDERING caseIgnoreOrderingMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.44 )
attributetype ( 2.5.4.47 NAME 'enhancedSearchGuide'
DESC 'RFC2256: enhanced search guide'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.21 )
attributetype ( 2.5.4.48 NAME 'protocolInformation'
DESC 'RFC2256: protocol information'
EQUALITY protocolInformationMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.42 )
# system schema
#attributetype ( 2.5.4.49 NAME 'distinguishedName'
# EQUALITY distinguishedNameMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )
attributetype ( 2.5.4.50 NAME 'uniqueMember'
DESC 'RFC2256: unique member of a group'
EQUALITY uniqueMemberMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.34 )
attributetype ( 2.5.4.51 NAME 'houseIdentifier'
DESC 'RFC2256: house identifier'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} )
# Must be transferred using ;binary
attributetype ( 2.5.4.52 NAME 'supportedAlgorithms'
DESC 'RFC2256: supported algorithms'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.49 )
# Must be transferred using ;binary
attributetype ( 2.5.4.53 NAME 'deltaRevocationList'
DESC 'RFC2256: delta revocation list; use ;binary'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.9 )
attributetype ( 2.5.4.54 NAME 'dmdName'
DESC 'RFC2256: name of DMD'
SUP name )
attributetype ( 2.5.4.65 NAME 'pseudonym'
DESC 'X.520(4th): pseudonym for the object'
SUP name )
# Standard object classes from RFC2256
# system schema
#objectclass ( 2.5.6.0 NAME 'top'
# DESC 'RFC2256: top of the superclass chain'
# ABSTRACT
# MUST objectClass )
# system schema
#objectclass ( 2.5.6.1 NAME 'alias'
# DESC 'RFC2256: an alias'
# SUP top STRUCTURAL
# MUST aliasedObjectName )
objectclass ( 2.5.6.2 NAME 'country'
DESC 'RFC2256: a country'
SUP top STRUCTURAL
MUST c
MAY ( searchGuide $ description ) )
objectclass ( 2.5.6.3 NAME 'locality'
DESC 'RFC2256: a locality'
SUP top STRUCTURAL
MAY ( street $ seeAlso $ searchGuide $ st $ l $ description ) )
objectclass ( 2.5.6.4 NAME 'organization'
DESC 'RFC2256: an organization'
SUP top STRUCTURAL
MUST o
MAY ( userPassword $ searchGuide $ seeAlso $ businessCategory $
x121Address $ registeredAddress $ destinationIndicator $
preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $
telephoneNumber $ internationaliSDNNumber $
facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $
postalAddress $ physicalDeliveryOfficeName $ st $ l $ description ) )
objectclass ( 2.5.6.5 NAME 'organizationalUnit'
DESC 'RFC2256: an organizational unit'
SUP top STRUCTURAL
MUST ou
MAY ( userPassword $ searchGuide $ seeAlso $ businessCategory $
x121Address $ registeredAddress $ destinationIndicator $
preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $
telephoneNumber $ internationaliSDNNumber $
facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $
postalAddress $ physicalDeliveryOfficeName $ st $ l $ description ) )
objectclass ( 2.5.6.6 NAME 'person'
DESC 'RFC2256: a person'
SUP top STRUCTURAL
MUST ( sn $ cn )
MAY ( userPassword $ telephoneNumber $ seeAlso $ description ) )
objectclass ( 2.5.6.7 NAME 'organizationalPerson'
DESC 'RFC2256: an organizational person'
SUP person STRUCTURAL
MAY ( title $ x121Address $ registeredAddress $ destinationIndicator $
preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $
telephoneNumber $ internationaliSDNNumber $
facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $
postalAddress $ physicalDeliveryOfficeName $ ou $ st $ l ) )
objectclass ( 2.5.6.8 NAME 'organizationalRole'
DESC 'RFC2256: an organizational role'
SUP top STRUCTURAL
MUST cn
MAY ( x121Address $ registeredAddress $ destinationIndicator $
preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $
telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $
seeAlso $ roleOccupant $ preferredDeliveryMethod $ street $
postOfficeBox $ postalCode $ postalAddress $
physicalDeliveryOfficeName $ ou $ st $ l $ description ) )
objectclass ( 2.5.6.9 NAME 'groupOfNames'
DESC 'RFC2256: a group of names (DNs)'
SUP top STRUCTURAL
MUST ( member $ cn )
MAY ( businessCategory $ seeAlso $ owner $ ou $ o $ description ) )
objectclass ( 2.5.6.10 NAME 'residentialPerson'
DESC 'RFC2256: an residential person'
SUP person STRUCTURAL
MUST l
MAY ( businessCategory $ x121Address $ registeredAddress $
destinationIndicator $ preferredDeliveryMethod $ telexNumber $
teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $
facsimileTelephoneNumber $ preferredDeliveryMethod $ street $
postOfficeBox $ postalCode $ postalAddress $
physicalDeliveryOfficeName $ st $ l ) )
objectclass ( 2.5.6.11 NAME 'applicationProcess'
DESC 'RFC2256: an application process'
SUP top STRUCTURAL
MUST cn
MAY ( seeAlso $ ou $ l $ description ) )
objectclass ( 2.5.6.12 NAME 'applicationEntity'
DESC 'RFC2256: an application entity'
SUP top STRUCTURAL
MUST ( presentationAddress $ cn )
MAY ( supportedApplicationContext $ seeAlso $ ou $ o $ l $
description ) )
objectclass ( 2.5.6.13 NAME 'dSA'
DESC 'RFC2256: a directory system agent (a server)'
SUP applicationEntity STRUCTURAL
MAY knowledgeInformation )
objectclass ( 2.5.6.14 NAME 'device'
DESC 'RFC2256: a device'
SUP top STRUCTURAL
MUST cn
MAY ( serialNumber $ seeAlso $ owner $ ou $ o $ l $ description ) )
objectclass ( 2.5.6.15 NAME 'strongAuthenticationUser'
DESC 'RFC2256: a strong authentication user'
SUP top AUXILIARY
MUST userCertificate )
objectclass ( 2.5.6.16 NAME 'certificationAuthority'
DESC 'RFC2256: a certificate authority'
SUP top AUXILIARY
MUST ( authorityRevocationList $ certificateRevocationList $
cACertificate ) MAY crossCertificatePair )
objectclass ( 2.5.6.17 NAME 'groupOfUniqueNames'
DESC 'RFC2256: a group of unique names (DN and Unique Identifier)'
SUP top STRUCTURAL
MUST ( uniqueMember $ cn )
MAY ( businessCategory $ seeAlso $ owner $ ou $ o $ description ) )
objectclass ( 2.5.6.18 NAME 'userSecurityInformation'
DESC 'RFC2256: a user security information'
SUP top AUXILIARY
MAY ( supportedAlgorithms ) )
objectclass ( 2.5.6.16.2 NAME 'certificationAuthority-V2'
SUP certificationAuthority
AUXILIARY MAY ( deltaRevocationList ) )
objectclass ( 2.5.6.19 NAME 'cRLDistributionPoint'
SUP top STRUCTURAL
MUST ( cn )
MAY ( certificateRevocationList $ authorityRevocationList $
deltaRevocationList ) )
objectclass ( 2.5.6.20 NAME 'dmd'
SUP top STRUCTURAL
MUST ( dmdName )
MAY ( userPassword $ searchGuide $ seeAlso $ businessCategory $
x121Address $ registeredAddress $ destinationIndicator $
preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $
telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $
street $ postOfficeBox $ postalCode $ postalAddress $
physicalDeliveryOfficeName $ st $ l $ description ) )
#
# Object Classes from RFC 2587
#
objectclass ( 2.5.6.21 NAME 'pkiUser'
DESC 'RFC2587: a PKI user'
SUP top AUXILIARY
MAY userCertificate )
objectclass ( 2.5.6.22 NAME 'pkiCA'
DESC 'RFC2587: PKI certificate authority'
SUP top AUXILIARY
MAY ( authorityRevocationList $ certificateRevocationList $
cACertificate $ crossCertificatePair ) )
objectclass ( 2.5.6.23 NAME 'deltaCRL'
DESC 'RFC2587: PKI user'
SUP top AUXILIARY
MAY deltaRevocationList )
#
# Standard Track URI label schema from RFC 2079
# system schema
#attributetype ( 1.3.6.1.4.1.250.1.57 NAME 'labeledURI'
# DESC 'RFC2079: Uniform Resource Identifier with optional label'
# EQUALITY caseExactMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
objectclass ( 1.3.6.1.4.1.250.3.15 NAME 'labeledURIObject'
DESC 'RFC2079: object that contains the URI attribute type'
SUP top AUXILIARY
MAY ( labeledURI ) )
#
# Derived from RFC 1274, but with new "short names"
#
#attributetype ( 0.9.2342.19200300.100.1.1
# NAME ( 'uid' 'userid' )
# DESC 'RFC1274: user identifier'
# EQUALITY caseIgnoreMatch
# SUBSTR caseIgnoreSubstringsMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} )
attributetype ( 0.9.2342.19200300.100.1.3
NAME ( 'mail' 'rfc822Mailbox' )
DESC 'RFC1274: RFC822 Mailbox'
EQUALITY caseIgnoreIA5Match
SUBSTR caseIgnoreIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{256} )
objectclass ( 0.9.2342.19200300.100.4.19 NAME 'simpleSecurityObject'
DESC 'RFC1274: simple security object'
SUP top AUXILIARY
MUST userPassword )
# RFC 1274 + RFC 2247
attributetype ( 0.9.2342.19200300.100.1.25
NAME ( 'dc' 'domainComponent' )
DESC 'RFC1274/2247: domain component'
EQUALITY caseIgnoreIA5Match
SUBSTR caseIgnoreIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )
# RFC 2247
objectclass ( 1.3.6.1.4.1.1466.344 NAME 'dcObject'
DESC 'RFC2247: domain component object'
SUP top AUXILIARY MUST dc )
# RFC 2377
objectclass ( 1.3.6.1.1.3.1 NAME 'uidObject'
DESC 'RFC2377: uid object'
SUP top AUXILIARY MUST uid )
# RFC 4524
# The 'associatedDomain' attribute specifies DNS [RFC1034][RFC2181]
# host names [RFC1123] that are associated with an object. That is,
# values of this attribute should conform to the following ABNF:
#
# domain = root / label *( DOT label )
# root = SPACE
# label = LETDIG [ *61( LETDIG / HYPHEN ) LETDIG ]
# LETDIG = %x30-39 / %x41-5A / %x61-7A ; "0" - "9" / "A"-"Z" / "a"-"z"
# SPACE = %x20 ; space (" ")
# HYPHEN = %x2D ; hyphen ("-")
# DOT = %x2E ; period (".")
attributetype ( 0.9.2342.19200300.100.1.37
NAME 'associatedDomain'
DESC 'RFC1274: domain associated with object'
EQUALITY caseIgnoreIA5Match
SUBSTR caseIgnoreIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
# RFC 2459 -- deprecated in favor of 'mail' (in cosine.schema)
attributetype ( 1.2.840.113549.1.9.1
NAME ( 'email' 'emailAddress' 'pkcs9email' )
DESC 'RFC3280: legacy attribute for email addresses in DNs'
EQUALITY caseIgnoreIA5Match
SUBSTR caseIgnoreIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{128} )

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,155 @@
# inetorgperson.schema -- InetOrgPerson (RFC2798)
# $OpenLDAP$
## This work is part of OpenLDAP Software <http://www.openldap.org/>.
##
## Copyright 1998-2019 The OpenLDAP Foundation.
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted only as authorized by the OpenLDAP
## Public License.
##
## A copy of this license is available in the file LICENSE in the
## top-level directory of the distribution or, alternatively, at
## <http://www.OpenLDAP.org/license.html>.
#
# InetOrgPerson (RFC2798)
#
# Depends upon
# Definition of an X.500 Attribute Type and an Object Class to Hold
# Uniform Resource Identifiers (URIs) [RFC2079]
# (core.schema)
#
# A Summary of the X.500(96) User Schema for use with LDAPv3 [RFC2256]
# (core.schema)
#
# The COSINE and Internet X.500 Schema [RFC1274] (cosine.schema)
# carLicense
# This multivalued field is used to record the values of the license or
# registration plate associated with an individual.
attributetype ( 2.16.840.1.113730.3.1.1
NAME 'carLicense'
DESC 'RFC2798: vehicle license or registration plate'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
# departmentNumber
# Code for department to which a person belongs. This can also be
# strictly numeric (e.g., 1234) or alphanumeric (e.g., ABC/123).
attributetype ( 2.16.840.1.113730.3.1.2
NAME 'departmentNumber'
DESC 'RFC2798: identifies a department within an organization'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
# displayName
# When displaying an entry, especially within a one-line summary list, it
# is useful to be able to identify a name to be used. Since other attri-
# bute types such as 'cn' are multivalued, an additional attribute type is
# needed. Display name is defined for this purpose.
attributetype ( 2.16.840.1.113730.3.1.241
NAME 'displayName'
DESC 'RFC2798: preferred name to be used when displaying entries'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15
SINGLE-VALUE )
# employeeNumber
# Numeric or alphanumeric identifier assigned to a person, typically based
# on order of hire or association with an organization. Single valued.
attributetype ( 2.16.840.1.113730.3.1.3
NAME 'employeeNumber'
DESC 'RFC2798: numerically identifies an employee within an organization'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15
SINGLE-VALUE )
# employeeType
# Used to identify the employer to employee relationship. Typical values
# used will be "Contractor", "Employee", "Intern", "Temp", "External", and
# "Unknown" but any value may be used.
attributetype ( 2.16.840.1.113730.3.1.4
NAME 'employeeType'
DESC 'RFC2798: type of employment for a person'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )
# jpegPhoto
# Used to store one or more images of a person using the JPEG File
# Interchange Format [JFIF].
# Note that the jpegPhoto attribute type was defined for use in the
# Internet X.500 pilots but no referencable definition for it could be
# located.
attributetype ( 0.9.2342.19200300.100.1.60
NAME 'jpegPhoto'
DESC 'RFC2798: a JPEG image'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.28 )
# preferredLanguage
# Used to indicate an individual's preferred written or spoken
# language. This is useful for international correspondence or human-
# computer interaction. Values for this attribute type MUST conform to
# the definition of the Accept-Language header field defined in
# [RFC2068] with one exception: the sequence "Accept-Language" ":"
# should be omitted. This is a single valued attribute type.
attributetype ( 2.16.840.1.113730.3.1.39
NAME 'preferredLanguage'
DESC 'RFC2798: preferred written or spoken language for a person'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15
SINGLE-VALUE )
# userSMIMECertificate
# A PKCS#7 [RFC2315] SignedData, where the content that is signed is
# ignored by consumers of userSMIMECertificate values. It is
# recommended that values have a `contentType' of data with an absent
# `content' field. Values of this attribute contain a person's entire
# certificate chain and an smimeCapabilities field [RFC2633] that at a
# minimum describes their SMIME algorithm capabilities. Values for
# this attribute are to be stored and requested in binary form, as
# 'userSMIMECertificate;binary'. If available, this attribute is
# preferred over the userCertificate attribute for S/MIME applications.
## OpenLDAP note: ";binary" transfer should NOT be used as syntax is binary
attributetype ( 2.16.840.1.113730.3.1.40
NAME 'userSMIMECertificate'
DESC 'RFC2798: PKCS#7 SignedData used to support S/MIME'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 )
# userPKCS12
# PKCS #12 [PKCS12] provides a format for exchange of personal identity
# information. When such information is stored in a directory service,
# the userPKCS12 attribute should be used. This attribute is to be stored
# and requested in binary form, as 'userPKCS12;binary'. The attribute
# values are PFX PDUs stored as binary data.
## OpenLDAP note: ";binary" transfer should NOT be used as syntax is binary
attributetype ( 2.16.840.1.113730.3.1.216
NAME 'userPKCS12'
DESC 'RFC2798: personal identity information, a PKCS #12 PFX'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 )
# inetOrgPerson
# The inetOrgPerson represents people who are associated with an
# organization in some way. It is a structural class and is derived
# from the organizationalPerson which is defined in X.521 [X521].
objectclass ( 2.16.840.1.113730.3.2.2
NAME 'inetOrgPerson'
DESC 'RFC2798: Internet Organizational Person'
SUP organizationalPerson
STRUCTURAL
MAY (
audio $ businessCategory $ carLicense $ departmentNumber $
displayName $ employeeNumber $ employeeType $ givenName $
homePhone $ homePostalAddress $ initials $ jpegPhoto $
labeledURI $ mail $ manager $ mobile $ o $ pager $
photo $ roomNumber $ secretary $ uid $ userCertificate $
x500uniqueIdentifier $ preferredLanguage $
userSMIMECertificate $ userPKCS12 )
)

View file

@ -0,0 +1,88 @@
## LDAP Schema Yunohost EMAIL
## Version 0.1
## Adrien Beudin
# Attributes
attributetype ( 1.3.6.1.4.1.40328.1.20.2.1
NAME 'maildrop'
DESC 'Mail addresses where mails are forwarded -- ie forwards'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{512})
attributetype ( 1.3.6.1.4.1.40328.1.20.2.2
NAME 'mailalias'
DESC 'Mail addresses accepted by this account -- ie aliases'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{512})
attributetype ( 1.3.6.1.4.1.40328.1.20.2.3
NAME 'mailenable'
DESC 'Mail Account validity'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8})
attributetype ( 1.3.6.1.4.1.40328.1.20.2.4
NAME 'mailbox'
DESC 'Mailbox path where mails are delivered'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{512})
attributetype ( 1.3.6.1.4.1.40328.1.20.2.5
NAME 'virtualdomain'
DESC 'A mail domain name'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{512})
attributetype ( 1.3.6.1.4.1.40328.1.20.2.6
NAME 'virtualdomaindescription'
DESC 'Virtual domain description'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{512})
attributetype ( 1.3.6.1.4.1.40328.1.20.2.7
NAME 'mailuserquota'
DESC 'Mailbox quota for a user'
EQUALITY caseIgnoreMatch
SUBSTR caseIgnoreSubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{16} SINGLE-VALUE )
# Mail Account Objectclass
objectclass ( 1.3.6.1.4.1.40328.1.1.2.1
NAME 'mailAccount'
DESC 'Mail Account'
SUP top
AUXILIARY
MUST (
mail
)
MAY (
mailalias $ maildrop $ mailenable $ mailbox $ mailuserquota
)
)
# Mail Domain Objectclass
objectclass ( 1.3.6.1.4.1.40328.1.1.2.2
NAME 'mailDomain'
DESC 'Domain mail entry'
SUP top
STRUCTURAL
MUST (
virtualdomain
)
MAY (
virtualdomaindescription $ mailuserquota
)
)
# Mail Group Objectclass
objectclass ( 1.3.6.1.4.1.40328.1.1.2.3
NAME 'mailGroup' SUP top AUXILIARY
DESC 'Mail Group'
MUST ( mail )
)

View file

@ -0,0 +1,237 @@
# $OpenLDAP$
## This work is part of OpenLDAP Software <http://www.openldap.org/>.
##
## Copyright 1998-2019 The OpenLDAP Foundation.
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted only as authorized by the OpenLDAP
## Public License.
##
## A copy of this license is available in the file LICENSE in the
## top-level directory of the distribution or, alternatively, at
## <http://www.OpenLDAP.org/license.html>.
# Definitions from RFC2307 (Experimental)
# An Approach for Using LDAP as a Network Information Service
# Depends upon core.schema and cosine.schema
# Note: The definitions in RFC2307 are given in syntaxes closely related
# to those in RFC2252, however, some liberties are taken that are not
# supported by RFC2252. This file has been written following RFC2252
# strictly.
# OID Base is iso(1) org(3) dod(6) internet(1) directory(1) nisSchema(1).
# i.e. nisSchema in RFC2307 is 1.3.6.1.1.1
#
# Syntaxes are under 1.3.6.1.1.1.0 (two new syntaxes are defined)
# validaters for these syntaxes are incomplete, they only
# implement printable string validation (which is good as the
# common use of these syntaxes violates the specification).
# Attribute types are under 1.3.6.1.1.1.1
# Object classes are under 1.3.6.1.1.1.2
# Attribute Type Definitions
# builtin
#attributetype ( 1.3.6.1.1.1.1.0 NAME 'uidNumber'
# DESC 'An integer uniquely identifying a user in an administrative domain'
# EQUALITY integerMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
# builtin
#attributetype ( 1.3.6.1.1.1.1.1 NAME 'gidNumber'
# DESC 'An integer uniquely identifying a group in an administrative domain'
# EQUALITY integerMatch
# SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.2 NAME 'gecos'
DESC 'The GECOS field; the common name'
EQUALITY caseIgnoreIA5Match
SUBSTR caseIgnoreIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.3 NAME 'homeDirectory'
DESC 'The absolute path to the home directory'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.4 NAME 'loginShell'
DESC 'The path to the login shell'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.5 NAME 'shadowLastChange'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.6 NAME 'shadowMin'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.7 NAME 'shadowMax'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.8 NAME 'shadowWarning'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.9 NAME 'shadowInactive'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.10 NAME 'shadowExpire'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.11 NAME 'shadowFlag'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.12 NAME 'memberUid'
EQUALITY caseExactIA5Match
SUBSTR caseExactIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.1.1.1.13 NAME 'memberNisNetgroup'
EQUALITY caseExactIA5Match
SUBSTR caseExactIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.1.1.1.14 NAME 'nisNetgroupTriple'
DESC 'Netgroup triple'
SYNTAX 1.3.6.1.1.1.0.0 )
attributetype ( 1.3.6.1.1.1.1.15 NAME 'ipServicePort'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.16 NAME 'ipServiceProtocol'
SUP name )
attributetype ( 1.3.6.1.1.1.1.17 NAME 'ipProtocolNumber'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.18 NAME 'oncRpcNumber'
EQUALITY integerMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.19 NAME 'ipHostNumber'
DESC 'IP address'
EQUALITY caseIgnoreIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{128} )
attributetype ( 1.3.6.1.1.1.1.20 NAME 'ipNetworkNumber'
DESC 'IP network'
EQUALITY caseIgnoreIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{128} SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.21 NAME 'ipNetmaskNumber'
DESC 'IP netmask'
EQUALITY caseIgnoreIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{128} SINGLE-VALUE )
attributetype ( 1.3.6.1.1.1.1.22 NAME 'macAddress'
DESC 'MAC address'
EQUALITY caseIgnoreIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{128} )
attributetype ( 1.3.6.1.1.1.1.23 NAME 'bootParameter'
DESC 'rpc.bootparamd parameter'
SYNTAX 1.3.6.1.1.1.0.1 )
attributetype ( 1.3.6.1.1.1.1.24 NAME 'bootFile'
DESC 'Boot image name'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.1.1.1.26 NAME 'nisMapName'
SUP name )
attributetype ( 1.3.6.1.1.1.1.27 NAME 'nisMapEntry'
EQUALITY caseExactIA5Match
SUBSTR caseExactIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{1024} SINGLE-VALUE )
# Object Class Definitions
objectclass ( 1.3.6.1.1.1.2.0 NAME 'posixAccount'
DESC 'Abstraction of an account with POSIX attributes'
SUP top AUXILIARY
MUST ( cn $ uid $ uidNumber $ gidNumber $ homeDirectory )
MAY ( userPassword $ loginShell $ gecos $ description ) )
objectclass ( 1.3.6.1.1.1.2.1 NAME 'shadowAccount'
DESC 'Additional attributes for shadow passwords'
SUP top AUXILIARY
MUST uid
MAY ( userPassword $ shadowLastChange $ shadowMin $
shadowMax $ shadowWarning $ shadowInactive $
shadowExpire $ shadowFlag $ description ) )
objectclass ( 1.3.6.1.1.1.2.2 NAME 'posixGroup'
DESC 'Abstraction of a group of accounts'
SUP top STRUCTURAL
MUST ( cn $ gidNumber )
MAY ( userPassword $ memberUid $ description ) )
objectclass ( 1.3.6.1.1.1.2.3 NAME 'ipService'
DESC 'Abstraction an Internet Protocol service'
SUP top STRUCTURAL
MUST ( cn $ ipServicePort $ ipServiceProtocol )
MAY ( description ) )
objectclass ( 1.3.6.1.1.1.2.4 NAME 'ipProtocol'
DESC 'Abstraction of an IP protocol'
SUP top STRUCTURAL
MUST ( cn $ ipProtocolNumber $ description )
MAY description )
objectclass ( 1.3.6.1.1.1.2.5 NAME 'oncRpc'
DESC 'Abstraction of an ONC/RPC binding'
SUP top STRUCTURAL
MUST ( cn $ oncRpcNumber $ description )
MAY description )
objectclass ( 1.3.6.1.1.1.2.6 NAME 'ipHost'
DESC 'Abstraction of a host, an IP device'
SUP top AUXILIARY
MUST ( cn $ ipHostNumber )
MAY ( l $ description $ manager ) )
objectclass ( 1.3.6.1.1.1.2.7 NAME 'ipNetwork'
DESC 'Abstraction of an IP network'
SUP top STRUCTURAL
MUST ( cn $ ipNetworkNumber )
MAY ( ipNetmaskNumber $ l $ description $ manager ) )
objectclass ( 1.3.6.1.1.1.2.8 NAME 'nisNetgroup'
DESC 'Abstraction of a netgroup'
SUP top STRUCTURAL
MUST cn
MAY ( nisNetgroupTriple $ memberNisNetgroup $ description ) )
objectclass ( 1.3.6.1.1.1.2.9 NAME 'nisMap'
DESC 'A generic abstraction of a NIS map'
SUP top STRUCTURAL
MUST nisMapName
MAY description )
objectclass ( 1.3.6.1.1.1.2.10 NAME 'nisObject'
DESC 'An entry in a NIS map'
SUP top STRUCTURAL
MUST ( cn $ nisMapEntry $ nisMapName )
MAY description )
objectclass ( 1.3.6.1.1.1.2.11 NAME 'ieee802Device'
DESC 'A device with a MAC address'
SUP top AUXILIARY
MAY macAddress )
objectclass ( 1.3.6.1.1.1.2.12 NAME 'bootableDevice'
DESC 'A device with boot parameters'
SUP top AUXILIARY
MAY ( bootFile $ bootParameter ) )

View file

@ -0,0 +1,76 @@
#
# OpenLDAP schema file for Sudo
# Save as /etc/openldap/schema/sudo.schema
#
attributetype ( 1.3.6.1.4.1.15953.9.1.1
NAME 'sudoUser'
DESC 'User(s) who may run sudo'
EQUALITY caseExactIA5Match
SUBSTR caseExactIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.4.1.15953.9.1.2
NAME 'sudoHost'
DESC 'Host(s) who may run sudo'
EQUALITY caseExactIA5Match
SUBSTR caseExactIA5SubstringsMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.4.1.15953.9.1.3
NAME 'sudoCommand'
DESC 'Command(s) to be executed by sudo'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.4.1.15953.9.1.4
NAME 'sudoRunAs'
DESC 'User(s) impersonated by sudo (deprecated)'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.4.1.15953.9.1.5
NAME 'sudoOption'
DESC 'Options(s) followed by sudo'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.4.1.15953.9.1.6
NAME 'sudoRunAsUser'
DESC 'User(s) impersonated by sudo'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.4.1.15953.9.1.7
NAME 'sudoRunAsGroup'
DESC 'Group(s) impersonated by sudo'
EQUALITY caseExactIA5Match
SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
attributetype ( 1.3.6.1.4.1.15953.9.1.8
NAME 'sudoNotBefore'
DESC 'Start of time interval for which the entry is valid'
EQUALITY generalizedTimeMatch
ORDERING generalizedTimeOrderingMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 )
attributetype ( 1.3.6.1.4.1.15953.9.1.9
NAME 'sudoNotAfter'
DESC 'End of time interval for which the entry is valid'
EQUALITY generalizedTimeMatch
ORDERING generalizedTimeOrderingMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 )
attributeTypes ( 1.3.6.1.4.1.15953.9.1.10
NAME 'sudoOrder'
DESC 'an integer to order the sudoRole entries'
EQUALITY integerMatch
ORDERING integerOrderingMatch
SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )
objectclass ( 1.3.6.1.4.1.15953.9.2.1 NAME 'sudoRole' SUP top STRUCTURAL
DESC 'Sudoer Entries'
MUST ( cn )
MAY ( sudoUser $ sudoHost $ sudoCommand $ sudoRunAs $ sudoRunAsUser $ sudoRunAsGroup $ sudoOption $ sudoOrder $ sudoNotBefore $ sudoNotAfter $
description )
)

View file

@ -0,0 +1,33 @@
#dn: cn=yunohost,cn=schema,cn=config
#objectClass: olcSchemaConfig
#cn: yunohost
# ATTRIBUTES
# For Permission
attributetype ( 1.3.6.1.4.1.17953.9.1.1 NAME 'permission'
DESC 'Yunohost permission on user and group side'
SUP distinguishedName )
attributetype ( 1.3.6.1.4.1.17953.9.1.2 NAME 'groupPermission'
DESC 'Yunohost permission for a group on permission side'
SUP distinguishedName )
attributetype ( 1.3.6.1.4.1.17953.9.1.3 NAME 'inheritPermission'
DESC 'Yunohost permission for user on permission side'
SUP distinguishedName )
attributetype ( 1.3.6.1.4.1.17953.9.1.4 NAME 'URL'
DESC 'Yunohost application URL'
SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} )
# OBJECTCLASS
# For Applications
objectclass ( 1.3.6.1.4.1.17953.9.2.1 NAME 'groupOfNamesYnh'
DESC 'Yunohost user group'
SUP top AUXILIARY
MAY ( member $ businessCategory $ seeAlso $ owner $ ou $ o $ permission ) )
objectclass ( 1.3.6.1.4.1.17953.9.2.2 NAME 'permissionYnh'
DESC 'a Yunohost application'
SUP top AUXILIARY
MUST cn
MAY ( groupPermission $ inheritPermission $ URL ) )
# For User
objectclass ( 1.3.6.1.4.1.17953.9.2.3 NAME 'userPermissionYnh'
DESC 'a Yunohost application'
SUP top AUXILIARY
MAY ( permission ) )

View file

@ -0,0 +1,94 @@
serverID %(serverid)s
moduleload back_%(database)s
moduleload memberof
%(include_directives)s
loglevel %(loglevel)s
#allow bind_v2
database %(database)s
directory "%(directory)s"
suffix "%(suffix)s"
rootdn "%(rootdn)s"
rootpw "%(rootpw)s"
TLSCACertificateFile "%(cafile)s"
TLSCertificateFile "%(servercert)s"
TLSCertificateKeyFile "%(serverkey)s"
authz-regexp
"gidnumber=%(root_gid)s\\+uidnumber=%(root_uid)s,cn=peercred,cn=external,cn=auth"
"%(rootdn)s"
index objectClass eq
index uid,sudoUser eq,sub
index entryCSN,entryUUID eq
index cn,mail eq
index gidNumber,uidNumber eq
index member,memberUid,uniqueMember eq
index virtualdomain eq
# The userPassword by default can be changed
# by the entry owning it if they are authenticated.
# Others should not be able to see it, except the
# admin entry below
# These access lines apply to database #1 only
access to attrs=userPassword,shadowLastChange
by dn="cn=admin,dc=yunohost,dc=org" write
by dn.exact="gidNumber=%(root_gid)s+uidnumber=%(root_uid)s,cn=peercred,cn=external,cn=auth" write
by anonymous auth
by self write
by * none
# Personnal information can be changed by the entry
# owning it if they are authenticated.
# Others should be able to see it.
access to attrs=cn,gecos,givenName,mail,maildrop,displayName,sn
by dn="cn=admin,dc=yunohost,dc=org" write
by dn.exact="gidNumber=%(root_gid)s+uidnumber=%(root_uid)s,cn=peercred,cn=external,cn=auth" write
by self write
by * read
# Ensure read access to the base for things like
# supportedSASLMechanisms. Without this you may
# have problems with SASL not knowing what
# mechanisms are available and the like.
# Note that this is covered by the 'access to *'
# ACL below too but if you change that as people
# are wont to do you'll still need this if you
# want SASL (and possible ldap_files things) to work
# happily.
access to dn.base="" by * read
# The admin dn has full write access, everyone else
# can read everything.
access to *
by dn="cn=admin,dc=yunohost,dc=org" write
by dn.exact="gidNumber=%(root_gid)s+uidnumber=%(root_uid)s,cn=peercred,cn=external,cn=auth" write
by group/groupOfNames/Member="cn=admin,ou=groups,dc=yunohost,dc=org" write
by * read
# Configure Memberof Overlay (used for Yunohost permission)
# Link user <-> group
#dn: olcOverlay={0}memberof,olcDatabase={1}mdb,cn=config
overlay memberof
memberof-group-oc groupOfNamesYnh
memberof-member-ad member
memberof-memberof-ad memberOf
memberof-dangling error
memberof-refint TRUE
# Link permission <-> groupes
#dn: olcOverlay={1}memberof,olcDatabase={1}mdb,cn=config
overlay memberof
memberof-group-oc permissionYnh
memberof-member-ad groupPermission
memberof-memberof-ad permission
memberof-dangling error
memberof-refint TRUE
# Link permission <-> user
#dn: olcOverlay={2}memberof,olcDatabase={1}mdb,cn=config
overlay memberof
memberof-group-oc permissionYnh
memberof-member-ad inheritPermission
memberof-memberof-ad permission
memberof-dangling error
memberof-refint TRUE

205
test/ldap_files/tests.ldif Normal file
View file

@ -0,0 +1,205 @@
dn: dc=yunohost,dc=org
dc: yunohost
o: yunohost.org
objectclass: top
objectclass: dcObject
objectclass: organization
dn: cn=admin,dc=yunohost,dc=org
objectClass: simpleSecurityObject
objectClass: organizationalRole
cn: admin
userPassword: yunohost
#dn: ou=people,dc=yunohost,dc=org
#objectClass: organizationalUnit
#ou: people
#
#dn: ou=moregroups,dc=yunohost,dc=org
#objectClass: organizationalUnit
#ou: moregroups
#
#dn: ou=mirror_groups,dc=yunohost,dc=org
#objectClass: organizationalUnit
#ou: mirror_groups
#
#
#dn: uid=alice,ou=people,dc=yunohost,dc=org
#objectClass: person
#objectClass: organizationalPerson
#objectClass: inetOrgPerson
#objectClass: posixAccount
#cn: alice
#uid: alice
#userPassword: password
#uidNumber: 1000
#gidNumber: 1000
#givenName: Alice
#sn: Adams
#homeDirectory: /home/alice
#
#dn: uid=bob,ou=people,dc=yunohost,dc=org
#objectClass: person
#objectClass: organizationalPerson
#objectClass: inetOrgPerson
#objectClass: posixAccount
#cn: bob
#uid: bob
#userPassword: password
#uidNumber: 1001
#gidNumber: 50
#givenName: Robert
#sn: Barker
#homeDirectory: /home/bob
#
#dn: uid=dreßler,ou=people,dc=yunohost,dc=org
#objectClass: person
#objectClass: organizationalPerson
#objectClass: inetOrgPerson
#objectClass: posixAccount
#cn: dreßler
#uid: dreßler
#userPassword: password
#uidNumber: 1002
#gidNumber: 50
#givenName: Wolfgang
#sn: Dreßler
#homeDirectory: /home/dressler
#
#dn: uid=nobody,ou=people,dc=yunohost,dc=org
#objectClass: person
#objectClass: organizationalPerson
#objectClass: inetOrgPerson
#objectClass: posixAccount
#cn: nobody
#uid: nobody
#userPassword: password
#uidNumber: 1003
#gidNumber: 50
#sn: nobody
#homeDirectory: /home/nobody
#
#dn: uid=nonposix,ou=people,dc=yunohost,dc=org
#objectClass: person
#objectClass: organizationalPerson
#objectClass: inetOrgPerson
#cn: nonposix
#uid: nonposix
#userPassword: password
#sn: nonposix
#
#
## posixGroup objects
#dn: cn=active_px,ou=moregroups,dc=yunohost,dc=org
#objectClass: posixGroup
#cn: active_px
#gidNumber: 1000
#memberUid: nonposix
#
#dn: cn=staff_px,ou=moregroups,dc=yunohost,dc=org
#objectClass: posixGroup
#cn: staff_px
#gidNumber: 1001
#memberUid: alice
#memberUid: nonposix
#
#dn: cn=superuser_px,ou=moregroups,dc=yunohost,dc=org
#objectClass: posixGroup
#cn: superuser_px
#gidNumber: 1002
#memberUid: alice
#memberUid: nonposix
#
#
## groupOfNames groups
#dn: cn=empty_gon,ou=moregroups,dc=yunohost,dc=org
#cn: empty_gon
#objectClass: groupOfNames
#member:
#
#dn: cn=active_gon,ou=moregroups,dc=yunohost,dc=org
#cn: active_gon
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#
#dn: cn=staff_gon,ou=moregroups,dc=yunohost,dc=org
#cn: staff_gon
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#
#dn: cn=superuser_gon,ou=moregroups,dc=yunohost,dc=org
#cn: superuser_gon
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#
#dn: cn=other_gon,ou=moregroups,dc=yunohost,dc=org
#cn: other_gon
#objectClass: groupOfNames
#member: uid=bob,ou=people,dc=yunohost,dc=org
#
#
## groupOfNames objects for LDAPGroupQuery testing
#dn: ou=query_groups,dc=yunohost,dc=org
#objectClass: organizationalUnit
#ou: query_groups
#
#dn: cn=alice_gon,ou=query_groups,dc=yunohost,dc=org
#cn: alice_gon
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#
#dn: cn=mutual_gon,ou=query_groups,dc=yunohost,dc=org
#cn: mutual_gon
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#member: uid=bob,ou=people,dc=yunohost,dc=org
#
#dn: cn=bob_gon,ou=query_groups,dc=yunohost,dc=org
#cn: bob_gon
#objectClass: groupOfNames
#member: uid=bob,ou=people,dc=yunohost,dc=org
#
#dn: cn=dreßler_gon,ou=query_groups,dc=yunohost,dc=org
#cn: dreßler_gon
#objectClass: groupOfNames
#member: uid=dreßler,ou=people,dc=yunohost,dc=org
#
#
## groupOfNames objects for selective group mirroring.
#dn: cn=mirror1,ou=mirror_groups,dc=yunohost,dc=org
#cn: mirror1
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#
#dn: cn=mirror2,ou=mirror_groups,dc=yunohost,dc=org
#cn: mirror2
#objectClass: groupOfNames
#member:
#
#dn: cn=mirror3,ou=mirror_groups,dc=yunohost,dc=org
#cn: mirror3
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#
#dn: cn=mirror4,ou=mirror_groups,dc=yunohost,dc=org
#cn: mirror4
#objectClass: groupOfNames
#member:
#
#
## Nested groups with a circular reference
#dn: cn=parent_gon,ou=moregroups,dc=yunohost,dc=org
#cn: parent_gon
#objectClass: groupOfNames
#member: cn=nested_gon,ou=moregroups,dc=yunohost,dc=org
#
#dn: CN=nested_gon,ou=moregroups,dc=yunohost,dc=org
#cn: nested_gon
#objectClass: groupOfNames
#member: uid=alice,ou=people,dc=yunohost,dc=org
#member: cn=circular_gon,ou=moregroups,dc=yunohost,dc=org
#
#dn: cn=circular_gon,ou=moregroups,dc=yunohost,dc=org
#cn: circular_gon
#objectClass: groupOfNames
#member: cn=parent_gon,ou=moregroups,dc=yunohost,dc=org

5
test/locales/en.json Normal file
View file

@ -0,0 +1,5 @@
{
"foo": "bar",
"Dummy Password": "Dummy Password",
"Dummy Yoloswag Password": "Dummy Yoloswag Password"
}

0
test/src/__init__.py Normal file
View file

104
test/src/ldap_server.py Normal file
View file

@ -0,0 +1,104 @@
try:
import slapdtest
except ImportError:
import old_slapdtest as slapdtest
import os
from moulinette.authenticators import ldap as m_ldap
HERE = os.path.abspath(os.path.dirname(__file__))
class LDAPServer:
def __init__(self):
self.server_default = slapdtest.SlapdObject()
with open(os.path.join(HERE, "..", "ldap_files", "slapd.conf.template")) as f:
SLAPD_CONF_TEMPLATE = f.read()
self.server_default.slapd_conf_template = SLAPD_CONF_TEMPLATE
self.server_default.suffix = "dc=yunohost,dc=org"
self.server_default.root_cn = "admin"
self.server_default.SCHEMADIR = os.path.join(HERE, "..", "ldap_files", "schema")
self.server_default.openldap_schema_files = [
"core.schema",
"cosine.schema",
"nis.schema",
"inetorgperson.schema",
"sudo.schema",
"yunohost.schema",
"mailserver.schema",
]
self.server = None
self.uri = ""
def start(self):
self.server = self.server_default
self.server.start()
self.uri = self.server.ldapi_uri
with open(os.path.join(HERE, "..", "ldap_files", "tests.ldif")) as fp:
ldif = fp.read().decode("utf-8")
self.server.ldapadd(ldif)
self.tools_ldapinit()
def stop(self):
if self.server:
self.server.stop()
def __del__(self):
if self.server:
self.server.stop()
def tools_ldapinit(self):
"""
YunoHost LDAP initialization
"""
import yaml
with open(os.path.join(HERE, "..", "ldap_files", "ldap_scheme.yml")) as f:
ldap_map = yaml.load(f)
def _get_ldap_interface():
conf = {
"vendor": "ldap",
"name": "as-root",
"parameters": {
"uri": self.server.ldapi_uri,
"base_dn": "dc=yunohost,dc=org",
"user_rdn": "gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid()),
},
"extra": {},
}
_ldap_interface = m_ldap.Authenticator(**conf)
return _ldap_interface
ldap_interface = _get_ldap_interface()
for rdn, attr_dict in ldap_map["parents"].items():
ldap_interface.add(rdn, attr_dict)
for rdn, attr_dict in ldap_map["children"].items():
ldap_interface.add(rdn, attr_dict)
for rdn, attr_dict in ldap_map["depends_children"].items():
ldap_interface.add(rdn, attr_dict)
admin_dict = {
"cn": ["admin"],
"uid": ["admin"],
"description": ["LDAP Administrator"],
"gidNumber": ["1007"],
"uidNumber": ["1007"],
"homeDirectory": ["/home/admin"],
"loginShell": ["/bin/bash"],
"objectClass": [
"organizationalRole",
"posixAccount",
"simpleSecurityObject",
],
"userPassword": ["yunohost"],
}
ldap_interface.update("cn=admin", admin_dict)

View file

@ -0,0 +1,2 @@
I have adapted the code from https://github.com/python-ldap/python-ldap/tree/master/Lib/slapdtest since the version of python-ldap we use does not provide the import slapdtest.
This part will must be removed once we switch to python3

View file

@ -0,0 +1,2 @@
# flake8: noqa
from ._slapdtest import SlapdObject

View file

@ -0,0 +1,653 @@
# -*- coding: utf-8 -*-
# flake8: noqa
"""
slapdtest - module for spawning test instances of OpenLDAP's slapd server
See https://www.python-ldap.org/ for details.
"""
from __future__ import unicode_literals
import os
import socket
import sys
import time
import subprocess
import logging
import atexit
from logging.handlers import SysLogHandler
import unittest
# Switch off processing .ldaprc or ldap.conf before importing _ldap
os.environ["LDAPNOINIT"] = "1"
import ldap
from urllib import quote_plus
try:
from shutil import which
except ImportError:
# shutil.which() from Python 3.6
# "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
# 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation;
# All Rights Reserved"
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
HERE = os.path.abspath(os.path.dirname(__file__))
# a template string for generating simple slapd.conf file
SLAPD_CONF_TEMPLATE = r"""
serverID %(serverid)s
moduleload back_%(database)s
%(include_directives)s
loglevel %(loglevel)s
allow bind_v2
authz-regexp
"gidnumber=%(root_gid)s\\+uidnumber=%(root_uid)s,cn=peercred,cn=external,cn=auth"
"%(rootdn)s"
database %(database)s
directory "%(directory)s"
suffix "%(suffix)s"
rootdn "%(rootdn)s"
rootpw "%(rootpw)s"
TLSCACertificateFile "%(cafile)s"
TLSCertificateFile "%(servercert)s"
TLSCertificateKeyFile "%(serverkey)s"
# ignore missing client cert but fail with invalid client cert
TLSVerifyClient try
authz-regexp
"C=DE, O=python-ldap, OU=slapd-test, CN=([A-Za-z]+)"
"ldap://ou=people,dc=local???($1)"
"""
LOCALHOST = "127.0.0.1"
CI_DISABLED = set(os.environ.get("CI_DISABLED", "").split(":"))
if "LDAPI" in CI_DISABLED:
HAVE_LDAPI = False
else:
HAVE_LDAPI = hasattr(socket, "AF_UNIX")
def identity(test_item):
"""Identity decorator
"""
return test_item
def skip_unless_ci(reason, feature=None):
"""Skip test unless test case is executed on CI like Travis CI
"""
if not os.environ.get("CI", False):
return unittest.skip(reason)
elif feature in CI_DISABLED:
return unittest.skip(reason)
else:
# Don't skip on Travis
return identity
def requires_tls():
"""Decorator for TLS tests
Tests are not skipped on CI (e.g. Travis CI)
"""
if not ldap.TLS_AVAIL:
return skip_unless_ci("test needs ldap.TLS_AVAIL", feature="TLS")
else:
return identity
def requires_sasl():
if not ldap.SASL_AVAIL:
return skip_unless_ci("test needs ldap.SASL_AVAIL", feature="SASL")
else:
return identity
def requires_ldapi():
if not HAVE_LDAPI:
return skip_unless_ci("test needs ldapi support (AF_UNIX)", feature="LDAPI")
else:
return identity
def _add_sbin(path):
"""Add /sbin and related directories to a command search path"""
directories = path.split(os.pathsep)
if sys.platform != "win32":
for sbin in "/usr/local/sbin", "/sbin", "/usr/sbin":
if sbin not in directories:
directories.append(sbin)
return os.pathsep.join(directories)
def combined_logger(
log_name,
log_level=logging.WARN,
sys_log_format="%(levelname)s %(message)s",
console_log_format="%(asctime)s %(levelname)s %(message)s",
):
"""
Returns a combined SysLogHandler/StreamHandler logging instance
with formatters
"""
if "LOGLEVEL" in os.environ:
log_level = os.environ["LOGLEVEL"]
try:
log_level = int(log_level)
except ValueError:
pass
# for writing to syslog
new_logger = logging.getLogger(log_name)
if sys_log_format and os.path.exists("/dev/log"):
my_syslog_formatter = logging.Formatter(
fmt=" ".join((log_name, sys_log_format))
)
my_syslog_handler = logging.handlers.SysLogHandler(
address="/dev/log", facility=SysLogHandler.LOG_DAEMON,
)
my_syslog_handler.setFormatter(my_syslog_formatter)
new_logger.addHandler(my_syslog_handler)
if console_log_format:
my_stream_formatter = logging.Formatter(fmt=console_log_format)
my_stream_handler = logging.StreamHandler()
my_stream_handler.setFormatter(my_stream_formatter)
new_logger.addHandler(my_stream_handler)
new_logger.setLevel(log_level)
return new_logger # end of combined_logger()
class SlapdObject(object):
"""
Controller class for a slapd instance, OpenLDAP's server.
This class creates a temporary data store for slapd, runs it
listening on a private Unix domain socket and TCP port,
and initializes it with a top-level entry and the root user.
When a reference to an instance of this class is lost, the slapd
server is shut down.
An instance can be used as a context manager. When exiting the context
manager, the slapd server is shut down and the temporary data store is
removed.
.. versionchanged:: 3.1
Added context manager functionality
"""
slapd_conf_template = SLAPD_CONF_TEMPLATE
database = "mdb"
suffix = "dc=slapd-test,dc=python-ldap,dc=org"
root_cn = "Manager"
root_pw = "password"
slapd_loglevel = "stats stats2"
local_host = "127.0.0.1"
testrunsubdirs = ("schema",)
openldap_schema_files = ("core.schema",)
TMPDIR = os.environ.get("TMP", os.getcwd())
if "SCHEMA" in os.environ:
SCHEMADIR = os.environ["SCHEMA"]
elif os.path.isdir("/etc/openldap/schema"):
SCHEMADIR = "/etc/openldap/schema"
elif os.path.isdir("/etc/ldap/schema"):
SCHEMADIR = "/etc/ldap/schema"
else:
SCHEMADIR = None
BIN_PATH = os.environ.get("BIN", os.environ.get("PATH", os.defpath))
SBIN_PATH = os.environ.get("SBIN", _add_sbin(BIN_PATH))
# time in secs to wait before trying to access slapd via LDAP (again)
_start_sleep = 1.5
# create loggers once, multiple calls mess up refleak tests
_log = combined_logger("python-ldap-test")
def __init__(self):
self._proc = None
self._port = self._avail_tcp_port()
self.server_id = self._port % 4096
self.testrundir = os.path.join(self.TMPDIR, "python-ldap-test-%d" % self._port)
self._schema_prefix = os.path.join(self.testrundir, "schema")
self._slapd_conf = os.path.join(self.testrundir, "slapd.conf")
self._db_directory = os.path.join(self.testrundir, "openldap-data")
self.ldap_uri = "ldap://%s:%d/" % (LOCALHOST, self._port)
if HAVE_LDAPI:
ldapi_path = os.path.join(self.testrundir, "ldapi")
self.ldapi_uri = "ldapi://%s" % quote_plus(ldapi_path)
self.default_ldap_uri = self.ldapi_uri
# use SASL/EXTERNAL via LDAPI when invoking OpenLDAP CLI tools
self.cli_sasl_external = ldap.SASL_AVAIL
else:
self.ldapi_uri = None
self.default_ldap_uri = self.ldap_uri
# Use simple bind via LDAP uri
self.cli_sasl_external = False
self._find_commands()
if self.SCHEMADIR is None:
raise ValueError("SCHEMADIR is None, ldap schemas are missing.")
# TLS certs
self.cafile = os.path.join(HERE, "certs/ca.pem")
self.servercert = os.path.join(HERE, "certs/server.pem")
self.serverkey = os.path.join(HERE, "certs/server.key")
self.clientcert = os.path.join(HERE, "certs/client.pem")
self.clientkey = os.path.join(HERE, "certs/client.key")
@property
def root_dn(self):
return "cn={self.root_cn},{self.suffix}".format(self=self)
def _find_commands(self):
self.PATH_LDAPADD = self._find_command("ldapadd")
self.PATH_LDAPDELETE = self._find_command("ldapdelete")
self.PATH_LDAPMODIFY = self._find_command("ldapmodify")
self.PATH_LDAPWHOAMI = self._find_command("ldapwhoami")
self.PATH_SLAPD = os.environ.get("SLAPD", None)
if not self.PATH_SLAPD:
self.PATH_SLAPD = self._find_command("slapd", in_sbin=True)
def _find_command(self, cmd, in_sbin=False):
if in_sbin:
path = self.SBIN_PATH
var_name = "SBIN"
else:
path = self.BIN_PATH
var_name = "BIN"
command = which(cmd, path=path)
if command is None:
raise ValueError(
"Command '{}' not found. Set the {} environment variable to "
"override slapdtest's search path.".format(cmd, var_name)
)
return command
def setup_rundir(self):
"""
creates rundir structure
for setting up a custom directory structure you have to override
this method
"""
os.mkdir(self.testrundir)
os.mkdir(self._db_directory)
self._create_sub_dirs(self.testrunsubdirs)
self._ln_schema_files(self.openldap_schema_files, self.SCHEMADIR)
def _cleanup_rundir(self):
"""
Recursively delete whole directory specified by `path'
"""
# cleanup_rundir() is called in atexit handler. Until Python 3.4,
# the rest of the world is already destroyed.
import os, os.path
if not os.path.exists(self.testrundir):
return
self._log.debug("clean-up %s", self.testrundir)
for dirpath, dirnames, filenames in os.walk(self.testrundir, topdown=False):
for filename in filenames:
self._log.debug("remove %s", os.path.join(dirpath, filename))
os.remove(os.path.join(dirpath, filename))
for dirname in dirnames:
self._log.debug("rmdir %s", os.path.join(dirpath, dirname))
os.rmdir(os.path.join(dirpath, dirname))
os.rmdir(self.testrundir)
self._log.info("cleaned-up %s", self.testrundir)
def _avail_tcp_port(self):
"""
find an available port for TCP connection
"""
sock = socket.socket()
try:
sock.bind((self.local_host, 0))
port = sock.getsockname()[1]
finally:
sock.close()
self._log.info("Found available port %d", port)
return port
def gen_config(self):
"""
generates a slapd.conf and returns it as one string
for generating specific static configuration files you have to
override this method
"""
include_directives = "\n".join(
'include "{schema_prefix}/{schema_file}"'.format(
schema_prefix=self._schema_prefix, schema_file=schema_file,
)
for schema_file in self.openldap_schema_files
)
config_dict = {
"serverid": hex(self.server_id),
"schema_prefix": self._schema_prefix,
"include_directives": include_directives,
"loglevel": self.slapd_loglevel,
"database": self.database,
"directory": self._db_directory,
"suffix": self.suffix,
"rootdn": self.root_dn,
"rootpw": self.root_pw,
"root_uid": os.getuid(),
"root_gid": os.getgid(),
"cafile": self.cafile,
"servercert": self.servercert,
"serverkey": self.serverkey,
}
return self.slapd_conf_template % config_dict
def _create_sub_dirs(self, dir_names):
"""
create sub-directories beneath self.testrundir
"""
for dname in dir_names:
dir_name = os.path.join(self.testrundir, dname)
self._log.debug("Create directory %s", dir_name)
os.mkdir(dir_name)
def _ln_schema_files(self, file_names, source_dir):
"""
write symbolic links to original schema files
"""
for fname in file_names:
ln_source = os.path.join(source_dir, fname)
ln_target = os.path.join(self._schema_prefix, fname)
self._log.debug("Create symlink %s -> %s", ln_source, ln_target)
os.symlink(ln_source, ln_target)
def _write_config(self):
"""Writes the slapd.conf file out, and returns the path to it."""
self._log.debug("Writing config to %s", self._slapd_conf)
with open(self._slapd_conf, "w") as config_file:
config_file.write(self.gen_config())
self._log.info("Wrote config to %s", self._slapd_conf)
def _test_config(self):
self._log.debug("testing config %s", self._slapd_conf)
popen_list = [
self.PATH_SLAPD,
"-Ttest",
"-f",
self._slapd_conf,
"-u",
]
if self._log.isEnabledFor(logging.DEBUG):
popen_list.append("-v")
popen_list.extend(["-d", "config"])
else:
popen_list.append("-Q")
proc = subprocess.Popen(popen_list)
if proc.wait() != 0:
raise RuntimeError("configuration test failed")
self._log.info("config ok: %s", self._slapd_conf)
def _start_slapd(self):
"""
Spawns/forks the slapd process
"""
urls = [self.ldap_uri]
if self.ldapi_uri:
urls.append(self.ldapi_uri)
slapd_args = [
self.PATH_SLAPD,
"-f",
self._slapd_conf,
"-F",
self.testrundir,
"-h",
" ".join(urls),
]
if self._log.isEnabledFor(logging.DEBUG):
slapd_args.extend(["-d", "-1"])
else:
slapd_args.extend(["-d", "0"])
self._log.info("starting slapd: %r", " ".join(slapd_args))
self._proc = subprocess.Popen(slapd_args)
# Waits until the LDAP server socket is open, or slapd crashed
# no cover to avoid spurious coverage changes, see
# https://github.com/python-ldap/python-ldap/issues/127
for _ in range(10): # pragma: no cover
if self._proc.poll() is not None:
self._stopped()
raise RuntimeError("slapd exited before opening port")
time.sleep(self._start_sleep)
try:
self._log.debug("slapd connection check to %s", self.default_ldap_uri)
self.ldapwhoami()
except RuntimeError:
pass
else:
return
raise RuntimeError("slapd did not start properly")
def start(self):
"""
Starts the slapd server process running, and waits for it to come up.
"""
if self._proc is None:
# prepare directory structure
atexit.register(self.stop)
self._cleanup_rundir()
self.setup_rundir()
self._write_config()
self._test_config()
self._start_slapd()
self._log.debug(
"slapd with pid=%d listening on %s and %s",
self._proc.pid,
self.ldap_uri,
self.ldapi_uri,
)
def stop(self):
"""
Stops the slapd server, and waits for it to terminate and cleans up
"""
if self._proc is not None:
self._log.debug("stopping slapd with pid %d", self._proc.pid)
self._proc.terminate()
self.wait()
self._cleanup_rundir()
if hasattr(atexit, "unregister"):
# Python 3
atexit.unregister(self.stop)
elif hasattr(atexit, "_exithandlers"):
# Python 2, can be None during process shutdown
try:
atexit._exithandlers.remove(self.stop)
except ValueError:
pass
def restart(self):
"""
Restarts the slapd server with same data
"""
self._proc.terminate()
self.wait()
self._start_slapd()
def wait(self):
"""Waits for the slapd process to terminate by itself."""
if self._proc:
self._proc.wait()
self._stopped()
def _stopped(self):
"""Called when the slapd server is known to have terminated"""
if self._proc is not None:
self._log.info("slapd[%d] terminated", self._proc.pid)
self._proc = None
def _cli_auth_args(self):
if self.cli_sasl_external:
authc_args = [
"-Y",
"EXTERNAL",
]
if not self._log.isEnabledFor(logging.DEBUG):
authc_args.append("-Q")
else:
authc_args = [
"-x",
"-D",
self.root_dn,
"-w",
self.root_pw,
]
return authc_args
# no cover to avoid spurious coverage changes
def _cli_popen(
self, ldapcommand, extra_args=None, ldap_uri=None, stdin_data=None
): # pragma: no cover
if ldap_uri is None:
ldap_uri = self.default_ldap_uri
args = (
[ldapcommand, "-H", ldap_uri,] + self._cli_auth_args() + (extra_args or [])
)
self._log.debug("Run command: %r", " ".join(args))
proc = subprocess.Popen(
args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
self._log.debug("stdin_data=%r", stdin_data)
stdout_data, stderr_data = proc.communicate(stdin_data)
if stdout_data is not None:
self._log.debug("stdout_data=%r", stdout_data)
if stderr_data is not None:
self._log.debug("stderr_data=%r", stderr_data)
if proc.wait() != 0:
raise RuntimeError(
"{!r} process failed:\n{!r}\n{!r}".format(
args, stdout_data, stderr_data
)
)
return stdout_data, stderr_data
def ldapwhoami(self, extra_args=None):
"""
Runs ldapwhoami on this slapd instance
"""
self._cli_popen(self.PATH_LDAPWHOAMI, extra_args=extra_args)
def ldapadd(self, ldif, extra_args=None):
"""
Runs ldapadd on this slapd instance, passing it the ldif content
"""
self._cli_popen(
self.PATH_LDAPADD, extra_args=extra_args, stdin_data=ldif.encode("utf-8")
)
def ldapmodify(self, ldif, extra_args=None):
"""
Runs ldapadd on this slapd instance, passing it the ldif content
"""
self._cli_popen(
self.PATH_LDAPMODIFY, extra_args=extra_args, stdin_data=ldif.encode("utf-8")
)
def ldapdelete(self, dn, recursive=False, extra_args=None):
"""
Runs ldapdelete on this slapd instance, deleting 'dn'
"""
if extra_args is None:
extra_args = []
if recursive:
extra_args.append("-r")
extra_args.append(dn)
self._cli_popen(self.PATH_LDAPDELETE, extra_args=extra_args)
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.stop()
class SlapdTestCase(unittest.TestCase):
"""
test class which also clones or initializes a running slapd
"""
server_class = SlapdObject
server = None
ldap_object_class = None
def _open_ldap_conn(self, who=None, cred=None, **kwargs):
"""
return a LDAPObject instance after simple bind
"""
ldap_conn = self.ldap_object_class(self.server.ldap_uri, **kwargs)
ldap_conn.protocol_version = 3
# ldap_conn.set_option(ldap.OPT_REFERRALS, 0)
ldap_conn.simple_bind_s(who or self.server.root_dn, cred or self.server.root_pw)
return ldap_conn
@classmethod
def setUpClass(cls):
cls.server = cls.server_class()
cls.server.start()
@classmethod
def tearDownClass(cls):
cls.server.stop()

View file

@ -0,0 +1,24 @@
python-ldap test certificates
=============================
Certificates and keys
---------------------
* ``ca.pem``: internal root CA certificate
* ``server.pem``: TLS server certificate for slapd, signed by root CA. The
server cert is valid for DNS Name ``localhost`` and IPs ``127.0.0.1`` and
``:1``.
* ``server.key``: private key for ``server.pem``, no password protection
* ``client.pem``: certificate for TLS client cert authentication, signed by
root CA.
* ``client.key``: private key for ``client.pem``, no password protection
Configuration and scripts
-------------------------
* ``ca.conf`` contains the CA definition as well as extensions for the
client and server certificates.
* ``client.conf`` and ``server.conf`` hold the subject and base configuration
for server and client certs.
* ``gencerts.sh`` creates new CA, client and server certificates.
* ``gennssdb.sh`` can be used to create a NSSDB for all certs and keys.

View file

@ -0,0 +1,77 @@
# Written by Christian Heimes
[default]
ca = "ca"
tmpdir = $ENV::CATMPDIR
outdir = $ENV::CAOUTDIR
name_opt = multiline,-esc_msb,utf8
[req]
default_bits = 2048
encrypt_key = no
default_md = sha256
utf8 = yes
string_mask = utf8only
prompt = no
distinguished_name = ca_dn
[ca_dn]
countryName = "DE"
organizationName = "python-ldap"
organizationalUnitName = "slapd-test"
commonName = "Python LDAP Test CA"
[ca]
default_ca = python_ldap_ca
[python_ldap_ca]
certificate = $outdir/$ca.pem
private_key = $outdir/$ca.key
new_certs_dir = $tmpdir
serial = $tmpdir/$ca.crt.srl
crlnumber = $tmpdir/$ca.crl.srl
database = $tmpdir/$ca.db
unique_subject = no
default_days = 365200
default_md = sha256
policy = match_pol
email_in_dn = no
preserve = no
name_opt = $name_opt
cert_opt = ca_default
copy_extensions = none
default_crl_days = 365100
[match_pol]
countryName = match
stateOrProvinceName = optional
localityName = optional
organizationName = match
organizationalUnitName = match
commonName = supplied
[ca_ext]
basicConstraints = critical,CA:true
keyUsage = critical,keyCertSign,cRLSign
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always
[server_san]
DNS.1 = localhost
IP.1 = 127.0.0.1
IP.2 = ::1
[server_ext]
basicConstraints = critical,CA:false
keyUsage = critical,digitalSignature,keyEncipherment
extendedKeyUsage = critical,serverAuth
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always
subjectAltName = @server_san
[client_ext]
basicConstraints = critical,CA:false
keyUsage = critical,digitalSignature
extendedKeyUsage = critical,clientAuth
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always

View file

@ -0,0 +1,80 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 1 (0x1)
Signature Algorithm: sha256WithRSAEncryption
Issuer: C=DE, O=python-ldap, OU=slapd-test, CN=Python LDAP Test CA
Validity
Not Before: Apr 12 18:52:38 2019 GMT
Not After : Oct 17 18:52:38 2994 GMT
Subject: C=DE, O=python-ldap, OU=slapd-test, CN=Python LDAP Test CA
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:d7:30:73:20:44:7d:83:d4:c7:01:b8:ab:1e:7c:
91:f4:38:ac:9c:41:43:64:0c:31:99:48:70:22:7d:
ae:1b:47:e7:2a:28:4d:f7:46:4e:b4:ba:ae:c0:9d:
d5:1f:4b:7a:79:2f:b9:dc:68:7f:79:84:88:50:51:
3b:7d:dc:d5:57:17:66:45:c0:2c:20:13:f7:99:d6:
9d:e2:12:7c:41:76:82:51:19:2c:b6:ff:46:cb:04:
56:38:22:2a:c3:7a:b5:71:51:49:4e:62:68:a0:99:
6f:de:f3:a2:0f:a2:aa:1b:72:a5:87:bc:42:5a:a7:
22:8d:33:b4:88:a8:dc:5d:72:ca:dd:a0:9a:4e:db:
7d:8b:10:de:c5:41:e9:e9:8d:fa:6c:dd:94:6e:b1:
31:c2:6d:a1:69:6c:7a:3a:b2:76:65:c9:e5:95:38:
62:40:81:c6:29:26:26:d1:d1:c1:f4:5e:fa:24:ef:
13:da:24:13:6f:f5:5c:ba:b1:31:8f:30:94:71:7b:
c6:e5:da:b9:b5:64:39:39:09:c2:4a:80:64:58:1d:
99:f5:65:3c:a7:26:08:95:26:35:7b:fa:e7:20:08:
ff:72:df:9b:8f:9f:da:8b:c3:a7:8b:fc:8c:c0:a5:
31:87:1d:4c:14:f6:cf:90:5e:2e:6e:a6:db:27:08:
eb:df
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:TRUE
X509v3 Key Usage: critical
Certificate Sign, CRL Sign
X509v3 Subject Key Identifier:
BD:78:D5:4A:F1:90:96:C5:E8:EC:66:49:23:47:03:5F:26:73:86:B2
X509v3 Authority Key Identifier:
keyid:BD:78:D5:4A:F1:90:96:C5:E8:EC:66:49:23:47:03:5F:26:73:86:B2
Signature Algorithm: sha256WithRSAEncryption
06:20:1f:eb:42:6a:42:62:b1:ee:69:c8:cd:47:a6:2e:69:95:
59:dc:49:09:69:40:93:25:a1:ec:6d:3a:dd:dc:e5:74:ab:33:
9d:8f:cc:e3:bb:7a:3f:5b:51:58:74:f7:bd:6c:7c:3c:b6:5a:
05:50:a8:8c:c3:fb:5b:75:2a:c2:6c:06:93:4c:a9:93:71:1c:
51:e5:be:a1:24:93:e2:79:ca:ea:08:86:90:b9:70:e7:7a:40:
bf:f4:d6:71:f4:4d:c0:0f:e0:31:a0:23:46:77:30:72:a9:62:
8a:2a:12:c4:dd:3d:86:ae:f7:6b:33:80:26:58:49:53:ff:cd:
8a:c6:f6:11:2c:b3:ff:a5:8e:1c:f8:22:e2:1b:8e:04:33:fb:
0d:da:31:86:12:9f:d1:03:86:9c:6a:78:5e:3c:5e:8a:52:aa:
68:1f:ff:f9:17:75:b0:da:f2:99:3c:80:3c:96:2a:33:07:54:
59:84:e7:92:34:0f:99:76:e3:d6:4d:4d:9c:fb:21:35:f9:cb:
a5:30:80:8b:9d:61:90:d3:d4:59:3a:2f:f2:f6:20:13:7e:26:
dc:50:b0:49:3e:19:fe:eb:7d:cf:b9:1a:5d:5c:3a:76:30:d9:
0e:d7:df:de:ce:a9:c4:21:df:63:b9:d0:64:86:0b:28:9a:2e:
ab:51:73:e4
-----BEGIN CERTIFICATE-----
MIIDjDCCAnSgAwIBAgIBATANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJERTEU
MBIGA1UECgwLcHl0aG9uLWxkYXAxEzARBgNVBAsMCnNsYXBkLXRlc3QxHDAaBgNV
BAMME1B5dGhvbiBMREFQIFRlc3QgQ0EwIBcNMTkwNDEyMTg1MjM4WhgPMjk5NDEw
MTcxODUyMzhaMFYxCzAJBgNVBAYTAkRFMRQwEgYDVQQKDAtweXRob24tbGRhcDET
MBEGA1UECwwKc2xhcGQtdGVzdDEcMBoGA1UEAwwTUHl0aG9uIExEQVAgVGVzdCBD
QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANcwcyBEfYPUxwG4qx58
kfQ4rJxBQ2QMMZlIcCJ9rhtH5yooTfdGTrS6rsCd1R9Lenkvudxof3mEiFBRO33c
1VcXZkXALCAT95nWneISfEF2glEZLLb/RssEVjgiKsN6tXFRSU5iaKCZb97zog+i
qhtypYe8QlqnIo0ztIio3F1yyt2gmk7bfYsQ3sVB6emN+mzdlG6xMcJtoWlsejqy
dmXJ5ZU4YkCBxikmJtHRwfRe+iTvE9okE2/1XLqxMY8wlHF7xuXaubVkOTkJwkqA
ZFgdmfVlPKcmCJUmNXv65yAI/3Lfm4+f2ovDp4v8jMClMYcdTBT2z5BeLm6m2ycI
698CAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
VR0OBBYEFL141UrxkJbF6OxmSSNHA18mc4ayMB8GA1UdIwQYMBaAFL141UrxkJbF
6OxmSSNHA18mc4ayMA0GCSqGSIb3DQEBCwUAA4IBAQAGIB/rQmpCYrHuacjNR6Yu
aZVZ3EkJaUCTJaHsbTrd3OV0qzOdj8zju3o/W1FYdPe9bHw8tloFUKiMw/tbdSrC
bAaTTKmTcRxR5b6hJJPiecrqCIaQuXDnekC/9NZx9E3AD+AxoCNGdzByqWKKKhLE
3T2GrvdrM4AmWElT/82KxvYRLLP/pY4c+CLiG44EM/sN2jGGEp/RA4acanhePF6K
UqpoH//5F3Ww2vKZPIA8liozB1RZhOeSNA+ZduPWTU2c+yE1+culMICLnWGQ09RZ
Oi/y9iATfibcULBJPhn+633PuRpdXDp2MNkO19/ezqnEId9judBkhgsomi6rUXPk
-----END CERTIFICATE-----

View file

@ -0,0 +1,16 @@
# Written by Christian Heimes
[req]
default_bits = 2048
encrypt_key = no
default_md = sha256
utf8 = yes
string_mask = utf8only
prompt = no
distinguished_name = client_dn
[client_dn]
countryName = "DE"
organizationName = "python-ldap"
organizationalUnitName = "slapd-test"
commonName = "client"

View file

@ -0,0 +1,28 @@
-----BEGIN PRIVATE KEY-----
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDjt5O6nRrnAWPm
T0JvRLBHMclll92IWF/O4GEdcJ5fbBxP3BxK0Dv+6aRcR7b2o0f6fk/bgNepXfv/
MXDQcFlESbfmUNGshFmZr0sjPrYPD1R06TZs+/7RsMXnx1c79mFGEQ4wqzDOBHKQ
xeDhNJk+BcE0QABsqF8AA2XC2/dK14QCljKLC84k1zTFTnh8duN2eAalaPQFFOoj
4AnonUnswJ45zIx5V2BdG+oqO5dwo/cEukKgAEL8T2IJ9Cqlmh2sPbMqYC8cODq6
YcugMznxrfHV5LNThfkvwMe26+vv68r65zalPDy0M+cUMTMyBVY4TL3fejrloY2t
YMhPJIclAgMBAAECggEAPXdd/u9NRbGQX6hhTFuEIZOEw1F80MLaCaNzU1kExskN
01icom0W5LX4UZhiAK0OTsUtlRhwHh1qWfXkd777uX0UkKycDC8laGByra7Nwb7n
ky8oK77Rh5RptyiNmXflxd3wsJ5k7BczPXTMQL3L53vyLMJh2vKPwhcorrJlS+Pi
JjINMaR4IrDlpMYlrn9NTjsGr+mj/pdmKfU/KVXeKzFcwKTjUnDJNSbGDIC0AxaJ
dGU0yIX9MPW+p5szcA9o22UWW4LsEFY4YABeCqbm9/UQt3jWVMjCy4AOgr/9HWSR
DvXI/Xtdl3CTCr8+qDnhBaUI27z+UelZfTBFKUb8AQKBgQD6SmtrTBgEfb6tuxJw
AAHRuUcWGjatZ7X+meHRC9B7UPxUrKl9tU5NC7Gz6YMt+vr4bNMwykI6Ndj+4tSJ
KqsAC86v19CH4usMBLZ68MeTRvtQGiPah71syYrxf0uvYOx/KzUUBX240Ls+lEbE
W33psMoNAezUPpJwKx7CMjcBgQKBgQDo6VaT59bKRc3DXJvqFjd7TPIex+ny6JK+
8oOwyyFFBwkzfymoOxN4lxSrE6yf7uTemRRn+RIH3UGDottIDqzhjvtcV5uODeIN
8WzxTbl759qIxt+z7aF7SkwJLJAAZS3qqCXKtMBo7ln4xKaoRLT2RohqD1YXGrg8
wmYcUZoPpQKBgQCm2QVSuZ8pH0oFNjfMQbT0wbYJnd/lKMXBu4M1f9Ky4gHT0GYM
Ttirs6f6byfrduvmv2TpmWscsti80SktZywnE7fssMlqTHKzyFB9FBV2sFLHyyUr
gGFeK9xbsKgbeVkuTPdNKXvtv/eSd/XU38jIB/opQadGtY+ZBqWyfxb8AQKBgBLc
SlmBzZ/llSr7xdhn4ihG69hYQfacpL13r/hSCqinUDRuWLY5ynLacR8FYdY1pyzr
Yn6k6bPfU93QA0fLgG5ngK1SntMbBrIwWa0UqS+Cb+zhhd3xIUF1m8CmbibKCrTU
1vKaPnaAzqJZclFv9uN2hLdp9IO8cyzgZRpn9TzNAoGAUfZF1983qknfBgD8Lgm3
zzKYtc8q2Ukatfo4VCp66CEprbLcBq5mKx6JiBoMGqU8SI5XVG0F0aHH2n8gImcu
bO0vtEldDc1ylZ/H7xhHFWlMzmTlsbHdHVtetFfKLTpjq6duvgLA12lJNHNVu3OU
Z1bRWDeZIP70+jdYrmSoVi8=
-----END PRIVATE KEY-----

View file

@ -0,0 +1,83 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 3 (0x3)
Signature Algorithm: sha256WithRSAEncryption
Issuer: C=DE, O=python-ldap, OU=slapd-test, CN=Python LDAP Test CA
Validity
Not Before: Apr 12 18:52:38 2019 GMT
Not After : Mar 1 18:52:38 3019 GMT
Subject: C=DE, O=python-ldap, OU=slapd-test, CN=client
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:e3:b7:93:ba:9d:1a:e7:01:63:e6:4f:42:6f:44:
b0:47:31:c9:65:97:dd:88:58:5f:ce:e0:61:1d:70:
9e:5f:6c:1c:4f:dc:1c:4a:d0:3b:fe:e9:a4:5c:47:
b6:f6:a3:47:fa:7e:4f:db:80:d7:a9:5d:fb:ff:31:
70:d0:70:59:44:49:b7:e6:50:d1:ac:84:59:99:af:
4b:23:3e:b6:0f:0f:54:74:e9:36:6c:fb:fe:d1:b0:
c5:e7:c7:57:3b:f6:61:46:11:0e:30:ab:30:ce:04:
72:90:c5:e0:e1:34:99:3e:05:c1:34:40:00:6c:a8:
5f:00:03:65:c2:db:f7:4a:d7:84:02:96:32:8b:0b:
ce:24:d7:34:c5:4e:78:7c:76:e3:76:78:06:a5:68:
f4:05:14:ea:23:e0:09:e8:9d:49:ec:c0:9e:39:cc:
8c:79:57:60:5d:1b:ea:2a:3b:97:70:a3:f7:04:ba:
42:a0:00:42:fc:4f:62:09:f4:2a:a5:9a:1d:ac:3d:
b3:2a:60:2f:1c:38:3a:ba:61:cb:a0:33:39:f1:ad:
f1:d5:e4:b3:53:85:f9:2f:c0:c7:b6:eb:eb:ef:eb:
ca:fa:e7:36:a5:3c:3c:b4:33:e7:14:31:33:32:05:
56:38:4c:bd:df:7a:3a:e5:a1:8d:ad:60:c8:4f:24:
87:25
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:FALSE
X509v3 Key Usage: critical
Digital Signature
X509v3 Extended Key Usage: critical
TLS Web Client Authentication
X509v3 Subject Key Identifier:
4F:E7:35:C7:C8:C1:01:C3:7C:53:86:B9:BF:AE:8B:D6:45:A2:78:20
X509v3 Authority Key Identifier:
keyid:BD:78:D5:4A:F1:90:96:C5:E8:EC:66:49:23:47:03:5F:26:73:86:B2
Signature Algorithm: sha256WithRSAEncryption
1c:90:5f:cf:18:48:95:4d:9d:d3:8e:6d:d1:69:19:1e:7b:3f:
1f:48:7c:c8:0d:2f:c4:53:0f:89:23:f4:be:ea:b4:7a:c6:dd:
cc:18:0f:e7:34:ea:2c:d4:07:0d:65:78:e8:20:40:3f:36:ef:
2c:00:31:69:e6:20:48:65:be:57:03:0e:69:ff:b9:83:59:99:
7d:4d:86:98:14:5b:8e:39:25:3a:a8:6d:51:dc:45:a5:0f:cd:
f3:7a:fd:55:af:5f:55:75:20:03:f5:4a:75:6a:79:2f:76:84:
f6:4e:3d:1d:59:45:9a:b1:6a:57:6f:16:76:76:f8:df:6e:96:
d5:25:27:34:4b:21:d8:c9:9a:36:55:45:a0:43:16:43:68:93:
37:af:81:89:06:d1:56:1b:9e:0f:62:40:ad:3c:4c:f5:ef:6c:
a2:a4:7f:f2:fa:78:9c:0d:c0:19:f1:10:e8:d8:cf:03:67:3c:
2d:4d:f3:5d:67:5c:41:a7:4f:d6:c5:0e:ff:2c:04:dd:23:bb:
85:44:8e:25:ac:15:a3:82:fa:a4:4f:fa:1d:87:f0:58:dc:ae:
53:05:b9:81:e8:cb:e5:0c:ac:a5:74:68:03:f9:22:a0:45:b6:
62:58:e0:98:d9:8c:54:a4:22:03:7a:37:12:eb:7d:b1:ad:45:
60:8e:7a:df
-----BEGIN CERTIFICATE-----
MIIDlDCCAnygAwIBAgIBAzANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJERTEU
MBIGA1UECgwLcHl0aG9uLWxkYXAxEzARBgNVBAsMCnNsYXBkLXRlc3QxHDAaBgNV
BAMME1B5dGhvbiBMREFQIFRlc3QgQ0EwIBcNMTkwNDEyMTg1MjM4WhgPMzAxOTAz
MDExODUyMzhaMEkxCzAJBgNVBAYTAkRFMRQwEgYDVQQKDAtweXRob24tbGRhcDET
MBEGA1UECwwKc2xhcGQtdGVzdDEPMA0GA1UEAwwGY2xpZW50MIIBIjANBgkqhkiG
9w0BAQEFAAOCAQ8AMIIBCgKCAQEA47eTup0a5wFj5k9Cb0SwRzHJZZfdiFhfzuBh
HXCeX2wcT9wcStA7/umkXEe29qNH+n5P24DXqV37/zFw0HBZREm35lDRrIRZma9L
Iz62Dw9UdOk2bPv+0bDF58dXO/ZhRhEOMKswzgRykMXg4TSZPgXBNEAAbKhfAANl
wtv3SteEApYyiwvOJNc0xU54fHbjdngGpWj0BRTqI+AJ6J1J7MCeOcyMeVdgXRvq
KjuXcKP3BLpCoABC/E9iCfQqpZodrD2zKmAvHDg6umHLoDM58a3x1eSzU4X5L8DH
tuvr7+vK+uc2pTw8tDPnFDEzMgVWOEy933o65aGNrWDITySHJQIDAQABo3gwdjAM
BgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwIHgDAWBgNVHSUBAf8EDDAKBggrBgEF
BQcDAjAdBgNVHQ4EFgQUT+c1x8jBAcN8U4a5v66L1kWieCAwHwYDVR0jBBgwFoAU
vXjVSvGQlsXo7GZJI0cDXyZzhrIwDQYJKoZIhvcNAQELBQADggEBAByQX88YSJVN
ndOObdFpGR57Px9IfMgNL8RTD4kj9L7qtHrG3cwYD+c06izUBw1leOggQD827ywA
MWnmIEhlvlcDDmn/uYNZmX1NhpgUW445JTqobVHcRaUPzfN6/VWvX1V1IAP1SnVq
eS92hPZOPR1ZRZqxaldvFnZ2+N9ultUlJzRLIdjJmjZVRaBDFkNokzevgYkG0VYb
ng9iQK08TPXvbKKkf/L6eJwNwBnxEOjYzwNnPC1N811nXEGnT9bFDv8sBN0ju4VE
jiWsFaOC+qRP+h2H8FjcrlMFuYHoy+UMrKV0aAP5IqBFtmJY4JjZjFSkIgN6NxLr
fbGtRWCOet8=
-----END CERTIFICATE-----

View file

@ -0,0 +1,68 @@
#!/bin/sh
# Written by Christian Heimes
set -e
export CAOUTDIR=.
export CATMPDIR=tmp
rm -rf $CATMPDIR
rm -rf ca.pem ca.key server.pem server.key client.pem client.key
rm -rf cert9.db key4.db pkcs11.tx
mkdir -p $CAOUTDIR
mkdir -p $CATMPDIR
touch $CATMPDIR/ca.db
touch $CATMPDIR/ca.db.attr
echo '01' > $CATMPDIR/ca.crt.srl
echo '01' > $CATMPDIR/ca.crl.srl
# root CA
openssl req -new \
-config ca.conf \
-out $CATMPDIR/ca.csr \
-keyout $CAOUTDIR/ca.key \
-batch
openssl ca -selfsign \
-config ca.conf \
-in $CATMPDIR/ca.csr \
-out $CAOUTDIR/ca.pem \
-extensions ca_ext \
-days 356300 \
-batch
# server cert
openssl req -new \
-config server.conf \
-out $CATMPDIR/server.csr \
-keyout $CAOUTDIR/server.key \
-batch
openssl ca \
-config ca.conf \
-in $CATMPDIR/server.csr \
-out $CAOUTDIR/server.pem \
-policy match_pol \
-extensions server_ext \
-batch
# client cert
openssl req -new \
-config client.conf \
-out $CATMPDIR/client.csr \
-keyout $CAOUTDIR/client.key \
-batch
openssl ca \
-config ca.conf \
-in $CATMPDIR/client.csr \
-out $CAOUTDIR/client.pem \
-policy match_pol \
-extensions client_ext \
-batch
# cleanup
rm -rf $CATMPDIR ca.key
echo DONE

View file

@ -0,0 +1,28 @@
#!/bin/sh
# Written by Christian Heimes
set -e
CATMPDIR=tmp
PASSFILE=${CATMPDIR}/passwd.txt
NSSDB=sql:${CAOUTDIR}
mkdir -p $CATMPDIR
# Create PKCS#12 files for NSSDB import
echo "dummy" > $PASSFILE
openssl pkcs12 -name "servercert" -in server.pem -inkey server.key \
-caname "testca" -CAfile ca.pem \
-password "file:${PASSFILE}" -export -out server.p12
openssl pkcs12 -name "clientcert" -in client.pem -inkey client.key \
-caname "testca" -CAfile ca.pem \
-password "file:${PASSFILE}" -export -out client.p12
# Create NSS DB
certutil -d $NSSDB -N --empty-password
certutil -d $NSSDB -A -n "testca" -t CT,, -a -i ca.pem
pk12util -d $NSSDB -i server.p12 -w ${PASSFILE}
pk12util -d $NSSDB -i client.p12 -w ${PASSFILE}
certutil -d $NSSDB -L
# cleanup
rm -rf $CATMPDIR server.p12 client.p12

View file

@ -0,0 +1,16 @@
# Written by Christian Heimes
[req]
default_bits = 2048
encrypt_key = no
default_md = sha256
utf8 = yes
string_mask = utf8only
prompt = no
distinguished_name = server_dn
[server_dn]
countryName = "DE"
organizationName = "python-ldap"
organizationalUnitName = "slapd-test"
commonName = "server cert for localhost"

View file

@ -0,0 +1,28 @@
-----BEGIN PRIVATE KEY-----
MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCsBk0ml3ERFJyg
I6ujIJYERVU4doTZZd4r4z/LOef0hyiYiIQAc9wetaoZpM+bl4Eherxy9SBaCBwR
zefbaYQz2f2hdEDb+sISOiTke1eiF2ugYNlS55Wk1KnCnORE9bjcSNLPsscoUSzE
2bnBSoUwdiVK18YOCZR6GTeC8eA3ekvlR+9g+FBOgQ9+StXPDdq+iIAGXZREJIua
munErtTOw85De4YFCnzGw3UeCITDD4wFmI2IWphRFwWPsSDwUJfATA8S+7Rm4vwr
Qj726gUDlicTzPXKhJjXjj6XL7xXHfpQwMPkBCrxesKceHMJ+mrRsuuqHciuixRi
g94mILElAgMBAAECggEADG5oJOHMye8zYl8xiBhSvvxDrFDkSNGTvJgvhAArQwCB
boRvBZlZzt5R7Ih8eEH6kvDLrYMJU3hCjwbSOojlhNm7+m7sQPleDPMmt1wyeQQ4
Qt681cDmj4LOwcGUvWcEdObOVTQWMFOtaIxTYCSCe34OM9pj9Z+7mxc3a78O9PND
Ib/CwcTA1OyoupzkKirqkdLXwK3x2aT/1TMaPX94taHB51cxXc7AglL9QnuCkuaG
krqrexy3rGimzsP3OwQGEUjWKcZVSSPT8/k1pPE9hRgOqBy05BfkAzlebdvc3GO5
AbZk0NX2sfVHl4dTEXs/hTBCTQ3XmaltumQ9MdL+AQKBgQDg2I5QxBA2UHb8vCtK
f31kfG6YQc4MkoslrrMrtJjZqDYaLZPS1ARPSfYRqcc+7GDreuLmw39f8ZECd+2W
BYUqzZv9g13R9DY99g0/sINnZGsESwfIdLNNlHvVx2UrD5ybCj4vLhuPsVV7XlWs
cpl+rcuBVpqy8UIXifQ/Z3xLvwKBgQDD3CLjuC0mcTO2sIWqEHqVkc8CY2NJA2Qh
C78fwpaCqJUUdWnS69QbRGWgkFJL+oO8lQVQ1bXhZLHyQmy7Z5d5olCH6AW4GRnf
hBAnKJ+QTm9B6QVWzjUuHuOeCukfiTQbha14pOS9ar3X2QFWjDnzCRrnAxJmoY3H
BJATLHhMGwKBgQDSxAy7xt4Pm+O9y8Gk5tcq771X+i9k96V54EZRzMuPFDAK3/h2
o4marZD9Q7Hi2P+NHTc+67klvbKZpsPOYkRPOEdmH9M9cPe7oz8OGa9DpwzuDEsy
a7p8GZjvbyb1c3/wkWxzG3x4eNnReD9FFHOwHMfr6LvAy4iRuh57pM0NzwKBgDY3
1DixnV4M7EHgb7/6O9T3vhRtKujlVWyIcen61etpe4tkTV0kB11c+70M9pstyBYG
MqiD4It6coAbvznJnXcAZcaZhivGVxE237nXVwR9kfLu7JlxD+uqhVwUrSAbvR75
TGIfU2rUB6We3u30d349wQK+KPPcOQEk1DValBqNAoGBAKfXOXgFBkIVW79fOkup
aIZXdEmU3Up61Oo0KDbxsg4l73NnnvuEnNMBTx3nT3KCVIAcQL9MNpLX/Z0HjOn1
aiWVtTNq2OFL0V0HueBhbkFiWp551jTS7LjndCYHpUB/B8/wXP0kxHUm8HrQrRvK
DhV3zcxsXts1INidXjzzOkPi
-----END PRIVATE KEY-----

View file

@ -0,0 +1,86 @@
Certificate:
Data:
Version: 3 (0x2)
Serial Number: 2 (0x2)
Signature Algorithm: sha256WithRSAEncryption
Issuer: C=DE, O=python-ldap, OU=slapd-test, CN=Python LDAP Test CA
Validity
Not Before: Apr 12 18:52:38 2019 GMT
Not After : Mar 1 18:52:38 3019 GMT
Subject: C=DE, O=python-ldap, OU=slapd-test, CN=server cert for localhost
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
Public-Key: (2048 bit)
Modulus:
00:ac:06:4d:26:97:71:11:14:9c:a0:23:ab:a3:20:
96:04:45:55:38:76:84:d9:65:de:2b:e3:3f:cb:39:
e7:f4:87:28:98:88:84:00:73:dc:1e:b5:aa:19:a4:
cf:9b:97:81:21:7a:bc:72:f5:20:5a:08:1c:11:cd:
e7:db:69:84:33:d9:fd:a1:74:40:db:fa:c2:12:3a:
24:e4:7b:57:a2:17:6b:a0:60:d9:52:e7:95:a4:d4:
a9:c2:9c:e4:44:f5:b8:dc:48:d2:cf:b2:c7:28:51:
2c:c4:d9:b9:c1:4a:85:30:76:25:4a:d7:c6:0e:09:
94:7a:19:37:82:f1:e0:37:7a:4b:e5:47:ef:60:f8:
50:4e:81:0f:7e:4a:d5:cf:0d:da:be:88:80:06:5d:
94:44:24:8b:9a:9a:e9:c4:ae:d4:ce:c3:ce:43:7b:
86:05:0a:7c:c6:c3:75:1e:08:84:c3:0f:8c:05:98:
8d:88:5a:98:51:17:05:8f:b1:20:f0:50:97:c0:4c:
0f:12:fb:b4:66:e2:fc:2b:42:3e:f6:ea:05:03:96:
27:13:cc:f5:ca:84:98:d7:8e:3e:97:2f:bc:57:1d:
fa:50:c0:c3:e4:04:2a:f1:7a:c2:9c:78:73:09:fa:
6a:d1:b2:eb:aa:1d:c8:ae:8b:14:62:83:de:26:20:
b1:25
Exponent: 65537 (0x10001)
X509v3 extensions:
X509v3 Basic Constraints: critical
CA:FALSE
X509v3 Key Usage: critical
Digital Signature, Key Encipherment
X509v3 Extended Key Usage: critical
TLS Web Server Authentication
X509v3 Subject Key Identifier:
08:D1:86:1B:82:0A:4F:71:31:E4:F5:31:23:CC:67:3B:FA:84:3B:A0
X509v3 Authority Key Identifier:
keyid:BD:78:D5:4A:F1:90:96:C5:E8:EC:66:49:23:47:03:5F:26:73:86:B2
X509v3 Subject Alternative Name:
DNS:localhost, IP Address:127.0.0.1, IP Address:0:0:0:0:0:0:0:1
Signature Algorithm: sha256WithRSAEncryption
88:60:af:be:11:c4:aa:dc:9b:f1:e7:14:da:20:aa:6f:2f:06:
ae:38:b2:7c:ac:90:81:22:51:7e:cb:26:15:6e:fe:67:98:c1:
0d:dc:aa:39:98:2b:d2:cc:3c:ff:1a:92:2f:56:0a:a9:6e:d8:
9a:3d:c5:4d:6f:cc:91:2e:e3:4e:bf:22:ab:cb:92:1a:a0:8f:
43:cd:82:bc:48:55:c4:95:cf:10:6b:6a:31:19:92:7d:e0:06:
05:6f:0b:33:e7:2a:37:42:f9:ec:1b:29:99:e1:58:0c:01:a7:
c3:8b:58:71:21:9f:61:8c:a7:fb:b6:7e:32:8b:a9:4e:c7:1f:
f6:46:e8:dd:ac:a6:4c:53:f8:4d:93:e4:ec:73:ab:0b:be:98:
c5:78:c4:92:c0:4c:78:47:52:2f:93:07:67:20:a4:5a:7f:59:
7e:4f:48:53:20:0d:37:bb:06:f8:44:42:64:b4:94:15:43:d1:
4c:51:f3:97:1d:2d:cd:db:b9:bb:1a:69:10:89:7d:ae:1d:0d:
94:78:45:29:cd:c4:42:67:67:96:05:bf:da:aa:23:65:7b:04:
ff:b7:ac:9d:ee:0b:e7:0f:c1:c5:0b:48:fe:0f:d6:3f:d8:b4:
77:12:bb:f5:91:4f:43:e6:01:3f:a4:c0:ea:8c:c6:68:99:8e:
49:e8:c4:8b
-----BEGIN CERTIFICATE-----
MIID1zCCAr+gAwIBAgIBAjANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJERTEU
MBIGA1UECgwLcHl0aG9uLWxkYXAxEzARBgNVBAsMCnNsYXBkLXRlc3QxHDAaBgNV
BAMME1B5dGhvbiBMREFQIFRlc3QgQ0EwIBcNMTkwNDEyMTg1MjM4WhgPMzAxOTAz
MDExODUyMzhaMFwxCzAJBgNVBAYTAkRFMRQwEgYDVQQKDAtweXRob24tbGRhcDET
MBEGA1UECwwKc2xhcGQtdGVzdDEiMCAGA1UEAwwZc2VydmVyIGNlcnQgZm9yIGxv
Y2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKwGTSaXcREU
nKAjq6MglgRFVTh2hNll3ivjP8s55/SHKJiIhABz3B61qhmkz5uXgSF6vHL1IFoI
HBHN59tphDPZ/aF0QNv6whI6JOR7V6IXa6Bg2VLnlaTUqcKc5ET1uNxI0s+yxyhR
LMTZucFKhTB2JUrXxg4JlHoZN4Lx4Dd6S+VH72D4UE6BD35K1c8N2r6IgAZdlEQk
i5qa6cSu1M7DzkN7hgUKfMbDdR4IhMMPjAWYjYhamFEXBY+xIPBQl8BMDxL7tGbi
/CtCPvbqBQOWJxPM9cqEmNeOPpcvvFcd+lDAw+QEKvF6wpx4cwn6atGy66odyK6L
FGKD3iYgsSUCAwEAAaOBpzCBpDAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwIF
oDAWBgNVHSUBAf8EDDAKBggrBgEFBQcDATAdBgNVHQ4EFgQUCNGGG4IKT3Ex5PUx
I8xnO/qEO6AwHwYDVR0jBBgwFoAUvXjVSvGQlsXo7GZJI0cDXyZzhrIwLAYDVR0R
BCUwI4IJbG9jYWxob3N0hwR/AAABhxAAAAAAAAAAAAAAAAAAAAABMA0GCSqGSIb3
DQEBCwUAA4IBAQCIYK++EcSq3Jvx5xTaIKpvLwauOLJ8rJCBIlF+yyYVbv5nmMEN
3Ko5mCvSzDz/GpIvVgqpbtiaPcVNb8yRLuNOvyKry5IaoI9DzYK8SFXElc8Qa2ox
GZJ94AYFbwsz5yo3QvnsGymZ4VgMAafDi1hxIZ9hjKf7tn4yi6lOxx/2RujdrKZM
U/hNk+Tsc6sLvpjFeMSSwEx4R1IvkwdnIKRaf1l+T0hTIA03uwb4REJktJQVQ9FM
UfOXHS3N27m7GmkQiX2uHQ2UeEUpzcRCZ2eWBb/aqiNlewT/t6yd7gvnD8HFC0j+
D9Y/2LR3Erv1kU9D5gE/pMDqjMZomY5J6MSL
-----END CERTIFICATE-----

54
test/src/testauth.py Normal file
View file

@ -0,0 +1,54 @@
def testauth_none():
return "some_data_from_none"
def testauth_subcat_none():
return "some_data_from_subcat_none"
def testauth_default():
return "some_data_from_default"
def testauth_subcat_default():
return "some_data_from_subcat_default"
def testauth_subcat_post():
return "some_data_from_subcat_post"
def testauth_other_profile():
return "some_data_from_other_profile"
def testauth_subcat_other_profile():
return "some_data_from_subcat_other_profile"
def testauth_only_api():
return "some_data_from_only_api"
def testauth_only_cli():
return "some_data_from_only_cli"
def testauth_ldap():
return "some_data_from_ldap"
def testauth_with_arg(super_arg):
return super_arg
def testauth_with_extra_str_only(only_a_str):
return only_a_str
def testauth_with_type_int(only_an_int):
return only_an_int
def yoloswag_version(*args, **kwargs):
return "666"

View file

@ -3,85 +3,313 @@ import pytest
from moulinette.actionsmap import (
CommentParameter,
AskParameter,
PasswordParameter,
PatternParameter,
RequiredParameter,
ActionsMap
ExtraArgumentParser,
ActionsMap,
)
from moulinette.interfaces import GLOBAL_SECTION
from moulinette.interfaces import BaseActionsMapParser
from moulinette.core import MoulinetteError
from moulinette import m18n
@pytest.fixture
def iface():
return 'iface'
return "iface"
def test_comment_parameter_bad_bool_value(iface, caplog):
comment = CommentParameter(iface)
assert comment.validate(True, 'a') == 'a'
assert any('expecting a non-empty string' in message for message in caplog.messages)
assert comment.validate(True, "a") == "a"
assert any("expecting a non-empty string" in message for message in caplog.messages)
def test_comment_parameter_bad_empty_string(iface, caplog):
comment = CommentParameter(iface)
assert comment.validate('', 'a') == 'a'
assert any('expecting a non-empty string' in message for message in caplog.messages)
assert comment.validate("", "a") == "a"
assert any("expecting a non-empty string" in message for message in caplog.messages)
def test_comment_parameter_bad_type(iface):
comment = CommentParameter(iface)
with pytest.raises(TypeError):
comment.validate({}, 'b')
comment.validate({}, "b")
def test_ask_parameter_str_value(iface, caplog):
ask = AskParameter(iface)
assert ask.validate("a", "a") == "a"
assert not len(caplog.messages)
def test_ask_parameter_bad_bool_value(iface, caplog):
ask = AskParameter(iface)
assert ask.validate(True, 'a') == 'a'
assert any('expecting a non-empty string' in message for message in caplog.messages)
assert ask.validate(True, "a") == "a"
assert any("expecting a non-empty string" in message for message in caplog.messages)
def test_ask_parameter_bad_empty_string(iface, caplog):
ask = AskParameter(iface)
assert ask.validate('', 'a') == 'a'
assert any('expecting a non-empty string' in message for message in caplog.messages)
assert ask.validate("", "a") == "a"
assert any("expecting a non-empty string" in message for message in caplog.messages)
def test_ask_parameter_bad_type(iface):
ask = AskParameter(iface)
with pytest.raises(TypeError):
ask.validate({}, 'b')
ask.validate({}, "b")
def test_ask_parameter(iface, mocker):
ask = AskParameter(iface)
arg = ask("foobar", "a", "a")
assert arg == "a"
from moulinette.core import Moulinette18n, MoulinetteSignals
mocker.patch.object(Moulinette18n, "n", return_value="awesome_test")
mocker.patch.object(MoulinetteSignals, "prompt", return_value="awesome_test")
arg = ask("foobar", "a", None)
assert arg == "awesome_test"
def test_password_parameter(iface, mocker):
ask = PasswordParameter(iface)
arg = ask("foobar", "a", "a")
assert arg == "a"
from moulinette.core import Moulinette18n, MoulinetteSignals
mocker.patch.object(Moulinette18n, "n", return_value="awesome_test")
mocker.patch.object(MoulinetteSignals, "prompt", return_value="awesome_test")
arg = ask("foobar", "a", None)
assert arg == "awesome_test"
def test_pattern_parameter_bad_str_value(iface, caplog):
pattern = PatternParameter(iface)
assert pattern.validate('', 'a') == ['', 'pattern_not_match']
assert any('expecting a list' in message for message in caplog.messages)
assert pattern.validate("", "a") == ["", "pattern_not_match"]
assert any("expecting a list" in message for message in caplog.messages)
@pytest.mark.parametrize('iface', [
[],
['pattern_alone'],
['pattern', 'message', 'extra stuff']
])
@pytest.mark.parametrize(
"iface", [[], ["pattern_alone"], ["pattern", "message", "extra stuff"]]
)
def test_pattern_parameter_bad_list_len(iface):
pattern = PatternParameter(iface)
with pytest.raises(TypeError):
pattern.validate(iface, 'a')
pattern.validate(iface, "a")
def test_required_paremeter_missing_value(iface):
def test_pattern_parameter(iface, caplog, mocker):
pattern = PatternParameter(iface)
arg = pattern(["foo", "foobar"], "foo_name", "foo_value")
assert arg == "foo_value"
error = "error_message"
mocker.patch("moulinette.Moulinette18n.n", return_value=error)
with pytest.raises(MoulinetteError) as exception:
pattern(["foo", "message"], "foo_name", "not_match")
translation = m18n.g("invalid_argument", argument="foo_name", error=error)
expected_msg = translation.format(argument="foo_name", error=error)
assert expected_msg in str(exception)
assert any("doesn't match pattern" in message for message in caplog.messages)
def test_required_paremeter(iface):
required = RequiredParameter(iface)
arg = required(True, "a", "a")
assert arg == "a"
assert required.validate(True, "a")
assert not required.validate(False, "a")
def test_required_paremeter_bad_type(iface):
required = RequiredParameter(iface)
with pytest.raises(TypeError):
required.validate("a", "a")
with pytest.raises(TypeError):
required.validate(1, "a")
with pytest.raises(TypeError):
required.validate([], "a")
with pytest.raises(TypeError):
required.validate({}, "a")
def test_required_paremeter_missing_value(iface, caplog):
required = RequiredParameter(iface)
with pytest.raises(MoulinetteError) as exception:
required(True, 'a', '')
assert 'is required' in str(exception)
required(True, "a", "")
translation = m18n.g("argument_required", argument="a")
expected_msg = translation.format(argument="a")
assert expected_msg in str(exception)
assert any("is required" in message for message in caplog.messages)
def test_actions_map_unknown_authenticator(monkeypatch, tmp_path):
monkeypatch.setenv('MOULINETTE_DATA_DIR', str(tmp_path))
actionsmap_dir = actionsmap_dir = tmp_path / 'actionsmap'
monkeypatch.setenv("MOULINETTE_DATA_DIR", str(tmp_path))
actionsmap_dir = actionsmap_dir = tmp_path / "actionsmap"
actionsmap_dir.mkdir()
amap = ActionsMap(BaseActionsMapParser)
with pytest.raises(ValueError) as exception:
amap.get_authenticator(profile='unknown')
assert 'Unknown authenticator' in str(exception)
amap.get_authenticator_for_profile("unknown")
assert "Unknown authenticator" in str(exception)
def test_extra_argument_parser_add_argument(iface):
extra_argument_parse = ExtraArgumentParser(iface)
extra_argument_parse.add_argument("Test", "foo", {"ask": "lol"})
assert "Test" in extra_argument_parse._extra_params
assert "foo" in extra_argument_parse._extra_params["Test"]
assert "ask" in extra_argument_parse._extra_params["Test"]["foo"]
assert extra_argument_parse._extra_params["Test"]["foo"]["ask"] == "lol"
extra_argument_parse = ExtraArgumentParser(iface)
extra_argument_parse.add_argument(GLOBAL_SECTION, "foo", {"ask": "lol"})
assert GLOBAL_SECTION in extra_argument_parse._extra_params
assert "foo" in extra_argument_parse._extra_params[GLOBAL_SECTION]
assert "ask" in extra_argument_parse._extra_params[GLOBAL_SECTION]["foo"]
assert extra_argument_parse._extra_params[GLOBAL_SECTION]["foo"]["ask"] == "lol"
def test_extra_argument_parser_add_argument_bad_arg(iface):
extra_argument_parse = ExtraArgumentParser(iface)
with pytest.raises(MoulinetteError) as exception:
extra_argument_parse.add_argument(GLOBAL_SECTION, "foo", {"ask": 1})
translation = m18n.g("error_see_log")
expected_msg = translation.format()
assert expected_msg in str(exception)
extra_argument_parse = ExtraArgumentParser(iface)
extra_argument_parse.add_argument(GLOBAL_SECTION, "foo", {"error": 1})
assert GLOBAL_SECTION in extra_argument_parse._extra_params
assert "foo" in extra_argument_parse._extra_params[GLOBAL_SECTION]
assert not len(extra_argument_parse._extra_params[GLOBAL_SECTION]["foo"])
def test_extra_argument_parser_parse_args(iface, mocker):
extra_argument_parse = ExtraArgumentParser(iface)
extra_argument_parse.add_argument(GLOBAL_SECTION, "foo", {"ask": "lol"})
extra_argument_parse.add_argument(GLOBAL_SECTION, "foo2", {"ask": "lol2"})
extra_argument_parse.add_argument(
GLOBAL_SECTION, "bar", {"password": "lul", "ask": "lul"}
)
args = extra_argument_parse.parse_args(
GLOBAL_SECTION, {"foo": 1, "foo2": ["a", "b", {"foobar": True}], "bar": "rab"}
)
assert "foo" in args
assert args["foo"] == 1
assert "foo2" in args
assert args["foo2"] == ["a", "b", {"foobar": True}]
assert "bar" in args
assert args["bar"] == "rab"
def test_actions_map_api():
from moulinette.interfaces.api import ActionsMapParser
amap = ActionsMap(ActionsMapParser, use_cache=False)
assert amap.parser.global_conf["authenticate"] == "all"
assert "default" in amap.parser.global_conf["authenticator"]
assert "yoloswag" in amap.parser.global_conf["authenticator"]
assert ("GET", "/test-auth/default") in amap.parser.routes
assert ("POST", "/test-auth/subcat/post") in amap.parser.routes
amap.generate_cache()
amap = ActionsMap(ActionsMapParser, use_cache=True)
assert amap.parser.global_conf["authenticate"] == "all"
assert "default" in amap.parser.global_conf["authenticator"]
assert "yoloswag" in amap.parser.global_conf["authenticator"]
assert ("GET", "/test-auth/default") in amap.parser.routes
assert ("POST", "/test-auth/subcat/post") in amap.parser.routes
def test_actions_map_import_error(mocker):
from moulinette.interfaces.api import ActionsMapParser
amap = ActionsMap(ActionsMapParser)
from moulinette.core import MoulinetteLock
mocker.patch.object(MoulinetteLock, "_is_son_of", return_value=False)
mocker.patch("__builtin__.__import__", side_effect=ImportError)
with pytest.raises(MoulinetteError) as exception:
amap.process({}, timeout=30, route=("GET", "/test-auth/none"))
mocker.stopall()
translation = m18n.g("error_see_log")
expected_msg = translation.format()
assert expected_msg in str(exception)
def test_actions_map_cli():
from moulinette.interfaces.cli import ActionsMapParser
import argparse
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument(
"--debug",
action="store_true",
default=False,
help="Log and print debug messages",
)
amap = ActionsMap(
ActionsMapParser, use_cache=False, parser_kwargs={"top_parser": parser}
)
assert amap.parser.global_conf["authenticate"] == "all"
assert "default" in amap.parser.global_conf["authenticator"]
assert "yoloswag" in amap.parser.global_conf["authenticator"]
assert "testauth" in amap.parser._subparsers.choices
assert "none" in amap.parser._subparsers.choices["testauth"]._actions[1].choices
assert "subcat" in amap.parser._subparsers.choices["testauth"]._actions[1].choices
assert (
"default"
in amap.parser._subparsers.choices["testauth"]
._actions[1]
.choices["subcat"]
._actions[1]
.choices
)
amap.generate_cache()
amap = ActionsMap(
ActionsMapParser, use_cache=True, parser_kwargs={"top_parser": parser}
)
assert amap.parser.global_conf["authenticate"] == "all"
assert "default" in amap.parser.global_conf["authenticator"]
assert "yoloswag" in amap.parser.global_conf["authenticator"]
assert "testauth" in amap.parser._subparsers.choices
assert "none" in amap.parser._subparsers.choices["testauth"]._actions[1].choices
assert "subcat" in amap.parser._subparsers.choices["testauth"]._actions[1].choices
assert (
"default"
in amap.parser._subparsers.choices["testauth"]
._actions[1]
.choices["subcat"]
._actions[1]
.choices
)

343
test/test_auth.py Normal file
View file

@ -0,0 +1,343 @@
import os
import pytest
from moulinette import MoulinetteError
from moulinette import m18n
class TestAuthAPI:
def login(self, webapi, csrf=False, profile=None, status=200, password="default"):
data = {"password": password}
if profile:
data["profile"] = profile
return webapi.post(
"/login",
data,
status=status,
headers=None if csrf else {"X-Requested-With": ""},
)
def test_request_no_auth_needed(self, moulinette_webapi):
assert (
moulinette_webapi.get("/test-auth/none", status=200).text
== '"some_data_from_none"'
)
def test_request_no_auth_needed_subcategories(self, moulinette_webapi):
assert (
moulinette_webapi.get("/test-auth/subcat/none", status=200).text
== '"some_data_from_subcat_none"'
)
def test_request_with_auth_but_not_logged(self, moulinette_webapi):
assert (
moulinette_webapi.get("/test-auth/default", status=401).text
== "Authentication required"
)
def test_request_with_auth_subcategories_but_not_logged(self, moulinette_webapi):
assert (
moulinette_webapi.get("/test-auth/subcat/default", status=401).text
== "Authentication required"
)
def test_request_not_logged_only_api(self, moulinette_webapi):
assert (
moulinette_webapi.get("/test-auth/only-api", status=401).text
== "Authentication required"
)
def test_request_only_api(self, moulinette_webapi):
self.login(moulinette_webapi)
assert (
moulinette_webapi.get("/test-auth/only-api", status=200).text
== '"some_data_from_only_api"'
)
def test_request_not_logged_only_cli(self, moulinette_webapi):
assert (
moulinette_webapi.get("/test-auth/only-cli", status=200).text
== '"some_data_from_only_cli"'
)
def test_login(self, moulinette_webapi):
assert self.login(moulinette_webapi).text == "Logged in"
assert "session.id" in moulinette_webapi.cookies
assert "session.tokens" in moulinette_webapi.cookies
cache_session_default = os.environ["MOULINETTE_CACHE_DIR"] + "/session/default/"
assert moulinette_webapi.cookies["session.id"] + ".asc" in os.listdir(
cache_session_default
)
def test_login_bad_password(self, moulinette_webapi):
assert (
self.login(moulinette_webapi, password="Bad Password", status=401).text
== "Invalid password"
)
assert "session.id" not in moulinette_webapi.cookies
assert "session.tokens" not in moulinette_webapi.cookies
def test_login_csrf_attempt(self, moulinette_webapi):
# C.f.
# https://security.stackexchange.com/a/58308
# https://stackoverflow.com/a/22533680
assert (
"CSRF protection"
in self.login(moulinette_webapi, csrf=True, status=403).text
)
assert not any(c.name == "session.id" for c in moulinette_webapi.cookiejar)
assert not any(c.name == "session.tokens" for c in moulinette_webapi.cookiejar)
def test_login_then_legit_request_without_cookies(self, moulinette_webapi):
self.login(moulinette_webapi)
moulinette_webapi.cookiejar.clear()
moulinette_webapi.get("/test-auth/default", status=401)
def test_login_then_legit_request(self, moulinette_webapi):
self.login(moulinette_webapi)
assert (
moulinette_webapi.get("/test-auth/default", status=200).text
== '"some_data_from_default"'
)
assert (
moulinette_webapi.get("/test-auth/subcat/default", status=200).text
== '"some_data_from_subcat_default"'
)
def test_login_then_logout(self, moulinette_webapi):
self.login(moulinette_webapi)
moulinette_webapi.get("/logout", status=200)
cache_session_default = os.environ["MOULINETTE_CACHE_DIR"] + "/session/default/"
assert not moulinette_webapi.cookies["session.id"] + ".asc" in os.listdir(
cache_session_default
)
assert (
moulinette_webapi.get("/test-auth/default", status=401).text
== "Authentication required"
)
def test_login_other_profile(self, moulinette_webapi):
self.login(moulinette_webapi, profile="yoloswag", password="yoloswag")
assert "session.id" in moulinette_webapi.cookies
assert "session.tokens" in moulinette_webapi.cookies
cache_session_default = (
os.environ["MOULINETTE_CACHE_DIR"] + "/session/yoloswag/"
)
assert moulinette_webapi.cookies["session.id"] + ".asc" in os.listdir(
cache_session_default
)
def test_login_wrong_profile(self, moulinette_webapi):
self.login(moulinette_webapi)
assert (
moulinette_webapi.get("/test-auth/other-profile", status=401).text
== "Authentication required"
)
moulinette_webapi.get("/logout", status=200)
self.login(moulinette_webapi, profile="yoloswag", password="yoloswag")
assert (
moulinette_webapi.get("/test-auth/default", status=401).text
== "Authentication required"
)
def test_login_ldap(self, moulinette_webapi, ldap_server, mocker):
mocker.patch(
"moulinette.authenticators.ldap.Authenticator._get_uri",
return_value=ldap_server.uri,
)
self.login(moulinette_webapi, profile="ldap", password="yunohost")
assert (
moulinette_webapi.get("/test-auth/ldap", status=200).text
== '"some_data_from_ldap"'
)
def test_request_with_arg(self, moulinette_webapi, capsys):
self.login(moulinette_webapi)
assert (
moulinette_webapi.get("/test-auth/with_arg/yoloswag", status=200).text
== '"yoloswag"'
)
def test_request_arg_with_extra(self, moulinette_webapi, caplog, mocker):
self.login(moulinette_webapi)
assert (
moulinette_webapi.get(
"/test-auth/with_extra_str_only/YoLoSwAg", status=200
).text
== '"YoLoSwAg"'
)
error = "error_message"
mocker.patch("moulinette.Moulinette18n.n", return_value=error)
moulinette_webapi.get("/test-auth/with_extra_str_only/12345", status=400)
assert any("doesn't match pattern" in message for message in caplog.messages)
def test_request_arg_with_type(self, moulinette_webapi, caplog, mocker):
self.login(moulinette_webapi)
assert (
moulinette_webapi.get("/test-auth/with_type_int/12345", status=200).text
== "12345"
)
error = "error_message"
mocker.patch("moulinette.Moulinette18n.g", return_value=error)
moulinette_webapi.get("/test-auth/with_type_int/yoloswag", status=400)
class TestAuthCLI:
def test_login(self, moulinette_cli, capsys, mocker):
mocker.patch("getpass.getpass", return_value="default")
moulinette_cli.run(["testauth", "default"], output_as="plain")
message = capsys.readouterr()
assert "some_data_from_default" in message.out
moulinette_cli.run(
["testauth", "default"], output_as="plain", password="default"
)
message = capsys.readouterr()
assert "some_data_from_default" in message.out
def test_login_bad_password(self, moulinette_cli, capsys, mocker):
with pytest.raises(MoulinetteError):
moulinette_cli.run(
["testauth", "default"], output_as="plain", password="Bad Password"
)
mocker.patch("getpass.getpass", return_value="Bad Password")
with pytest.raises(MoulinetteError):
moulinette_cli.run(["testauth", "default"], output_as="plain")
def test_login_wrong_profile(self, moulinette_cli, mocker):
mocker.patch("getpass.getpass", return_value="default")
with pytest.raises(MoulinetteError) as exception:
moulinette_cli.run(["testauth", "other-profile"], output_as="none")
translation = m18n.g("invalid_password")
expected_msg = translation.format()
assert expected_msg in str(exception)
with pytest.raises(MoulinetteError) as exception:
moulinette_cli.run(
["testauth", "default"], output_as="none", password="yoloswag"
)
expected_msg = translation.format()
assert expected_msg in str(exception)
def test_request_no_auth_needed(self, capsys, moulinette_cli):
moulinette_cli.run(["testauth", "none"], output_as="plain")
message = capsys.readouterr()
assert "some_data_from_none" in message.out
def test_request_not_logged_only_api(self, capsys, moulinette_cli):
moulinette_cli.run(["testauth", "only-api"], output_as="plain")
message = capsys.readouterr()
assert "some_data_from_only_api" in message.out
def test_request_only_cli(self, capsys, moulinette_cli, mocker):
mocker.patch("getpass.getpass", return_value="default")
moulinette_cli.run(["testauth", "only-cli"], output_as="plain")
message = capsys.readouterr()
assert "some_data_from_only_cli" in message.out
def test_request_not_logged_only_cli(self, capsys, moulinette_cli, mocker):
mocker.patch("getpass.getpass")
with pytest.raises(MoulinetteError) as exception:
moulinette_cli.run(["testauth", "only-cli"], output_as="plain")
message = capsys.readouterr()
assert "some_data_from_only_cli" not in message.out
translation = m18n.g("invalid_password")
expected_msg = translation.format()
assert expected_msg in str(exception)
def test_request_with_callback(self, moulinette_cli, capsys, mocker):
mocker.patch("getpass.getpass", return_value="default")
moulinette_cli.run(["--version"], output_as="plain")
message = capsys.readouterr()
assert "666" in message.out
moulinette_cli.run(["-v"], output_as="plain")
message = capsys.readouterr()
assert "666" in message.out
with pytest.raises(MoulinetteError):
moulinette_cli.run(["--wersion"], output_as="plain")
message = capsys.readouterr()
assert "cannot get value from callback method" in message.err
def test_request_with_arg(self, moulinette_cli, capsys, mocker):
mocker.patch("getpass.getpass", return_value="default")
moulinette_cli.run(["testauth", "with_arg", "yoloswag"], output_as="plain")
message = capsys.readouterr()
assert "yoloswag" in message.out
def test_request_arg_with_extra(self, moulinette_cli, capsys, mocker):
mocker.patch("getpass.getpass", return_value="default")
moulinette_cli.run(
["testauth", "with_extra_str_only", "YoLoSwAg"], output_as="plain"
)
message = capsys.readouterr()
assert "YoLoSwAg" in message.out
error = "error_message"
mocker.patch("moulinette.Moulinette18n.n", return_value=error)
with pytest.raises(MoulinetteError):
moulinette_cli.run(
["testauth", "with_extra_str_only", "12345"], output_as="plain"
)
message = capsys.readouterr()
assert "doesn't match pattern" in message.err
def test_request_arg_with_type(self, moulinette_cli, capsys, mocker):
mocker.patch("getpass.getpass", return_value="default")
moulinette_cli.run(["testauth", "with_type_int", "12345"], output_as="plain")
message = capsys.readouterr()
assert "12345" in message.out
mocker.patch("sys.exit")
with pytest.raises(MoulinetteError):
moulinette_cli.run(
["testauth", "with_type_int", "yoloswag"], output_as="plain"
)
message = capsys.readouterr()
assert "invalid int value" in message.err

View file

@ -2,11 +2,11 @@ import os.path
def test_open_cachefile_creates(monkeypatch, tmp_path):
monkeypatch.setenv('MOULINETTE_CACHE_DIR', str(tmp_path))
monkeypatch.setenv("MOULINETTE_CACHE_DIR", str(tmp_path))
from moulinette.cache import open_cachefile
handle = open_cachefile('foo.cache', mode='w')
handle = open_cachefile("foo.cache", mode="w")
assert handle.mode == 'w'
assert handle.name == os.path.join(str(tmp_path), 'foo.cache')
assert handle.mode == "w"
assert handle.name == os.path.join(str(tmp_path), "foo.cache")

View file

@ -1,153 +1,513 @@
import os
import pytest
import pwd
import grp
from moulinette import m18n
from moulinette.core import MoulinetteError
from moulinette.utils.filesystem import (append_to_file, read_file, read_json,
rm, write_to_file, write_to_json)
from moulinette.utils.filesystem import (
append_to_file,
read_file,
read_json,
read_yaml,
read_toml,
read_ldif,
rm,
write_to_file,
write_to_json,
write_to_yaml,
mkdir,
chown,
chmod,
)
def test_read_file(test_file):
content = read_file(str(test_file))
assert content == 'foo\nbar\n'
assert content == "foo\nbar\n"
def test_read_file_missing_file():
bad_file = 'doesnt-exist'
bad_file = "doesnt-exist"
with pytest.raises(MoulinetteError) as exception:
read_file(bad_file)
translation = m18n.g('file_not_exist', path=bad_file)
translation = m18n.g("file_not_exist", path=bad_file)
expected_msg = translation.format(path=bad_file)
assert expected_msg in str(exception)
def test_read_file_cannot_read_ioerror(test_file, mocker):
error = 'foobar'
error = "foobar"
with mocker.patch('__builtin__.open', side_effect=IOError(error)):
mocker.patch("__builtin__.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
read_file(str(test_file))
translation = m18n.g('cannot_open_file', file=str(test_file), error=error)
translation = m18n.g("cannot_open_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def test_read_file_cannot_read_exception(test_file, mocker):
error = "foobar"
mocker.patch("__builtin__.open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
read_file(str(test_file))
translation = m18n.g("unknown_error_reading_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def test_read_json(test_json):
content = read_json(str(test_json))
assert 'foo' in content.keys()
assert content['foo'] == 'bar'
assert "foo" in content.keys()
assert content["foo"] == "bar"
def test_read_json_cannot_read(test_json, mocker):
error = 'foobar'
error = "foobar"
with mocker.patch('json.loads', side_effect=ValueError(error)):
mocker.patch("json.loads", side_effect=ValueError(error))
with pytest.raises(MoulinetteError) as exception:
read_json(str(test_json))
translation = m18n.g('corrupted_json', ressource=str(test_json), error=error)
translation = m18n.g("corrupted_json", ressource=str(test_json), error=error)
expected_msg = translation.format(ressource=str(test_json), error=error)
assert expected_msg in str(exception)
def test_read_yaml(test_yaml):
content = read_yaml(str(test_yaml))
assert "foo" in content.keys()
assert content["foo"] == "bar"
def test_read_yaml_cannot_read(test_yaml, mocker):
error = "foobar"
mocker.patch("yaml.safe_load", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
read_yaml(str(test_yaml))
translation = m18n.g("corrupted_yaml", ressource=str(test_yaml), error=error)
expected_msg = translation.format(ressource=str(test_yaml), error=error)
assert expected_msg in str(exception)
def test_read_toml(test_toml):
content = read_toml(str(test_toml))
assert "foo" in content.keys()
assert content["foo"] == "bar"
def test_read_toml_cannot_read(test_toml, mocker):
error = "foobar"
mocker.patch("toml.loads", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
read_toml(str(test_toml))
translation = m18n.g("corrupted_toml", ressource=str(test_toml), error=error)
expected_msg = translation.format(ressource=str(test_toml), error=error)
assert expected_msg in str(exception)
def test_read_ldif(test_ldif):
dn, entry = read_ldif(str(test_ldif))[0]
assert dn == "mail=alice@example.com"
assert entry["mail"] == ["alice@example.com"]
assert entry["objectclass"] == ["top", "person"]
assert entry["cn"] == ["Alice Alison"]
dn, entry = read_ldif(str(test_ldif), ["objectclass"])[0]
assert dn == "mail=alice@example.com"
assert entry["mail"] == ["alice@example.com"]
assert "objectclass" not in entry
assert entry["cn"] == ["Alice Alison"]
def test_read_ldif_cannot_ioerror(test_ldif, mocker):
error = "foobar"
mocker.patch("__builtin__.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
read_ldif(str(test_ldif))
translation = m18n.g("cannot_open_file", file=str(test_ldif), error=error)
expected_msg = translation.format(file=str(test_ldif), error=error)
assert expected_msg in str(exception)
def test_read_ldif_cannot_exception(test_ldif, mocker):
error = "foobar"
mocker.patch("__builtin__.open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
read_ldif(str(test_ldif))
translation = m18n.g("unknown_error_reading_file", file=str(test_ldif), error=error)
expected_msg = translation.format(file=str(test_ldif), error=error)
assert expected_msg in str(exception)
def test_write_to_existing_file(test_file):
write_to_file(str(test_file), 'yolo\nswag')
assert read_file(str(test_file)) == 'yolo\nswag'
write_to_file(str(test_file), "yolo\nswag")
assert read_file(str(test_file)) == "yolo\nswag"
def test_write_to_new_file(tmp_path):
new_file = tmp_path / 'newfile.txt'
new_file = tmp_path / "newfile.txt"
write_to_file(str(new_file), 'yolo\nswag')
write_to_file(str(new_file), "yolo\nswag")
assert os.path.exists(str(new_file))
assert read_file(str(new_file)) == 'yolo\nswag'
assert read_file(str(new_file)) == "yolo\nswag"
def test_write_to_existing_file_bad_perms(test_file, mocker):
error = 'foobar'
error = "foobar"
with mocker.patch('__builtin__.open', side_effect=IOError(error)):
mocker.patch("__builtin__.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
write_to_file(str(test_file), 'yolo\nswag')
write_to_file(str(test_file), "yolo\nswag")
translation = m18n.g('cannot_write_file', file=str(test_file), error=error)
translation = m18n.g("cannot_write_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def test_write_to_file_exception(test_file, mocker):
error = "foobar"
mocker.patch("__builtin__.open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
write_to_file(str(test_file), "yolo\nswag")
translation = m18n.g("error_writing_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def test_write_cannot_write_folder(tmp_path):
with pytest.raises(AssertionError):
write_to_file(str(tmp_path), 'yolo\nswag')
write_to_file(str(tmp_path), "yolo\nswag")
def test_write_cannot_write_to_non_existant_folder():
with pytest.raises(AssertionError):
write_to_file('/toto/test', 'yolo\nswag')
write_to_file("/toto/test", "yolo\nswag")
def test_write_to_file_with_a_list(test_file):
write_to_file(str(test_file), ['yolo', 'swag'])
assert read_file(str(test_file)) == 'yolo\nswag'
write_to_file(str(test_file), ["yolo", "swag"])
assert read_file(str(test_file)) == "yolo\nswag"
def test_append_to_existing_file(test_file):
append_to_file(str(test_file), 'yolo\nswag')
assert read_file(str(test_file)) == 'foo\nbar\nyolo\nswag'
append_to_file(str(test_file), "yolo\nswag")
assert read_file(str(test_file)) == "foo\nbar\nyolo\nswag"
def test_append_to_new_file(tmp_path):
new_file = tmp_path / 'newfile.txt'
new_file = tmp_path / "newfile.txt"
append_to_file(str(new_file), 'yolo\nswag')
append_to_file(str(new_file), "yolo\nswag")
assert os.path.exists(str(new_file))
assert read_file(str(new_file)) == 'yolo\nswag'
assert read_file(str(new_file)) == "yolo\nswag"
def text_write_dict_to_json(tmp_path):
new_file = tmp_path / 'newfile.json'
def test_write_dict_to_json(tmp_path):
new_file = tmp_path / "newfile.json"
dummy_dict = {'foo': 42, 'bar': ['a', 'b', 'c']}
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
write_to_json(str(new_file), dummy_dict)
_json = read_json(str(new_file))
assert 'foo' in _json.keys()
assert 'bar' in _json.keys()
assert "foo" in _json.keys()
assert "bar" in _json.keys()
assert _json['foo'] == 42
assert _json['bar'] == ['a', 'b', 'c']
assert _json["foo"] == 42
assert _json["bar"] == ["a", "b", "c"]
def test_write_json_to_existing_file_bad_perms(test_file, mocker):
error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_file), dummy_dict)
translation = m18n.g("cannot_write_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def test_write_json_to_file_exception(test_file, mocker):
error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_file), dummy_dict)
translation = m18n.g("error_writing_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def text_write_list_to_json(tmp_path):
new_file = tmp_path / 'newfile.json'
new_file = tmp_path / "newfile.json"
dummy_list = ['foo', 'bar', 'baz']
dummy_list = ["foo", "bar", "baz"]
write_to_json(str(new_file), dummy_list)
_json = read_json(str(new_file))
assert _json == ['foo', 'bar', 'baz']
assert _json == ["foo", "bar", "baz"]
def test_write_to_json_bad_perms(test_json, mocker):
error = 'foobar'
error = "foobar"
with mocker.patch('__builtin__.open', side_effect=IOError(error)):
mocker.patch("__builtin__.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
write_to_json(str(test_json), {'a': 1})
write_to_json(str(test_json), {"a": 1})
translation = m18n.g('cannot_write_file', file=str(test_json), error=error)
translation = m18n.g("cannot_write_file", file=str(test_json), error=error)
expected_msg = translation.format(file=str(test_json), error=error)
assert expected_msg in str(exception)
def test_write_json_cannot_write_to_non_existant_folder():
with pytest.raises(AssertionError):
write_to_json('/toto/test.json', ['a', 'b'])
write_to_json("/toto/test.json", ["a", "b"])
def test_write_dict_to_yaml(tmp_path):
new_file = tmp_path / "newfile.yaml"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
write_to_yaml(str(new_file), dummy_dict)
_yaml = read_yaml(str(new_file))
assert "foo" in _yaml.keys()
assert "bar" in _yaml.keys()
assert _yaml["foo"] == 42
assert _yaml["bar"] == ["a", "b", "c"]
def test_write_yaml_to_existing_file_bad_perms(test_file, mocker):
error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
write_to_yaml(str(test_file), dummy_dict)
translation = m18n.g("cannot_write_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def test_write_yaml_to_file_exception(test_file, mocker):
error = "foobar"
dummy_dict = {"foo": 42, "bar": ["a", "b", "c"]}
mocker.patch("__builtin__.open", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
write_to_yaml(str(test_file), dummy_dict)
translation = m18n.g("error_writing_file", file=str(test_file), error=error)
expected_msg = translation.format(file=str(test_file), error=error)
assert expected_msg in str(exception)
def text_write_list_to_yaml(tmp_path):
new_file = tmp_path / "newfile.yaml"
dummy_list = ["foo", "bar", "baz"]
write_to_yaml(str(new_file), dummy_list)
_yaml = read_yaml(str(new_file))
assert _yaml == ["foo", "bar", "baz"]
def test_write_to_yaml_bad_perms(test_yaml, mocker):
error = "foobar"
mocker.patch("__builtin__.open", side_effect=IOError(error))
with pytest.raises(MoulinetteError) as exception:
write_to_yaml(str(test_yaml), {"a": 1})
translation = m18n.g("cannot_write_file", file=str(test_yaml), error=error)
expected_msg = translation.format(file=str(test_yaml), error=error)
assert expected_msg in str(exception)
def test_write_yaml_cannot_write_to_non_existant_folder():
with pytest.raises(AssertionError):
write_to_yaml("/toto/test.yaml", ["a", "b"])
def test_mkdir(tmp_path):
new_path = tmp_path / "new_folder"
mkdir(str(new_path))
assert os.path.isdir(str(new_path))
assert oct(os.stat(str(new_path)).st_mode & 0o777) == oct(0o777)
def test_mkdir_with_permission(tmp_path, mocker):
# This test only make sense when not being root
if os.getuid() == 0:
return
new_path = tmp_path / "new_folder"
permission = 0o700
mkdir(str(new_path), mode=permission)
assert os.path.isdir(str(new_path))
assert oct(os.stat(str(new_path)).st_mode & 0o777) == oct(permission)
new_path = tmp_path / "new_parent2" / "new_folder"
with pytest.raises(OSError):
mkdir(str(new_path), parents=True, mode=0o000)
def test_mkdir_with_parent(tmp_path):
new_path = tmp_path / "new_folder"
mkdir(str(new_path) + "/", parents=True)
assert os.path.isdir(str(new_path))
new_path = tmp_path / "new_parent" / "new_folder"
mkdir(str(new_path), parents=True)
assert os.path.isdir(str(new_path))
def test_mkdir_existing_folder(tmp_path):
new_path = tmp_path / "new_folder"
os.makedirs(str(new_path))
with pytest.raises(OSError):
mkdir(str(new_path))
def test_chown(test_file):
with pytest.raises(ValueError):
chown(str(test_file))
current_uid = os.getuid()
current_gid = os.getgid()
chown(str(test_file), current_uid, current_gid)
assert os.stat(str(test_file)).st_uid == current_uid
assert os.stat(str(test_file)).st_gid == current_gid
current_gid = os.getgid()
chown(str(test_file), uid=None, gid=current_gid)
assert os.stat(str(test_file)).st_gid == current_gid
current_uid = pwd.getpwuid(os.getuid())[0]
current_gid = grp.getgrgid(os.getgid())[0]
chown(str(test_file), current_uid, current_gid)
assert os.stat(str(test_file)).st_uid == os.getuid()
assert os.stat(str(test_file)).st_gid == os.getgid()
fake_user = "nousrlol"
with pytest.raises(MoulinetteError) as exception:
chown(str(test_file), fake_user)
translation = m18n.g("unknown_user", user=fake_user)
expected_msg = translation.format(user=fake_user)
assert expected_msg in str(exception)
fake_grp = "nogrplol"
with pytest.raises(MoulinetteError) as exception:
chown(str(test_file), gid=fake_grp)
translation = m18n.g("unknown_group", group=fake_grp)
expected_msg = translation.format(group=fake_grp)
assert expected_msg in str(exception)
def test_chown_recursive(test_file):
current_uid = os.getuid()
dirname = os.path.dirname(str(test_file))
mkdir(os.path.join(dirname, "new_dir"))
chown(str(dirname), current_uid, recursive=True)
assert os.stat(str(dirname)).st_uid == current_uid
def test_chown_exception(test_file, mocker):
error = "foobar"
mocker.patch("os.chown", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
chown(str(test_file), 1)
translation = m18n.g(
"error_changing_file_permissions", path=test_file, error=str(error)
)
expected_msg = translation.format(path=test_file, error=str(error))
assert expected_msg in str(exception)
def test_chmod(test_file):
permission = 0o723
chmod(str(test_file), permission)
assert oct(os.stat(str(test_file)).st_mode & 0o777) == oct(permission)
dirname = os.path.dirname(str(test_file))
permission = 0o722
chmod(str(dirname), permission, recursive=True)
assert oct(os.stat(str(test_file)).st_mode & 0o777) == oct(permission)
assert oct(os.stat(dirname).st_mode & 0o777) == oct(permission)
def test_chmod_recursive(test_file):
dirname = os.path.dirname(str(test_file))
mkdir(os.path.join(dirname, "new_dir"))
permission = 0o721
fpermission = 0o720
chmod(str(dirname), permission, fmode=fpermission, recursive=True)
assert oct(os.stat(str(test_file)).st_mode & 0o777) == oct(fpermission)
assert oct(os.stat(dirname).st_mode & 0o777) == oct(permission)
def test_chmod_exception(test_file, mocker):
error = "foobar"
mocker.patch("os.chmod", side_effect=Exception(error))
with pytest.raises(MoulinetteError) as exception:
chmod(str(test_file), 0o000)
translation = m18n.g(
"error_changing_file_permissions", path=test_file, error=str(error)
)
expected_msg = translation.format(path=test_file, error=str(error))
assert expected_msg in str(exception)
def test_remove_file(test_file):
@ -157,13 +517,13 @@ def test_remove_file(test_file):
def test_remove_file_bad_perms(test_file, mocker):
error = 'foobar'
error = "foobar"
with mocker.patch('os.remove', side_effect=OSError(error)):
mocker.patch("os.remove", side_effect=OSError(error))
with pytest.raises(MoulinetteError) as exception:
rm(str(test_file))
translation = m18n.g('error_removing', path=str(test_file), error=error)
translation = m18n.g("error_removing", path=str(test_file), error=error)
expected_msg = translation.format(path=str(test_file), error=error)
assert expected_msg in str(exception)

408
test/test_ldap.py Normal file
View file

@ -0,0 +1,408 @@
import pytest
import os
from moulinette.authenticators import ldap as m_ldap
from moulinette import m18n
from moulinette.core import MoulinetteError
class TestLDAP:
def setup_method(self):
self.ldap_conf = {
"vendor": "ldap",
"name": "as-root",
"parameters": {"base_dn": "dc=yunohost,dc=org"},
"extra": {},
}
def test_authenticate_simple_bind_with_admin(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
self.ldap_conf["parameters"]["user_rdn"] = "cn=admin,dc=yunohost,dc=org"
ldap_interface = m_ldap.Authenticator(**self.ldap_conf)
ldap_interface.authenticate(password="yunohost")
assert ldap_interface.con
def test_authenticate_simple_bind_with_wrong_user(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
self.ldap_conf["parameters"]["user_rdn"] = "cn=yoloswag,dc=yunohost,dc=org"
ldap_interface = m_ldap.Authenticator(**self.ldap_conf)
with pytest.raises(MoulinetteError) as exception:
ldap_interface.authenticate(password="yunohost")
translation = m18n.g("invalid_password")
expected_msg = translation.format()
assert expected_msg in str(exception)
assert ldap_interface.con is None
def test_authenticate_simple_bind_with_rdn_wrong_password(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
self.ldap_conf["parameters"]["user_rdn"] = "cn=admin,dc=yunohost,dc=org"
ldap_interface = m_ldap.Authenticator(**self.ldap_conf)
with pytest.raises(MoulinetteError) as exception:
ldap_interface.authenticate(password="bad_password_lul")
translation = m18n.g("invalid_password")
expected_msg = translation.format()
assert expected_msg in str(exception)
assert ldap_interface.con is None
def test_authenticate_simple_bind_anonymous(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
self.ldap_conf["parameters"]["user_rdn"] = ""
ldap_interface = m_ldap.Authenticator(**self.ldap_conf)
ldap_interface.authenticate()
assert ldap_interface.con
def test_authenticate_sasl_non_interactive_bind(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
self.ldap_conf["parameters"]["user_rdn"] = (
"gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid())
)
ldap_interface = m_ldap.Authenticator(**self.ldap_conf)
assert ldap_interface.con
def test_authenticate_server_down(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
self.ldap_conf["parameters"]["user_rdn"] = "cn=admin,dc=yunohost,dc=org"
ldap_server.stop()
ldap_interface = m_ldap.Authenticator(**self.ldap_conf)
with pytest.raises(MoulinetteError) as exception:
ldap_interface.authenticate(password="yunohost")
translation = m18n.g("ldap_server_down")
expected_msg = translation.format()
assert expected_msg in str(exception)
assert ldap_interface.con is None
def create_ldap_interface(self, user_rdn, password=None):
self.ldap_conf["parameters"]["user_rdn"] = user_rdn
ldap_interface = m_ldap.Authenticator(**self.ldap_conf)
if not ldap_interface.con:
ldap_interface.authenticate(password=password)
return ldap_interface
def test_admin_read(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
assert "cn" in admin_info
assert admin_info["cn"] == ["admin"]
assert "description" in admin_info
assert admin_info["description"] == ["LDAP Administrator"]
assert "userPassword" in admin_info
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
admin_info = ldap_interface.search(
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
)[0]
assert admin_info.keys() == ["userPassword"]
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
def test_sasl_read(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid())
)
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
assert "cn" in admin_info
assert admin_info["cn"] == ["admin"]
assert "description" in admin_info
assert admin_info["description"] == ["LDAP Administrator"]
assert "userPassword" in admin_info
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
admin_info = ldap_interface.search(
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
)[0]
assert admin_info.keys() == ["userPassword"]
assert admin_info["userPassword"][0].startswith("{CRYPT}$6$")
def test_anonymous_read(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface("")
admin_info = ldap_interface.search("cn=admin,dc=yunohost,dc=org", attrs=None)[0]
assert "cn" in admin_info
assert admin_info["cn"] == ["admin"]
assert "description" in admin_info
assert admin_info["description"] == ["LDAP Administrator"]
assert "userPassword" not in admin_info
admin_info = ldap_interface.search(
"cn=admin,dc=yunohost,dc=org", attrs=["userPassword"]
)[0]
assert not admin_info
def add_new_user(self, ldap_interface):
new_user = "new_user"
attr_dict = {
"objectClass": ["inetOrgPerson", "posixAccount"],
"sn": new_user,
"cn": new_user,
"userPassword": new_user,
"gidNumber": "666",
"uidNumber": "666",
"homeDirectory": "/home/" + new_user,
}
ldap_interface.add("uid=%s,ou=users" % new_user, attr_dict)
# Check if we can login as the new user
assert self.create_ldap_interface(
"uid=%s,ou=users,dc=yunohost,dc=org" % new_user, new_user
).con
return ldap_interface.search(
"uid=%s,ou=users,dc=yunohost,dc=org" % new_user, attrs=None
)[0]
def test_admin_add(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
new_user_info = self.add_new_user(ldap_interface)
assert "cn" in new_user_info
assert new_user_info["cn"] == ["new_user"]
assert "sn" in new_user_info
assert new_user_info["sn"] == ["new_user"]
assert "uid" in new_user_info
assert new_user_info["uid"] == ["new_user"]
assert "objectClass" in new_user_info
assert "inetOrgPerson" in new_user_info["objectClass"]
assert "posixAccount" in new_user_info["objectClass"]
def test_sasl_add(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid())
)
new_user_info = self.add_new_user(ldap_interface)
assert "cn" in new_user_info
assert new_user_info["cn"] == ["new_user"]
assert "sn" in new_user_info
assert new_user_info["sn"] == ["new_user"]
assert "uid" in new_user_info
assert new_user_info["uid"] == ["new_user"]
assert "objectClass" in new_user_info
assert "inetOrgPerson" in new_user_info["objectClass"]
assert "posixAccount" in new_user_info["objectClass"]
def test_anonymous_add(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface("")
with pytest.raises(MoulinetteError) as exception:
self.add_new_user(ldap_interface)
expected_message = "error during LDAP add operation with: rdn="
expected_error = "modifications require authentication"
assert expected_error in str(exception)
assert expected_message in str(exception)
def remove_new_user(self, ldap_interface):
new_user_info = self.add_new_user(
self.create_ldap_interface(
"gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid()),
"yunohost",
)
)
uid = new_user_info["uid"][0]
ldap_interface.remove("uid=%s,ou=users" % uid)
with pytest.raises(MoulinetteError) as exception:
ldap_interface.search(
"uid=%s,ou=users,dc=yunohost,dc=org" % uid, attrs=None
)
expected_message = "error during LDAP search operation with: base="
expected_error = "No such object"
assert expected_error in str(exception)
assert expected_message in str(exception)
def test_admin_remove(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
self.remove_new_user(ldap_interface)
def test_sasl_remove(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid())
)
self.remove_new_user(ldap_interface)
def test_anonymous_remove(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface("")
with pytest.raises(MoulinetteError) as exception:
self.remove_new_user(ldap_interface)
expected_message = "error during LDAP delete operation with: rdn="
expected_error = "modifications require authentication"
assert expected_error in str(exception)
assert expected_message in str(exception)
def update_new_user(self, ldap_interface, new_rdn=False):
new_user_info = self.add_new_user(
self.create_ldap_interface(
"gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid()),
"yunohost",
)
)
uid = new_user_info["uid"][0]
new_user_info["uidNumber"] = ["555"]
new_user_info["gidNumber"] = ["555"]
new_another_user_uid = "new_another_user"
if new_rdn:
new_rdn = "uid=%s" % new_another_user_uid
ldap_interface.update("uid=%s,ou=users" % uid, new_user_info, new_rdn)
if new_rdn:
uid = new_another_user_uid
return ldap_interface.search(
"uid=%s,ou=users,dc=yunohost,dc=org" % uid, attrs=None
)[0]
def test_admin_update(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
new_user_info = self.update_new_user(ldap_interface)
assert new_user_info["uid"] == ["new_user"]
assert new_user_info["uidNumber"] == ["555"]
assert new_user_info["gidNumber"] == ["555"]
def test_admin_update_new_rdn(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
new_user_info = self.update_new_user(ldap_interface, True)
assert new_user_info["uid"] == ["new_another_user"]
assert new_user_info["uidNumber"] == ["555"]
assert new_user_info["gidNumber"] == ["555"]
def test_sasl_update(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"gidNumber=%s+uidNumber=%s,cn=peercred,cn=external,cn=auth"
% (os.getgid(), os.getuid())
)
new_user_info = self.update_new_user(ldap_interface)
assert new_user_info["uid"] == ["new_user"]
assert new_user_info["uidNumber"] == ["555"]
assert new_user_info["gidNumber"] == ["555"]
def test_sasl_update_new_rdn(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
new_user_info = self.update_new_user(ldap_interface, True)
assert new_user_info["uid"] == ["new_another_user"]
assert new_user_info["uidNumber"] == ["555"]
assert new_user_info["gidNumber"] == ["555"]
def test_anonymous_update(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface("")
with pytest.raises(MoulinetteError) as exception:
self.update_new_user(ldap_interface)
expected_message = "error during LDAP update operation with: rdn="
expected_error = "modifications require authentication"
assert expected_error in str(exception)
assert expected_message in str(exception)
def test_anonymous_update_new_rdn(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface("")
with pytest.raises(MoulinetteError) as exception:
self.update_new_user(ldap_interface, True)
expected_message = "error during LDAP update operation with: rdn="
expected_error = "modifications require authentication"
assert expected_error in str(exception)
assert expected_message in str(exception)
def test_empty_update(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
new_user_info = self.update_new_user(ldap_interface)
assert new_user_info["uid"] == ["new_user"]
assert new_user_info["uidNumber"] == ["555"]
assert new_user_info["gidNumber"] == ["555"]
uid = new_user_info["uid"][0]
assert ldap_interface.update("uid=%s,ou=users" % uid, new_user_info)
def test_get_conflict(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
self.add_new_user(ldap_interface)
conflict = ldap_interface.get_conflict({"uid": "new_user"})
assert conflict == ("uid", "new_user")
conflict = ldap_interface.get_conflict(
{"uid": "new_user"}, base_dn="ou=users,dc=yunohost,dc=org"
)
assert conflict == ("uid", "new_user")
conflict = ldap_interface.get_conflict({"uid": "not_a_user"})
assert not conflict
def test_validate_uniqueness(self, ldap_server):
self.ldap_conf["parameters"]["uri"] = ldap_server.uri
ldap_interface = self.create_ldap_interface(
"cn=admin,dc=yunohost,dc=org", "yunohost"
)
self.add_new_user(ldap_interface)
with pytest.raises(MoulinetteError) as exception:
ldap_interface.validate_uniqueness({"uid": "new_user"})
translation = m18n.g(
"ldap_attribute_already_exists", attribute="uid", value="new_user"
)
expected_msg = translation.format(attribute="uid", value="new_user")
assert expected_msg in str(exception)
assert ldap_interface.validate_uniqueness({"uid": "not_a_user"})

View file

@ -8,19 +8,19 @@ from moulinette.utils.network import download_json, download_text
def test_download(test_url):
with requests_mock.Mocker() as mock:
mock.register_uri('GET', test_url, text='some text')
mock.register_uri("GET", test_url, text="some text")
fetched_text = download_text(test_url)
assert fetched_text == 'some text'
assert fetched_text == "some text"
def test_download_bad_url():
with pytest.raises(MoulinetteError):
download_text('Nowhere')
download_text("Nowhere")
def test_download_404(test_url):
with requests_mock.Mocker() as mock:
mock.register_uri('GET', test_url, status_code=404)
mock.register_uri("GET", test_url, status_code=404)
with pytest.raises(MoulinetteError):
download_text(test_url)
@ -28,29 +28,37 @@ def test_download_404(test_url):
def test_download_ssl_error(test_url):
with requests_mock.Mocker() as mock:
exception = requests.exceptions.SSLError
mock.register_uri('GET', test_url, exc=exception)
mock.register_uri("GET", test_url, exc=exception)
with pytest.raises(MoulinetteError):
download_text(test_url)
def test_download_connection_error(test_url):
with requests_mock.Mocker() as mock:
exception = requests.exceptions.ConnectionError
mock.register_uri("GET", test_url, exc=exception)
with pytest.raises(MoulinetteError):
download_text(test_url)
def test_download_timeout(test_url):
with requests_mock.Mocker() as mock:
exception = requests.exceptions.ConnectTimeout
mock.register_uri('GET', test_url, exc=exception)
exception = requests.exceptions.Timeout
mock.register_uri("GET", test_url, exc=exception)
with pytest.raises(MoulinetteError):
download_text(test_url)
def test_download_json(test_url):
with requests_mock.Mocker() as mock:
mock.register_uri('GET', test_url, text='{"foo":"bar"}')
mock.register_uri("GET", test_url, text='{"foo":"bar"}')
fetched_json = download_json(test_url)
assert 'foo' in fetched_json.keys()
assert fetched_json['foo'] == 'bar'
assert "foo" in fetched_json.keys()
assert fetched_json["foo"] == "bar"
def test_download_json_bad_json(test_url):
with requests_mock.Mocker() as mock:
mock.register_uri('GET', test_url, text='notjsonlol')
mock.register_uri("GET", test_url, text="notjsonlol")
with pytest.raises(MoulinetteError):
download_json(test_url)

View file

@ -4,14 +4,117 @@ from subprocess import CalledProcessError
import pytest
from moulinette.utils.process import run_commands
from moulinette.utils.process import call_async_output
from moulinette.utils.process import check_output
def test_run_shell_command_list(test_file):
assert os.path.exists(str(test_file))
run_commands(['rm -f %s' % str(test_file)])
run_commands(["rm -f %s" % str(test_file)])
assert not os.path.exists(str(test_file))
def test_run_shell_bad_cmd():
with pytest.raises(CalledProcessError):
run_commands(['yolo swag'])
run_commands(["yolo swag"])
def test_run_shell_bad_cmd_with_callback():
def callback(a, b, c):
assert isinstance(a, int)
assert isinstance(b, str)
assert isinstance(c, str)
return True
assert run_commands(["yolo swag", "yolo swag", "yolo swag"], callback=callback) == 3
def callback(a, b, c):
assert isinstance(a, int)
assert isinstance(b, str)
assert isinstance(c, str)
return False
assert run_commands(["yolo swag", "yolo swag"], callback=callback) == 1
def callback(a, b, c):
assert isinstance(a, int)
assert isinstance(b, str)
assert isinstance(c, tuple)
return True
run_commands(["yolo swag"], separate_stderr=True, callback=callback)
def test_run_shell_bad_callback():
callback = 1
with pytest.raises(ValueError):
run_commands(["ls"], callback=callback)
def test_run_shell_kwargs():
with pytest.raises(ValueError):
run_commands([""], stdout="None")
with pytest.raises(ValueError):
run_commands([""], stderr="None")
run_commands(["ls"], cwd="/tmp")
with pytest.raises(OSError):
run_commands(["ls"], cwd="/yoloswag")
def test_call_async_output(test_file):
def callback(a):
assert a == "foo\n" or a == "bar\n"
call_async_output(["cat", str(test_file)], callback)
with pytest.raises(ValueError):
call_async_output(["cat", str(test_file)], 1)
def callbackA(a):
assert a == "foo\n" or a == "bar\n"
def callbackB(a):
pass
callback = (callbackA, callbackB)
call_async_output(["cat", str(test_file)], callback)
def test_call_async_output_kwargs(test_file, mocker):
def callback(a):
assert a == "foo\n" or a == "bar\n"
with pytest.raises(ValueError):
call_async_output(["cat", str(test_file)], callback, stdout=None)
with pytest.raises(ValueError):
call_async_output(["cat", str(test_file)], callback, stderr=None)
call_async_output(["cat", str(test_file)], callback, stdinfo=None)
def callbackA(a):
assert a == "foo\n" or a == "bar\n"
def callbackB(a):
pass
def callbackC(a):
pass
callback = (callbackA, callbackB, callbackC)
dirname = os.path.dirname(str(test_file))
os.mkdir(os.path.join(dirname, "teststdinfo"))
call_async_output(
["cat", str(test_file)],
callback,
stdinfo=os.path.join(dirname, "teststdinfo", "teststdinfo"),
)
def test_check_output(test_file):
assert check_output(["cat", str(test_file)], shell=False) == "foo\nbar\n"
assert check_output("cat %s" % str(test_file)) == "foo\nbar\n"

View file

@ -7,8 +7,8 @@ def test_json_extended_encoder(caplog):
assert encoder.default(set([1, 2, 3])) == [1, 2, 3]
assert encoder.default(dt(1917, 3, 8)) == '1917-03-08T00:00:00+00:00'
assert encoder.default(dt(1917, 3, 8)) == "1917-03-08T00:00:00+00:00"
assert encoder.default(None) == 'None'
assert encoder.default(None) == "None"
for message in caplog.messages:
assert 'cannot properly encode in JSON' in message
assert "cannot properly encode in JSON" in message

View file

@ -2,19 +2,20 @@ from moulinette.utils.text import search, searchf, prependlines, random_ascii
def test_search():
assert search('a', 'a a a') == ['a', 'a', 'a']
assert search('a', 'a a a', count=2) == ['a', 'a']
assert not search('a', 'c c d')
assert search("a", "a a a") == ["a", "a", "a"]
assert search("a", "a a a", count=2) == ["a", "a"]
assert search("a", "a a a", count=-1) == "a"
assert not search("a", "c c d")
def test_searchf(test_file):
assert searchf('bar', str(test_file)) == ['bar']
assert not searchf('baz', str(test_file))
assert searchf("bar", str(test_file)) == ["bar"]
assert not searchf("baz", str(test_file))
def test_prependlines():
assert prependlines('abc\nedf\nghi', 'XXX') == 'XXXabc\nXXXedf\nXXXghi'
assert prependlines('', 'XXX') == 'XXX'
assert prependlines("abc\nedf\nghi", "XXX") == "XXXabc\nXXXedf\nXXXghi"
assert prependlines("", "XXX") == "XXX"
def test_random_ascii():

19
tox.ini
View file

@ -15,6 +15,10 @@ deps =
pytest-env >= 0.6.2, < 1.0
requests >= 2.22.0, < 3.0
requests-mock >= 1.6.0, < 2.0
toml >= 0.10, < 0.11
gevent-websocket
bottle >= 0.12
WebTest >= 2.0, < 2.1
commands =
pytest {posargs}
@ -24,6 +28,21 @@ deps = flake8
skip_install = True
usedevelop = False
[testenv:format]
basepython = python3
commands = black {posargs} moulinette test
deps = black
skip_install = True
usedevelop = False
[testenv:format-check]
basepython = {[testenv:format]basepython}
commands = black {posargs:--check --diff} moulinette test
deps = {[testenv:format]deps}
skip_install = {[testenv:format]skip_install}
usedevelop = {[testenv:format]usedevelop}
[testenv:docs]
usedevelop = True
commands = python -m sphinx -W doc/ doc/_build