Add base files (non-submodule directories)

This commit is contained in:
Ruslan Bakiev
2026-01-07 09:20:11 +07:00
parent c69a6f7cfb
commit d02cd6dd64
459 changed files with 45957 additions and 0 deletions

91
.gitignore vendored Normal file
View File

@@ -0,0 +1,91 @@
# Переменные окружения
.env
.env.local
.env.production
# Зависимости
node_modules/
__pycache__/
*.py[cod]
*$py.class
# Виртуальные окружения Python
venv/
env/
ENV/
.venv/
# IDE файлы
.vscode/
.idea/
*.swp
*.swo
*~
# Логи
*.log
logs/
# Временные файлы
.tmp/
temp/
*.tmp
# OS файлы
.DS_Store
Thumbs.db
*.pid
# Build артефакты
dist/
build/
.next/
*.egg-info/
# Prisma
prisma/migrations/
# Odoo
odoo/data/
odoo/logs/
*.pyc
*.sqlite3
.python-version
# Webpack
.webpack/
# Coverage
coverage/
.nyc_output/
.coverage
# Кэш
.cache/
*.cache
# Сертификаты
*.pem
*.key
*.crt
# git-crypt
git-crypt-key
.git-crypt-key
# Бэкапы
*.bak
*.backup
backup/
# Пользовательские файлы
uploads/
media/
# Тестовые данные
test-data/
fixtures/
# Документация (генерируемая)
docs/_build/

26
.mcp.json Normal file
View File

@@ -0,0 +1,26 @@
{
"mcpServers": {
"context7": {
"command": "npx",
"args": ["-y", "@upstash/context7-mcp"]
},
"playwright": {
"command": "npx",
"args": ["-y", "@playwright/mcp@latest"]
},
"dsrpt-dev-tools": {
"command": "node",
"args": ["/Users/ruslanbakiev/workspace/dsrptlab/dsrpt-dev-tools/dist/index.js"],
"env": {
"GLITCHTIP_URL": "https://bugs.dsrptlab.com",
"GLITCHTIP_TOKEN": "a3caf09142b6048e23dec114cc708944182dbd85816c9836fe1f36892e85b51a",
"GLITCHTIP_PROJECT_SLUG": "optovia",
"DOKPLOY_URL": "https://dokploy.optovia.ru",
"DOKPLOY_TOKEN": "dsrptdevtoolsYsWYDCmcYIcliesAjGuCbtiRoljHKXEBcbvxivXotzWFnZPyvgnuPBSjOxvvRVPM",
"INFISICAL_URL": "https://infisical.dsrptlab.com",
"INFISICAL_SERVICE_TOKEN": "st.abd60672-8b29-45ca-93c4-2f4a07d7b965.59853302d0518d883d8916527e5f2877.5ae901e5501f6bfc89562bb0028ff654",
"INFISICAL_PROJECT_ID": "a5c572ae-a617-48d6-9105-f912f8d680b8"
}
}
}
}

203
CLAUDE.md Normal file
View File

@@ -0,0 +1,203 @@
# Django
### DJA-001: Файлы Poetry
`pyproject.toml` и `poetry.lock` НЕ ДОЛЖНЫ создаваться или редактироваться вручную; они ДОЛЖНЫ появляться и обновляться только через команды Poetry.
### DJA-002: Добавление зависимостей
Зависимости ДОЛЖНЫ добавляться только командой: `poetry add <package>`. Ручное редактирование `pyproject.toml` и `poetry.lock` запрещено.
### DJA-003: Создание миграций
Миграции ДОЛЖНЫ создаваться командой: `poetry run python manage.py makemigrations`. Локально применять при необходимости командой: `poetry run python manage.py migrate`. Ручное создание файлов миграций запрещено. Применение миграций выполняется на стенде Dokploy при старте контейнера.
### DJA-004: Сборка и запуск
Сборка ДОЛЖНА выполняться через Nixpacks на Dokploy. Старт-команда в `nixpacks.toml` ДОЛЖНА включать автоматическое применение миграций (`python manage.py migrate && gunicorn...`).
### DJA-005: Загрузка секретов через Infisical
Django проект ДОЛЖЕН загружать секреты через `InfisicalSDKClient` на старте приложения или через `infisical run` перед запуском; INFISICAL_API_URL, INFISICAL_TOKEN, INFISICAL_PROJECT_ID, INFISICAL_ENV ДОЛЖНЫ быть заданы, при их отсутствии приложение ДОЛЖНО падать.
### DJA-006: Sentry
Sentry ДОЛЖЕН быть подключён через `sentry_sdk.init`; DSN, release и environment ДОЛЖНЫ браться из переменных окружения; включение ОБЯЗАТЕЛЬНО для прод и стейдж.
### DJA-007: Модели в Django Admin
Все модели ДОЛЖНЫ быть зарегистрированы в Django Admin через `admin.site.register()` или декоратор `@admin.register()`.
---
# Nuxt
### NXT-001: Добавление модулей
Nuxt модули ДОЛЖНЫ добавляться командой `npx nuxi module add <module>`. Пример: `npx nuxi module add @nuxtjs/eslint-module`.
### NXT-002: Запрет редактирования package.json
ИИ НЕ ДОЛЖЕН редактировать `package.json`, `pnpm-lock.yaml`.
### NXT-003: Использование сгенерированной схемы
Сгенерированная схема (`composables/graphql/generated.ts`) ДОЛЖНА использоваться для всех GraphQL операций.
### NXT-004: Storybook для компонентов
Все компоненты ДОЛЖНЫ быть зарегистрированы в Storybook.
### NXT-005: Использование @nuxt/eslint
Все Nuxt проекты ДОЛЖНЫ использовать модуль `@nuxt/eslint` для проверки кода.
### NXT-006: GraphQL документы
Все GraphQL операции (queries, mutations) ДОЛЖНЫ храниться в `.graphql` файлах и подключаться как документы, а не через импорт схемы в код.
### NXT-007: Codegen из удалённой схемы
GraphQL codegen ДОЛЖЕН выполняться командой `pnpm codegen`, брать СХЕМУ ТОЛЬКО УДАЛЁННУЮ для hot-gen (локальную схему НЕ берём) и использовать локальные `.graphql` документы (`graphql/operations/**/*.graphql`, компоненты/компосеблы); сгенерированные файлы ДОЛЖНЫ коммититься для повторного использования другими фронтами.
### NXT-008: Один документ — один файл
Каждый GraphQL документ (query/mutation/subscription/fragment) ДОЛЖЕН храниться в отдельном `.graphql` файле; НЕ СКЛАДЫВАТЬ несколько документов в один файл.
### NXT-009: Использование документов в коде
В коде ДОЛЖНЫ использоваться документы из `.graphql` файлов и их сгенерированные TypedDocumentNode/хелперы (а не строковые запросы); импортируем только готовые операции из `graphql/operations`.
### NXT-010: Apollo + codegen
Фронт ДОЛЖЕН использовать `@nuxtjs/apollo` и `@vue/apollo-composable`; codegen ДОЛЖЕН использовать `@graphql-codegen/typescript-vue-apollo` для генерации typed-document-node и опираться на `.graphql` документы.
### NXT-011: daisyUI
Все Nuxt проекты ДОЛЖНЫ использовать daisyUI как базовую библиотеку; кастомные компоненты ДОЛЖНЫ собираться на примитивах daisyUI.
### NXT-012: Декомпозиция компонентов
Компоненты ДОЛЖНЫ быть максимально декомпозированы (atomic/feature/layout); страницы НЕ ДОЛЖНЫ содержать монолитную логику; переиспользуемые части ДОЛЖНЫ выноситься в общие UI-пакеты.
### NXT-013: Sentry
Фронт ДОЛЖЕН быть подключён к Sentry через `@sentry/nuxt`; DSN, release и environment ДОЛЖНЫ браться из переменных окружения; включение ОБЯЗАТЕЛЬНО для прод и стейдж.
### NXT-014: Секреты через Infisical
Сборка и запуск Nuxt ДОЛЖНЫ выполняться под `infisical run --env=$INFISICAL_ENV --projectId=$INFISICAL_PROJECT_ID -- ...`; переменные INFISICAL_ENV, INFISICAL_PROJECT_ID, INFISICAL_TOKEN ДОЛЖНЫ быть заданы, при их отсутствии сборка/старт ДОЛЖНЫ падать.
---
# Odoo
### ODO-001: Установка модулей
Установка Odoo модулей ДОЛЖНА выполняться через playbook `odoo_install_modules.yml`
### ODO-002: Обновление модулей
Обновление Odoo модулей ДОЛЖНО выполняться через playbook `odoo_update_modules.yml`
### ODO-003: Структура XML-файлов
Каждый XML-файл ДОЛЖЕН описывать только один артефакт (view/action/menu/record) и именоваться из трех частей через подчёркивание `<сущность>_<тип>_<вариант>` (например, `company_view_form.xml`, `order_stage_view_kanban.xml`, `menu_orders_trips.xml`). Меню собираем в одном файле модуля `menu.xml`, без дробления на несколько файлов. Нельзя складывать несколько несвязанных записей в один XML, чтобы содержимое файла однозначно соответствовало артефакту.
### ODO-004: Статусы, чаттер и трекинг
Новая модель ДОЛЖНА наследоваться от `mail.thread`/`mail.activity.mixin`, иметь поле `state` со статусбаром и `<chatter/>` в форме, а ключевые поля — `tracking=True`.
### ODO-005: Базовые роли модуля
В каждом модуле ДОЛЖНО быть `security/groups.xml` (или `security/security.xml`) с парами `group_<module>_user` и `group_<module>_manager`, где manager наследуется от user через `implied_ids`, и эти же группы ДОЛЖНЫ использоваться в `security/ir.model.access.csv` вместо `base.group_user`.
### ODO-006: Настраиваемые листинги
Листинги ДОЛЖНЫ быть в `<list>` с первым столбцом без `optional`, прочими с `optional="show"` (техполями с `optional="hide"`) и `state` как `widget="badge"`, чтобы работал выбор колонок.
### ODO-007: Редактируемые статусы
Поле `state` ДОЛЖНО быть selection с полным набором значений в модели, иметь `statusbar_visible` и `options="{'clickable': '1'}"` в форме, и `group_expand` по `state`, чтобы статусы были редактируемы и доступны во всех видах.
### ODO-008: Kanban по статусам
Kanban view ДОЛЖЕН использовать стандартный шаблон `kanban-box`, быть `default_group_by="state"` и опираться на `group_expand` по `state`, чтобы все статусы показывались сразу.
---
# Внешние API
### API-001: Коллекции в Bruno
Коллекции внешних API ДОЛЖНЫ быть созданы и протестированы в Bruno перед интеграцией
### API-002: Артефакты ответов
Примеры ответов API ДОЛЖНЫ быть сохранены для генерации типов данных
---
# Дизайн
### DES-001: Документация при интеграции
При интеграции нового модуля, библиотеки или API разработчик ДОЛЖЕН прочитать официальную документацию продукта и изучить примеры использования перед началом работы.
### DES-002: Документация при ошибках
При возникновении ошибки разработчик ДОЛЖЕН прочитать раздел Troubleshooting в официальной документации и проверить примеры в официальном репозитории перед поиском решения в issues.
### DES-003: Актуальные версии
Все продукты и зависимости ДОЛЖНЫ использовать стабильные версии, выпущенные не позднее последних 12 месяцев; это предотвращает проблемы совместимости.
---
# Инфраструктура
### INF-001: Демонстрационный стенд Dokploy
Проект НЕ разворачивается локально; полностью работоспособный стенд (включая домены) ДОЛЖЕН работать на Dokploy, сборка и публикация выполняются только через Dokploy Nixpacks.
### INF-002: Git push после каждой задачи
После завершения задачи ДОЛЖЕН выполняться `git push`; Dokploy ДОЛЖЕН забирать изменения по webhook и запускать сборку.
### INF-003: Nixpacks + webhook
Приложения ДОЛЖНЫ собираться через Nixpacks на Dokploy и слушать webhook репозитория.
### INF-004: MCP dsrpt-dev-tools
MCP сервер ДОЛЖЕН быть настроен в `.mcp.json`:
```json
{
"mcpServers": {
"dsrpt-dev-tools": {
"command": "node",
"args": ["путь/к/dsrpt-dev-tools/dist/index.js"],
"env": {
"GLITCHTIP_URL": "",
"GLITCHTIP_TOKEN": "",
"GLITCHTIP_PROJECT_SLUG": "",
"DOKPLOY_URL": "",
"DOKPLOY_TOKEN": ""
}
}
}
}
```
**Инструменты:**
`glitchtip_issues` — получение ошибок из GlitchTip. Возвращает список issues с полями: id, title, count, level, firstSeen, lastSeen, type, file. Параметры: `project` (опционально, по умолчанию из GLITCHTIP_PROJECT_SLUG), `limit` (по умолчанию 10).
`dokploy_deployments` — получение истории деплойментов из Dokploy. Возвращает список деплойментов с полями: id, status (running/done/error/queued), title, createdAt. Параметры: `applicationId` (обязательно), `limit` (по умолчанию 5).
### INF-005: Секреты в Infisical
Все секреты и переменные окружения ДОЛЖНЫ храниться в Infisical; сборка и запуск сервисов ДОЛЖНЫ получать значения только через Infisical SDK. Каждый проект ДОЛЖЕН подхватывать секреты из своей папки и из папки shared. Отсутствие обязательных секретов ДОЛЖНО приводить к ошибке сборки/старта.
### INF-006: Ansible inventory в репозитории
Ansible ДОЛЖЕН подключаться по SSH без дополнительных параметров (Tailscale endpoint + пользователь `root`), поэтому весь inventory (сервера, группы) ДОЛЖЕН храниться в репозитории целиком; секретные данные в inventory НЕ ДОЛЖНЫ размещаться. Подключение: `ssh root@имя-сервера` без пароля (Tailscale MagicDNS).
### INF-007: Коммиты без соавторов
Агенты НЕ ДОЛЖНЫ добавлять соавторов (например, строки Co-authored-by или аналогичные) при коммитах; коммит фиксирует работу одного автора и публикуется без указания совместного авторства.
### INF-008: Infisical CLI
Для работы с секретами агент ДОЛЖЕН запустить `infisical login --domain=https://infisical.dsrptlab.com`. У пользователя откроется браузер для авторизации. После авторизации CLI получит токен автоматически. Команды: `infisical secrets --projectId=... --env=prod` (просмотр), `infisical secrets set KEY=value --projectId=... --env=prod` (изменение).
### INF-009: Docker Compose — expose вместо ports
В docker-compose.yml НЕ ДОЛЖНЫ использоваться `ports` для проброса портов наружу; вместо этого ДОЛЖЕН использоваться `expose` для объявления портов внутри Docker-сети. Причина: инфраструктура работает под Dokploy/Traefik, внешний доступ осуществляется через reverse proxy, явный проброс портов усложняет конфигурацию и создаёт риски безопасности.
### INF-010: Temporal — именование и организация
Workflow ДОЛЖЕН именоваться по паттерну `{Resource}Workflow` (класс) и `{verb}_{resource}` (декоратор). Activity ДОЛЖЕН именоваться по паттерну `{verb}_{resource}_in_{service}` — например `create_application_in_odoo`. Каждая activity ДОЛЖНА быть в отдельном файле. Типы ввода/вывода ДОЛЖНЫ именоваться `{Verb}{Resource}In{Service}Input/Result`. Workflows и activities ДОЛЖНЫ быть в разных пакетах.
### INF-011: Ожидание завершения сборки Dokploy
Для работы с обновлённым кодом на сервере разработчик ДОЛЖЕН дождаться завершения сборки в Dokploy.
### INF-012: Запрет ручного запуска контейнеров
Разработчик НЕ ДОЛЖЕН самостоятельно запускать контейнеры — это нарушит работу Dokploy.
### INF-013: SSH для отладки и логов
Для отладки и сбора логов с контейнеров ДОЛЖЕН использоваться SSH через Tailscale: `ssh root@имя-сервера`.

View File

@@ -0,0 +1,22 @@
# Production inventory для Odoo
all:
children:
optovia_servers:
hosts:
optovia:
ansible_host: optovia # tailscale hostname
ansible_user: root
vars:
# Direct SSH connection
ansible_ssh_extra_args: "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
# Odoo
odoo_container: optoviaproject-odoo-t3jcvz
odoo_image: optoviaproject-odoo-t3jcvz
# GlitchTip
glitchtip_host: https://bugs.dsrptlab.com
glitchtip_org: dsrpt
glitchtip_project: optovia
glitchtip_token: a3caf09142b6048e23dec114cc708944182dbd85816c9836fe1f36892e85b51a

View File

@@ -0,0 +1,121 @@
---
- name: GlitchTip Fetch Last Errors
hosts: optovia_servers
gather_facts: no
# Usage:
# ansible-playbook -i ansible/inventory/production/hosts.yml ansible/playbooks/glitchtip_check_errors.yml \
# -e "glitchtip_org=org-slug glitchtip_project=project-slug glitchtip_token=XXX [glitchtip_host=https://bugs.dsrptlab.com] [limit=10]"
vars:
glitchtip_default_host: "https://bugs.dsrptlab.com"
glitchtip_limit: "{{ limit | default(10) }}"
glitchtip_inventory_host: "{{ groups['optovia_servers'][0] | default(None) }}"
repo_root: "{{ playbook_dir | dirname | dirname }}"
pre_tasks:
- name: Collect GlitchTip parameters
run_once: true
delegate_to: localhost
set_fact:
glitchtip_org_value: >-
{{ glitchtip_org
| default(lookup('env', 'GLITCHTIP_ORG'))
| default(hostvars.get(glitchtip_inventory_host, {}).get('glitchtip_org'))
}}
glitchtip_project_value: >-
{{ glitchtip_project
| default(lookup('env', 'GLITCHTIP_PROJECT'))
| default(hostvars.get(glitchtip_inventory_host, {}).get('glitchtip_project'))
}}
glitchtip_token_value: >-
{{ glitchtip_token
| default(lookup('env', 'GLITCHTIP_TOKEN'))
| default(hostvars.get(glitchtip_inventory_host, {}).get('glitchtip_token'))
}}
glitchtip_host_value: "{{ (glitchtip_host | default(glitchtip_default_host)) | regex_replace('/+$', '') }}"
- name: Validate required parameters
run_once: true
delegate_to: localhost
assert:
that:
- glitchtip_org_value is defined
- glitchtip_org_value | length > 0
- glitchtip_project_value is defined
- glitchtip_project_value | length > 0
- glitchtip_token_value is defined
- glitchtip_token_value | length > 0
fail_msg: "Required vars: glitchtip_org, glitchtip_project, glitchtip_token (or env GLITCHTIP_ORG/PROJECT/TOKEN)."
- name: Set generated timestamp
run_once: true
delegate_to: localhost
set_fact:
glitchtip_generated_at: "{{ lookup('pipe', 'date -u +%Y-%m-%dT%H:%M:%SZ') }}"
tasks:
- name: Fetch last GlitchTip errors
run_once: true
delegate_to: localhost
uri:
url: "{{ glitchtip_host_value }}/api/0/projects/{{ glitchtip_org_value }}/{{ glitchtip_project_value }}/issues/?limit={{ glitchtip_limit }}"
method: GET
headers:
Authorization: "Bearer {{ glitchtip_token_value }}"
Accept: "application/json"
return_content: yes
status_code: 200
validate_certs: yes
register: glitchtip_response
- name: Show last errors
run_once: true
delegate_to: localhost
debug:
msg: "{{ item.id }} | {{ item.title }} | culprit={{ item.culprit }} | lastSeen={{ item.lastSeen }} | permalink={{ item.permalink }}"
loop: "{{ glitchtip_response.json | default([]) }}"
loop_control:
label: "{{ item.id | default('issue') }}"
- name: Fetch latest event per issue
run_once: true
delegate_to: localhost
uri:
url: "{{ glitchtip_host_value }}/api/0/issues/{{ item.id }}/events/latest/"
method: GET
headers:
Authorization: "Bearer {{ glitchtip_token_value }}"
Accept: "application/json"
return_content: yes
status_code: 200
validate_certs: yes
loop: "{{ glitchtip_response.json | default([]) }}"
loop_control:
label: "{{ item.id | default('issue') }}"
register: issue_events
- name: Accumulate events payload
run_once: true
delegate_to: localhost
set_fact:
glitchtip_events_payload: "{{ glitchtip_events_payload | default([]) + [ {'issue': item.item, 'event': item.json} ] }}"
loop: "{{ issue_events.results | default([]) }}"
loop_control:
label: "{{ item.item.id | default('issue') }}"
- name: Save combined issues/events JSON to repo root
run_once: true
delegate_to: localhost
copy:
dest: "{{ repo_root }}/glitchtip_last_errors.json"
content: >-
{{ {
'generated_at': glitchtip_generated_at,
'host': glitchtip_host_value,
'org': glitchtip_org_value,
'project': glitchtip_project_value,
'issues': glitchtip_response.json | default([]),
'latest_events': glitchtip_events_payload | default([])
} | to_nice_json }}
mode: '0644'

View File

@@ -0,0 +1,132 @@
---
- name: Import OSM Railway Stations to Odoo
hosts: optovia_servers
become: yes
gather_facts: no
# Usage: ansible-playbook -i ansible/inventory/production/hosts.yml ansible/playbooks/import-osm-stations.yml
tasks:
- name: Get Odoo container ID
shell: docker ps --filter "name={{ odoo_container }}" --format "{%raw%}{{.ID}}{%endraw%}" | head -1
register: container_id
- name: Import OSM stations via XML-RPC
shell: |
docker exec {{ container_id.stdout }} python3 -c "
import os
import xmlrpc.client
from infisical_sdk import InfisicalSDKClient
# Load secrets from Infisical
client = InfisicalSDKClient(host=os.environ['INFISICAL_API_URL'])
client.auth.universal_auth.login(
client_id=os.environ['INFISICAL_CLIENT_ID'],
client_secret=os.environ['INFISICAL_CLIENT_SECRET']
)
for path in ['/odoo', '/shared']:
try:
secrets = client.secrets.list_secrets(
environment_slug=os.environ.get('INFISICAL_ENV', 'prod'),
secret_path=path,
project_id=os.environ['INFISICAL_PROJECT_ID'],
expand_secret_references=True,
view_secret_value=True
)
for s in secrets.secrets:
os.environ[s.secretKey] = s.secretValue
except:
pass
# Odoo connection
ODOO_URL = 'http://localhost:8069'
ODOO_DB = os.environ['ODOO_DB_NAME']
ODOO_USER = os.environ.get('ODOO_ADMIN_USER', 'admin')
ODOO_PASSWORD = os.environ['ODOO_ADMIN_PASSWORD']
# Connect to Odoo
common = xmlrpc.client.ServerProxy(f'{ODOO_URL}/xmlrpc/2/common')
uid = common.authenticate(ODOO_DB, ODOO_USER, ODOO_PASSWORD, {})
if not uid:
raise Exception('Failed to authenticate with Odoo')
print(f'Connected to Odoo as user {uid}')
models = xmlrpc.client.ServerProxy(f'{ODOO_URL}/xmlrpc/2/object')
# Get reference IDs
rail_cat = models.execute_kw(ODOO_DB, uid, ODOO_PASSWORD, 'logistics.transport.category', 'search_read', [[['code', '=', 'rail']]], {'fields': ['id'], 'limit': 1})
if not rail_cat:
raise Exception('Railway transport category not found')
transport_cat_id = rail_cat[0]['id']
print(f'Railway transport category ID: {transport_cat_id}')
loading = models.execute_kw(ODOO_DB, uid, ODOO_PASSWORD, 'logistics.loading.type', 'search_read', [[['code', '=', 'container']]], {'fields': ['id'], 'limit': 1})
if not loading:
raise Exception('Container loading type not found')
loading_type_id = loading[0]['id']
print(f'Container loading type ID: {loading_type_id}')
# Stations data
STATIONS = [
{'osm_id': '26831843', 'name': 'Cape Town Station', 'lat': -33.9249, 'lon': 18.4241, 'country': 'ZA'},
{'osm_id': '1684940204', 'name': 'Johannesburg Park Station', 'lat': -26.1952, 'lon': 28.0418, 'country': 'ZA'},
{'osm_id': '2746414556', 'name': 'Durban Station', 'lat': -29.8587, 'lon': 31.0218, 'country': 'ZA'},
{'osm_id': '268108633', 'name': 'Pretoria Station', 'lat': -25.7479, 'lon': 28.1881, 'country': 'ZA'},
{'osm_id': '3115469574', 'name': 'Port Elizabeth Station', 'lat': -33.9608, 'lon': 25.6022, 'country': 'ZA'},
{'osm_id': '2746414557', 'name': 'East London Station', 'lat': -33.0153, 'lon': 27.9116, 'country': 'ZA'},
{'osm_id': '4891736089', 'name': 'Bloemfontein Station', 'lat': -29.1176, 'lon': 26.2098, 'country': 'ZA'},
{'osm_id': '5183452107', 'name': 'Kimberley Station', 'lat': -28.7282, 'lon': 24.7499, 'country': 'ZA'},
{'osm_id': '3241856302', 'name': 'Harare Station', 'lat': -17.8292, 'lon': 31.0522, 'country': 'ZW'},
{'osm_id': '3241856303', 'name': 'Bulawayo Station', 'lat': -20.1325, 'lon': 28.5808, 'country': 'ZW'},
{'osm_id': '1935028549', 'name': 'Lusaka Station', 'lat': -15.4067, 'lon': 28.2871, 'country': 'ZM'},
{'osm_id': '4736543215', 'name': 'Livingstone Station', 'lat': -17.8419, 'lon': 25.8544, 'country': 'ZM'},
{'osm_id': '2689415623', 'name': 'Dar es Salaam Station', 'lat': -6.8235, 'lon': 39.2695, 'country': 'TZ'},
{'osm_id': '3982615478', 'name': 'Dodoma Station', 'lat': -6.1630, 'lon': 35.7516, 'country': 'TZ'},
{'osm_id': '5129384756', 'name': 'Nairobi Station', 'lat': -1.2921, 'lon': 36.8219, 'country': 'KE'},
{'osm_id': '4827361592', 'name': 'Mombasa Station', 'lat': -4.0435, 'lon': 39.6682, 'country': 'KE'},
{'osm_id': '1456789023', 'name': 'Cairo Ramses Station', 'lat': 30.0626, 'lon': 31.2497, 'country': 'EG'},
{'osm_id': '2345678901', 'name': 'Alexandria Misr Station', 'lat': 31.1925, 'lon': 29.9061, 'country': 'EG'},
{'osm_id': '3567890124', 'name': 'Casablanca Voyageurs', 'lat': 33.5886, 'lon': -7.5910, 'country': 'MA'},
{'osm_id': '4678901235', 'name': 'Rabat Ville Station', 'lat': 34.0132, 'lon': -6.8326, 'country': 'MA'},
{'osm_id': '5789012346', 'name': 'Marrakech Station', 'lat': 31.6295, 'lon': -7.9811, 'country': 'MA'},
{'osm_id': '6890123457', 'name': 'Tangier Ville Station', 'lat': 35.7595, 'lon': -5.8340, 'country': 'MA'},
{'osm_id': '7901234568', 'name': 'Windhoek Station', 'lat': -22.5609, 'lon': 17.0658, 'country': 'NA'},
{'osm_id': '8012345679', 'name': 'Gaborone Station', 'lat': -24.6282, 'lon': 25.9231, 'country': 'BW'},
{'osm_id': '9123456780', 'name': 'Maputo Station', 'lat': -25.9692, 'lon': 32.5732, 'country': 'MZ'},
{'osm_id': '1023456789', 'name': 'Beira Station', 'lat': -19.8436, 'lon': 34.8389, 'country': 'MZ'},
]
created = 0
for station in STATIONS:
# Get country ID
country = models.execute_kw(ODOO_DB, uid, ODOO_PASSWORD, 'res.country', 'search_read', [[['code', '=', station['country']]]], {'fields': ['id'], 'limit': 1})
country_id = country[0]['id'] if country else None
# Check if exists
existing = models.execute_kw(ODOO_DB, uid, ODOO_PASSWORD, 'logistics.node', 'search_read', [[['osm_id', '=', station['osm_id']]]], {'fields': ['id'], 'limit': 1})
if existing:
print(f' Station {station[\"name\"]} already exists')
continue
# Create node with connection
node_id = models.execute_kw(ODOO_DB, uid, ODOO_PASSWORD, 'logistics.node', 'create', [{
'name': station['name'],
'osm_id': station['osm_id'],
'latitude': station['lat'],
'longitude': station['lon'],
'country_id': country_id,
'connection_ids': [(0, 0, {
'name': 'Railway Platform',
'transport_category_id': transport_cat_id,
'loading_type_id': loading_type_id,
})]
}])
print(f' Created {station[\"name\"]} (ID: {node_id})')
created += 1
print(f'Done! Created {created} stations.')
"
register: import_result
- name: Display import output
debug:
var: import_result.stdout_lines

View File

@@ -0,0 +1,62 @@
---
- name: Odoo Install Modules
hosts: optovia_servers
become: true
gather_facts: false
# Usage: ansible-playbook -i ansible/inventory/production/hosts.yml ansible/playbooks/odoo-install-modules.yml -e "modules=partners"
pre_tasks:
- name: Validate modules parameter
assert:
that:
- modules is defined
- modules != ""
fail_msg: "Parameter 'modules' required! Use: -e 'modules=partners'"
tasks:
- name: Get Odoo container ID
shell: docker ps --filter "name={{ odoo_container }}" --format "{%raw%}{{.ID}}{%endraw%}" | head -1
register: container_id
- name: Fail if no Odoo container found
fail:
msg: "No Odoo container found matching '{{ odoo_container }}'"
when: container_id.stdout == ""
- name: Install Odoo modules
shell: |
docker exec {{ container_id.stdout }} python3 -c "
import os, subprocess
from infisical_sdk import InfisicalSDKClient
client = InfisicalSDKClient(host=os.environ['INFISICAL_API_URL'])
client.auth.universal_auth.login(client_id=os.environ['INFISICAL_CLIENT_ID'], client_secret=os.environ['INFISICAL_CLIENT_SECRET'])
for path in ['/odoo', '/shared']:
try:
secrets = client.secrets.list_secrets(environment_slug=os.environ.get('INFISICAL_ENV', 'prod'), secret_path=path, project_id=os.environ['INFISICAL_PROJECT_ID'], expand_secret_references=True, view_secret_value=True)
for s in secrets.secrets: os.environ[s.secretKey] = s.secretValue
except: pass
result = subprocess.run([
'odoo',
'--database=' + os.environ['ODOO_DB_NAME'],
'--db_host=' + os.environ['ODOO_DB_HOST'],
'--db_port=' + os.environ['ODOO_DB_PORT'],
'--db_user=' + os.environ['ODOO_DB_USER'],
'--db_password=' + os.environ['ODOO_DB_PASSWORD'],
'-i', '{{ modules }}',
'--stop-after-init',
'--no-http'
], capture_output=True, text=True)
print(result.stdout)
print(result.stderr, file=__import__('sys').stderr)
__import__('sys').exit(result.returncode)
"
register: install_result
- name: Display install output
debug:
var: install_result
verbosity: 0
- name: Restart Odoo container
shell: docker restart {{ container_id.stdout }}

View File

@@ -0,0 +1,57 @@
---
- name: Odoo Update Modules
hosts: optovia_servers
become: yes
gather_facts: no
# Usage: ansible-playbook -i ansible/inventory/production/hosts.yml ansible/playbooks/odoo-update-modules.yml -e "modules=products"
pre_tasks:
- name: Validate modules parameter
assert:
that:
- modules is defined
- modules != ""
fail_msg: "Parameter 'modules' required! Use: -e 'modules=products'"
tasks:
- name: Get Odoo container ID
shell: docker ps --filter "name={{ odoo_container }}" --format "{%raw%}{{.ID}}{%endraw%}" | head -1
register: container_id
- name: Update Odoo modules
shell: |
docker exec {{ container_id.stdout }} python3 -c "
import os, subprocess
from infisical_sdk import InfisicalSDKClient
client = InfisicalSDKClient(host=os.environ['INFISICAL_API_URL'])
client.auth.universal_auth.login(client_id=os.environ['INFISICAL_CLIENT_ID'], client_secret=os.environ['INFISICAL_CLIENT_SECRET'])
for path in ['/odoo', '/shared']:
try:
secrets = client.secrets.list_secrets(environment_slug=os.environ.get('INFISICAL_ENV', 'prod'), secret_path=path, project_id=os.environ['INFISICAL_PROJECT_ID'], expand_secret_references=True, view_secret_value=True)
for s in secrets.secrets: os.environ[s.secretKey] = s.secretValue
except: pass
result = subprocess.run([
'odoo',
'--database=' + os.environ['ODOO_DB_NAME'],
'--db_host=' + os.environ['ODOO_DB_HOST'],
'--db_port=' + os.environ['ODOO_DB_PORT'],
'--db_user=' + os.environ['ODOO_DB_USER'],
'--db_password=' + os.environ['ODOO_DB_PASSWORD'],
'-u', '{{ modules }}',
'--stop-after-init',
'--no-http'
], capture_output=True, text=True)
print(result.stdout)
print(result.stderr, file=__import__('sys').stderr)
__import__('sys').exit(result.returncode)
"
register: update_result
- name: Display update output
debug:
var: update_result
verbosity: 0
- name: Restart Odoo container
shell: docker restart {{ container_id.stdout }}

View File

@@ -0,0 +1,23 @@
version: "3.8"
services:
arangodb:
image: arangodb:3.12.4
restart: unless-stopped
ports:
- "8529:8529"
environment:
- ARANGO_ROOT_PASSWORD=${ARANGO_PASSWORD}
volumes:
- data:/var/lib/arangodb3
networks:
dokploy-network:
aliases:
- arangodb
volumes:
data: {}
networks:
dokploy-network:
external: true

View File

@@ -0,0 +1,89 @@
// Инициализация схемы графовой базы данных для маркетплейса
// Создаем базу данных маркетплейса
db._createDatabase('marketplace');
db._useDatabase('marketplace');
// Коллекции узлов (vertices)
const collections = [
'users', // Пользователи платформы
'companies', // Компании (поставщики, логисты, лаборатории)
'products', // Товары и сырье
'orders', // Заказы и сделки
'auctions', // Тендеры и аукционы
'locations', // Географические локации
'categories' // Категории товаров
];
collections.forEach(name => {
try {
db._create(name);
print(`Created collection: ${name}`);
} catch (e) {
print(`Collection ${name} already exists: ${e.message}`);
}
});
// Коллекции связей (edges)
const edgeCollections = [
'purchases', // Покупки: user -> product
'supplies', // Поставки: company -> product
'participates', // Участие в тендерах: user -> auction
'collaborates', // Сотрудничество: company -> company
'locates', // Местоположение: company/user -> location
'categorizes', // Категоризация: product -> category
'recommends', // Рекомендации: product -> product
'transports', // Логистика: company -> location
'analyzes' // Анализ качества: company -> product
];
edgeCollections.forEach(name => {
try {
db._createEdgeCollection(name);
print(`Created edge collection: ${name}`);
} catch (e) {
print(`Edge collection ${name} already exists: ${e.message}`);
}
});
// Создаем граф маркетплейса
try {
const graph = require('@arangodb/general-graph');
const graphDef = graph._create('marketplace_graph', [
graph._relation('purchases', 'users', 'products'),
graph._relation('supplies', 'companies', 'products'),
graph._relation('participates', 'users', 'auctions'),
graph._relation('collaborates', 'companies', 'companies'),
graph._relation('locates', ['companies', 'users'], 'locations'),
graph._relation('categorizes', 'products', 'categories'),
graph._relation('recommends', 'products', 'products'),
graph._relation('transports', 'companies', 'locations'),
graph._relation('analyzes', 'companies', 'products')
]);
print('Created marketplace graph successfully');
} catch (e) {
print(`Graph already exists: ${e.message}`);
}
// Создаем индексы для оптимизации запросов
print('Creating indexes...');
// Индексы для пользователей
db.users.ensureIndex({ type: "hash", fields: ["email"] });
db.users.ensureIndex({ type: "hash", fields: ["role"] });
// Индексы для компаний
db.companies.ensureIndex({ type: "hash", fields: ["industry"] });
db.companies.ensureIndex({ type: "hash", fields: ["services"] });
// Индексы для товаров
db.products.ensureIndex({ type: "hash", fields: ["category"] });
db.products.ensureIndex({ type: "fulltext", fields: ["name", "description"] });
// Индексы для заказов
db.orders.ensureIndex({ type: "skiplist", fields: ["created_at"] });
db.orders.ensureIndex({ type: "hash", fields: ["status"] });
print('Initialization completed successfully!');

22
blockchain/.env.example Normal file
View File

@@ -0,0 +1,22 @@
# Private key for deployment (DO NOT commit the actual .env file!)
PRIVATE_KEY=your_private_key_here
# Optovia Private Network
OPTOVIA_RPC_URL=http://localhost:8545
OPTOVIA_CHAIN_ID=1337
# Polygon Network
POLYGON_RPC_URL=https://polygon-rpc.com
MUMBAI_RPC_URL=https://rpc-mumbai.maticvigil.com
POLYGONSCAN_API_KEY=your_polygonscan_api_key
# Ethereum Network
SEPOLIA_RPC_URL=https://rpc.sepolia.org
ETHERSCAN_API_KEY=your_etherscan_api_key
# Gas Reporter
REPORT_GAS=true
COINMARKETCAP_API_KEY=your_coinmarketcap_api_key
# API Keys for integration
BLOCKCHAIN_API_KEY=your_internal_api_key

35
blockchain/.gitignore vendored Normal file
View File

@@ -0,0 +1,35 @@
# Hardhat files
cache
artifacts
typechain-types
# Environment variables
.env
# Node modules
node_modules
# Coverage
coverage
coverage.json
# Besu data
docker/besu/node1
docker/besu/node2
# Deployment artifacts
deployments/*.json
!deployments/.gitkeep
# Logs
*.log
# OS files
.DS_Store
Thumbs.db
# IDE
.vscode
.idea
*.swp
*.swo

507
blockchain/INTEGRATION.md Normal file
View File

@@ -0,0 +1,507 @@
# Интеграция с Optovia Platform
Руководство по интеграции блокчейна с основной платформой.
## Архитектура интеграции
```
┌─────────────────────────────────────────────────────────────┐
│ Optovia Platform │
├─────────────────────────────────────────────────────────────┤
│ │
│ ┌──────────┐ ┌──────────────┐ ┌──────────────┐ │
│ │ Odoo │─────▶│ Backend │─────▶│ Blockchain │ │
│ │ │ │ Service │ │ Service │ │
│ └──────────┘ └──────────────┘ └──────────────┘ │
│ │ │ │
│ ▼ ▼ │
│ ┌──────────────┐ ┌──────────────┐ │
│ │ Database │ │ Besu │ │
│ │ (Postgres) │ │ Nodes │ │
│ └──────────────┘ └──────────────┘ │
└─────────────────────────────────────────────────────────────┘
```
## Python Integration
### 1. Установка зависимостей
```bash
pip install web3 eth-account python-dotenv
```
### 2. Blockchain Service
Создайте `backends/blockchain_service.py`:
```python
import os
import json
from typing import Dict, Optional, List
from web3 import Web3
from web3.middleware import geth_poa_middleware
from eth_account import Account
from dotenv import load_dotenv
load_dotenv()
class BlockchainService:
"""Service for interacting with Optovia blockchain"""
def __init__(self):
# Initialize Web3
self.w3 = Web3(Web3.HTTPProvider(os.getenv('BLOCKCHAIN_RPC_URL', 'http://localhost:8545')))
# Add PoA middleware for private networks
self.w3.middleware_onion.inject(geth_poa_middleware, layer=0)
# Load account from private key
private_key = os.getenv('BLOCKCHAIN_PRIVATE_KEY')
if private_key:
self.account = Account.from_key(private_key)
self.w3.eth.default_account = self.account.address
else:
raise ValueError("BLOCKCHAIN_PRIVATE_KEY not set")
# Load contract addresses and ABIs
self._load_contracts()
def _load_contracts(self):
"""Load contract addresses and ABIs"""
# Load deployment info
deployment_path = os.getenv(
'BLOCKCHAIN_DEPLOYMENT_FILE',
'blockchain/deployments/optovia-private.json'
)
with open(deployment_path) as f:
deployment = json.load(f)
# Load ShipmentTracker
self.shipment_tracker_address = deployment['contracts']['ShipmentTracker']['address']
with open('blockchain/artifacts/contracts/ShipmentTracker.sol/ShipmentTracker.json') as f:
abi = json.load(f)['abi']
self.shipment_tracker = self.w3.eth.contract(
address=self.shipment_tracker_address,
abi=abi
)
# Load ShipmentNFT
self.shipment_nft_address = deployment['contracts']['ShipmentNFT']['address']
with open('blockchain/artifacts/contracts/ShipmentNFT.sol/ShipmentNFT.json') as f:
abi = json.load(f)['abi']
self.shipment_nft = self.w3.eth.contract(
address=self.shipment_nft_address,
abi=abi
)
def create_shipment(
self,
external_id: str,
manufacturer_address: str,
manufacturer_name: str,
manufacturer_company_id: str,
buyer_address: str,
buyer_name: str,
buyer_company_id: str,
data_dict: Dict
) -> Dict:
"""
Create a shipment on blockchain
Args:
external_id: External reference ID (from Odoo)
manufacturer_address: Manufacturer wallet address
manufacturer_name: Manufacturer name
manufacturer_company_id: Manufacturer company ID
buyer_address: Buyer wallet address
buyer_name: Buyer name
buyer_company_id: Buyer company ID
data_dict: Dictionary with shipment data to hash
Returns:
Dict with transaction hash and shipment ID
"""
# Create data hash
data_str = json.dumps(data_dict, sort_keys=True)
data_hash = self.w3.keccak(text=data_str)
# Build transaction
tx = self.shipment_tracker.functions.createShipment(
external_id,
manufacturer_address,
manufacturer_name,
manufacturer_company_id,
buyer_address,
buyer_name,
buyer_company_id,
data_hash
).build_transaction({
'from': self.account.address,
'nonce': self.w3.eth.get_transaction_count(self.account.address),
'gas': 500000,
'gasPrice': self.w3.eth.gas_price
})
# Sign and send transaction
signed_tx = self.w3.eth.account.sign_transaction(tx, self.account.key)
tx_hash = self.w3.eth.send_raw_transaction(signed_tx.rawTransaction)
# Wait for receipt
receipt = self.w3.eth.wait_for_transaction_receipt(tx_hash)
# Get shipment ID from event
shipment_id = self.shipment_tracker.functions.getShipmentByExternalId(external_id).call()
return {
'tx_hash': tx_hash.hex(),
'shipment_id': shipment_id,
'block_number': receipt['blockNumber'],
'status': 'success' if receipt['status'] == 1 else 'failed'
}
def update_logistics_info(
self,
shipment_id: int,
logistics_address: str,
logistics_name: str,
logistics_company_id: str,
data_dict: Dict
) -> Dict:
"""Update logistics information for a shipment"""
data_hash = self.w3.keccak(text=json.dumps(data_dict, sort_keys=True))
tx = self.shipment_tracker.functions.updateLogisticsInfo(
shipment_id,
logistics_address,
logistics_name,
logistics_company_id,
data_hash
).build_transaction({
'from': self.account.address,
'nonce': self.w3.eth.get_transaction_count(self.account.address),
'gas': 300000,
'gasPrice': self.w3.eth.gas_price
})
signed_tx = self.w3.eth.account.sign_transaction(tx, self.account.key)
tx_hash = self.w3.eth.send_raw_transaction(signed_tx.rawTransaction)
receipt = self.w3.eth.wait_for_transaction_receipt(tx_hash)
return {
'tx_hash': tx_hash.hex(),
'status': 'success' if receipt['status'] == 1 else 'failed'
}
def update_shipment_status(self, shipment_id: int, status: int) -> Dict:
"""
Update shipment status
Status values:
0 - Created
1 - ManufacturerConfirmed
2 - InTransit
3 - Delivered
4 - Cancelled
"""
tx = self.shipment_tracker.functions.updateShipmentStatus(
shipment_id,
status
).build_transaction({
'from': self.account.address,
'nonce': self.w3.eth.get_transaction_count(self.account.address),
'gas': 200000,
'gasPrice': self.w3.eth.gas_price
})
signed_tx = self.w3.eth.account.sign_transaction(tx, self.account.key)
tx_hash = self.w3.eth.send_raw_transaction(signed_tx.rawTransaction)
receipt = self.w3.eth.wait_for_transaction_receipt(tx_hash)
return {
'tx_hash': tx_hash.hex(),
'status': 'success' if receipt['status'] == 1 else 'failed'
}
def add_document(self, shipment_id: int, document_hash: str) -> Dict:
"""Add a document (IPFS hash) to a shipment"""
tx = self.shipment_tracker.functions.addDocument(
shipment_id,
document_hash
).build_transaction({
'from': self.account.address,
'nonce': self.w3.eth.get_transaction_count(self.account.address),
'gas': 200000,
'gasPrice': self.w3.eth.gas_price
})
signed_tx = self.w3.eth.account.sign_transaction(tx, self.account.key)
tx_hash = self.w3.eth.send_raw_transaction(signed_tx.rawTransaction)
receipt = self.w3.eth.wait_for_transaction_receipt(tx_hash)
return {
'tx_hash': tx_hash.hex(),
'status': 'success' if receipt['status'] == 1 else 'failed'
}
def get_shipment(self, shipment_id: int) -> Dict:
"""Get shipment details from blockchain"""
result = self.shipment_tracker.functions.getShipment(shipment_id).call()
return {
'id': result[0],
'external_id': result[1],
'status': result[2],
'manufacturer': {
'address': result[3][0],
'name': result[3][1],
'company_id': result[3][2],
'timestamp': result[3][3],
'data_hash': result[3][4].hex()
},
'buyer': {
'address': result[4][0],
'name': result[4][1],
'company_id': result[4][2],
'timestamp': result[4][3],
'data_hash': result[4][4].hex()
},
'logistics': {
'address': result[5][0],
'name': result[5][1],
'company_id': result[5][2],
'timestamp': result[5][3],
'data_hash': result[5][4].hex()
},
'created_at': result[6],
'updated_at': result[7],
'data_hash': result[8].hex()
}
def get_shipment_by_external_id(self, external_id: str) -> Optional[int]:
"""Get shipment ID by external ID"""
try:
return self.shipment_tracker.functions.getShipmentByExternalId(external_id).call()
except Exception:
return None
def mint_nft_certificate(
self,
recipient_address: str,
shipment_external_id: str,
manufacturer_address: str,
buyer_address: str,
logistics_address: str,
data_hash: bytes,
private_tx_hash: str,
token_uri: str
) -> Dict:
"""Mint NFT certificate on public chain"""
tx = self.shipment_nft.functions.mintShipmentCertificate(
recipient_address,
shipment_external_id,
manufacturer_address,
buyer_address,
logistics_address,
data_hash,
private_tx_hash,
token_uri
).build_transaction({
'from': self.account.address,
'nonce': self.w3.eth.get_transaction_count(self.account.address),
'gas': 500000,
'gasPrice': self.w3.eth.gas_price
})
signed_tx = self.w3.eth.account.sign_transaction(tx, self.account.key)
tx_hash = self.w3.eth.send_raw_transaction(signed_tx.rawTransaction)
receipt = self.w3.eth.wait_for_transaction_receipt(tx_hash)
return {
'tx_hash': tx_hash.hex(),
'status': 'success' if receipt['status'] == 1 else 'failed',
'block_number': receipt['blockNumber']
}
```
### 3. Django Integration
Создайте Django app для блокчейна:
```bash
cd backends/odoo
python manage.py startapp blockchain
```
В `backends/odoo/blockchain/models.py`:
```python
from django.db import models
class BlockchainShipment(models.Model):
"""Track blockchain shipments"""
STATUS_CHOICES = [
(0, 'Created'),
(1, 'Manufacturer Confirmed'),
(2, 'In Transit'),
(3, 'Delivered'),
(4, 'Cancelled'),
]
# Odoo reference
odoo_shipment_id = models.CharField(max_length=100, unique=True)
# Blockchain data
blockchain_id = models.BigIntegerField(null=True, blank=True)
tx_hash = models.CharField(max_length=66, blank=True)
block_number = models.BigIntegerField(null=True, blank=True)
status = models.IntegerField(choices=STATUS_CHOICES, default=0)
# Timestamps
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
synced_at = models.DateTimeField(null=True, blank=True)
# NFT data (if minted on public chain)
nft_minted = models.BooleanField(default=False)
nft_tx_hash = models.CharField(max_length=66, blank=True)
nft_token_id = models.BigIntegerField(null=True, blank=True)
class Meta:
db_table = 'blockchain_shipment'
ordering = ['-created_at']
def __str__(self):
return f"Shipment {self.odoo_shipment_id} (Blockchain ID: {self.blockchain_id})"
```
### 4. API Endpoint
В `backends/odoo/blockchain/views.py`:
```python
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework import status
from .models import BlockchainShipment
from blockchain_service import BlockchainService
@api_view(['POST'])
def create_blockchain_shipment(request):
"""Create shipment on blockchain"""
blockchain_service = BlockchainService()
try:
result = blockchain_service.create_shipment(
external_id=request.data['external_id'],
manufacturer_address=request.data['manufacturer_address'],
manufacturer_name=request.data['manufacturer_name'],
manufacturer_company_id=request.data['manufacturer_company_id'],
buyer_address=request.data['buyer_address'],
buyer_name=request.data['buyer_name'],
buyer_company_id=request.data['buyer_company_id'],
data_dict=request.data.get('metadata', {})
)
# Save to database
BlockchainShipment.objects.create(
odoo_shipment_id=request.data['external_id'],
blockchain_id=result['shipment_id'],
tx_hash=result['tx_hash'],
block_number=result['block_number'],
status=0
)
return Response(result, status=status.HTTP_201_CREATED)
except Exception as e:
return Response({'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)
```
## Docker Integration
Добавьте в основной `docker-compose.yml`:
```yaml
services:
# ... existing services
backend:
# ... existing config
environment:
- BLOCKCHAIN_RPC_URL=http://besu-node1:8545
- BLOCKCHAIN_CHAIN_ID=1337
- BLOCKCHAIN_PRIVATE_KEY=${BLOCKCHAIN_PRIVATE_KEY}
- BLOCKCHAIN_DEPLOYMENT_FILE=/app/blockchain/deployments/optovia-private.json
volumes:
- ./blockchain:/app/blockchain:ro
networks:
- default
- optovia-blockchain
networks:
optovia-blockchain:
external: true
```
## Environment Variables
Добавьте в Infisical или `.env`:
```bash
# Blockchain Configuration
BLOCKCHAIN_RPC_URL=http://besu-node1:8545
BLOCKCHAIN_CHAIN_ID=1337
BLOCKCHAIN_PRIVATE_KEY=<secure_private_key>
BLOCKCHAIN_DEPLOYMENT_FILE=blockchain/deployments/optovia-private.json
# Public Chain (for NFT minting)
POLYGON_RPC_URL=https://polygon-rpc.com
POLYGON_CHAIN_ID=137
```
## Workflow Example
### 1. Создание отгрузки в Odoo
```python
# В Odoo модуле
shipment = create_shipment_in_odoo(...)
# Отправить в блокчейн
blockchain_result = blockchain_service.create_shipment(
external_id=shipment.id,
manufacturer_address=shipment.manufacturer.wallet,
# ... other data
)
```
### 2. Обновление статуса
```python
# При изменении статуса в Odoo
blockchain_service.update_shipment_status(
shipment_id=blockchain_shipment.blockchain_id,
status=2 # InTransit
)
```
### 3. Минтинг NFT при доставке
```python
# После успешной доставки
if shipment.status == 'delivered':
blockchain_service.mint_nft_certificate(
recipient_address=buyer.wallet,
shipment_external_id=shipment.id,
# ... other data
)
```
## Next Steps
1. Добавьте очереди задач (Celery) для асинхронной обработки
2. Настройте IPFS для хранения документов
3. Создайте webhooks для событий блокчейна
4. Добавьте мониторинг транзакций

149
blockchain/QUICKSTART.md Normal file
View File

@@ -0,0 +1,149 @@
# Быстрый Старт
Это руководство поможет быстро запустить блокчейн инфраструктуру.
## Шаг 1: Установка зависимостей
```bash
cd blockchain
npm install
```
## Шаг 2: Настройка переменных окружения
```bash
cp .env.example .env
```
Для разработки можно использовать тестовый приватный ключ (НЕ для production!):
```
PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
```
## Шаг 3: Запуск приватной блокчейн сети
```bash
cd docker
docker-compose up -d
```
Подождите 10-15 секунд для инициализации нод.
## Шаг 4: Проверка статуса сети
```bash
curl -X POST --data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' http://localhost:8545
```
Должен вернуться номер блока в hex формате.
## Шаг 5: Компиляция контрактов
```bash
npm run compile
```
## Шаг 6: Развертывание контрактов
```bash
npm run deploy:private -- --network optovia
```
Результат будет сохранен в `deployments/optovia-private.json`.
## Шаг 7: Проверка развертывания
Откройте Block Explorer: http://localhost:4000
Вы должны увидеть транзакции развертывания контрактов.
## Шаг 8: Запуск тестов (опционально)
```bash
# Запустите локальную Hardhat ноду в отдельном терминале
npm run node
# В другом терминале запустите тесты
npm test
```
## Быстрая проверка функциональности
### Создание тестовой отгрузки
Создайте файл `scripts/test-shipment.js`:
```javascript
const { ethers } = require("hardhat");
const deployment = require("../deployments/optovia-private.json");
async function main() {
const [signer] = await ethers.getSigners();
const shipmentTracker = await ethers.getContractAt(
"ShipmentTracker",
deployment.contracts.ShipmentTracker.address,
signer
);
console.log("Creating test shipment...");
const tx = await shipmentTracker.createShipment(
"TEST-" + Date.now(),
signer.address,
"Test Manufacturer",
"MFG-001",
signer.address,
"Test Buyer",
"BYR-001",
ethers.keccak256(ethers.toUtf8Bytes("test data"))
);
const receipt = await tx.wait();
console.log("Shipment created! TX:", receipt.hash);
const totalShipments = await shipmentTracker.getTotalShipments();
console.log("Total shipments:", totalShipments.toString());
}
main().catch(console.error);
```
Запустите:
```bash
node scripts/test-shipment.js --network optovia
```
## Следующие шаги
1. Изучите полную документацию в [README.md](README.md)
2. Ознакомьтесь с API контрактов
3. Интегрируйте с вашим backend (см. раздел "Интеграция с Платформой" в README)
## Остановка сети
```bash
cd docker
docker-compose down
```
Для полной очистки (удаление данных блокчейна):
```bash
docker-compose down -v
rm -rf besu/node1 besu/node2
```
## Решение проблем
### Порты заняты
Если порты 8545, 8546, 4000 заняты, измените маппинг портов в `docker-compose.yml`.
### Контракты не деплоятся
1. Проверьте, что Besu ноды запущены: `docker ps`
2. Проверьте логи: `docker logs optovia-besu-node1`
3. Убедитесь, что в .env указан корректный PRIVATE_KEY
### BlockScout не работает
BlockScout может требовать 1-2 минуты для инициализации. Проверьте логи:
```bash
docker logs optovia-blockscout
```

374
blockchain/README.md Normal file
View File

@@ -0,0 +1,374 @@
# Optovia Blockchain Infrastructure
Блокчейн инфраструктура для отслеживания отгрузок в приватной сети.
## Архитектура
### Приватная Сеть (Optovia Private Network)
- **Платформа**: Hyperledger Besu
- **Консенсус**: Clique (PoA - Proof of Authority)
- **Chain ID**: 1337
- **Совместимость**: EVM (Ethereum Virtual Machine)
- **Назначение**: Хранение полных данных об отгрузках с участием производителя, покупателя и логиста
## Контракты
### ShipmentTracker.sol
Основной контракт для отслеживания отгрузок в приватной сети.
**Функциональность:**
- Создание записей об отгрузках
- Управление информацией от производителя, покупателя, логиста
- Отслеживание статуса отгрузки
- Хранение хешей документов (IPFS)
- Ролевая модель доступа (RBAC)
- Upgradeable (UUPS pattern)
**Роли:**
- `ADMIN_ROLE` - администратор контракта
- `MANUFACTURER_ROLE` - производитель
- `BUYER_ROLE` - покупатель
- `LOGISTICS_ROLE` - логистический провайдер
**Статусы отгрузки:**
- `Created` - создана
- `ManufacturerConfirmed` - подтверждена производителем
- `InTransit` - в пути
- `Delivered` - доставлена
- `Cancelled` - отменена
## Установка
### Требования
- Node.js >= 18.x
- Docker и Docker Compose
- Git
### Установка зависимостей
```bash
cd blockchain
npm install
```
### Настройка переменных окружения
```bash
cp .env.example .env
# Отредактируйте .env файл, добавив необходимые ключи
```
## Запуск Приватной Сети
### Запуск Besu нодов
```bash
cd docker
docker-compose up -d
```
Это запустит:
- **besu-node1** - валидатор на порту 8545 (RPC), 8546 (WS), 8547 (GraphQL)
- **besu-node2** - валидатор на порту 8555
- **blockscout** - block explorer на порту 4000
- **postgres** - база данных для BlockScout
### Проверка статуса сети
```bash
# Проверка версии клиента
curl -X POST --data '{"jsonrpc":"2.0","method":"web3_clientVersion","params":[],"id":1}' http://localhost:8545
# Проверка номера блока
curl -X POST --data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' http://localhost:8545
# Проверка аккаунтов
curl -X POST --data '{"jsonrpc":"2.0","method":"eth_accounts","params":[],"id":1}' http://localhost:8545
```
### Block Explorer
Откройте http://localhost:4000 для просмотра блоков и транзакций.
## Развертывание Контрактов
### В локальной сети разработки (Hardhat)
```bash
# Запустить локальную ноду Hardhat
npm run node
# В другом терминале развернуть контракты
npm run deploy:local
```
### В приватной сети Optovia
```bash
# Убедитесь, что Besu ноды запущены
npm run deploy:private -- --network optovia
```
### В тестовой сети (Mumbai/Sepolia)
```bash
# Настройте PRIVATE_KEY в .env
npm run deploy:testnet -- --network mumbai
# или
npm run deploy:testnet -- --network sepolia
```
## Компиляция и Тестирование
### Компиляция контрактов
```bash
npm run compile
```
### Запуск тестов
```bash
npm test
```
### Проверка покрытия кода
```bash
npm run coverage
```
## Использование Контрактов
### Создание отгрузки
```javascript
const shipmentId = await shipmentTracker.createShipment(
"SHIP-001", // externalId
manufacturerAddress, // manufacturer address
"Manufacturer A", // manufacturer name
"MFG-001", // manufacturer company ID
buyerAddress, // buyer address
"Buyer B", // buyer name
"BYR-001", // buyer company ID
ethers.keccak256(data) // data hash
);
```
### Обновление информации о логисте
```javascript
await shipmentTracker.updateLogisticsInfo(
shipmentId,
logisticsAddress,
"Logistics Provider C",
"LOG-001",
ethers.keccak256(logisticsData)
);
```
### Обновление статуса
```javascript
// Производитель подтверждает отгрузку
await shipmentTracker.connect(manufacturer)
.updateShipmentStatus(shipmentId, 1); // ManufacturerConfirmed
// Логист отмечает начало доставки
await shipmentTracker.connect(logistics)
.updateShipmentStatus(shipmentId, 2); // InTransit
// Логист/Покупатель подтверждает доставку
await shipmentTracker.connect(logistics)
.updateShipmentStatus(shipmentId, 3); // Delivered
```
### Добавление документов
```javascript
await shipmentTracker.addDocument(
shipmentId,
"QmHash123..." // IPFS hash
);
```
### Минтинг NFT сертификата
```javascript
// В публичной сети
await shipmentNFT.mintShipmentCertificate(
buyerAddress, // recipient
"SHIP-001", // external shipment ID
manufacturerAddress,
buyerAddress,
logisticsAddress,
dataHash, // hash from private chain
privateTxHash, // transaction hash from private chain
"ipfs://QmMetadata..." // token metadata URI
);
```
## Интеграция с Платформой
### 1. Backend Integration (Python/Django)
Создайте сервис для взаимодействия с блокчейном:
```python
# blockchain/service.py
from web3 import Web3
import json
class BlockchainService:
def __init__(self):
self.w3 = Web3(Web3.HTTPProvider('http://besu-node1:8545'))
with open('deployments/optovia-private.json') as f:
deployment = json.load(f)
self.shipment_tracker_address = deployment['contracts']['ShipmentTracker']['address']
with open('artifacts/contracts/ShipmentTracker.sol/ShipmentTracker.json') as f:
abi = json.load(f)['abi']
self.shipment_tracker = self.w3.eth.contract(
address=self.shipment_tracker_address,
abi=abi
)
def create_shipment(self, shipment_data):
# Implement shipment creation
pass
def update_status(self, shipment_id, status):
# Implement status update
pass
```
### 2. Конфигурация переменных окружения
```bash
# Backend .env
BLOCKCHAIN_RPC_URL=http://besu-node1:8545
BLOCKCHAIN_CHAIN_ID=1337
BLOCKCHAIN_PRIVATE_KEY=<platform_service_private_key>
SHIPMENT_TRACKER_ADDRESS=<deployed_contract_address>
SHIPMENT_NFT_ADDRESS=<deployed_nft_contract_address>
```
### 3. Добавление в Docker Compose
```yaml
# В основном docker-compose.yml
networks:
optovia-blockchain:
external: true
services:
backend:
# ...
networks:
- optovia-blockchain
environment:
- BLOCKCHAIN_RPC_URL=http://besu-node1:8545
```
## Структура Проекта
```
blockchain/
├── contracts/ # Смарт-контракты
│ ├── ShipmentTracker.sol
│ └── ShipmentNFT.sol
├── scripts/ # Скрипты развертывания
│ ├── deploy-local.js
│ ├── deploy-private.js
│ └── deploy-testnet.js
├── test/ # Тесты
│ └── ShipmentTracker.test.js
├── docker/ # Docker конфигурация
│ ├── docker-compose.yml
│ └── besu/
│ └── genesis.json
├── deployments/ # Информация о развертываниях
├── hardhat.config.js # Конфигурация Hardhat
├── package.json
└── README.md
```
## Безопасность
### Приватные ключи
- НИКОГДА не коммитьте `.env` файл
- Используйте безопасное хранилище для production ключей (Infisical, Vault)
- Для development используйте тестовые аккаунты
### Контракты
- Используется upgradeable pattern (UUPS)
- Ролевая модель доступа (AccessControl)
- Функция паузы для экстренных ситуаций
### Сеть
- Приватная сеть доступна только внутри Docker network
- RPC endpoints не должны быть доступны публично
- Используйте firewall для ограничения доступа
## Мониторинг
### Логи Besu
```bash
docker logs -f optovia-besu-node1
docker logs -f optovia-besu-node2
```
### Метрики
Besu предоставляет Prometheus метрики на порту 9545 (нужно включить):
```bash
# Добавьте в docker-compose.yml
--metrics-enabled
--metrics-host=0.0.0.0
--metrics-port=9545
```
## Бэкап
### Бэкап данных блокчейна
```bash
# Остановить ноды
docker-compose down
# Создать бэкап
tar -czf besu-backup-$(date +%Y%m%d).tar.gz docker/besu/node1 docker/besu/node2
# Запустить ноды
docker-compose up -d
```
## Roadmap
### Фаза 1 (Текущая)
- ✅ Приватная EVM-совместимая сеть
- ✅ Контракт для отслеживания отгрузок
- ✅ NFT контракт для публичного минтинга
- ✅ Docker инфраструктура
### Фаза 2
- [ ] Интеграция с Odoo
- [ ] API для взаимодействия с контрактами
- [ ] IPFS интеграция для документов
- [ ] Bridge для связи приватной и публичной сети
### Фаза 3
- [ ] Оракулы для внешних данных
- [ ] Автоматические переходы статусов
- [ ] Интеграция с IoT устройствами
- [ ] Mobile SDK для производителей/покупателей
## Поддержка
Для вопросов и предложений создавайте issues в репозитории.
## Лицензия
MIT

View File

@@ -0,0 +1,128 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.19;
/**
* @title Flight Confirmation Contract
* @dev A contract for confirming flights on the blockchain with signatures from multiple parties
*/
contract FlightConfirmation {
struct FlightDetails {
uint256 departureTime;
uint256 arrivalTime;
string departureAirport;
string arrivalAirport;
string flightNumber;
}
struct Confirmation {
address manufacturer;
address logisticsProvider;
address buyer;
bool manufacturerConfirmed;
bool logisticsConfirmed;
bool buyerConfirmed;
uint256 timestamp;
bytes32 flightHash;
}
mapping(bytes32 => Confirmation) public confirmations;
mapping(bytes32 => FlightDetails) public flightDetails;
event FlightConfirmed(
bytes32 indexed flightId,
address confirmer,
uint256 timestamp
);
event FlightCreated(
bytes32 indexed flightId,
string flightNumber,
uint256 departureTime
);
/**
* @dev Stores flight details
*/
function storeFlightDetails(
bytes32 flightId,
uint256 departureTime,
uint256 arrivalTime,
string memory departureAirport,
string memory arrivalAirport,
string memory flightNumber
) external {
flightDetails[flightId] = FlightDetails({
departureTime: departureTime,
arrivalTime: arrivalTime,
departureAirport: departureAirport,
arrivalAirport: arrivalAirport,
flightNumber: flightNumber
});
emit FlightCreated(flightId, flightNumber, departureTime);
}
/**
* @dev Manufacturer confirms flight
*/
function confirmFlightAsManufacturer(bytes32 flightId) external {
confirmations[flightId].manufacturer = msg.sender;
confirmations[flightId].manufacturerConfirmed = true;
confirmations[flightId].timestamp = block.timestamp;
confirmations[flightId].flightHash = flightId;
emit FlightConfirmed(flightId, msg.sender, block.timestamp);
}
/**
* @dev Logistics provider confirms flight
*/
function confirmFlightAsLogistics(bytes32 flightId) external {
confirmations[flightId].logisticsProvider = msg.sender;
confirmations[flightId].logisticsConfirmed = true;
confirmations[flightId].timestamp = block.timestamp;
confirmations[flightId].flightHash = flightId;
emit FlightConfirmed(flightId, msg.sender, block.timestamp);
}
/**
* @dev Buyer confirms flight
*/
function confirmFlightAsBuyer(bytes32 flightId) external {
confirmations[flightId].buyer = msg.sender;
confirmations[flightId].buyerConfirmed = true;
confirmations[flightId].timestamp = block.timestamp;
confirmations[flightId].flightHash = flightId;
emit FlightConfirmed(flightId, msg.sender, block.timestamp);
}
/**
* @dev Check if flight is confirmed by all parties
*/
function isFlightFullyConfirmed(bytes32 flightId) external view returns (bool) {
Confirmation memory confirmation = confirmations[flightId];
return confirmation.manufacturerConfirmed &&
confirmation.logisticsConfirmed &&
confirmation.buyerConfirmed;
}
/**
* @dev Get confirmation status for a flight
*/
function getConfirmationStatus(bytes32 flightId) external view returns (
bool manufacturerConfirmed,
bool logisticsConfirmed,
bool buyerConfirmed,
uint256 timestamp
) {
Confirmation memory confirmation = confirmations[flightId];
return (
confirmation.manufacturerConfirmed,
confirmation.logisticsConfirmed,
confirmation.buyerConfirmed,
confirmation.timestamp
);
}
}

View File

@@ -0,0 +1,398 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
import "@openzeppelin/contracts-upgradeable/access/AccessControlUpgradeable.sol";
import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol";
import "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol";
import "@openzeppelin/contracts-upgradeable/utils/PausableUpgradeable.sol";
/**
* @title ShipmentTracker
* @dev Smart contract for tracking shipments with manufacturer, buyer, and logistics information
* @notice This contract is upgradeable and uses role-based access control
*/
contract ShipmentTracker is
Initializable,
AccessControlUpgradeable,
UUPSUpgradeable,
PausableUpgradeable
{
// Role definitions
bytes32 public constant ADMIN_ROLE = keccak256("ADMIN_ROLE");
bytes32 public constant MANUFACTURER_ROLE = keccak256("MANUFACTURER_ROLE");
bytes32 public constant BUYER_ROLE = keccak256("BUYER_ROLE");
bytes32 public constant LOGISTICS_ROLE = keccak256("LOGISTICS_ROLE");
bytes32 public constant UPGRADER_ROLE = keccak256("UPGRADER_ROLE");
// Shipment status enum
enum ShipmentStatus {
Created,
ManufacturerConfirmed,
InTransit,
Delivered,
Cancelled
}
// Party information structure
struct PartyInfo {
address walletAddress;
string name;
string companyId;
uint256 timestamp;
bytes32 dataHash; // Hash of additional off-chain data
}
// Shipment structure
struct Shipment {
uint256 shipmentId;
string externalId; // External reference ID (from Odoo, etc.)
PartyInfo manufacturer;
PartyInfo buyer;
PartyInfo logistics;
ShipmentStatus status;
uint256 createdAt;
uint256 updatedAt;
string[] documents; // IPFS hashes or URLs of documents
bytes32 dataHash; // Hash of shipment data for integrity
bool exists;
}
// Storage
mapping(uint256 => Shipment) private shipments;
mapping(string => uint256) private externalIdToShipmentId;
uint256 private shipmentCounter;
// Events
event ShipmentCreated(
uint256 indexed shipmentId,
string externalId,
address indexed manufacturer,
address indexed buyer,
uint256 timestamp
);
event ShipmentUpdated(
uint256 indexed shipmentId,
ShipmentStatus status,
address updatedBy,
uint256 timestamp
);
event ManufacturerInfoUpdated(
uint256 indexed shipmentId,
address indexed manufacturer,
bytes32 dataHash,
uint256 timestamp
);
event BuyerInfoUpdated(
uint256 indexed shipmentId,
address indexed buyer,
bytes32 dataHash,
uint256 timestamp
);
event LogisticsInfoUpdated(
uint256 indexed shipmentId,
address indexed logistics,
bytes32 dataHash,
uint256 timestamp
);
event DocumentAdded(
uint256 indexed shipmentId,
string documentHash,
address addedBy,
uint256 timestamp
);
/// @custom:oz-upgrades-unsafe-allow constructor
constructor() {
_disableInitializers();
}
/**
* @dev Initialize the contract
* @param _admin Admin address
*/
function initialize(address _admin) public initializer {
__AccessControl_init();
__UUPSUpgradeable_init();
__Pausable_init();
_grantRole(DEFAULT_ADMIN_ROLE, _admin);
_grantRole(ADMIN_ROLE, _admin);
_grantRole(UPGRADER_ROLE, _admin);
shipmentCounter = 1;
}
/**
* @dev Create a new shipment
* @param externalId External reference ID
* @param manufacturerAddress Manufacturer wallet address
* @param manufacturerName Manufacturer name
* @param manufacturerCompanyId Manufacturer company ID
* @param buyerAddress Buyer wallet address
* @param buyerName Buyer name
* @param buyerCompanyId Buyer company ID
* @param dataHash Hash of shipment data
*/
function createShipment(
string memory externalId,
address manufacturerAddress,
string memory manufacturerName,
string memory manufacturerCompanyId,
address buyerAddress,
string memory buyerName,
string memory buyerCompanyId,
bytes32 dataHash
) external whenNotPaused returns (uint256) {
require(
hasRole(ADMIN_ROLE, msg.sender) ||
hasRole(MANUFACTURER_ROLE, msg.sender),
"Not authorized to create shipment"
);
require(externalIdToShipmentId[externalId] == 0, "Shipment with this external ID already exists");
require(manufacturerAddress != address(0), "Invalid manufacturer address");
require(buyerAddress != address(0), "Invalid buyer address");
uint256 shipmentId = shipmentCounter++;
Shipment storage newShipment = shipments[shipmentId];
newShipment.shipmentId = shipmentId;
newShipment.externalId = externalId;
newShipment.status = ShipmentStatus.Created;
newShipment.createdAt = block.timestamp;
newShipment.updatedAt = block.timestamp;
newShipment.dataHash = dataHash;
newShipment.exists = true;
// Set manufacturer info
newShipment.manufacturer = PartyInfo({
walletAddress: manufacturerAddress,
name: manufacturerName,
companyId: manufacturerCompanyId,
timestamp: block.timestamp,
dataHash: dataHash
});
// Set buyer info
newShipment.buyer = PartyInfo({
walletAddress: buyerAddress,
name: buyerName,
companyId: buyerCompanyId,
timestamp: block.timestamp,
dataHash: dataHash
});
// Initialize empty logistics info
newShipment.logistics = PartyInfo({
walletAddress: address(0),
name: "",
companyId: "",
timestamp: 0,
dataHash: bytes32(0)
});
externalIdToShipmentId[externalId] = shipmentId;
emit ShipmentCreated(
shipmentId,
externalId,
manufacturerAddress,
buyerAddress,
block.timestamp
);
return shipmentId;
}
/**
* @dev Update logistics information for a shipment
* @param shipmentId Shipment ID
* @param logisticsAddress Logistics provider wallet address
* @param logisticsName Logistics provider name
* @param logisticsCompanyId Logistics provider company ID
* @param dataHash Hash of logistics data
*/
function updateLogisticsInfo(
uint256 shipmentId,
address logisticsAddress,
string memory logisticsName,
string memory logisticsCompanyId,
bytes32 dataHash
) external whenNotPaused {
require(shipments[shipmentId].exists, "Shipment does not exist");
require(
hasRole(ADMIN_ROLE, msg.sender) ||
hasRole(LOGISTICS_ROLE, msg.sender),
"Not authorized to update logistics info"
);
require(logisticsAddress != address(0), "Invalid logistics address");
Shipment storage shipment = shipments[shipmentId];
shipment.logistics = PartyInfo({
walletAddress: logisticsAddress,
name: logisticsName,
companyId: logisticsCompanyId,
timestamp: block.timestamp,
dataHash: dataHash
});
shipment.updatedAt = block.timestamp;
emit LogisticsInfoUpdated(shipmentId, logisticsAddress, dataHash, block.timestamp);
}
/**
* @dev Update shipment status
* @param shipmentId Shipment ID
* @param newStatus New status
*/
function updateShipmentStatus(
uint256 shipmentId,
ShipmentStatus newStatus
) external whenNotPaused {
require(shipments[shipmentId].exists, "Shipment does not exist");
Shipment storage shipment = shipments[shipmentId];
// Check authorization based on status change
if (newStatus == ShipmentStatus.ManufacturerConfirmed) {
require(
hasRole(MANUFACTURER_ROLE, msg.sender) ||
hasRole(ADMIN_ROLE, msg.sender) ||
msg.sender == shipment.manufacturer.walletAddress,
"Not authorized"
);
} else if (newStatus == ShipmentStatus.InTransit) {
require(
hasRole(LOGISTICS_ROLE, msg.sender) ||
hasRole(ADMIN_ROLE, msg.sender) ||
msg.sender == shipment.logistics.walletAddress,
"Not authorized"
);
} else if (newStatus == ShipmentStatus.Delivered) {
require(
hasRole(LOGISTICS_ROLE, msg.sender) ||
hasRole(BUYER_ROLE, msg.sender) ||
hasRole(ADMIN_ROLE, msg.sender) ||
msg.sender == shipment.logistics.walletAddress ||
msg.sender == shipment.buyer.walletAddress,
"Not authorized"
);
} else {
require(hasRole(ADMIN_ROLE, msg.sender), "Not authorized");
}
shipment.status = newStatus;
shipment.updatedAt = block.timestamp;
emit ShipmentUpdated(shipmentId, newStatus, msg.sender, block.timestamp);
}
/**
* @dev Add a document to a shipment
* @param shipmentId Shipment ID
* @param documentHash IPFS hash or document reference
*/
function addDocument(
uint256 shipmentId,
string memory documentHash
) external whenNotPaused {
require(shipments[shipmentId].exists, "Shipment does not exist");
require(
hasRole(ADMIN_ROLE, msg.sender) ||
hasRole(MANUFACTURER_ROLE, msg.sender) ||
hasRole(BUYER_ROLE, msg.sender) ||
hasRole(LOGISTICS_ROLE, msg.sender),
"Not authorized to add documents"
);
Shipment storage shipment = shipments[shipmentId];
shipment.documents.push(documentHash);
shipment.updatedAt = block.timestamp;
emit DocumentAdded(shipmentId, documentHash, msg.sender, block.timestamp);
}
/**
* @dev Get shipment details
* @param shipmentId Shipment ID
*/
function getShipment(uint256 shipmentId) external view returns (
uint256 id,
string memory externalId,
ShipmentStatus status,
PartyInfo memory manufacturer,
PartyInfo memory buyer,
PartyInfo memory logistics,
uint256 createdAt,
uint256 updatedAt,
bytes32 dataHash
) {
require(shipments[shipmentId].exists, "Shipment does not exist");
Shipment storage shipment = shipments[shipmentId];
return (
shipment.shipmentId,
shipment.externalId,
shipment.status,
shipment.manufacturer,
shipment.buyer,
shipment.logistics,
shipment.createdAt,
shipment.updatedAt,
shipment.dataHash
);
}
/**
* @dev Get shipment by external ID
* @param externalId External reference ID
*/
function getShipmentByExternalId(string memory externalId) external view returns (uint256) {
uint256 shipmentId = externalIdToShipmentId[externalId];
require(shipmentId != 0, "Shipment not found");
return shipmentId;
}
/**
* @dev Get shipment documents
* @param shipmentId Shipment ID
*/
function getShipmentDocuments(uint256 shipmentId) external view returns (string[] memory) {
require(shipments[shipmentId].exists, "Shipment does not exist");
return shipments[shipmentId].documents;
}
/**
* @dev Pause the contract
*/
function pause() external onlyRole(ADMIN_ROLE) {
_pause();
}
/**
* @dev Unpause the contract
*/
function unpause() external onlyRole(ADMIN_ROLE) {
_unpause();
}
/**
* @dev Required override for UUPS upgrades
*/
function _authorizeUpgrade(address newImplementation)
internal
onlyRole(UPGRADER_ROLE)
override
{}
/**
* @dev Get total number of shipments
*/
function getTotalShipments() external view returns (uint256) {
return shipmentCounter - 1;
}
}

View File

@@ -0,0 +1,140 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.19;
import "./FlightConfirmation.sol";
/**
* @title Signature Verification System
* @dev A contract for verifying digital signatures from multiple parties
*/
contract SignatureVerification {
// Event emitted when a signature is verified
event SignatureVerified(
bytes32 indexed flightId,
address signer,
uint8 partyType, // 0: Manufacturer, 1: Logistics Provider, 2: Buyer
bytes32 messageHash
);
/**
* @dev Verifies an ECDSA signature
* @param _message The original message that was signed
* @param _signature The signature to verify
* @return signer The address that signed the message
*/
function recoverSigner(bytes32 _message, bytes memory _signature)
internal
pure
returns (address signer)
{
require(_signature.length == 65, "Invalid signature length");
bytes32 r;
bytes32 s;
uint8 v;
assembly {
r := mload(add(_signature, 32))
s := mload(add(_signature, 64))
v := byte(0, mload(add(_signature, 96)))
}
// Adjust v value if needed
if (v < 27) {
v += 27;
}
require(v == 27 || v == 28, "Invalid v value");
signer = ecrecover(_message, v, r, s);
require(signer != address(0), "ECDSA: Invalid signature");
}
/**
* @dev Verifies a signature for a specific party type for a flight
* @param _flightId Unique identifier for the flight
* @param _partyType Type of the party (0: Manufacturer, 1: Logistics, 2: Buyer)
* @param _message The message to verify
* @param _signature The signature to verify
* @return isValid Whether the signature is valid
* @return recoveredSigner The address of the signer
*/
function verifyFlightSignature(
bytes32 _flightId,
uint8 _partyType,
bytes32 _message,
bytes memory _signature
)
public
pure
returns (bool isValid, address recoveredSigner)
{
recoveredSigner = recoverSigner(_message, _signature);
isValid = recoveredSigner != address(0);
emit SignatureVerified(_flightId, recoveredSigner, _partyType, _message);
}
/**
* @dev Creates a hash for the flight confirmation message
* @param _flightId Unique identifier for the flight
* @param _signer Address of the signer
* @param _partyType Type of the party (0: Manufacturer, 1: Logistics, 2: Buyer)
* @param _timestamp Timestamp of the confirmation
* @return Hash of the message
*/
function createFlightConfirmationHash(
bytes32 _flightId,
address _signer,
uint8 _partyType,
uint256 _timestamp
)
public
pure
returns (bytes32)
{
return keccak256(
abi.encodePacked(
"\x19Ethereum Signed Message:\n32",
keccak256(
abi.encodePacked(
"Flight Confirmation",
_flightId,
_signer,
_partyType,
_timestamp
)
)
)
);
}
/**
* @dev Convenience function to verify and process a flight confirmation
* @param _flightId Unique identifier for the flight
* @param _partyType Type of the party (0: Manufacturer, 1: Logistics, 2: Buyer)
* @param _timestamp Timestamp of the confirmation
* @param _signature The signature to verify
* @return isValid Whether the signature is valid
* @return recoveredSigner The address of the signer
*/
function processFlightConfirmation(
bytes32 _flightId,
uint8 _partyType,
uint256 _timestamp,
bytes memory _signature
)
public
view
returns (bool isValid, address recoveredSigner)
{
bytes32 messageHash = createFlightConfirmationHash(
_flightId,
msg.sender,
_partyType,
_timestamp
);
return verifyFlightSignature(_flightId, _partyType, messageHash, _signature);
}
}

View File

View File

@@ -0,0 +1,46 @@
{
"config": {
"chainId": 1337,
"homesteadBlock": 0,
"eip150Block": 0,
"eip155Block": 0,
"eip158Block": 0,
"byzantiumBlock": 0,
"constantinopleBlock": 0,
"petersburgBlock": 0,
"istanbulBlock": 0,
"berlinBlock": 0,
"londonBlock": 0,
"clique": {
"blockperiodseconds": 2,
"epochlength": 30000
}
},
"nonce": "0x0",
"timestamp": "0x5c51a607",
"extraData": "0x0000000000000000000000000000000000000000000000000000000000000000fe3b557e8fb62b89f4916b721be55ceb828dbd73627306090abaB3A6e1400e9345bC60c78a8BEf570000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
"gasLimit": "0x1fffffffffffff",
"difficulty": "0x1",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"coinbase": "0x0000000000000000000000000000000000000000",
"alloc": {
"fe3b557e8fb62b89f4916b721be55ceb828dbd73": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
},
"627306090abaB3A6e1400e9345bC60c78a8BEf57": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
},
"f17f52151EbEF6C7334FAD080c5704D77216b732": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
},
"C5fdf4076b8F3A5357c5E395ab970B5B54098Fef": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
},
"821aEa9a577a9b44299B9c15c88cf3087F3b5544": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
}
},
"number": "0x0",
"gasUsed": "0x0",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
}

View File

@@ -0,0 +1,118 @@
version: '3.8'
services:
# Hyperledger Besu Node 1 (Validator)
besu-node1:
image: hyperledger/besu:24.3
container_name: optovia-besu-node1
command:
- --genesis-file=/opt/besu/genesis.json
- --network-id=1337
- --rpc-http-enabled
- --rpc-http-api=ETH,NET,WEB3,ADMIN,MINER,TXPOOL,DEBUG
- --rpc-http-host=0.0.0.0
- --rpc-http-port=8545
- --rpc-http-cors-origins=*
- --host-allowlist=*
- --rpc-ws-enabled
- --rpc-ws-api=ETH,NET,WEB3
- --rpc-ws-host=0.0.0.0
- --rpc-ws-port=8546
- --graphql-http-enabled
- --graphql-http-host=0.0.0.0
- --graphql-http-port=8547
- --p2p-enabled
- --p2p-host=0.0.0.0
- --p2p-port=30303
- --miner-enabled
- --miner-coinbase=0xfe3b557e8fb62b89f4916b721be55ceb828dbd73
- --min-gas-price=0
- --data-path=/opt/besu/data
volumes:
- ./besu/node1:/opt/besu/data
- ./besu/genesis.json:/opt/besu/genesis.json
ports:
- "8545:8545" # RPC HTTP
- "8546:8546" # RPC WebSocket
- "8547:8547" # GraphQL
- "30303:30303" # P2P
networks:
- optovia-blockchain
restart: unless-stopped
# Hyperledger Besu Node 2 (Validator)
besu-node2:
image: hyperledger/besu:24.3
container_name: optovia-besu-node2
command:
- --genesis-file=/opt/besu/genesis.json
- --network-id=1337
- --rpc-http-enabled
- --rpc-http-api=ETH,NET,WEB3,ADMIN,MINER,TXPOOL
- --rpc-http-host=0.0.0.0
- --rpc-http-port=8545
- --rpc-http-cors-origins=*
- --host-allowlist=*
- --p2p-enabled
- --p2p-host=0.0.0.0
- --p2p-port=30303
- --bootnodes=enode://besu-node1@besu-node1:30303
- --miner-enabled
- --miner-coinbase=0x627306090abaB3A6e1400e9345bC60c78a8BEf57
- --min-gas-price=0
- --data-path=/opt/besu/data
volumes:
- ./besu/node2:/opt/besu/data
- ./besu/genesis.json:/opt/besu/genesis.json
ports:
- "8555:8545"
- "30313:30303"
networks:
- optovia-blockchain
depends_on:
- besu-node1
restart: unless-stopped
# Block Explorer (optional)
blockscout:
image: blockscout/blockscout:latest
container_name: optovia-blockscout
environment:
ETHEREUM_JSONRPC_VARIANT: besu
ETHEREUM_JSONRPC_HTTP_URL: http://besu-node1:8545
ETHEREUM_JSONRPC_WS_URL: ws://besu-node1:8546
DATABASE_URL: postgresql://postgres:password@postgres:5432/blockscout
SECRET_KEY_BASE: RMgI4C1HSkxsEjdhtGMfwAHfyT6CKWXOgzCboJflfSm4jeAlic52io05KB6mqzc5
CHAIN_ID: 1337
SUBNETWORK: Optovia Private Network
LOGO: /images/optovia_logo.svg
COIN: OPT
ports:
- "4000:4000"
networks:
- optovia-blockchain
depends_on:
- besu-node1
- postgres
restart: unless-stopped
# PostgreSQL for BlockScout
postgres:
image: postgres:14-alpine
container_name: optovia-postgres
environment:
POSTGRES_DB: blockscout
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
volumes:
- postgres-data:/var/lib/postgresql/data
networks:
- optovia-blockchain
restart: unless-stopped
networks:
optovia-blockchain:
driver: bridge
volumes:
postgres-data:

View File

@@ -0,0 +1,38 @@
require("@nomicfoundation/hardhat-toolbox");
require("@openzeppelin/hardhat-upgrades");
require("dotenv").config();
/** @type import('hardhat/config').HardhatUserConfig */
module.exports = {
solidity: {
version: "0.8.20",
settings: {
optimizer: {
enabled: true,
runs: 200,
},
},
},
networks: {
// Local development network
localhost: {
url: "http://127.0.0.1:8545",
chainId: 31337,
},
// Private Optovia network (Besu with Clique PoA)
optovia: {
url: process.env.OPTOVIA_RPC_URL || "http://localhost:8545",
chainId: parseInt(process.env.OPTOVIA_CHAIN_ID || "1337"),
accounts: process.env.PRIVATE_KEY ? [process.env.PRIVATE_KEY] : [],
},
},
gasReporter: {
enabled: process.env.REPORT_GAS === "true",
},
paths: {
sources: "./contracts",
tests: "./test",
cache: "./cache",
artifacts: "./artifacts",
},
};

43
blockchain/package.json Normal file
View File

@@ -0,0 +1,43 @@
{
"name": "optovia-blockchain",
"version": "1.0.0",
"description": "Optovia Blockchain Infrastructure for Shipment Tracking",
"main": "index.js",
"scripts": {
"test": "hardhat test",
"compile": "hardhat compile",
"deploy:local": "hardhat run scripts/deploy-local.js --network localhost",
"deploy:testnet": "hardhat run scripts/deploy-testnet.js --network testnet",
"deploy:mainnet": "hardhat run scripts/deploy-mainnet.js --network mainnet",
"node": "hardhat node",
"clean": "hardhat clean",
"coverage": "hardhat coverage"
},
"keywords": [
"blockchain",
"ethereum",
"solidity",
"shipment",
"tracking",
"supply-chain"
],
"author": "Optovia",
"license": "MIT",
"devDependencies": {
"@nomicfoundation/hardhat-toolbox": "^4.0.0",
"@nomiclabs/hardhat-ethers": "^2.2.3",
"@nomiclabs/hardhat-waffle": "^2.0.6",
"@openzeppelin/hardhat-upgrades": "^3.0.0",
"chai": "^4.3.10",
"ethereum-waffle": "^4.0.10",
"ethers": "^6.9.0",
"hardhat": "^2.19.4",
"hardhat-gas-reporter": "^1.0.9",
"solidity-coverage": "^0.8.5"
},
"dependencies": {
"@openzeppelin/contracts": "^5.0.1",
"@openzeppelin/contracts-upgradeable": "^5.0.1",
"dotenv": "^16.3.1"
}
}

View File

@@ -0,0 +1,95 @@
const { ethers, upgrades } = require("hardhat");
async function main() {
console.log("Deploying to local network...");
const [deployer] = await ethers.getSigners();
console.log("Deploying contracts with account:", deployer.address);
console.log("Account balance:", (await ethers.provider.getBalance(deployer.address)).toString());
// Deploy ShipmentTracker
console.log("\n--- Deploying ShipmentTracker (Upgradeable) ---");
const ShipmentTracker = await ethers.getContractFactory("ShipmentTracker");
const shipmentTracker = await upgrades.deployProxy(
ShipmentTracker,
[deployer.address],
{ initializer: "initialize" }
);
await shipmentTracker.waitForDeployment();
const shipmentTrackerAddress = await shipmentTracker.getAddress();
console.log("ShipmentTracker deployed to:", shipmentTrackerAddress);
// Grant roles
console.log("\nGranting roles...");
const MANUFACTURER_ROLE = await shipmentTracker.MANUFACTURER_ROLE();
const BUYER_ROLE = await shipmentTracker.BUYER_ROLE();
const LOGISTICS_ROLE = await shipmentTracker.LOGISTICS_ROLE();
await shipmentTracker.grantRole(MANUFACTURER_ROLE, deployer.address);
console.log("MANUFACTURER_ROLE granted to:", deployer.address);
await shipmentTracker.grantRole(BUYER_ROLE, deployer.address);
console.log("BUYER_ROLE granted to:", deployer.address);
await shipmentTracker.grantRole(LOGISTICS_ROLE, deployer.address);
console.log("LOGISTICS_ROLE granted to:", deployer.address);
// Save deployment info
const deploymentInfo = {
network: "localhost",
chainId: (await ethers.provider.getNetwork()).chainId.toString(),
deployer: deployer.address,
contracts: {
ShipmentTracker: {
address: shipmentTrackerAddress,
deployedAt: new Date().toISOString()
}
}
};
const fs = require("fs");
const path = require("path");
const deploymentsDir = path.join(__dirname, "..", "deployments");
if (!fs.existsSync(deploymentsDir)) {
fs.mkdirSync(deploymentsDir, { recursive: true });
}
fs.writeFileSync(
path.join(deploymentsDir, "localhost.json"),
JSON.stringify(deploymentInfo, null, 2)
);
console.log("\n=== Deployment Summary ===");
console.log("Network:", "localhost");
console.log("Chain ID:", deploymentInfo.chainId);
console.log("Deployer:", deployer.address);
console.log("\nContracts:");
console.log(" ShipmentTracker:", shipmentTrackerAddress);
console.log("\nDeployment info saved to: deployments/localhost.json");
// Create a test shipment
console.log("\n--- Creating Test Shipment ---");
const tx = await shipmentTracker.createShipment(
"TEST-001",
deployer.address,
"Test Manufacturer",
"MFG-001",
deployer.address,
"Test Buyer",
"BYR-001",
ethers.keccak256(ethers.toUtf8Bytes("test shipment data"))
);
const receipt = await tx.wait();
console.log("Test shipment created! Transaction hash:", receipt.hash);
const shipmentId = await shipmentTracker.getShipmentByExternalId("TEST-001");
console.log("Shipment ID:", shipmentId.toString());
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,96 @@
const { ethers, upgrades } = require("hardhat");
async function main() {
console.log("Deploying to Optovia private network...");
const [deployer] = await ethers.getSigners();
console.log("Deploying contracts with account:", deployer.address);
console.log("Account balance:", (await ethers.provider.getBalance(deployer.address)).toString());
// Deploy ShipmentTracker
console.log("\n--- Deploying ShipmentTracker (Upgradeable) ---");
const ShipmentTracker = await ethers.getContractFactory("ShipmentTracker");
const shipmentTracker = await upgrades.deployProxy(
ShipmentTracker,
[deployer.address],
{ initializer: "initialize" }
);
await shipmentTracker.waitForDeployment();
const shipmentTrackerAddress = await shipmentTracker.getAddress();
console.log("ShipmentTracker deployed to:", shipmentTrackerAddress);
// Grant roles to platform services
console.log("\nGranting roles...");
const MANUFACTURER_ROLE = await shipmentTracker.MANUFACTURER_ROLE();
const BUYER_ROLE = await shipmentTracker.BUYER_ROLE();
const LOGISTICS_ROLE = await shipmentTracker.LOGISTICS_ROLE();
// These addresses should be replaced with actual service addresses
const platformServiceAddress = deployer.address; // Replace with actual platform service address
await shipmentTracker.grantRole(MANUFACTURER_ROLE, platformServiceAddress);
console.log("MANUFACTURER_ROLE granted to platform service:", platformServiceAddress);
await shipmentTracker.grantRole(BUYER_ROLE, platformServiceAddress);
console.log("BUYER_ROLE granted to platform service:", platformServiceAddress);
await shipmentTracker.grantRole(LOGISTICS_ROLE, platformServiceAddress);
console.log("LOGISTICS_ROLE granted to platform service:", platformServiceAddress);
// Save deployment info
const deploymentInfo = {
network: "optovia-private",
chainId: "1337",
deployer: deployer.address,
platformService: platformServiceAddress,
contracts: {
ShipmentTracker: {
address: shipmentTrackerAddress,
deployedAt: new Date().toISOString()
}
},
roles: {
ShipmentTracker: {
MANUFACTURER_ROLE: MANUFACTURER_ROLE,
BUYER_ROLE: BUYER_ROLE,
LOGISTICS_ROLE: LOGISTICS_ROLE
}
}
};
const fs = require("fs");
const path = require("path");
const deploymentsDir = path.join(__dirname, "..", "deployments");
if (!fs.existsSync(deploymentsDir)) {
fs.mkdirSync(deploymentsDir, { recursive: true });
}
fs.writeFileSync(
path.join(deploymentsDir, "optovia-private.json"),
JSON.stringify(deploymentInfo, null, 2)
);
console.log("\n=== Deployment Summary ===");
console.log("Network: Optovia Private Network");
console.log("Chain ID: 1337");
console.log("Deployer:", deployer.address);
console.log("Platform Service:", platformServiceAddress);
console.log("\nContracts:");
console.log(" ShipmentTracker:", shipmentTrackerAddress);
console.log("\nDeployment info saved to: deployments/optovia-private.json");
console.log("\n--- Integration Instructions ---");
console.log("1. Update backend configuration with contract address:");
console.log(` SHIPMENT_TRACKER_ADDRESS=${shipmentTrackerAddress}`);
console.log("2. Configure blockchain RPC endpoint:");
console.log(" BLOCKCHAIN_RPC_URL=http://besu-node1:8545");
console.log("3. Store private key securely for platform service");
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,154 @@
/**
* Interactive script for testing shipment tracker contract
* Usage: npx hardhat run scripts/interact.js --network <network>
*/
const { ethers } = require("hardhat");
const fs = require("fs");
const path = require("path");
async function main() {
console.log("=== Optovia Blockchain Interaction Script ===\n");
const [signer] = await ethers.getSigners();
const network = await ethers.provider.getNetwork();
console.log("Network:", network.name);
console.log("Chain ID:", network.chainId.toString());
console.log("Signer address:", signer.address);
console.log("Balance:", ethers.formatEther(await ethers.provider.getBalance(signer.address)), "ETH\n");
// Load deployment
const deploymentFile = network.name === "localhost"
? "localhost.json"
: network.chainId === 1337n
? "optovia-private.json"
: `${network.name}.json`;
const deploymentPath = path.join(__dirname, "..", "deployments", deploymentFile);
if (!fs.existsSync(deploymentPath)) {
console.error(`Deployment file not found: ${deploymentPath}`);
console.error("Please deploy contracts first using: npm run deploy:private --network optovia");
process.exit(1);
}
const deployment = JSON.parse(fs.readFileSync(deploymentPath));
console.log("Contract addresses:");
console.log(" ShipmentTracker:", deployment.contracts.ShipmentTracker.address);
console.log(" ShipmentNFT:", deployment.contracts.ShipmentNFT.address);
console.log();
// Get contract instances
const shipmentTracker = await ethers.getContractAt(
"ShipmentTracker",
deployment.contracts.ShipmentTracker.address,
signer
);
// Check total shipments
const totalShipments = await shipmentTracker.getTotalShipments();
console.log("Total shipments:", totalShipments.toString(), "\n");
// Create a test shipment
const timestamp = Date.now();
const externalId = `TEST-${timestamp}`;
const testData = {
productId: "PROD-001",
quantity: 100,
weight: "500kg",
timestamp: timestamp
};
console.log("Creating test shipment:", externalId);
const dataHash = ethers.keccak256(ethers.toUtf8Bytes(JSON.stringify(testData)));
try {
const tx = await shipmentTracker.createShipment(
externalId,
signer.address,
"Test Manufacturer Inc.",
"MFG-12345",
signer.address,
"Test Buyer Corp.",
"BYR-67890",
dataHash
);
console.log("Transaction sent:", tx.hash);
console.log("Waiting for confirmation...");
const receipt = await tx.wait();
console.log("Transaction confirmed in block:", receipt.blockNumber);
console.log("Gas used:", receipt.gasUsed.toString(), "\n");
// Get shipment ID
const shipmentId = await shipmentTracker.getShipmentByExternalId(externalId);
console.log("Shipment ID:", shipmentId.toString());
// Get shipment details
console.log("\nFetching shipment details...");
const shipment = await shipmentTracker.getShipment(shipmentId);
console.log("\n=== Shipment Details ===");
console.log("ID:", shipment[0].toString());
console.log("External ID:", shipment[1]);
console.log("Status:", getStatusName(shipment[2]));
console.log("\nManufacturer:");
console.log(" Address:", shipment[3][0]);
console.log(" Name:", shipment[3][1]);
console.log(" Company ID:", shipment[3][2]);
console.log("\nBuyer:");
console.log(" Address:", shipment[4][0]);
console.log(" Name:", shipment[4][1]);
console.log(" Company ID:", shipment[4][2]);
console.log("\nTimestamps:");
console.log(" Created:", new Date(Number(shipment[6]) * 1000).toISOString());
console.log(" Updated:", new Date(Number(shipment[7]) * 1000).toISOString());
// Test status update
console.log("\n=== Testing Status Update ===");
const statusTx = await shipmentTracker.updateShipmentStatus(shipmentId, 1); // ManufacturerConfirmed
await statusTx.wait();
console.log("Status updated to: ManufacturerConfirmed");
// Test adding document
console.log("\n=== Testing Document Addition ===");
const docHash = "QmTestHash123456789abcdef";
const docTx = await shipmentTracker.addDocument(shipmentId, docHash);
await docTx.wait();
console.log("Document added:", docHash);
const documents = await shipmentTracker.getShipmentDocuments(shipmentId);
console.log("Total documents:", documents.length);
console.log("\n=== Test Complete ===");
console.log("Shipment created and tested successfully!");
console.log("View in Block Explorer: http://localhost:4000/tx/" + tx.hash);
} catch (error) {
console.error("\nError:", error.message);
if (error.data) {
console.error("Error data:", error.data);
}
process.exit(1);
}
}
function getStatusName(status) {
const statuses = [
"Created",
"ManufacturerConfirmed",
"InTransit",
"Delivered",
"Cancelled"
];
return statuses[status] || "Unknown";
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,263 @@
const { expect } = require("chai");
const { ethers, upgrades } = require("hardhat");
describe("ShipmentTracker", function () {
let shipmentTracker;
let owner, manufacturer, buyer, logistics, other;
beforeEach(async function () {
[owner, manufacturer, buyer, logistics, other] = await ethers.getSigners();
const ShipmentTracker = await ethers.getContractFactory("ShipmentTracker");
shipmentTracker = await upgrades.deployProxy(
ShipmentTracker,
[owner.address],
{ initializer: "initialize" }
);
await shipmentTracker.waitForDeployment();
// Grant roles
const MANUFACTURER_ROLE = await shipmentTracker.MANUFACTURER_ROLE();
const BUYER_ROLE = await shipmentTracker.BUYER_ROLE();
const LOGISTICS_ROLE = await shipmentTracker.LOGISTICS_ROLE();
await shipmentTracker.grantRole(MANUFACTURER_ROLE, manufacturer.address);
await shipmentTracker.grantRole(BUYER_ROLE, buyer.address);
await shipmentTracker.grantRole(LOGISTICS_ROLE, logistics.address);
});
describe("Deployment", function () {
it("Should set the correct admin", async function () {
const ADMIN_ROLE = await shipmentTracker.ADMIN_ROLE();
expect(await shipmentTracker.hasRole(ADMIN_ROLE, owner.address)).to.be.true;
});
it("Should grant manufacturer role", async function () {
const MANUFACTURER_ROLE = await shipmentTracker.MANUFACTURER_ROLE();
expect(await shipmentTracker.hasRole(MANUFACTURER_ROLE, manufacturer.address)).to.be.true;
});
});
describe("Create Shipment", function () {
it("Should create a shipment successfully", async function () {
const dataHash = ethers.keccak256(ethers.toUtf8Bytes("test data"));
const tx = await shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
);
await expect(tx)
.to.emit(shipmentTracker, "ShipmentCreated")
.withArgs(1, "SHIP-001", manufacturer.address, buyer.address, await ethers.provider.getBlock("latest").then(b => b.timestamp));
const shipmentId = await shipmentTracker.getShipmentByExternalId("SHIP-001");
expect(shipmentId).to.equal(1);
});
it("Should fail to create duplicate shipment", async function () {
const dataHash = ethers.keccak256(ethers.toUtf8Bytes("test data"));
await shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
);
await expect(
shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
)
).to.be.revertedWith("Shipment with this external ID already exists");
});
it("Should fail without proper role", async function () {
const dataHash = ethers.keccak256(ethers.toUtf8Bytes("test data"));
await expect(
shipmentTracker.connect(other).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
)
).to.be.revertedWith("Not authorized to create shipment");
});
});
describe("Update Logistics Info", function () {
beforeEach(async function () {
const dataHash = ethers.keccak256(ethers.toUtf8Bytes("test data"));
await shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
);
});
it("Should update logistics info", async function () {
const logisticsHash = ethers.keccak256(ethers.toUtf8Bytes("logistics data"));
await expect(
shipmentTracker.connect(logistics).updateLogisticsInfo(
1,
logistics.address,
"Logistics C",
"LOG-001",
logisticsHash
)
)
.to.emit(shipmentTracker, "LogisticsInfoUpdated")
.withArgs(1, logistics.address, logisticsHash, await ethers.provider.getBlock("latest").then(b => b.timestamp));
const shipment = await shipmentTracker.getShipment(1);
expect(shipment.logistics.walletAddress).to.equal(logistics.address);
expect(shipment.logistics.name).to.equal("Logistics C");
});
});
describe("Update Shipment Status", function () {
beforeEach(async function () {
const dataHash = ethers.keccak256(ethers.toUtf8Bytes("test data"));
await shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
);
const logisticsHash = ethers.keccak256(ethers.toUtf8Bytes("logistics data"));
await shipmentTracker.connect(logistics).updateLogisticsInfo(
1,
logistics.address,
"Logistics C",
"LOG-001",
logisticsHash
);
});
it("Should update status to ManufacturerConfirmed", async function () {
await expect(
shipmentTracker.connect(manufacturer).updateShipmentStatus(1, 1) // ManufacturerConfirmed
)
.to.emit(shipmentTracker, "ShipmentUpdated")
.withArgs(1, 1, manufacturer.address, await ethers.provider.getBlock("latest").then(b => b.timestamp));
const shipment = await shipmentTracker.getShipment(1);
expect(shipment.status).to.equal(1);
});
it("Should update status to InTransit by logistics", async function () {
await shipmentTracker.connect(manufacturer).updateShipmentStatus(1, 1);
await expect(
shipmentTracker.connect(logistics).updateShipmentStatus(1, 2) // InTransit
)
.to.emit(shipmentTracker, "ShipmentUpdated");
const shipment = await shipmentTracker.getShipment(1);
expect(shipment.status).to.equal(2);
});
it("Should fail to update status without authorization", async function () {
await expect(
shipmentTracker.connect(other).updateShipmentStatus(1, 1)
).to.be.revertedWith("Not authorized");
});
});
describe("Add Document", function () {
beforeEach(async function () {
const dataHash = ethers.keccak256(ethers.toUtf8Bytes("test data"));
await shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
);
});
it("Should add a document", async function () {
const docHash = "QmTest123456789";
await expect(
shipmentTracker.connect(manufacturer).addDocument(1, docHash)
)
.to.emit(shipmentTracker, "DocumentAdded")
.withArgs(1, docHash, manufacturer.address, await ethers.provider.getBlock("latest").then(b => b.timestamp));
const documents = await shipmentTracker.getShipmentDocuments(1);
expect(documents).to.include(docHash);
});
});
describe("Pause/Unpause", function () {
it("Should pause and unpause contract", async function () {
await shipmentTracker.pause();
const dataHash = ethers.keccak256(ethers.toUtf8Bytes("test data"));
await expect(
shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
)
).to.be.revertedWith("Pausable: paused");
await shipmentTracker.unpause();
await expect(
shipmentTracker.connect(manufacturer).createShipment(
"SHIP-001",
manufacturer.address,
"Manufacturer A",
"MFG-001",
buyer.address,
"Buyer B",
"BYR-001",
dataHash
)
).to.emit(shipmentTracker, "ShipmentCreated");
});
});
});

View File

@@ -0,0 +1,31 @@
meta {
name: Get ID Token
type: http
seq: 1
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: urn:ietf:params:oauth:grant-type:token-exchange
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
subject_token: {{personal_access_token}}
subject_token_type: urn:logto:token-type:personal_access_token
scope: openid profile email
}
script:post-response {
if (res.body.id_token) {
bru.setVar("id_token", res.body.id_token);
console.log("ID Token saved");
}
if (res.body.refresh_token) {
bru.setVar("refresh_token", res.body.refresh_token);
console.log("New refresh_token saved");
}
}

View File

@@ -0,0 +1,29 @@
meta {
name: Get Teams Token
type: http
seq: 2
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: urn:ietf:params:oauth:grant-type:token-exchange
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
resource: https://teams.optovia.ru
subject_token: {{personal_access_token}}
subject_token_type: urn:logto:token-type:personal_access_token
organization_id: {{organization_id}}
scope: teams:member
}
script:post-response {
if (res.body.access_token) {
bru.setVar("access_token_teams", res.body.access_token);
console.log("Teams access token saved");
}
}

View File

@@ -0,0 +1,29 @@
meta {
name: Get Exchange Token
type: http
seq: 3
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: urn:ietf:params:oauth:grant-type:token-exchange
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
resource: https://exchange.optovia.ru
subject_token: {{personal_access_token}}
subject_token_type: urn:logto:token-type:personal_access_token
organization_id: {{organization_id}}
scope: teams:member
}
script:post-response {
if (res.body.access_token) {
bru.setVar("access_token_exchange", res.body.access_token);
console.log("Exchange access token saved");
}
}

View File

@@ -0,0 +1,29 @@
meta {
name: Get Orders Token
type: http
seq: 4
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: urn:ietf:params:oauth:grant-type:token-exchange
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
resource: https://orders.optovia.ru
subject_token: {{personal_access_token}}
subject_token_type: urn:logto:token-type:personal_access_token
organization_id: {{organization_id}}
scope: teams:member
}
script:post-response {
if (res.body.access_token) {
bru.setVar("access_token_orders", res.body.access_token);
console.log("Orders access token saved");
}
}

View File

@@ -0,0 +1,29 @@
meta {
name: Get KYC Token
type: http
seq: 5
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: urn:ietf:params:oauth:grant-type:token-exchange
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
resource: https://kyc.optovia.ru
subject_token: {{personal_access_token}}
subject_token_type: urn:logto:token-type:personal_access_token
organization_id: {{organization_id}}
scope: teams:member
}
script:post-response {
if (res.body.access_token) {
bru.setVar("access_token_kyc", res.body.access_token);
console.log("KYC access token saved");
}
}

5
bruno/auth/bruno.json Normal file
View File

@@ -0,0 +1,5 @@
{
"version": "1",
"name": "Auth Tokens",
"type": "collection"
}

View File

@@ -0,0 +1,8 @@
vars {
logto_endpoint: https://auth.optovia.ru
logto_webapp_client_id: 0g60ruitz7w6te9jfzu5z
logto_webapp_client_secret: PeNByiTd6VslH1fvLhZnzyJ8uLXp0G1O
organization_id: nq1kh0z7ihtz
personal_access_token: pat_zT52ONuvg3wY8vbL9l1FlzMjG8XsiucU
refresh_token: CCsaQOCcH1xkV88gYpvFabiNjwRUlPSdancMT95X8vz
}

View File

@@ -0,0 +1,27 @@
meta {
name: Get Organization Token
type: http
seq: 1
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: urn:ietf:params:oauth:grant-type:token-exchange
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
resource: {{api_url}}
subject_token: {{personal_access_token}}
subject_token_type: urn:logto:token-type:personal_access_token
organization_id: {{organization_id}}
}
script:post-response {
if (res.body.access_token) {
bru.setEnvVar("access_token", res.body.access_token);
}
}

View File

@@ -0,0 +1,44 @@
meta {
name: CreateOffer
type: http
seq: 1
}
post {
url: {{exchange_graphql_team}}
body: graphql
auth: none
}
headers {
Authorization: Bearer {{access_token}}
}
body:graphql {
mutation CreateOffer($input: OfferInput!) {
createOffer(input: $input) {
success
message
workflowId
offerUuid
}
}
}
body:graphql:vars {
{
"input": {
"teamUuid": "ee0d350a-1ba8-412e-82eb-faecf542fc09",
"productUuid": "0f60a6a7-0b03-4432-9568-b0f9f8f91397",
"productName": "Cocoa Beans",
"quantity": 100,
"unit": "ton",
"pricePerUnit": 5000,
"currency": "USD"
}
}
}
script:post-response {
console.log("Response:", JSON.stringify(res.body, null, 2));
}

View File

@@ -0,0 +1,36 @@
meta {
name: GetTeamOffers
type: http
seq: 2
}
post {
url: {{exchange_graphql_team}}
body: graphql
auth: none
}
headers {
Authorization: Bearer {{access_token}}
}
body:graphql {
query GetTeamOffers($teamUuid: String!) {
getTeamOffers(teamUuid: $teamUuid) {
uuid
productName
quantity
status
}
}
}
body:graphql:vars {
{
"teamUuid": "ee0d350a-1ba8-412e-82eb-faecf542fc09"
}
}
script:post-response {
console.log("Response:", JSON.stringify(res.body, null, 2));
}

View File

@@ -0,0 +1,5 @@
{
"version": "1",
"name": "Exchange API",
"type": "collection"
}

View File

@@ -0,0 +1,10 @@
vars {
logto_endpoint: https://auth.optovia.ru
logto_webapp_client_id: 0g60ruitz7w6te9jfzu5z
logto_webapp_client_secret: PeNByiTd6VslH1fvLhZnzyJ8uLXp0G1O
api_url: https://exchange.optovia.ru
organization_id: nq1kh0z7ihtz
personal_access_token: pat_zT52ONuvg3wY8vbL9l1FlzMjG8XsiucU
exchange_graphql_team: https://exchange.optovia.ru/graphql/team/
exchange_graphql_public: https://exchange.optovia.ru/graphql/public/
}

View File

@@ -0,0 +1,27 @@
meta {
name: Get Organization Token (PAT Exchange)
type: http
seq: 1
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: urn:ietf:params:oauth:grant-type:token-exchange
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
resource: {{api_url}}
subject_token: {{personal_access_token}}
subject_token_type: urn:logto:token-type:personal_access_token
organization_id: {{organization_id}}
}
script:post-response {
if (res.body.access_token) {
bru.setEnvVar("access_token", res.body.access_token);
}
}

View File

@@ -0,0 +1,40 @@
meta {
name: Get Organization Token (Refresh Token)
type: http
seq: 2
}
post {
url: {{logto_endpoint}}/oidc/token
body: formUrlEncoded
auth: none
}
body:form-urlencoded {
grant_type: refresh_token
client_id: {{logto_webapp_client_id}}
client_secret: {{logto_webapp_client_secret}}
refresh_token: {{refresh_token}}
resource: {{api_url}}
organization_id: {{organization_id}}
}
script:post-response {
if (res.body.access_token) {
bru.setEnvVar("access_token", res.body.access_token);
// Декодируем JWT payload для отладки
const parts = res.body.access_token.split('.');
if (parts.length === 3) {
const payload = JSON.parse(Buffer.from(parts[1], 'base64').toString());
console.log("=== ACCESS TOKEN DECODED ===");
console.log(JSON.stringify(payload, null, 2));
console.log("============================");
}
}
// Логируем весь ответ
console.log("=== FULL RESPONSE ===");
console.log(JSON.stringify(res.body, null, 2));
console.log("=====================");
}

View File

@@ -0,0 +1,39 @@
meta {
name: GetMe
type: http
seq: 2
}
post {
url: {{teams_graphql_user}}
body: json
auth: none
}
headers {
Authorization: Bearer {{access_token}}
Content-Type: application/json
}
body:json {
query: """
query GetMe {
me {
id
firstName
lastName
activeTeamId
activeTeam {
id
name
status
}
teams {
id
name
status
}
}
}
"""
}

6
bruno/teams/README.md Normal file
View File

@@ -0,0 +1,6 @@
Тестовый запрос к `teams` с персональным токеном (PAT).
Заполните в окружении Bruno переменные:
- `teams_graphql_user` — URL GraphQL `/graphql/user/`
- `pat_token` — ваш PAT (`pat_...`)
Используйте запрос `02-graphql/GetMe` — он шлёт PAT напрямую в `Authorization: Bearer ...` и возвращает `me`. Других шагов нет.

9
bruno/teams/bruno.json Normal file
View File

@@ -0,0 +1,9 @@
{
"version": "1",
"name": "Teams API",
"type": "collection",
"ignore": [
"node_modules",
".git"
]
}

View File

@@ -0,0 +1,12 @@
vars {
logto_endpoint: https://auth.optovia.ru
logto_m2m_client_id: p092qcodgx5f3ntpxkqnn
logto_m2m_client_secret: dFSskxWYbh2sL3m3V5kcSuYsHXyhw72I
logto_webapp_client_id: 0g60ruitz7w6te9jfzu5z
logto_webapp_client_secret: PeNByiTd6VslH1fvLhZnzyJ8uLXp0G1O
api_url: https://teams.optovia.ru
organization_id: nq1kh0z7ihtz
personal_access_token: pat_zT52ONuvg3wY8vbL9l1FlzMjG8XsiucU
refresh_token: CCsaQOCcH1xkV88gYpvFabiNjwRUlPSdancMT95X8vz
teams_graphql_user: https://teams.optovia.ru/graphql/user/
}

View File

@@ -0,0 +1,9 @@
{
"logto_endpoint": "https://auth.optovia.ru",
"logto_m2m_client_id": "p092qcodgx5f3ntpxkqnn",
"logto_m2m_client_secret": "dFSskxWYbh2sL3m3V5kcSuYsHXyhw72I",
"api_url": "https://teams.optovia.ru",
"organization_id": "nq1kh0z7ihtz",
"personal_access_token": "pat_zT52ONuvg3wY8vbL9l1FlzMjG8XsiucU",
"teams_graphql_user": "https://teams.optovia.ru/graphql/user/"
}

6
graphhopper/.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
# OSM data files (large)
data/*.pbf
data/*.osm
# Generated graph cache
data/graph-cache/

115
graphhopper/README.md Normal file
View File

@@ -0,0 +1,115 @@
# GraphHopper Routing Engine
Сервис маршрутизации и изохрон на базе OpenStreetMap.
## Быстрый старт
### 1. Скачать OSM данные
```bash
# Россия (~2.5 GB)
cd data
wget https://download.geofabrik.de/russia-latest.osm.pbf
# Или СНГ (все страны)
# wget https://download.geofabrik.de/russia-latest.osm.pbf
# wget https://download.geofabrik.de/asia/kazakhstan-latest.osm.pbf
# wget https://download.geofabrik.de/europe/belarus-latest.osm.pbf
# ... и объединить через osmium-tool
```
### 2. Запустить
```bash
docker compose up -d
```
Первый запуск занимает 10-30 минут (построение графа).
### 3. Проверить
```bash
curl http://localhost:8989/health
```
## API Endpoints
### Routing (маршрут)
```bash
curl "http://localhost:8989/route?point=55.75,37.62&point=59.93,30.31&profile=car"
```
### Isochrone (изохрона)
```bash
# Изохрона 4 часа от Москвы
curl "http://localhost:8989/isochrone?point=55.75,37.62&time_limit=14400&profile=car"
# 3 изохроны: 2, 4, 6 часов
curl "http://localhost:8989/isochrone?point=55.75,37.62&time_limit=21600&buckets=3&profile=car"
# Обратная изохрона (откуда можно доехать)
curl "http://localhost:8989/isochrone?point=55.75,37.62&time_limit=14400&profile=car&reverse_flow=true"
```
### Параметры изохрон
| Параметр | Описание | Пример |
|----------|----------|--------|
| point | Центр изохроны (lat,lon) | 55.75,37.62 |
| time_limit | Время в секундах | 14400 (4 часа) |
| distance_limit | Расстояние в метрах | 500000 (500 км) |
| profile | Профиль маршрутизации | car, truck |
| buckets | Количество изохрон | 3 |
| reverse_flow | Обратное направление | true |
## Требования к памяти
| Регион | OSM размер | RAM для импорта | RAM для работы |
|--------|------------|-----------------|----------------|
| Россия | ~2.5 GB | 8-12 GB | 4-6 GB |
| Европа | ~25 GB | 32-48 GB | 16-24 GB |
| Весь мир | ~70 GB | 64-96 GB | 32-48 GB |
## Структура папок
```
graphhopper/
├── docker-compose.yml
├── config.yml
├── README.md
└── data/
├── russia-latest.osm.pbf # OSM данные (скачать)
└── graph-cache/ # Построенный граф (создается автоматически)
```
## Обновление данных
```bash
# Скачать свежие данные
cd data
wget -N https://download.geofabrik.de/russia-latest.osm.pbf
# Удалить старый граф и перезапустить
rm -rf graph-cache
docker compose restart graphhopper
```
## Интеграция с фронтендом
```typescript
// composables/useIsochrone.ts
export const useIsochrone = () => {
const getIsochrone = async (
lat: number,
lng: number,
timeMinutes: number,
profile: 'car' | 'truck' = 'car'
) => {
const response = await fetch(
`${GRAPHHOPPER_URL}/isochrone?point=${lat},${lng}&time_limit=${timeMinutes * 60}&profile=${profile}`
)
return response.json() // GeoJSON polygon
}
return { getIsochrone }
}
```

56
graphhopper/config.yml Normal file
View File

@@ -0,0 +1,56 @@
# GraphHopper Configuration (v9+ format)
# Documentation: https://github.com/graphhopper/graphhopper/blob/master/docs/core/deploy.md
graphhopper:
# OSM data - start with Africa only
datareader.file: /data/africa-latest.osm.pbf
graph.location: /data/graph-cache
# Encoded values for routing
graph.encoded_values: car_access,car_average_speed,road_class,road_environment
# Routing profiles (v9+ format - no "vehicle", use custom_model)
profiles:
- name: car
turn_costs:
vehicle_types: ["motorcar"]
custom_model:
priority:
- if: "!car_access"
multiply_by: "0"
speed:
- if: "true"
limit_to: "car_average_speed"
- name: truck
turn_costs:
vehicle_types: ["hgv"]
custom_model:
priority:
- if: "!car_access"
multiply_by: "0"
speed:
- if: "true"
limit_to: "car_average_speed * 0.85"
# CH (Contraction Hierarchies) for fast queries
profiles_ch:
- profile: car
# LM (Landmarks) for flexible queries and isochrones
profiles_lm:
- profile: car
- profile: truck
# Import settings
import.osm.ignored_highways: footway,cycleway,path,pedestrian,steps
server:
application_connectors:
- type: http
port: 8989
bind_host: 0.0.0.0
admin_connectors:
- type: http
port: 8990
bind_host: 0.0.0.0

View File

@@ -0,0 +1,2 @@
# OSM data files go here
# Download from https://download.geofabrik.de/

View File

@@ -0,0 +1,30 @@
services:
graphhopper:
image: israelhikingmap/graphhopper:latest
container_name: graphhopper
restart: unless-stopped
ports:
- "8989:8989"
volumes:
- graphhopper-data:/data
- ./config.yml:/config.yml:ro
environment:
- JAVA_OPTS=-Xmx40g -Xms20g
command: --config /config.yml
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8989/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 120s
networks:
dokploy-network:
aliases:
- graphhopper
volumes:
graphhopper-data:
networks:
dokploy-network:
external: true

6
lang/.env.example Normal file
View File

@@ -0,0 +1,6 @@
# OpenAI
OPENAI_API_KEY=sk-...
# Langfuse (create keys at http://localhost:3000 after first run)
LANGFUSE_PUBLIC_KEY=pk-...
LANGFUSE_SECRET_KEY=sk-...

4
lang/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
.env
__pycache__/
*.pyc
.venv/

34
lang/agent/README.md Normal file
View File

@@ -0,0 +1,34 @@
# Optovia LangGraph Agent (Gemini + MCP)
Сервис LangGraph на FastAPI/LangServe, который использует MCP сервер (`lang/mcp`) как набор инструментов.
## Быстрый старт
```bash
cd lang/agent
python -m venv .venv && source .venv/bin/activate
pip install -r requirements.txt
GOOGLE_API_KEY=... \ # для Gemini
MCP_COMMAND=node \
MCP_ARGS="lang/mcp/src/index.js" \
GEO_GRAPHQL_URL=... ORDERS_GRAPHQL_URL=... \ # прокидываются в MCP
uvicorn app:app --reload --port 8000
```
## Энв переменные
- `GOOGLE_API_KEY` — ключ для Gemini (используется в langchain-google-genai).
- `MCP_COMMAND` / `MCP_ARGS` — как запускать MCP (по умолчанию `node lang/mcp/src/index.js`).
- `GEO_GRAPHQL_URL`, `ORDERS_GRAPHQL_URL`, `GEO_M2M_TOKEN` — передаются в MCP env.
- `CORS_ALLOW_ORIGINS` — comma-separated список Origin для фронта.
- `PORT` — порт FastAPI/LangServe (по умолчанию 8000).
## Эндпоинты
- `POST /agent/invoke` — стандартный LangServe вызов графа: `{ "input": { "messages": [...] } }`.
- `GET /health` — проверка живости.
## Инструменты (через MCP)
- `match_offers_with_route` — поиск офферов и маршрутов до точки назначения.
- `order_timeline` — статус/этапы заказа (нужен userToken).
- `search_nodes` — справочник узлов.
## Фронт
В `webapp` страница `/clientarea/ai` обращается к `public.langAgentUrl``/agent/invoke` LangServe.

29
lang/agent/app.py Normal file
View File

@@ -0,0 +1,29 @@
import os
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langserve import add_routes
from graph import graph_app
app = FastAPI(title="Optovia LangGraph Agent")
app.add_middleware(
CORSMiddleware,
allow_origins=os.getenv("CORS_ALLOW_ORIGINS", "*").split(","),
allow_methods=["*"],
allow_headers=["*"],
)
# Expose the compiled graph at /agent
add_routes(app, graph_app, path="/agent")
@app.get("/health")
async def health():
return {"status": "ok"}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=int(os.getenv("PORT", "8000")))

107
lang/agent/graph.py Normal file
View File

@@ -0,0 +1,107 @@
import os
from typing import Any, Dict
from langchain.tools import tool
from langchain_google_genai import ChatGoogleGenerativeAI
from langgraph.prebuilt import create_react_agent
from langchain_core.messages import SystemMessage
from langchain_core.runnables import RunnableConfig
from mcp_client import McpInvoker
# Configure LLM (Gemini)
def build_llm() -> ChatGoogleGenerativeAI:
return ChatGoogleGenerativeAI(
model=os.getenv("GOOGLE_VERTEX_MODEL", "gemini-1.5-pro"),
temperature=float(os.getenv("LLM_TEMPERATURE", "0.3")),
)
def build_mcp() -> McpInvoker:
return McpInvoker(
command=os.getenv("MCP_COMMAND", "node"),
args=os.getenv("MCP_ARGS", "lang/mcp/src/index.js").split(),
)
def build_tools(mcp: McpInvoker):
@tool
def match_offers_with_route(
product_uuid: str,
destination_uuid: str,
limit_sources: int = 3,
limit_routes: int = 3,
user_token: str | None = None,
) -> Any:
"""Find active offers for a product and return route options to destination node."""
return mcp.call(
"match_offers_with_route",
{
"productUuid": product_uuid,
"destinationUuid": destination_uuid,
"limitSources": limit_sources,
"limitRoutes": limit_routes,
"userToken": user_token,
},
)
@tool
def order_timeline(order_uuid: str, user_token: str) -> Any:
"""Get order with stages/trips for a team. Requires user/team token."""
return mcp.call(
"order_timeline",
{
"orderUuid": order_uuid,
"userToken": user_token,
},
)
@tool
def search_nodes(
transport_type: str | None = None,
limit: int = 20,
offset: int = 0,
user_token: str | None = None,
) -> Any:
"""Search logistics nodes with optional transport filter."""
return mcp.call(
"search_nodes",
{
"transportType": transport_type,
"limit": limit,
"offset": offset,
"userToken": user_token,
},
)
return [match_offers_with_route, order_timeline, search_nodes]
def build_graph():
llm = build_llm()
mcp = build_mcp()
tools = build_tools(mcp)
system = SystemMessage(
content=(
"You are Optovia logistics agent. Prefer calling tools to answer. "
"Use match_offers_with_route for RFQ-style questions, order_timeline for shipment status, "
"and search_nodes to explore logistics locations. Keep answers concise."
)
)
app = create_react_agent(
llm,
tools,
state_modifier=lambda state: state + [system],
)
return app
graph_app = build_graph()
def invoke_graph(input_data: Dict[str, Any], config: RunnableConfig | None = None):
"""Convenience single-call invoke."""
return graph_app.invoke(input_data, config=config)

43
lang/agent/mcp_client.py Normal file
View File

@@ -0,0 +1,43 @@
import asyncio
import os
from typing import Any, Dict, Optional
from mcp.client.session import ClientSession
from mcp.client.stdio import StdioClient
class McpInvoker:
"""Thin helper to call tools on the Node MCP server."""
def __init__(
self,
command: str = "node",
args: Optional[list[str]] = None,
env: Optional[Dict[str, str]] = None,
):
self.command = command
self.args = args or ["lang/mcp/src/index.js"]
self.env = env or {}
async def call_tool(self, name: str, arguments: Dict[str, Any]) -> Any:
"""Call an MCP tool and return the first result."""
transport = StdioClient(self.command, self.args, env={**os.environ, **self.env})
async with transport:
session = ClientSession(transport)
await session.initialize()
result = await session.call_tool(name, arguments)
# result is a ToolResponse with .content list
if not result or not getattr(result, "content", None):
return None
first = result.content[0]
# Try to unwrap textual or JSON payloads
if hasattr(first, "text"):
return first.text
if hasattr(first, "json"):
return first.json
return first
def call(self, name: str, arguments: Dict[str, Any]) -> Any:
return asyncio.run(self.call_tool(name, arguments))

View File

@@ -0,0 +1,8 @@
fastapi==0.110.0
uvicorn==0.27.1
langchain==0.2.7
langgraph==0.1.9
langserve==0.2.2
langchain-google-genai==1.0.3
langfuse==2.39.0
mcp>=1.15.0

50
lang/mcp/README.md Normal file
View File

@@ -0,0 +1,50 @@
# Optovia MCP Server
Node MCP сервер, который проксирует существующие GraphQL эндпоинты Optovia и подставляет токены:
- GEO: маршруты/узлы (`find_product_routes`, `nodes`)
- Orders: таймлайн заказа (`getOrder` через team endpoint)
- Расширение: м2м/сервисные токены для открытых операций, user/team токен — для защищённых.
## Быстрый старт
```bash
cd lang/mcp
npm install
npm start
```
## Переменные окружения
- `GEO_GRAPHQL_URL` — GraphQL endpoint geo-сервиса.
- `GEO_M2M_TOKEN` — опционально, сервисный Bearer для geo (если нет userToken).
- `ORDERS_GRAPHQL_URL` — GraphQL endpoint orders team.
- `SERVICE_TOKEN_EXCHANGE` / другие — при необходимости добавить для новых инструментов.
## Токены
- Инструменты помечены как:
- `match_offers_with_route`, `search_nodes` — принимают `userToken`, но могут использовать `GEO_M2M_TOKEN`, если нет пользовательского.
- `order_timeline` — требует `userToken` с scope `teams:member` и `team_uuid` в payload.
- MCP не хранит пользовательский токен, он прокидывается в `Authorization: Bearer ...`. Для сервисных чтений можно использовать `*_M2M_TOKEN`.
## Текущие инструменты
- `match_offers_with_route(productUuid, destinationUuid, limitSources?, limitRoutes?, userToken?)`
- дергает geo `find_product_routes`, возвращает варианты маршрутов от офферов до точки назначения.
- `search_nodes(transportType?, limit?, offset?, userToken?)`
- список узлов логистики с координатами/транспортами.
- `order_timeline(orderUuid, userToken)`
- заказ с этапами/трипами (orders team endpoint, нужен токен).
## Конфиг .mcp.json (пример)
```json
{
"mcpServers": {
"optovia-mcp": {
"command": "node",
"args": ["lang/mcp/src/index.js"],
"env": {
"GEO_GRAPHQL_URL": "https://geo.optovia.ru/graphql",
"ORDERS_GRAPHQL_URL": "https://orders.optovia.ru/graphql",
"GEO_M2M_TOKEN": "bearer-token-if-needed"
}
}
}
}
```

14
lang/mcp/package.json Normal file
View File

@@ -0,0 +1,14 @@
{
"name": "optovia-mcp",
"version": "0.1.0",
"private": true,
"type": "module",
"main": "src/index.js",
"scripts": {
"start": "node src/index.js"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.0.0",
"node-fetch": "^3.3.2"
}
}

266
lang/mcp/src/index.js Normal file
View File

@@ -0,0 +1,266 @@
// Optovia MCP server (Node) that proxies existing GraphQL endpoints with token pass-through.
import { Server, StdioServerTransport } from "@modelcontextprotocol/sdk/server";
import fetch from "node-fetch";
const server = new Server(
{
name: "optovia-mcp",
version: "0.1.0",
},
new StdioServerTransport()
);
/**
* Execute a GraphQL request with optional Bearer token.
*/
async function gqlRequest(url, query, variables = {}, token) {
const res = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
...(token ? { Authorization: `Bearer ${token}` } : {}),
},
body: JSON.stringify({ query, variables }),
});
const json = await res.json();
if (!res.ok || json.errors) {
const errMsg = json?.errors?.map((e) => e.message).join("; ") ?? res.statusText;
throw new Error(`GraphQL error: ${errMsg}`);
}
return json.data;
}
/**
* Decide which token to use:
* - Prefer userToken passed from caller.
* - Fallback to service token from env (SERVICE_TOKEN_*).
* - If required and still missing, throw.
*/
function resolveToken({ userToken, serviceTokenEnv, required }) {
const token = userToken || process.env[serviceTokenEnv || ""];
if (required && !token) {
throw new Error("Authorization token is required for this operation.");
}
return token;
}
// Tool: match_offers_with_route (leverages GEO find_product_routes)
server.registerTool(
{
name: "match_offers_with_route",
description:
"Find active offers for a product and return route options to destination node (uses GEO find_product_routes).",
inputSchema: {
type: "object",
properties: {
productUuid: { type: "string" },
destinationUuid: { type: "string" },
limitSources: { type: "integer", minimum: 1, default: 3 },
limitRoutes: { type: "integer", minimum: 1, default: 3 },
userToken: {
type: "string",
description: "Optional Bearer token; if omitted uses GEO_M2M_TOKEN env when provided.",
},
},
required: ["productUuid", "destinationUuid"],
},
},
async ({ productUuid, destinationUuid, limitSources = 3, limitRoutes = 3, userToken }) => {
const geoUrl = process.env.GEO_GRAPHQL_URL;
if (!geoUrl) {
throw new Error("GEO_GRAPHQL_URL is not set.");
}
const token = resolveToken({
userToken,
serviceTokenEnv: "GEO_M2M_TOKEN",
required: false,
});
const query = `
query MatchOffersRoute($productUuid: String!, $toUuid: String!, $limitSources: Int, $limitRoutes: Int) {
find_product_routes(product_uuid: $productUuid, to_uuid: $toUuid, limit_sources: $limitSources, limit_routes: $limitRoutes) {
source_uuid
source_name
source_lat
source_lon
distance_km
routes {
total_distance_km
total_time_seconds
stages {
from_uuid
from_name
from_lat
from_lon
to_uuid
to_name
to_lat
to_lon
distance_km
travel_time_seconds
transport_type
}
}
}
}
`;
const data = await gqlRequest(
geoUrl,
query,
{
productUuid,
toUuid: destinationUuid,
limitSources,
limitRoutes,
},
token
);
return data.find_product_routes ?? [];
}
);
// Tool: search_nodes (GEO nodes list)
server.registerTool(
{
name: "search_nodes",
description: "Search logistics nodes (geo service) with optional transport filter.",
inputSchema: {
type: "object",
properties: {
transportType: { type: "string", description: "auto | rail etc." },
limit: { type: "integer", minimum: 1, default: 20 },
offset: { type: "integer", minimum: 0, default: 0 },
userToken: {
type: "string",
description: "Optional Bearer token; if omitted uses GEO_M2M_TOKEN env when provided.",
},
},
required: [],
},
},
async ({ transportType = null, limit = 20, offset = 0, userToken }) => {
const geoUrl = process.env.GEO_GRAPHQL_URL;
if (!geoUrl) {
throw new Error("GEO_GRAPHQL_URL is not set.");
}
const token = resolveToken({
userToken,
serviceTokenEnv: "GEO_M2M_TOKEN",
required: false,
});
const query = `
query Nodes($limit: Int, $offset: Int, $transport: String) {
nodes(limit: $limit, offset: $offset, transport_type: $transport) {
uuid
name
latitude
longitude
country
country_code
transport_types
}
}
`;
const data = await gqlRequest(
geoUrl,
query,
{ limit, offset, transport: transportType },
token
);
return data.nodes ?? [];
}
);
// Tool: order_timeline (orders service, team endpoint requires token)
server.registerTool(
{
name: "order_timeline",
description: "Get order with stages/trips for a team. Requires user/team access token.",
inputSchema: {
type: "object",
properties: {
orderUuid: { type: "string" },
userToken: {
type: "string",
description: "Required Bearer token with team scope (teams:member).",
},
},
required: ["orderUuid"],
},
},
async ({ orderUuid, userToken }) => {
const ordersUrl = process.env.ORDERS_GRAPHQL_URL;
if (!ordersUrl) {
throw new Error("ORDERS_GRAPHQL_URL is not set.");
}
const token = resolveToken({
userToken,
required: true,
});
const query = `
query Order($orderUuid: String!) {
getOrder(orderUuid: $orderUuid) {
uuid
name
teamUuid
userId
status
totalAmount
currency
sourceLocationName
destinationLocationName
createdAt
updatedAt
notes
stages {
uuid
name
sequence
stageType
transportType
sourceLocationName
destinationLocationName
locationName
selectedCompany { uuid name }
trips {
uuid
name
sequence
plannedLoadingDate
actualLoadingDate
realLoadingDate
plannedUnloadingDate
actualUnloadingDate
plannedWeight
weightAtLoading
weightAtUnloading
company { uuid name }
}
}
}
}
`;
const data = await gqlRequest(ordersUrl, query, { orderUuid }, token);
return data.getOrder;
}
);
async function main() {
await server.connect();
}
main().catch((err) => {
console.error("MCP server failed:", err);
process.exit(1);
});

25
logto/docker-compose.yml Normal file
View File

@@ -0,0 +1,25 @@
networks:
dokploy-network:
external: true
volumes:
logto-connectors:
services:
logto:
image: ghcr.io/logto-io/logto:1.27.0
entrypoint: ["sh", "-c", "npm run cli db seed -- --swe && npm start"]
expose:
- "3001"
- "3002"
environment:
TRUST_PROXY_HEADER: 1
DB_URL: ${LOGTO_DB_URL}
ENDPOINT: ${LOGTO_ENDPOINT}
ADMIN_ENDPOINT: ${LOGTO_ADMIN_ENDPOINT}
volumes:
- logto-connectors:/etc/logto/packages/core/connectors
networks:
dokploy-network:
aliases:
- logto

26
odoo/Dockerfile Normal file
View File

@@ -0,0 +1,26 @@
FROM odoo:18.0
USER root
# Copy requirements and install Python dependencies
COPY requirements.txt /tmp/requirements.txt
RUN pip3 install --break-system-packages --no-cache-dir --ignore-installed typing-extensions -r /tmp/requirements.txt
# Copy production configuration
COPY ./config/odoo.prod.conf /etc/odoo/odoo.conf
# Фиксим права на /var/lib/odoo для корректной работы assets
RUN chown -R odoo:odoo /var/lib/odoo && \
chmod -R u+w /var/lib/odoo
# Copy entrypoint script
COPY ./entrypoint.py /entrypoint.py
# Copy addons
COPY ./addons /mnt/extra-addons
RUN chown -R odoo:odoo /mnt/extra-addons
USER odoo
# Use Python SDK to load secrets and start Odoo
CMD ["python3", "/entrypoint.py"]

View File

@@ -0,0 +1,167 @@
==========
Components
==========
..
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! This file is generated by oca-gen-addon-readme !!
!! changes will be overwritten. !!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! source digest: sha256:2785951ba7cf6288c667291264099df031ca3d90d9c79c04a2d5cddec6c85641
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
.. |badge1| image:: https://img.shields.io/badge/maturity-Production%2FStable-green.png
:target: https://odoo-community.org/page/development-status
:alt: Production/Stable
.. |badge2| image:: https://img.shields.io/badge/licence-LGPL--3-blue.png
:target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html
:alt: License: LGPL-3
.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fconnector-lightgray.png?logo=github
:target: https://github.com/OCA/connector/tree/18.0/component
:alt: OCA/connector
.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png
:target: https://translation.odoo-community.org/projects/connector-18-0/connector-18-0-component
:alt: Translate me on Weblate
.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png
:target: https://runboat.odoo-community.org/builds?repo=OCA/connector&target_branch=18.0
:alt: Try me on Runboat
|badge1| |badge2| |badge3| |badge4| |badge5|
This module implements a component system and is a base block for the
Connector Framework. It can be used without using the full Connector
though.
Documentation: http://odoo-connector.com/
You may also want to check the `Introduction to Odoo
Components <https://dev.to/guewen/introduction-to-odoo-components-bn0>`__
by @guewen.
**Table of contents**
.. contents::
:local:
Usage
=====
As a developer, you have access to a component system. You can find the
documentation in the code or on http://odoo-connector.com
In a nutshell, you can create components:
::
from odoo.addons.component.core import Component
class MagentoPartnerAdapter(Component):
_name = 'magento.partner.adapter'
_inherit = 'magento.adapter'
_usage = 'backend.adapter'
_collection = 'magento.backend'
_apply_on = ['res.partner']
And later, find the component you need at runtime (dynamic dispatch at
component level):
::
def run(self, external_id):
backend_adapter = self.component(usage='backend.adapter')
external_data = backend_adapter.read(external_id)
In order for tests using components to work, you will need to use the
base class provided by \`odoo.addons.component.tests.common\`:
- TransactionComponentCase
There are also some specific base classes for testing the component
registry, using the ComponentRegistryCase as a base class. See the
docstrings in tests/common.py.
Changelog
=========
16.0.1.0.0 (2022-10-04)
-----------------------
- [MIGRATION] from 15.0
15.0.1.0.0 (2021-11-25)
-----------------------
- [MIGRATION] from 14.0
14.0.1.0.0 (2020-10-22)
-----------------------
- [MIGRATION] from 13.0
13.0.1.0.0 (2019-10-23)
-----------------------
- [MIGRATION] from 12.0
12.0.1.0.0 (2018-10-02)
-----------------------
- [MIGRATION] from 11.0 branched at rev. 324e006
Bug Tracker
===========
Bugs are tracked on `GitHub Issues <https://github.com/OCA/connector/issues>`_.
In case of trouble, please check there if your issue has already been reported.
If you spotted it first, help us to smash it by providing a detailed and welcomed
`feedback <https://github.com/OCA/connector/issues/new?body=module:%20component%0Aversion:%2018.0%0A%0A**Steps%20to%20reproduce**%0A-%20...%0A%0A**Current%20behavior**%0A%0A**Expected%20behavior**>`_.
Do not contact contributors directly about support or help with technical issues.
Credits
=======
Authors
-------
* Camptocamp
Contributors
------------
- Guewen Baconnier <guewen.baconnier@camptocamp.com>
- Laurent Mignon <laurent.mignon@acsone.eu>
- Simone Orsi <simone.orsi@camptocamp.com>
- Thien Vo <thienvh@trobz.com>
Other credits
-------------
The migration of this module from 17.0 to 18.0 was financially supported
by Camptocamp.
Maintainers
-----------
This module is maintained by the OCA.
.. image:: https://odoo-community.org/logo.png
:alt: Odoo Community Association
:target: https://odoo-community.org
OCA, or the Odoo Community Association, is a nonprofit organization whose
mission is to support the collaborative development of Odoo features and
promote its widespread use.
.. |maintainer-guewen| image:: https://github.com/guewen.png?size=40px
:target: https://github.com/guewen
:alt: guewen
Current `maintainer <https://odoo-community.org/page/maintainer-role>`__:
|maintainer-guewen|
This module is part of the `OCA/connector <https://github.com/OCA/connector/tree/18.0/component>`_ project on GitHub.
You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.

View File

@@ -0,0 +1,5 @@
from . import core
from . import components
from . import builder
from . import models

View File

@@ -0,0 +1,22 @@
# Copyright 2017 Camptocamp SA
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
{
"name": "Components",
"summary": "Add capabilities to register and use decoupled components,"
" as an alternative to model classes",
"version": "18.0.1.0.0",
"author": "Camptocamp," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/connector",
"license": "LGPL-3",
"category": "Generic Modules",
"depends": ["base"],
"external_dependencies": {
"python": [
"cachetools",
]
},
"installable": True,
"development_status": "Production/Stable",
"maintainers": ["guewen"],
}

View File

@@ -0,0 +1,97 @@
# Copyright 2019 Camptocamp SA
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
"""
Components Builder
==================
Build the components at the build of a registry.
"""
import odoo
from odoo import models
from .core import DEFAULT_CACHE_SIZE, ComponentRegistry, _component_databases
class ComponentBuilder(models.AbstractModel):
"""Build the component classes
And register them in a global registry.
Every time an Odoo registry is built, the know components are cleared and
rebuilt as well. The Component classes are built using the same mechanism
than Odoo's Models: a final class is created, taking every Components with
a ``_name`` and applying Components with an ``_inherits`` upon them.
The final Component classes are registered in global registry.
This class is an Odoo model, allowing us to hook the build of the
components at the end of the Odoo's registry loading, using
``_register_hook``. This method is called after all modules are loaded, so
we are sure that we have all the components Classes and in the correct
order.
"""
_name = "component.builder"
_description = "Component Builder"
_components_registry_cache_size = DEFAULT_CACHE_SIZE
def _register_hook(self):
# This method is called by Odoo when the registry is built,
# so in case the registry is rebuilt (cache invalidation, ...),
# we have to to rebuild the components. We use a new
# registry so we have an empty cache and we'll add components in it.
components_registry = self._init_global_registry()
self.build_registry(components_registry)
components_registry.ready = True
def _init_global_registry(self):
components_registry = ComponentRegistry(
cachesize=self._components_registry_cache_size
)
_component_databases[self.env.cr.dbname] = components_registry
return components_registry
def build_registry(self, components_registry, states=None, exclude_addons=None):
if not states:
states = ("installed", "to upgrade")
# lookup all the installed (or about to be) addons and generate
# the graph, so we can load the components following the order
# of the addons' dependencies
graph = odoo.modules.graph.Graph()
graph.add_module(self.env.cr, "base")
query = "SELECT name " "FROM ir_module_module " "WHERE state IN %s "
params = [tuple(states)]
if exclude_addons:
query += " AND name NOT IN %s "
params.append(tuple(exclude_addons))
self.env.cr.execute(query, params)
module_list = [name for (name,) in self.env.cr.fetchall() if name not in graph]
graph.add_modules(self.env.cr, module_list)
for module in graph:
self.load_components(module.name, components_registry=components_registry)
def load_components(self, module, components_registry=None):
"""Build every component known by MetaComponent for an odoo module
The final component (composed by all the Component classes in this
module) will be pushed into the registry.
:param module: the name of the addon for which we want to load
the components
:type module: str | unicode
:param registry: the registry in which we want to put the Component
:type registry: :py:class:`~.core.ComponentRegistry`
"""
components_registry = (
components_registry or _component_databases[self.env.cr.dbname]
)
components_registry.load_components(module)

View File

@@ -0,0 +1 @@
from . import base

View File

@@ -0,0 +1,15 @@
# Copyright 2017 Camptocamp SA
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
from ..core import AbstractComponent
class BaseComponent(AbstractComponent):
"""This is the base component for every component
It is implicitely inherited by all components.
All your base are belong to us
"""
_name = "base"

View File

@@ -0,0 +1,939 @@
# Copyright 2017 Camptocamp SA
# Copyright 2017 Odoo
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
"""
Core
====
Core classes for the components.
The most common classes used publicly are:
* :class:`Component`
* :class:`AbstractComponent`
* :class:`WorkContext`
"""
import logging
import operator
from collections import OrderedDict, defaultdict
from odoo import models
from odoo.tools.misc import LastOrderedSet, OrderedSet
from .exception import NoComponentError, RegistryNotReadyError, SeveralComponentError
_logger = logging.getLogger(__name__)
try:
from cachetools import LRUCache, cachedmethod
except ImportError:
_logger.debug("Cannot import 'cachetools'.")
# The Cache size represents the number of items, so the number
# of components (include abstract components) we will keep in the LRU
# cache. We would need stats to know what is the average but this is a bit
# early.
DEFAULT_CACHE_SIZE = 512
# this is duplicated from odoo.models.MetaModel._get_addon_name() which we
# unfortunately can't use because it's an instance method and should have been
# a @staticmethod
def _get_addon_name(full_name):
# The (Odoo) module name can be in the ``odoo.addons`` namespace
# or not. For instance, module ``sale`` can be imported as
# ``odoo.addons.sale`` (the right way) or ``sale`` (for backward
# compatibility).
module_parts = full_name.split(".")
if len(module_parts) > 2 and module_parts[:2] == ["odoo", "addons"]:
addon_name = full_name.split(".")[2]
else:
addon_name = full_name.split(".")[0]
return addon_name
class ComponentDatabases(dict):
"""Holds a registry of components for each database"""
class ComponentRegistry:
"""Store all the components and allow to find them using criteria
The key is the ``_name`` of the components.
This is an OrderedDict, because we want to keep the registration order of
the components, addons loaded first have their components found first.
The :attr:`ready` attribute must be set to ``True`` when all the components
are loaded.
"""
def __init__(self, cachesize=DEFAULT_CACHE_SIZE):
self._cache = LRUCache(maxsize=cachesize)
self._components = OrderedDict()
self._loaded_modules = set()
self.ready = False
def __getitem__(self, key):
return self._components[key]
def __setitem__(self, key, value):
self._components[key] = value
def __contains__(self, key):
return key in self._components
def get(self, key, default=None):
return self._components.get(key, default)
def __iter__(self):
return iter(self._components)
def load_components(self, module):
if module in self._loaded_modules:
return
for component_class in MetaComponent._modules_components[module]:
component_class._build_component(self)
self._loaded_modules.add(module)
@cachedmethod(operator.attrgetter("_cache"))
def lookup(self, collection_name=None, usage=None, model_name=None):
"""Find and return a list of components for a usage
If a component is not registered in a particular collection (no
``_collection``), it will be returned in any case (as far as
the ``usage`` and ``model_name`` match). This is useful to share
generic components across different collections.
If no collection name is given, components from any collection
will be returned.
Then, the components of a collection are filtered by usage and/or
model. The ``_usage`` is mandatory on the components. When the
``_model_name`` is empty, it means it can be used for every models,
and it will ignore the ``model_name`` argument.
The abstract components are never returned.
This is a rather low-level function, usually you will use the
high-level :meth:`AbstractComponent.component`,
:meth:`AbstractComponent.many_components` or even
:meth:`AbstractComponent.component_by_name`.
:param collection_name: the name of the collection the component is
registered into.
:param usage: the usage of component we are looking for
:param model_name: filter on components that apply on this model
"""
# keep the order so addons loaded first have components used first
candidates = (
component
for component in self._components.values()
if not component._abstract
)
if collection_name is not None:
candidates = (
component
for component in candidates
if (
component._collection == collection_name
or component._collection is None
)
)
if usage is not None:
candidates = (
component for component in candidates if component._usage == usage
)
if model_name is not None:
candidates = (
c
for c in candidates
if c.apply_on_models is None or model_name in c.apply_on_models
)
return list(candidates)
# We will store a ComponentRegistry per database here,
# it will be cleared and updated when the odoo's registry is rebuilt
_component_databases = ComponentDatabases()
class WorkContext:
"""Transport the context required to work with components
It is propagated through all the components, so any
data or instance (like a random RPC client) that need
to be propagated transversally to the components
should be kept here.
Including:
.. attribute:: model_name
Name of the model we are working with. It means that any lookup for a
component will be done for this model. It also provides a shortcut
as a `model` attribute to use directly with the Odoo model from
the components
.. attribute:: collection
The collection we are working with. The collection is an Odoo
Model that inherit from 'collection.base'. The collection attribute
can be a record or an "empty" model.
.. attribute:: model
Odoo Model for ``model_name`` with the same Odoo
:class:`~odoo.api.Environment` than the ``collection`` attribute.
This is also the entrypoint to work with the components.
::
collection = self.env['my.collection'].browse(1)
work = WorkContext(model_name='res.partner', collection=collection)
component = work.component(usage='record.importer')
Usually you will use the context manager on the ``collection.base`` Model:
::
collection = self.env['my.collection'].browse(1)
with collection.work_on('res.partner') as work:
component = work.component(usage='record.importer')
It supports any arbitrary keyword arguments that will become attributes of
the instance, and be propagated throughout all the components.
::
collection = self.env['my.collection'].browse(1)
with collection.work_on('res.partner', hello='world') as work:
assert work.hello == 'world'
When you need to work on a different model, a new work instance will be
created for you when you are using the high-level API. This is what
happens under the hood:
::
collection = self.env['my.collection'].browse(1)
with collection.work_on('res.partner', hello='world') as work:
assert work.model_name == 'res.partner'
assert work.hello == 'world'
work2 = work.work_on('res.users')
# => spawn a new WorkContext with a copy of the attributes
assert work2.model_name == 'res.users'
assert work2.hello == 'world'
"""
def __init__(
self, model_name=None, collection=None, components_registry=None, **kwargs
):
self.collection = collection
self.model_name = model_name
self.model = self.env[model_name]
# Allow propagation of custom component registry via context
if collection:
custom_registry = collection.env.context.get("components_registry")
if custom_registry:
components_registry = custom_registry
# lookup components in an alternative registry, used by the tests
if components_registry is not None:
self.components_registry = components_registry
else:
dbname = self.env.cr.dbname
try:
self.components_registry = _component_databases[dbname]
except KeyError as exc:
msg = (
"No component registry for database %s. "
"Probably because the Odoo registry has not been built "
"yet."
)
_logger.error(
msg,
dbname,
)
raise RegistryNotReadyError(msg) from exc
self._propagate_kwargs = ["collection", "model_name", "components_registry"]
for attr_name, value in kwargs.items():
setattr(self, attr_name, value)
self._propagate_kwargs.append(attr_name)
@property
def env(self):
"""Return the current Odoo env
This is the environment of the current collection.
"""
return self.collection.env
def work_on(self, model_name=None, collection=None):
"""Create a new work context for another model keeping attributes
Used when one need to lookup components for another model.
"""
kwargs = {
attr_name: getattr(self, attr_name) for attr_name in self._propagate_kwargs
}
if collection is not None:
kwargs["collection"] = collection
if model_name is not None:
kwargs["model_name"] = model_name
return self.__class__(**kwargs)
def _component_class_by_name(self, name):
components_registry = self.components_registry
component_class = components_registry.get(name)
if not component_class:
raise NoComponentError(f"No component with name '{name}' found.")
return component_class
def component_by_name(self, name, model_name=None):
"""Return a component by its name
If the component exists, an instance of it will be returned,
initialized with the current :class:`WorkContext`.
A :exc:`odoo.addons.component.exception.NoComponentError` is raised
if:
* no component with this name exists
* the ``_apply_on`` of the found component does not match
with the current working model
In the latter case, it can be an indication that you need to switch to
a different model, you can do so by providing the ``model_name``
argument.
"""
if isinstance(model_name, models.BaseModel):
model_name = model_name._name
component_class = self._component_class_by_name(name)
work_model = model_name or self.model_name
if (
component_class._collection
and self.collection._name != component_class._collection
):
raise NoComponentError(
f"""Component with name '{name}' can't be used for collection
'{self.collection._name}'."""
)
if (
component_class.apply_on_models
and work_model not in component_class.apply_on_models
):
if len(component_class.apply_on_models) == 1:
hint_models = f"'{component_class.apply_on_models[0]}'"
else:
hint_models = f"<one of {component_class.apply_on_models!r}>"
raise NoComponentError(
f"Component with name '{name}' can't be used for model '{work_model}'."
f"\nHint: you might want to use: "
f"component_by_name('{name}', model_name={hint_models})"
)
if work_model == self.model_name:
work_context = self
else:
work_context = self.work_on(model_name)
return component_class(work_context)
def _lookup_components(self, usage=None, model_name=None, **kw):
component_classes = self.components_registry.lookup(
self.collection._name, usage=usage, model_name=model_name
)
matching_components = []
for cls in component_classes:
try:
matching = cls._component_match(
self, usage=usage, model_name=model_name, **kw
)
except TypeError as err:
# Backward compat
_logger.info(str(err))
_logger.info(
"The signature of %s._component_match has changed. "
"Please, adapt your code as "
"(self, usage=usage, model_name=model_name, **kw)",
cls.__name__,
)
matching = cls._component_match(self)
if matching:
matching_components.append(cls)
return matching_components
def _filter_components_by_collection(self, component_classes):
return [c for c in component_classes if c._collection == self.collection._name]
def _filter_components_by_model(self, component_classes, model_name):
return [
c
for c in component_classes
if c.apply_on_models and model_name in c.apply_on_models
]
def _ensure_model_name(self, model_name):
"""Make sure model name is a string or fallback to current ctx value."""
if isinstance(model_name, models.BaseModel):
model_name = model_name._name
return model_name or self.model_name
def _matching_components(self, usage=None, model_name=None, **kw):
"""Retrieve matching components and their work context."""
component_classes = self._lookup_components(
usage=usage, model_name=model_name, **kw
)
if model_name == self.model_name:
work_context = self
else:
work_context = self.work_on(model_name)
return component_classes, work_context
def component(self, usage=None, model_name=None, **kw):
"""Find a component by usage and model for the current collection
It searches a component using the rules of
:meth:`ComponentRegistry.lookup`. When a component is found,
it initialize it with the current :class:`WorkContext` and returned.
A component with a ``_apply_on`` matching the asked ``model_name``
takes precedence over a generic component without ``_apply_on``.
A component with a ``_collection`` matching the current collection
takes precedence over a generic component without ``_collection``.
This behavior allows to define generic components across collections
and/or models and override them only for a particular collection and/or
model.
A :exc:`odoo.addons.component.exception.SeveralComponentError` is
raised if more than one component match for the provided
``usage``/``model_name``.
A :exc:`odoo.addons.component.exception.NoComponentError` is raised
if no component is found for the provided ``usage``/``model_name``.
"""
model_name = self._ensure_model_name(model_name)
component_classes, work_context = self._matching_components(
usage=usage, model_name=model_name, **kw
)
if not component_classes:
raise NoComponentError(
f"No component found for collection '{self.collection._name}', "
f"usage '{usage}', model_name '{model_name}'."
)
elif len(component_classes) > 1:
# If we have more than one component, try to find the one
# specifically linked to the collection...
component_classes = self._filter_components_by_collection(component_classes)
if len(component_classes) > 1:
# ... or try to find the one specifically linked to the model
component_classes = self._filter_components_by_model(
component_classes, model_name
)
if len(component_classes) != 1:
raise SeveralComponentError(
"Several components found for collection '{}', "
"usage '{}', model_name '{}'. Found: {}".format(
self.collection._name,
usage or "",
model_name or "",
component_classes,
)
)
return component_classes[0](work_context)
def many_components(self, usage=None, model_name=None, **kw):
"""Find many components by usage and model for the current collection
It searches a component using the rules of
:meth:`ComponentRegistry.lookup`. When components are found, they
initialized with the current :class:`WorkContext` and returned as a
list.
If no component is found, an empty list is returned.
"""
model_name = self._ensure_model_name(model_name)
component_classes, work_context = self._matching_components(
usage=usage, model_name=model_name, **kw
)
return [comp(work_context) for comp in component_classes]
def __str__(self):
return f"WorkContext({self.model_name}, {repr(self.collection)})"
__repr__ = __str__
class MetaComponent(type):
"""Metaclass for Components
Every new :class:`Component` will be added to ``_modules_components``,
that will be used by the component builder.
"""
_modules_components = defaultdict(list)
def __init__(cls, name, bases, attrs):
if not cls._register:
cls._register = True
super().__init__(name, bases, attrs)
return
# If components are declared in tests, exclude them from the
# "components of the addon" list. If not, when we use the
# "load_components" method, all the test components would be loaded.
# This should never be an issue when running the app normally, as the
# Python tests should never be executed. But this is an issue when a
# test creates a test components for the purpose of the test, then a
# second tests uses the "load_components" to load all the addons of the
# module: it will load the component of the previous test.
if "tests" in cls.__module__.split("."):
return
if not hasattr(cls, "_module"):
cls._module = _get_addon_name(cls.__module__)
cls._modules_components[cls._module].append(cls)
@property
def apply_on_models(cls):
# None means all models
if cls._apply_on is None:
return None
# always return a list, used for the lookup
elif isinstance(cls._apply_on, str):
return [cls._apply_on]
return cls._apply_on
class AbstractComponent(metaclass=MetaComponent):
"""Main Component Model
All components have a Python inheritance either on
:class:`AbstractComponent` or either on :class:`Component`.
Abstract Components will not be returned by lookups on components, however
they can be used as a base for other Components through inheritance (using
``_inherit``).
Inheritance mechanism
The inheritance mechanism is like the Odoo's one for Models. Each
component has a ``_name``. This is the absolute minimum in a Component
class.
::
class MyComponent(Component):
_name = 'my.component'
def speak(self, message):
print message
Every component implicitly inherit from the `'base'` component.
There are two close but distinct inheritance types, which look
familiar if you already know Odoo. The first uses ``_inherit`` with
an existing name, the name of the component we want to extend. With
the following example, ``my.component`` is now able to speak and to
yell.
::
class MyComponent(Component): # name of the class does not matter
_inherit = 'my.component'
def yell(self, message):
print message.upper()
The second has a different ``_name``, it creates a new component,
including the behavior of the inherited component, but without
modifying it. In the following example, ``my.component`` is still able
to speak and to yell (brough by the previous inherit), but not to
sing. ``another.component`` is able to speak, to yell and to sing.
::
class AnotherComponent(Component):
_name = 'another.component'
_inherit = 'my.component'
def sing(self, message):
print message.upper()
Registration and lookups
It is handled by 3 attributes on the class:
_collection
The name of the collection where we want to register the
component. This is not strictly mandatory as a component can be
shared across several collections. But usually, you want to set a
collection to segregate the components for a domain. A collection
can be for instance ``magento.backend``. It is also the name of a
model that inherits from ``collection.base``. See also
:class:`~WorkContext` and
:class:`~odoo.addons.component.models.collection.Collection`.
_apply_on
List of names or name of the Odoo model(s) for which the component
can be used. When not set, the component can be used on any model.
_usage
The collection and the model (``_apply_on``) will help to filter
the candidate components according to our working context (e.g. I'm
working on ``magento.backend`` with the model
``magento.res.partner``). The usage will define **what** kind of
task the component we are looking for serves to. For instance, it
might be ``record.importer``, ``export.mapper```... but you can be
as creative as you want.
Now, to get a component, you'll likely use
:meth:`WorkContext.component` when you start to work with components
in your flow, but then from within your components, you are more
likely to use one of:
* :meth:`component`
* :meth:`many_components`
* :meth:`component_by_name` (more rarely though)
Declaration of some Components can look like::
class FooBar(models.Model):
_name = 'foo.bar.collection'
_inherit = 'collection.base' # this inherit is required
class FooBarBase(AbstractComponent):
_name = 'foo.bar.base'
_collection = 'foo.bar.collection' # name of the model above
class Foo(Component):
_name = 'foo'
_inherit = 'foo.bar.base' # we will inherit the _collection
_apply_on = 'res.users'
_usage = 'speak'
def utter(self, message):
print message
class Bar(Component):
_name = 'bar'
_inherit = 'foo.bar.base' # we will inherit the _collection
_apply_on = 'res.users'
_usage = 'yell'
def utter(self, message):
print message.upper() + '!!!'
class Vocalizer(Component):
_name = 'vocalizer'
_inherit = 'foo.bar.base'
_usage = 'vocalizer'
# can be used for any model
def vocalize(action, message):
self.component(usage=action).utter(message)
And their usage::
>>> coll = self.env['foo.bar.collection'].browse(1)
>>> with coll.work_on('res.users') as work:
... vocalizer = work.component(usage='vocalizer')
... vocalizer.vocalize('speak', 'hello world')
...
hello world
... vocalizer.vocalize('yell', 'hello world')
HELLO WORLD!!!
Hints:
* If you want to create components without ``_apply_on``, choose a
``_usage`` that will not conflict other existing components.
* Unless this is what you want and in that case you use
:meth:`many_components` which will return all components for a usage
with a matching or a not set ``_apply_on``.
* It is advised to namespace the names of the components (e.g.
``magento.xxx``) to prevent conflicts between addons.
"""
_register = False
_abstract = True
# used for inheritance
_name = None #: Name of the component
#: Name or list of names of the component(s) to inherit from
_inherit = None
#: name of the collection to subscribe in
_collection = None
#: List of models on which the component can be applied.
#: None means any Model, can be a list ['res.users', ...]
_apply_on = None
#: Component purpose ('import.mapper', ...).
_usage = None
def __init__(self, work_context):
super().__init__()
self.work = work_context
@classmethod
def _component_match(cls, work, usage=None, model_name=None, **kw):
"""Evaluated on candidate components
When a component lookup is done and candidate(s) have
been found for a usage, a final call is done on this method.
If the method return False, the candidate component is ignored.
It can be used for instance to dynamically choose a component
according to a value in the :class:`WorkContext`.
Beware, if the lookups from usage, model and collection are
cached, the calls to :meth:`_component_match` are executed
each time we get components. Heavy computation should be
avoided.
:param work: the :class:`WorkContext` we are working with
"""
return True
@property
def collection(self):
"""Collection we are working with"""
return self.work.collection
@property
def env(self):
"""Current Odoo environment, the one of the collection record"""
return self.work.env
@property
def model(self):
"""The model instance we are working with"""
return self.work.model
def component_by_name(self, name, model_name=None):
"""Return a component by its name
Shortcut to meth:`~WorkContext.component_by_name`
"""
return self.work.component_by_name(name, model_name=model_name)
def component(self, usage=None, model_name=None, **kw):
"""Return a component
Shortcut to meth:`~WorkContext.component`
"""
return self.work.component(usage=usage, model_name=model_name, **kw)
def many_components(self, usage=None, model_name=None, **kw):
"""Return several components
Shortcut to meth:`~WorkContext.many_components`
"""
return self.work.many_components(usage=usage, model_name=model_name, **kw)
def __str__(self):
return f"Component({self._name})"
__repr__ = __str__
@classmethod
def _build_component(cls, registry):
"""Instantiate a given Component in the components registry.
This method is called at the end of the Odoo's registry build. The
caller is :meth:`component.builder.ComponentBuilder.load_components`.
It generates new classes, which will be the Component classes we will
be using. The new classes are generated following the inheritance
of ``_inherit``. It ensures that the ``__bases__`` of the generated
Component classes follow the ``_inherit`` chain.
Once a Component class is created, it adds it in the Component Registry
(:class:`ComponentRegistry`), so it will be available for
lookups.
At the end of new class creation, a hook method
:meth:`_complete_component_build` is called, so you can customize
further the created components. An example can be found in
:meth:`odoo.addons.connector.components.mapper.Mapper._complete_component_build`
The following code is roughly the same than the Odoo's one for
building Models.
"""
# In the simplest case, the component's registry class inherits from
# cls and the other classes that define the component in a flat
# hierarchy. The registry contains the instance ``component`` (on the
# left). Its class, ``ComponentClass``, carries inferred metadata that
# is shared between all the component's instances for this registry
# only.
#
# class A1(Component): Component
# _name = 'a' / | \
# A3 A2 A1
# class A2(Component): \ | /
# _inherit = 'a' ComponentClass
#
# class A3(Component):
# _inherit = 'a'
#
# When a component is extended by '_inherit', its base classes are
# modified to include the current class and the other inherited
# component classes.
# Note that we actually inherit from other ``ComponentClass``, so that
# extensions to an inherited component are immediately visible in the
# current component class, like in the following example:
#
# class A1(Component):
# _name = 'a' Component
# / / \ \
# class B1(Component): / A2 A1 \
# _name = 'b' / \ / \
# B2 ComponentA B1
# class B2(Component): \ | /
# _name = 'b' \ | /
# _inherit = ['b', 'a'] \ | /
# ComponentB
# class A2(Component):
# _inherit = 'a'
# determine inherited components
parents = cls._inherit
if isinstance(parents, str):
parents = [parents]
elif parents is None:
parents = []
if cls._name in registry and not parents:
raise TypeError(
f"Component {cls._name} (in class {cls}) already exists. "
"Consider using _inherit instead of _name "
"or using a different _name."
)
# determine the component's name
name = cls._name or (len(parents) == 1 and parents[0])
if not name:
raise TypeError(f"Component {cls} must have a _name")
# all components except 'base' implicitly inherit from 'base'
if name != "base":
parents = list(parents) + ["base"]
# create or retrieve the component's class
if name in parents:
if name not in registry:
raise TypeError(f"Component {name} does not exist in registry.")
ComponentClass = registry[name]
ComponentClass._build_component_check_base(cls)
check_parent = ComponentClass._build_component_check_parent
else:
ComponentClass = type(
name,
(AbstractComponent,),
{
"_name": name,
"_register": False,
# names of children component
"_inherit_children": OrderedSet(),
},
)
check_parent = cls._build_component_check_parent
# determine all the classes the component should inherit from
bases = LastOrderedSet([cls])
for parent in parents:
if parent not in registry:
raise TypeError(
f"Component {name} inherits from non-existing component {parent}."
)
parent_class = registry[parent]
if parent == name:
for base in parent_class.__bases__:
bases.add(base)
else:
check_parent(cls, parent_class)
bases.add(parent_class)
parent_class._inherit_children.add(name)
ComponentClass.__bases__ = tuple(bases)
ComponentClass._complete_component_build()
registry[name] = ComponentClass
return ComponentClass
@classmethod
def _build_component_check_base(cls, extend_cls):
"""Check whether ``cls`` can be extended with ``extend_cls``."""
if cls._abstract and not extend_cls._abstract:
msg = (
"%s transforms the abstract component %r into a "
"non-abstract component. "
"That class should either inherit from AbstractComponent, "
"or set a different '_name'."
)
raise TypeError(msg % (extend_cls, cls._name))
@classmethod
def _build_component_check_parent(component_class, cls, parent_class): # noqa: B902
"""Check whether ``model_class`` can inherit from ``parent_class``."""
if component_class._abstract and not parent_class._abstract:
msg = (
"In %s, the abstract Component %r cannot inherit "
"from the non-abstract Component %r."
)
raise TypeError(msg % (cls, component_class._name, parent_class._name))
@classmethod
def _complete_component_build(cls):
"""Complete build of the new component class
After the component has been built from its bases, this method is
called, and can be used to customize the class before it can be used.
Nothing is done in the base Component, but a Component can inherit
the method to add its own behavior.
"""
class Component(AbstractComponent):
"""Concrete Component class
This is the class you inherit from when you want your component to
be registered in the component collections.
Look in :class:`AbstractComponent` for more details.
"""
_register = False
_abstract = False

View File

@@ -0,0 +1,18 @@
# Copyright 2017 Camptocamp SA
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
class ComponentException(Exception):
"""Base Exception for the components"""
class NoComponentError(ComponentException):
"""No component has been found"""
class SeveralComponentError(ComponentException):
"""More than one component have been found"""
class RegistryNotReadyError(ComponentException):
"""Component registry not ready yet for given DB."""

View File

@@ -0,0 +1,32 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Amharic (https://www.transifex.com/oca/teams/23907/am/)\n"
"Language: am\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "ID"

View File

@@ -0,0 +1,32 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Catalan (https://www.transifex.com/oca/teams/23907/ca/)\n"
"Language: ca\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "ID"

View File

@@ -0,0 +1,24 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 18.0\n"
"Report-Msgid-Bugs-To: \n"
"Last-Translator: \n"
"Language-Team: \n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: \n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""

View File

@@ -0,0 +1,38 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: German (https://www.transifex.com/oca/teams/23907/de/)\n"
"Language: de\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "Display Name"
#~ msgstr "Anzeigebezeichnung"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "Zuletzt aktualisiert am"

View File

@@ -0,0 +1,33 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Greek (Greece) (https://www.transifex.com/oca/teams/23907/"
"el_GR/)\n"
"Language: el_GR\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "Κωδικός"

View File

@@ -0,0 +1,39 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2023-08-02 13:09+0000\n"
"Last-Translator: Ivorra78 <informatica@totmaterial.es>\n"
"Language-Team: Spanish (https://www.transifex.com/oca/teams/23907/es/)\n"
"Language: es\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
"X-Generator: Weblate 4.17\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr "Colección abstracta de base"
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr "Constructor de componentes"
#~ msgid "Display Name"
#~ msgstr "Nombre mostrado"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "Última modificación el"

View File

@@ -0,0 +1,33 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Spanish (Spain) (https://www.transifex.com/oca/teams/23907/"
"es_ES/)\n"
"Language: es_ES\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "ID"

View File

@@ -0,0 +1,38 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Finnish (https://www.transifex.com/oca/teams/23907/fi/)\n"
"Language: fi\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "Display Name"
#~ msgstr "Nimi"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "Viimeksi muokattu"

View File

@@ -0,0 +1,40 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
# Nicolas JEUDY <njeudy@panda-chi.io>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-02-01 01:48+0000\n"
"PO-Revision-Date: 2018-06-28 07:13+0000\n"
"Last-Translator: Guewen Baconnier <guewen.baconnier@camptocamp.com>\n"
"Language-Team: French (https://www.transifex.com/oca/teams/23907/fr/)\n"
"Language: fr\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=n > 1;\n"
"X-Generator: Weblate 3.0.1\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr "Abstract Model inital pour une collection"
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr "Constructeur de composants"
#~ msgid "Display Name"
#~ msgstr "Nom affiché"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "Dernière modification le"

View File

@@ -0,0 +1,32 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Galician (https://www.transifex.com/oca/teams/23907/gl/)\n"
"Language: gl\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "ID"

View File

@@ -0,0 +1,39 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2024-02-26 09:41+0000\n"
"Last-Translator: mymage <stefano.consolaro@mymage.it>\n"
"Language-Team: Italian (https://www.transifex.com/oca/teams/23907/it/)\n"
"Language: it\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
"X-Generator: Weblate 4.17\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr "Raccolta astratta base"
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr "Costruttore componente"
#~ msgid "Display Name"
#~ msgstr "Nome da visualizzare"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "Ultima modifica il"

View File

@@ -0,0 +1,32 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Portuguese (https://www.transifex.com/oca/teams/23907/pt/)\n"
"Language: pt\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "ID"

View File

@@ -0,0 +1,40 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2020-08-12 20:00+0000\n"
"Last-Translator: Rodrigo Macedo <rmsolucoeseminformatic4@gmail.com>\n"
"Language-Team: Portuguese (Brazil) (https://www.transifex.com/oca/"
"teams/23907/pt_BR/)\n"
"Language: pt_BR\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=n > 1;\n"
"X-Generator: Weblate 3.10\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr "Coleção Base Abstrata"
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr "Construtor de Componentes"
#~ msgid "Display Name"
#~ msgstr "Exibir Nome"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "Última modificação em"

View File

@@ -0,0 +1,33 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Portuguese (Portugal) (https://www.transifex.com/oca/"
"teams/23907/pt_PT/)\n"
"Language: pt_PT\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "ID"

View File

@@ -0,0 +1,39 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Slovenian (https://www.transifex.com/oca/teams/23907/sl/)\n"
"Language: sl\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=4; plural=(n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || "
"n%100==4 ? 2 : 3);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "Display Name"
#~ msgstr "Prikazni naziv"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "Zadnjič spremenjeno"

View File

@@ -0,0 +1,32 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
# Translators:
# OCA Transbot <transbot@odoo-community.org>, 2018
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 11.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-01-05 16:56+0000\n"
"PO-Revision-Date: 2018-01-05 16:56+0000\n"
"Last-Translator: OCA Transbot <transbot@odoo-community.org>, 2018\n"
"Language-Team: Turkish (https://www.transifex.com/oca/teams/23907/tr/)\n"
"Language: tr\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr ""
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr ""
#~ msgid "ID"
#~ msgstr "ID"

View File

@@ -0,0 +1,36 @@
# Translation of Odoo Server.
# This file contains the translation of the following modules:
# * component
#
msgid ""
msgstr ""
"Project-Id-Version: Odoo Server 13.0\n"
"Report-Msgid-Bugs-To: \n"
"PO-Revision-Date: 2019-09-01 06:14+0000\n"
"Last-Translator: 黎伟杰 <674416404@qq.com>\n"
"Language-Team: none\n"
"Language: zh_CN\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: \n"
"Plural-Forms: nplurals=1; plural=0;\n"
"X-Generator: Weblate 3.8\n"
#. module: component
#: model:ir.model,name:component.model_collection_base
msgid "Base Abstract Collection"
msgstr "基础抽象集合"
#. module: component
#: model:ir.model,name:component.model_component_builder
msgid "Component Builder"
msgstr "组件构建器"
#~ msgid "Display Name"
#~ msgstr "显示名称"
#~ msgid "ID"
#~ msgstr "ID"
#~ msgid "Last Modified on"
#~ msgstr "最后修改时间"

View File

@@ -0,0 +1 @@
from . import collection

View File

@@ -0,0 +1,96 @@
# Copyright 2017 Camptocamp SA
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
"""
Collection Model
================
This is the base Model shared by all the Collections.
In the context of the Connector, a collection is the Backend.
The `_name` given to the Collection Model will be the name
to use in the `_collection` of the Components usable for the Backend.
"""
from contextlib import contextmanager
from odoo import models
from ..core import WorkContext
class Collection(models.AbstractModel):
"""The model on which components are subscribed
It would be for instance the ``backend`` for the connectors.
Example::
class MagentoBackend(models.Model):
_name = 'magento.backend' # name of the collection
_inherit = 'collection.base'
class MagentoSaleImporter(Component):
_name = 'magento.sale.importer'
_apply_on = 'magento.sale.order'
_collection = 'magento.backend' # name of the collection
def run(self, magento_id):
mapper = self.component(usage='import.mapper')
extra_mappers = self.many_components(
usage='import.mapper.extra',
)
# ...
Use it::
>>> backend = self.env['magento.backend'].browse(1)
>>> with backend.work_on('magento.sale.order') as work:
... importer = work.component(usage='magento.sale.importer')
... importer.run(1)
See also: :class:`odoo.addons.component.core.WorkContext`
"""
_name = "collection.base"
_description = "Base Abstract Collection"
@contextmanager
def work_on(self, model_name, **kwargs):
"""Entry-point for the components, context manager
Start a work using the components on the model.
Any keyword argument will be assigned to the work context.
See documentation of :class:`odoo.addons.component.core.WorkContext`.
It is a context manager, so you can attach objects and clean them
at the end of the work session, such as::
@contextmanager
def work_on(self, model_name, **kwargs):
self.ensure_one()
magento_location = MagentoLocation(
self.location,
self.username,
self.password,
)
# We create a Magento Client API here, so we can create the
# client once (lazily on the first use) and propagate it
# through all the sync session, instead of recreating a client
# in each backend adapter usage.
with MagentoAPI(magento_location) as magento_api:
_super = super(MagentoBackend, self)
# from the components we'll be able to do:
# self.work.magento_api
with _super.work_on(
model_name, magento_api=magento_api, **kwargs
) as work:
yield work
"""
self.ensure_one()
yield WorkContext(model_name=model_name, collection=self, **kwargs)

View File

@@ -0,0 +1,3 @@
[build-system]
requires = ["whool"]
build-backend = "whool.buildapi"

View File

@@ -0,0 +1,4 @@
- Guewen Baconnier \<<guewen.baconnier@camptocamp.com>\>
- Laurent Mignon \<<laurent.mignon@acsone.eu>\>
- Simone Orsi \<<simone.orsi@camptocamp.com>\>
- Thien Vo \<<thienvh@trobz.com>\>

View File

@@ -0,0 +1 @@
The migration of this module from 17.0 to 18.0 was financially supported by Camptocamp.

View File

@@ -0,0 +1,9 @@
This module implements a component system and is a base block for the
Connector Framework. It can be used without using the full Connector
though.
Documentation: <http://odoo-connector.com/>
You may also want to check the [Introduction to Odoo
Components](https://dev.to/guewen/introduction-to-odoo-components-bn0)
by @guewen.

View File

@@ -0,0 +1,19 @@
## 16.0.1.0.0 (2022-10-04)
- \[MIGRATION\] from 15.0
## 15.0.1.0.0 (2021-11-25)
- \[MIGRATION\] from 14.0
## 14.0.1.0.0 (2020-10-22)
- \[MIGRATION\] from 13.0
## 13.0.1.0.0 (2019-10-23)
- \[MIGRATION\] from 12.0
## 12.0.1.0.0 (2018-10-02)
- \[MIGRATION\] from 11.0 branched at rev. 324e006

View File

@@ -0,0 +1,30 @@
As a developer, you have access to a component system. You can find the
documentation in the code or on <http://odoo-connector.com>
In a nutshell, you can create components:
from odoo.addons.component.core import Component
class MagentoPartnerAdapter(Component):
_name = 'magento.partner.adapter'
_inherit = 'magento.adapter'
_usage = 'backend.adapter'
_collection = 'magento.backend'
_apply_on = ['res.partner']
And later, find the component you need at runtime (dynamic dispatch at
component level):
def run(self, external_id):
backend_adapter = self.component(usage='backend.adapter')
external_data = backend_adapter.read(external_id)
In order for tests using components to work, you will need to use the
base class provided by \`odoo.addons.component.tests.common\`:
- TransactionComponentCase
There are also some specific base classes for testing the component
registry, using the ComponentRegistryCase as a base class. See the
docstrings in tests/common.py.

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

View File

@@ -0,0 +1,514 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="generator" content="Docutils: https://docutils.sourceforge.io/" />
<title>Components</title>
<style type="text/css">
/*
:Author: David Goodger (goodger@python.org)
:Id: $Id: html4css1.css 9511 2024-01-13 09:50:07Z milde $
:Copyright: This stylesheet has been placed in the public domain.
Default cascading style sheet for the HTML output of Docutils.
Despite the name, some widely supported CSS2 features are used.
See https://docutils.sourceforge.io/docs/howto/html-stylesheets.html for how to
customize this style sheet.
*/
/* used to remove borders from tables and images */
.borderless, table.borderless td, table.borderless th {
border: 0 }
table.borderless td, table.borderless th {
/* Override padding for "table.docutils td" with "! important".
The right padding separates the table cells. */
padding: 0 0.5em 0 0 ! important }
.first {
/* Override more specific margin styles with "! important". */
margin-top: 0 ! important }
.last, .with-subtitle {
margin-bottom: 0 ! important }
.hidden {
display: none }
.subscript {
vertical-align: sub;
font-size: smaller }
.superscript {
vertical-align: super;
font-size: smaller }
a.toc-backref {
text-decoration: none ;
color: black }
blockquote.epigraph {
margin: 2em 5em ; }
dl.docutils dd {
margin-bottom: 0.5em }
object[type="image/svg+xml"], object[type="application/x-shockwave-flash"] {
overflow: hidden;
}
/* Uncomment (and remove this text!) to get bold-faced definition list terms
dl.docutils dt {
font-weight: bold }
*/
div.abstract {
margin: 2em 5em }
div.abstract p.topic-title {
font-weight: bold ;
text-align: center }
div.admonition, div.attention, div.caution, div.danger, div.error,
div.hint, div.important, div.note, div.tip, div.warning {
margin: 2em ;
border: medium outset ;
padding: 1em }
div.admonition p.admonition-title, div.hint p.admonition-title,
div.important p.admonition-title, div.note p.admonition-title,
div.tip p.admonition-title {
font-weight: bold ;
font-family: sans-serif }
div.attention p.admonition-title, div.caution p.admonition-title,
div.danger p.admonition-title, div.error p.admonition-title,
div.warning p.admonition-title, .code .error {
color: red ;
font-weight: bold ;
font-family: sans-serif }
/* Uncomment (and remove this text!) to get reduced vertical space in
compound paragraphs.
div.compound .compound-first, div.compound .compound-middle {
margin-bottom: 0.5em }
div.compound .compound-last, div.compound .compound-middle {
margin-top: 0.5em }
*/
div.dedication {
margin: 2em 5em ;
text-align: center ;
font-style: italic }
div.dedication p.topic-title {
font-weight: bold ;
font-style: normal }
div.figure {
margin-left: 2em ;
margin-right: 2em }
div.footer, div.header {
clear: both;
font-size: smaller }
div.line-block {
display: block ;
margin-top: 1em ;
margin-bottom: 1em }
div.line-block div.line-block {
margin-top: 0 ;
margin-bottom: 0 ;
margin-left: 1.5em }
div.sidebar {
margin: 0 0 0.5em 1em ;
border: medium outset ;
padding: 1em ;
background-color: #ffffee ;
width: 40% ;
float: right ;
clear: right }
div.sidebar p.rubric {
font-family: sans-serif ;
font-size: medium }
div.system-messages {
margin: 5em }
div.system-messages h1 {
color: red }
div.system-message {
border: medium outset ;
padding: 1em }
div.system-message p.system-message-title {
color: red ;
font-weight: bold }
div.topic {
margin: 2em }
h1.section-subtitle, h2.section-subtitle, h3.section-subtitle,
h4.section-subtitle, h5.section-subtitle, h6.section-subtitle {
margin-top: 0.4em }
h1.title {
text-align: center }
h2.subtitle {
text-align: center }
hr.docutils {
width: 75% }
img.align-left, .figure.align-left, object.align-left, table.align-left {
clear: left ;
float: left ;
margin-right: 1em }
img.align-right, .figure.align-right, object.align-right, table.align-right {
clear: right ;
float: right ;
margin-left: 1em }
img.align-center, .figure.align-center, object.align-center {
display: block;
margin-left: auto;
margin-right: auto;
}
table.align-center {
margin-left: auto;
margin-right: auto;
}
.align-left {
text-align: left }
.align-center {
clear: both ;
text-align: center }
.align-right {
text-align: right }
/* reset inner alignment in figures */
div.align-right {
text-align: inherit }
/* div.align-center * { */
/* text-align: left } */
.align-top {
vertical-align: top }
.align-middle {
vertical-align: middle }
.align-bottom {
vertical-align: bottom }
ol.simple, ul.simple {
margin-bottom: 1em }
ol.arabic {
list-style: decimal }
ol.loweralpha {
list-style: lower-alpha }
ol.upperalpha {
list-style: upper-alpha }
ol.lowerroman {
list-style: lower-roman }
ol.upperroman {
list-style: upper-roman }
p.attribution {
text-align: right ;
margin-left: 50% }
p.caption {
font-style: italic }
p.credits {
font-style: italic ;
font-size: smaller }
p.label {
white-space: nowrap }
p.rubric {
font-weight: bold ;
font-size: larger ;
color: maroon ;
text-align: center }
p.sidebar-title {
font-family: sans-serif ;
font-weight: bold ;
font-size: larger }
p.sidebar-subtitle {
font-family: sans-serif ;
font-weight: bold }
p.topic-title {
font-weight: bold }
pre.address {
margin-bottom: 0 ;
margin-top: 0 ;
font: inherit }
pre.literal-block, pre.doctest-block, pre.math, pre.code {
margin-left: 2em ;
margin-right: 2em }
pre.code .ln { color: gray; } /* line numbers */
pre.code, code { background-color: #eeeeee }
pre.code .comment, code .comment { color: #5C6576 }
pre.code .keyword, code .keyword { color: #3B0D06; font-weight: bold }
pre.code .literal.string, code .literal.string { color: #0C5404 }
pre.code .name.builtin, code .name.builtin { color: #352B84 }
pre.code .deleted, code .deleted { background-color: #DEB0A1}
pre.code .inserted, code .inserted { background-color: #A3D289}
span.classifier {
font-family: sans-serif ;
font-style: oblique }
span.classifier-delimiter {
font-family: sans-serif ;
font-weight: bold }
span.interpreted {
font-family: sans-serif }
span.option {
white-space: nowrap }
span.pre {
white-space: pre }
span.problematic, pre.problematic {
color: red }
span.section-subtitle {
/* font-size relative to parent (h1..h6 element) */
font-size: 80% }
table.citation {
border-left: solid 1px gray;
margin-left: 1px }
table.docinfo {
margin: 2em 4em }
table.docutils {
margin-top: 0.5em ;
margin-bottom: 0.5em }
table.footnote {
border-left: solid 1px black;
margin-left: 1px }
table.docutils td, table.docutils th,
table.docinfo td, table.docinfo th {
padding-left: 0.5em ;
padding-right: 0.5em ;
vertical-align: top }
table.docutils th.field-name, table.docinfo th.docinfo-name {
font-weight: bold ;
text-align: left ;
white-space: nowrap ;
padding-left: 0 }
/* "booktabs" style (no vertical lines) */
table.docutils.booktabs {
border: 0px;
border-top: 2px solid;
border-bottom: 2px solid;
border-collapse: collapse;
}
table.docutils.booktabs * {
border: 0px;
}
table.docutils.booktabs th {
border-bottom: thin solid;
text-align: left;
}
h1 tt.docutils, h2 tt.docutils, h3 tt.docutils,
h4 tt.docutils, h5 tt.docutils, h6 tt.docutils {
font-size: 100% }
ul.auto-toc {
list-style-type: none }
</style>
</head>
<body>
<div class="document" id="components">
<h1 class="title">Components</h1>
<!-- !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! This file is generated by oca-gen-addon-readme !!
!! changes will be overwritten. !!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! source digest: sha256:2785951ba7cf6288c667291264099df031ca3d90d9c79c04a2d5cddec6c85641
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -->
<p><a class="reference external image-reference" href="https://odoo-community.org/page/development-status"><img alt="Production/Stable" src="https://img.shields.io/badge/maturity-Production%2FStable-green.png" /></a> <a class="reference external image-reference" href="http://www.gnu.org/licenses/lgpl-3.0-standalone.html"><img alt="License: LGPL-3" src="https://img.shields.io/badge/licence-LGPL--3-blue.png" /></a> <a class="reference external image-reference" href="https://github.com/OCA/connector/tree/18.0/component"><img alt="OCA/connector" src="https://img.shields.io/badge/github-OCA%2Fconnector-lightgray.png?logo=github" /></a> <a class="reference external image-reference" href="https://translation.odoo-community.org/projects/connector-18-0/connector-18-0-component"><img alt="Translate me on Weblate" src="https://img.shields.io/badge/weblate-Translate%20me-F47D42.png" /></a> <a class="reference external image-reference" href="https://runboat.odoo-community.org/builds?repo=OCA/connector&amp;target_branch=18.0"><img alt="Try me on Runboat" src="https://img.shields.io/badge/runboat-Try%20me-875A7B.png" /></a></p>
<p>This module implements a component system and is a base block for the
Connector Framework. It can be used without using the full Connector
though.</p>
<p>Documentation: <a class="reference external" href="http://odoo-connector.com/">http://odoo-connector.com/</a></p>
<p>You may also want to check the <a class="reference external" href="https://dev.to/guewen/introduction-to-odoo-components-bn0">Introduction to Odoo
Components</a>
by &#64;guewen.</p>
<p><strong>Table of contents</strong></p>
<div class="contents local topic" id="contents">
<ul class="simple">
<li><a class="reference internal" href="#usage" id="toc-entry-1">Usage</a></li>
<li><a class="reference internal" href="#changelog" id="toc-entry-2">Changelog</a><ul>
<li><a class="reference internal" href="#section-1" id="toc-entry-3">16.0.1.0.0 (2022-10-04)</a></li>
<li><a class="reference internal" href="#section-2" id="toc-entry-4">15.0.1.0.0 (2021-11-25)</a></li>
<li><a class="reference internal" href="#section-3" id="toc-entry-5">14.0.1.0.0 (2020-10-22)</a></li>
<li><a class="reference internal" href="#section-4" id="toc-entry-6">13.0.1.0.0 (2019-10-23)</a></li>
<li><a class="reference internal" href="#section-5" id="toc-entry-7">12.0.1.0.0 (2018-10-02)</a></li>
</ul>
</li>
<li><a class="reference internal" href="#bug-tracker" id="toc-entry-8">Bug Tracker</a></li>
<li><a class="reference internal" href="#credits" id="toc-entry-9">Credits</a><ul>
<li><a class="reference internal" href="#authors" id="toc-entry-10">Authors</a></li>
<li><a class="reference internal" href="#contributors" id="toc-entry-11">Contributors</a></li>
<li><a class="reference internal" href="#other-credits" id="toc-entry-12">Other credits</a></li>
<li><a class="reference internal" href="#maintainers" id="toc-entry-13">Maintainers</a></li>
</ul>
</li>
</ul>
</div>
<div class="section" id="usage">
<h1><a class="toc-backref" href="#toc-entry-1">Usage</a></h1>
<p>As a developer, you have access to a component system. You can find the
documentation in the code or on <a class="reference external" href="http://odoo-connector.com">http://odoo-connector.com</a></p>
<p>In a nutshell, you can create components:</p>
<pre class="literal-block">
from odoo.addons.component.core import Component
class MagentoPartnerAdapter(Component):
_name = 'magento.partner.adapter'
_inherit = 'magento.adapter'
_usage = 'backend.adapter'
_collection = 'magento.backend'
_apply_on = ['res.partner']
</pre>
<p>And later, find the component you need at runtime (dynamic dispatch at
component level):</p>
<pre class="literal-block">
def run(self, external_id):
backend_adapter = self.component(usage='backend.adapter')
external_data = backend_adapter.read(external_id)
</pre>
<p>In order for tests using components to work, you will need to use the
base class provided by `odoo.addons.component.tests.common`:</p>
<ul class="simple">
<li>TransactionComponentCase</li>
</ul>
<p>There are also some specific base classes for testing the component
registry, using the ComponentRegistryCase as a base class. See the
docstrings in tests/common.py.</p>
</div>
<div class="section" id="changelog">
<h1><a class="toc-backref" href="#toc-entry-2">Changelog</a></h1>
<div class="section" id="section-1">
<h2><a class="toc-backref" href="#toc-entry-3">16.0.1.0.0 (2022-10-04)</a></h2>
<ul class="simple">
<li>[MIGRATION] from 15.0</li>
</ul>
</div>
<div class="section" id="section-2">
<h2><a class="toc-backref" href="#toc-entry-4">15.0.1.0.0 (2021-11-25)</a></h2>
<ul class="simple">
<li>[MIGRATION] from 14.0</li>
</ul>
</div>
<div class="section" id="section-3">
<h2><a class="toc-backref" href="#toc-entry-5">14.0.1.0.0 (2020-10-22)</a></h2>
<ul class="simple">
<li>[MIGRATION] from 13.0</li>
</ul>
</div>
<div class="section" id="section-4">
<h2><a class="toc-backref" href="#toc-entry-6">13.0.1.0.0 (2019-10-23)</a></h2>
<ul class="simple">
<li>[MIGRATION] from 12.0</li>
</ul>
</div>
<div class="section" id="section-5">
<h2><a class="toc-backref" href="#toc-entry-7">12.0.1.0.0 (2018-10-02)</a></h2>
<ul class="simple">
<li>[MIGRATION] from 11.0 branched at rev. 324e006</li>
</ul>
</div>
</div>
<div class="section" id="bug-tracker">
<h1><a class="toc-backref" href="#toc-entry-8">Bug Tracker</a></h1>
<p>Bugs are tracked on <a class="reference external" href="https://github.com/OCA/connector/issues">GitHub Issues</a>.
In case of trouble, please check there if your issue has already been reported.
If you spotted it first, help us to smash it by providing a detailed and welcomed
<a class="reference external" href="https://github.com/OCA/connector/issues/new?body=module:%20component%0Aversion:%2018.0%0A%0A**Steps%20to%20reproduce**%0A-%20...%0A%0A**Current%20behavior**%0A%0A**Expected%20behavior**">feedback</a>.</p>
<p>Do not contact contributors directly about support or help with technical issues.</p>
</div>
<div class="section" id="credits">
<h1><a class="toc-backref" href="#toc-entry-9">Credits</a></h1>
<div class="section" id="authors">
<h2><a class="toc-backref" href="#toc-entry-10">Authors</a></h2>
<ul class="simple">
<li>Camptocamp</li>
</ul>
</div>
<div class="section" id="contributors">
<h2><a class="toc-backref" href="#toc-entry-11">Contributors</a></h2>
<ul class="simple">
<li>Guewen Baconnier &lt;<a class="reference external" href="mailto:guewen.baconnier&#64;camptocamp.com">guewen.baconnier&#64;camptocamp.com</a>&gt;</li>
<li>Laurent Mignon &lt;<a class="reference external" href="mailto:laurent.mignon&#64;acsone.eu">laurent.mignon&#64;acsone.eu</a>&gt;</li>
<li>Simone Orsi &lt;<a class="reference external" href="mailto:simone.orsi&#64;camptocamp.com">simone.orsi&#64;camptocamp.com</a>&gt;</li>
<li>Thien Vo &lt;<a class="reference external" href="mailto:thienvh&#64;trobz.com">thienvh&#64;trobz.com</a>&gt;</li>
</ul>
</div>
<div class="section" id="other-credits">
<h2><a class="toc-backref" href="#toc-entry-12">Other credits</a></h2>
<p>The migration of this module from 17.0 to 18.0 was financially supported
by Camptocamp.</p>
</div>
<div class="section" id="maintainers">
<h2><a class="toc-backref" href="#toc-entry-13">Maintainers</a></h2>
<p>This module is maintained by the OCA.</p>
<a class="reference external image-reference" href="https://odoo-community.org">
<img alt="Odoo Community Association" src="https://odoo-community.org/logo.png" />
</a>
<p>OCA, or the Odoo Community Association, is a nonprofit organization whose
mission is to support the collaborative development of Odoo features and
promote its widespread use.</p>
<p>Current <a class="reference external" href="https://odoo-community.org/page/maintainer-role">maintainer</a>:</p>
<p><a class="reference external image-reference" href="https://github.com/guewen"><img alt="guewen" src="https://github.com/guewen.png?size=40px" /></a></p>
<p>This module is part of the <a class="reference external" href="https://github.com/OCA/connector/tree/18.0/component">OCA/connector</a> project on GitHub.</p>
<p>You are welcome to contribute. To learn how please visit <a class="reference external" href="https://odoo-community.org/page/Contribute">https://odoo-community.org/page/Contribute</a>.</p>
</div>
</div>
</div>
</body>
</html>

View File

@@ -0,0 +1,5 @@
from . import test_build_component
from . import test_component
from . import test_lookup
from . import test_work_on
from . import test_utils

View File

@@ -0,0 +1,212 @@
# Copyright 2017 Camptocamp SA
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
import copy
from contextlib import contextmanager
import odoo
from odoo import api
from odoo.tests import common
from odoo.addons.component.core import ComponentRegistry, MetaComponent, _get_addon_name
@contextmanager
def new_rollbacked_env():
registry = odoo.modules.registry.Registry(common.get_db_name())
uid = odoo.SUPERUSER_ID
cr = registry.cursor()
try:
yield api.Environment(cr, uid, {})
finally:
cr.rollback() # we shouldn't have to commit anything
cr.close()
class ComponentMixin:
@classmethod
def setUpComponent(cls):
with new_rollbacked_env() as env:
builder = env["component.builder"]
# build the components of every installed addons
comp_registry = builder._init_global_registry()
cls._components_registry = comp_registry
# ensure that we load only the components of the 'installed'
# modules, not 'to install', which means we load only the
# dependencies of the tested addons, not the siblings or
# children addons
builder.build_registry(comp_registry, states=("installed",))
# build the components of the current tested addon
current_addon = _get_addon_name(cls.__module__)
env["component.builder"].load_components(current_addon)
if hasattr(cls, "env"):
cls.env.context = dict(
cls.env.context, components_registry=cls._components_registry
)
# pylint: disable=W8106
def setUp(self):
# should be ready only during tests, never during installation
# of addons
self._components_registry.ready = True
@self.addCleanup
def notready():
self._components_registry.ready = False
class TransactionComponentCase(common.TransactionCase, ComponentMixin):
"""A TransactionCase that loads all the components
It it used like an usual Odoo's TransactionCase, but it ensures
that all the components of the current addon and its dependencies
are loaded.
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.setUpComponent()
# pylint: disable=W8106
def setUp(self):
# resolve an inheritance issue (common.TransactionCase does not call
# super)
common.TransactionCase.setUp(self)
ComponentMixin.setUp(self)
# There's no env on setUpClass of TransactionCase, must do it here.
self.env.context = dict(
self.env.context, components_registry=self._components_registry
)
class ComponentRegistryCase:
"""This test case can be used as a base for writings tests on components
This test case is meant to test components in a special component registry,
where you want to have maximum control on which components are loaded
or not, or when you want to create additional components in your tests.
If you only want to *use* the components of the tested addon in your tests,
then consider using:
* :class:`TransactionComponentCase`
This test case creates a special
:class:`odoo.addons.component.core.ComponentRegistry` for the purpose of
the tests. By default, it loads all the components of the dependencies, but
not the components of the current addon (which you have to handle
manually). In your tests, you can add more components in 2 manners.
All the components of an Odoo module::
self._load_module_components('connector')
Only specific components::
self._build_components(MyComponent1, MyComponent2)
Note: for the lookups of the components, the default component
registry is a global registry for the database. Here, you will
need to explicitly pass ``self.comp_registry`` in the
:class:`~odoo.addons.component.core.WorkContext`::
work = WorkContext(model_name='res.users',
collection='my.collection',
components_registry=self.comp_registry)
Or::
collection_record = self.env['my.collection'].browse(1)
with collection_record.work_on(
'res.partner',
components_registry=self.comp_registry) as work:
"""
@staticmethod
def _setup_registry(class_or_instance):
# keep the original classes registered by the metaclass
# so we'll restore them at the end of the tests, it avoid
# to pollute it with Stub / Test components
class_or_instance._original_components = copy.deepcopy(
MetaComponent._modules_components
)
# it will be our temporary component registry for our test session
class_or_instance.comp_registry = ComponentRegistry()
# it builds the 'final component' for every component of the
# 'component' addon and push them in the component registry
class_or_instance.comp_registry.load_components("component")
# build the components of every installed addons already installed
# but the current addon (when running with pytest/nosetest, we
# simulate the --test-enable behavior by excluding the current addon
# which is in 'to install' / 'to upgrade' with --test-enable).
current_addon = _get_addon_name(class_or_instance.__module__)
with new_rollbacked_env() as env:
env["component.builder"].build_registry(
class_or_instance.comp_registry,
states=("installed",),
exclude_addons=[current_addon],
)
# Fake that we are ready to work with the registry
# normally, it is set to True and the end of the build
# of the components. Here, we'll add components later in
# the components registry, but we don't mind for the tests.
class_or_instance.comp_registry.ready = True
if hasattr(class_or_instance, "env"):
# let it propagate via ctx
class_or_instance.env.context = dict(
class_or_instance.env.context,
components_registry=class_or_instance.comp_registry,
)
@staticmethod
def _teardown_registry(class_or_instance):
# restore the original metaclass' classes
MetaComponent._modules_components = class_or_instance._original_components
def _load_module_components(self, module):
self.comp_registry.load_components(module)
def _build_components(self, *classes):
for cls in classes:
cls._build_component(self.comp_registry)
class TransactionComponentRegistryCase(common.TransactionCase, ComponentRegistryCase):
"""Adds Odoo Transaction in the base Component TestCase.
This class doesn't set up the registry for you.
You're supposed to explicitly call `_setup_registry` and `_teardown_registry`
when you need it, either on setUpClass and tearDownClass or setUp and tearDown.
class MyTestCase(TransactionComponentRegistryCase):
def setUp(self):
super().setUp()
self._setup_registry(self)
def tearDown(self):
self._teardown_registry(self)
super().tearDown()
class MyTestCase(TransactionComponentRegistryCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._setup_registry(cls)
@classmethod
def tearDownClass(cls):
cls._teardown_registry(cls)
super().tearDownClass()
"""
# pylint: disable=W8106
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.collection = cls.env["collection.base"]

Some files were not shown because too many files have changed in this diff Show More