[REL] connector_amazon_sp: for 11.0

This commit is contained in:
Jared Kipe
2022-02-04 13:25:45 -08:00
parent 8b2afa882d
commit bf7192f71a
48 changed files with 4264 additions and 0 deletions

View File

@@ -0,0 +1,8 @@
# © 2021 Hibou Corp.
from . import api
from . import backend_adapter
from . import binder
from . import importer
from . import exporter
from . import mapper

View File

@@ -0,0 +1 @@
from . import amazon

View File

@@ -0,0 +1,182 @@
# © 2021 Hibou Corp.
# imports for Client and CredentialProvider patch
from os import environ
import json
from requests import request
import boto3
from botocore.config import Config as BotoConfig
from sp_api.base.client import Client
from sp_api.base.config import CredentialProvider
from sp_api.base.ApiResponse import ApiResponse
from sp_api.base.marketplaces import Marketplaces
from sp_api.auth import AccessTokenClient
from requests.exceptions import HTTPError
# imports for Wrapping
from sp_api.api import Orders, \
Shipping, \
MerchantFulfillment, \
Feeds
from sp_api.base.exceptions import SellingApiException, \
SellingApiForbiddenException
amz_proxy_endpoint = environ.get('AMAZON_SP_ENDPOINT', 'https://amz-proxy.hibou.io')
PROXY_ENDPOINT = amz_proxy_endpoint
PROXY = amz_proxy_endpoint.split('//')[1]
class RequestRateError(Exception):
def __init__(self, message, exception=None):
super().__init__(message)
self.exception = exception
class WrappedAPI:
SellingApiException = SellingApiException
SellingApiForbiddenException = SellingApiForbiddenException
def __init__(self, env, refresh_token, lwa_client_id, lwa_client_secret, aws_access_key, aws_secret_key, role_arn):
self.env = env
get_param = env['ir.config_parameter'].sudo().get_param
self.credentials = {
'refresh_token': refresh_token,
'lwa_app_id': lwa_client_id,
'lwa_client_secret': lwa_client_secret,
'aws_access_key': aws_access_key,
'aws_secret_key': aws_secret_key,
'role_arn': role_arn,
# 'db_uid': get_param('database.uuid', ''),
# 'pro_code': get_param('database.hibou_professional_code', ''),
}
def orders(self):
return Orders(credentials=self.credentials)
def shipping(self):
return Shipping(credentials=self.credentials)
def merchant_fulfillment(self):
return MerchantFulfillment(credentials=self.credentials)
def feeds(self):
return Feeds(credentials=self.credentials)
# patch the Client
def __init__(
self,
marketplace: Marketplaces = Marketplaces.US,
*,
refresh_token=None,
account='default',
credentials=None
):
super(Client, self).__init__(account, credentials)
self.boto3_client = boto3.client(
'sts',
# aws_access_key_id=self.credentials.aws_access_key,
# aws_secret_access_key=self.credentials.aws_secret_key
config=BotoConfig(proxies={'http': PROXY, 'https': PROXY})
)
self.endpoint = marketplace.endpoint
self.marketplace_id = marketplace.marketplace_id
self.region = marketplace.region
self._auth = AccessTokenClient(refresh_token=refresh_token, account=account, credentials=credentials)
def _sign_request(self):
return None
def _request(self, path: str, *, data: dict = None, params: dict = None, headers=None,
add_marketplace=True) -> ApiResponse:
if params is None:
params = {}
if data is None:
data = {}
self.method = params.pop('method', data.pop('method', 'GET'))
if add_marketplace:
self._add_marketplaces(data if self.method in ('POST', 'PUT') else params)
# auth=None because we don't sign the request anymore
# proxy setup...
# url = self.endpoint + path
url = PROXY_ENDPOINT + path
headers = headers or self.headers
headers['x-orig-host'] = headers['host']
del headers['host']
headers['x-db-uuid'] = self.credentials.db_uid
headers['x-pro-code'] = self.credentials.pro_code
res = request(self.method, url, params=params,
data=json.dumps(data) if data and self.method in ('POST', 'PUT') else None, headers=headers,
auth=self._sign_request())
try:
res.raise_for_status() # proxy does not return json errors
except HTTPError as e:
status_code = e.response.status_code
if str(status_code) == '429':
raise RequestRateError('HTTP 429', exception=e)
raise e
return self._check_response(res)
# Patch _request to have timeout, not signing differences above.
def _request(self, path: str, *, data: dict = None, params: dict = None, headers=None,
add_marketplace=True) -> ApiResponse:
if params is None:
params = {}
if data is None:
data = {}
self.method = params.pop('method', data.pop('method', 'GET'))
if add_marketplace:
self._add_marketplaces(data if self.method in ('POST', 'PUT') else params)
res = request(self.method, self.endpoint + path, params=params,
data=json.dumps(data) if data and self.method in ('POST', 'PUT') else None, headers=headers or self.headers,
auth=self._sign_request(),
timeout=60)
return self._check_response(res)
# Client.__init__ = __init__
# Client._sign_request = _sign_request
Client._request = _request
# patch the CredentialProvider
class Config:
def __init__(self,
refresh_token,
lwa_app_id,
lwa_client_secret,
aws_access_key,
aws_secret_key,
role_arn,
db_uid,
pro_code,
):
self.refresh_token = refresh_token
self.lwa_app_id = lwa_app_id
self.lwa_client_secret = lwa_client_secret
self.aws_access_key = aws_access_key
self.aws_secret_key = aws_secret_key
self.role_arn = role_arn
self.db_uid = db_uid
self.pro_code = pro_code
def check_config(self):
errors = []
for k, v in self.__dict__.items():
if not v and k != 'refresh_token':
errors.append(k)
return errors
# CredentialProvider.Config = Config

View File

@@ -0,0 +1,79 @@
# © 2021 Hibou Corp.
from odoo.addons.component.core import AbstractComponent
# Feed API
from datetime import datetime
from xml.etree import ElementTree
class BaseAmazonConnectorComponent(AbstractComponent):
""" Base Amazon Connector Component
All components of this connector should inherit from it.
"""
_name = 'base.amazon.connector'
_inherit = 'base.connector'
_collection = 'amazon.backend'
class AmazonAdapter(AbstractComponent):
_name = 'amazon.adapter'
_inherit = ['base.backend.adapter', 'base.amazon.connector']
ElementTree = ElementTree
FEED_ENCODING = 'iso-8859-1'
def search(self, filters=None):
""" Search records according to some criterias
and returns a list of ids """
raise NotImplementedError
def read(self, id, attributes=None):
""" Returns the information of a record """
raise NotImplementedError
def search_read(self, filters=None):
""" Search records according to some criterias
and returns their information"""
raise NotImplementedError
def create(self, data):
""" Create a record on the external system """
raise NotImplementedError
def write(self, id, data):
""" Update records on the external system """
raise NotImplementedError
def delete(self, id):
""" Delete a record on the external system """
raise NotImplementedError
def _feed(self, message_type, backend):
root = self.ElementTree.Element('AmazonEnvelope',
{'{http://www.w3.org/2001/XMLSchema-instance}noNamespaceSchemaLocation': 'amzn-envelope.xsd'})
header = self.ElementTree.SubElement(root, 'Header')
self.ElementTree.SubElement(header, 'DocumentVersion').text = '1.01'
self.ElementTree.SubElement(header, 'MerchantIdentifier').text = backend.merchant_id
self.ElementTree.SubElement(root, 'MessageType').text = message_type
# note that you can remove and add your own Message node
message = self.ElementTree.SubElement(root, 'Message')
self.ElementTree.SubElement(message, 'MessageID').text = str(int(datetime.now().timestamp()))
return root, message
def _feed_string(self, node):
return self.ElementTree.tostring(node, encoding=self.FEED_ENCODING, method='xml')
@property
def api_instance(self):
try:
amazon_api = getattr(self.work, 'amazon_api')
except AttributeError:
raise AttributeError(
'You must provide a amazon_api attribute with a '
'Amazon instance to be able to use the '
'Backend Adapter.'
)
return amazon_api

View File

@@ -0,0 +1,22 @@
# © 2021 Hibou Corp.
from odoo.addons.component.core import Component
class AmazonModelBinder(Component):
""" Bind records and give odoo/amazon ids correspondence
Binding models are models called ``amazon.{normal_model}``,
like ``amazon.sale.order`` or ``amazon.product.product``.
They are ``_inherits`` of the normal models and contains
the Amazon ID, the ID of the Amazon Backend and the additional
fields belonging to the Amazon instance.
"""
_name = 'amazon.binder'
_inherit = ['base.binder', 'base.amazon.connector']
_apply_on = [
'amazon.product.product',
'amazon.sale.order',
'amazon.sale.order.line',
'amazon.stock.picking',
]

View File

@@ -0,0 +1,310 @@
# © 2021 Hibou Corp.
import logging
from contextlib import contextmanager
from datetime import datetime
import psycopg2
import odoo
from odoo import _
from odoo.addons.component.core import AbstractComponent
from odoo.addons.connector.exception import (IDMissingInBackend,
RetryableJobError)
_logger = logging.getLogger(__name__)
class AmazonBaseExporter(AbstractComponent):
""" Base exporter for Amazon """
_name = 'amazon.base.exporter'
_inherit = ['base.exporter', 'base.amazon.connector']
_usage = 'record.exporter'
def __init__(self, working_context):
super(AmazonBaseExporter, self).__init__(working_context)
self.binding = None
self.external_id = None
def run(self, binding, *args, **kwargs):
""" Run the synchronization
:param binding: binding record to export
"""
self.binding = binding
self.external_id = self.binder.to_external(self.binding)
result = self._run(*args, **kwargs)
self.binder.bind(self.external_id, self.binding)
# Commit so we keep the external ID when there are several
# exports (due to dependencies) and one of them fails.
# The commit will also release the lock acquired on the binding
# record
if not odoo.tools.config['test_enable']:
self.env.cr.commit() # noqa
self._after_export()
return result
def _run(self):
""" Flow of the synchronization, implemented in inherited classes"""
raise NotImplementedError
def _after_export(self):
""" Can do several actions after exporting a record to Amazon """
pass
class AmazonExporter(AbstractComponent):
""" A common flow for the exports to Amazon """
_name = 'amazon.exporter'
_inherit = 'amazon.base.exporter'
def __init__(self, working_context):
super(AmazonExporter, self).__init__(working_context)
self.binding = None
def _lock(self):
""" Lock the binding record.
Lock the binding record so we are sure that only one export
job is running for this record if concurrent jobs have to export the
same record.
When concurrent jobs try to export the same record, the first one
will lock and proceed, the others will fail to lock and will be
retried later.
This behavior works also when the export becomes multilevel
with :meth:`_export_dependencies`. Each level will set its own lock
on the binding record it has to export.
"""
sql = ("SELECT id FROM %s WHERE ID = %%s FOR UPDATE NOWAIT" %
self.model._table)
try:
self.env.cr.execute(sql, (self.binding.id, ),
log_exceptions=False)
except psycopg2.OperationalError:
_logger.info('A concurrent job is already exporting the same '
'record (%s with id %s). Job delayed later.',
self.model._name, self.binding.id)
raise RetryableJobError(
'A concurrent job is already exporting the same record '
'(%s with id %s). The job will be retried later.' %
(self.model._name, self.binding.id))
def _has_to_skip(self):
""" Return True if the export can be skipped """
return False
@contextmanager
def _retry_unique_violation(self):
""" Context manager: catch Unique constraint error and retry the
job later.
When we execute several jobs workers concurrently, it happens
that 2 jobs are creating the same record at the same time (binding
record created by :meth:`_export_dependency`), resulting in:
IntegrityError: duplicate key value violates unique
constraint "amazon_product_product_odoo_uniq"
DETAIL: Key (backend_id, odoo_id)=(1, 4851) already exists.
In that case, we'll retry the import just later.
.. warning:: The unique constraint must be created on the
binding record to prevent 2 bindings to be created
for the same Amazon record.
"""
try:
yield
except psycopg2.IntegrityError as err:
if err.pgcode == psycopg2.errorcodes.UNIQUE_VIOLATION:
raise RetryableJobError(
'A database error caused the failure of the job:\n'
'%s\n\n'
'Likely due to 2 concurrent jobs wanting to create '
'the same record. The job will be retried later.' % err)
else:
raise
def _export_dependency(self, relation, binding_model,
component_usage='record.exporter',
binding_field='amazon_bind_ids',
binding_extra_vals=None):
"""
Export a dependency. The exporter class is a subclass of
``AmazonExporter``. If a more precise class need to be defined,
it can be passed to the ``exporter_class`` keyword argument.
.. warning:: a commit is done at the end of the export of each
dependency. The reason for that is that we pushed a record
on the backend and we absolutely have to keep its ID.
So you *must* take care not to modify the Odoo
database during an export, excepted when writing
back the external ID or eventually to store
external data that we have to keep on this side.
You should call this method only at the beginning
of the exporter synchronization,
in :meth:`~._export_dependencies`.
:param relation: record to export if not already exported
:type relation: :py:class:`odoo.models.BaseModel`
:param binding_model: name of the binding model for the relation
:type binding_model: str | unicode
:param component_usage: 'usage' to look for to find the Component to
for the export, by default 'record.exporter'
:type exporter: str | unicode
:param binding_field: name of the one2many field on a normal
record that points to the binding record
(default: amazon_bind_ids).
It is used only when the relation is not
a binding but is a normal record.
:type binding_field: str | unicode
:binding_extra_vals: In case we want to create a new binding
pass extra values for this binding
:type binding_extra_vals: dict
"""
if not relation:
return
rel_binder = self.binder_for(binding_model)
# wrap is typically True if the relation is for instance a
# 'product.product' record but the binding model is
# 'amazon.product.product'
wrap = relation._name != binding_model
if wrap and hasattr(relation, binding_field):
domain = [('odoo_id', '=', relation.id),
('backend_id', '=', self.backend_record.id)]
binding = self.env[binding_model].search(domain)
if binding:
assert len(binding) == 1, (
'only 1 binding for a backend is '
'supported in _export_dependency')
# we are working with a unwrapped record (e.g.
# product.category) and the binding does not exist yet.
# Example: I created a product.product and its binding
# amazon.product.product and we are exporting it, but we need to
# create the binding for the product.category on which it
# depends.
else:
bind_values = {'backend_id': self.backend_record.id,
'odoo_id': relation.id}
if binding_extra_vals:
bind_values.update(binding_extra_vals)
# If 2 jobs create it at the same time, retry
# one later. A unique constraint (backend_id,
# odoo_id) should exist on the binding model
with self._retry_unique_violation():
binding = (self.env[binding_model]
.with_context(connector_no_export=True)
.sudo()
.create(bind_values))
# Eager commit to avoid having 2 jobs
# exporting at the same time. The constraint
# will pop if an other job already created
# the same binding. It will be caught and
# raise a RetryableJobError.
if not odoo.tools.config['test_enable']:
self.env.cr.commit() # noqa
else:
# If amazon_bind_ids does not exist we are typically in a
# "direct" binding (the binding record is the same record).
# If wrap is True, relation is already a binding record.
binding = relation
if not rel_binder.to_external(binding):
exporter = self.component(usage=component_usage,
model_name=binding_model)
exporter.run(binding)
def _export_dependencies(self):
""" Export the dependencies for the record"""
return
def _map_data(self):
""" Returns an instance of
:py:class:`~odoo.addons.connector.components.mapper.MapRecord`
"""
return self.mapper.map_record(self.binding)
def _validate_create_data(self, data):
""" Check if the values to import are correct
Pro-actively check before the ``Model.create`` if some fields
are missing or invalid
Raise `InvalidDataError`
"""
return
def _validate_update_data(self, data):
""" Check if the values to import are correct
Pro-actively check before the ``Model.update`` if some fields
are missing or invalid
Raise `InvalidDataError`
"""
return
def _create_data(self, map_record, fields=None, **kwargs):
""" Get the data to pass to :py:meth:`_create` """
return map_record.values(for_create=True, fields=fields, **kwargs)
def _create(self, data):
""" Create the Amazon record """
# special check on data before export
self._validate_create_data(data)
return self.backend_adapter.create(data)
def _update_data(self, map_record, fields=None, **kwargs):
""" Get the data to pass to :py:meth:`_update` """
return map_record.values(fields=fields, **kwargs)
def _update(self, data):
""" Update an Amazon record """
assert self.external_id
# special check on data before export
self._validate_update_data(data)
self.backend_adapter.write(self.external_id, data)
def _run(self, fields=None):
""" Flow of the synchronization, implemented in inherited classes"""
assert self.binding
if not self.external_id:
fields = None # should be created with all the fields
if self._has_to_skip():
return
# export the missing linked resources
self._export_dependencies()
# prevent other jobs to export the same record
# will be released on commit (or rollback)
self._lock()
map_record = self._map_data()
if self.external_id:
record = self._update_data(map_record, fields=fields)
if not record:
return _('Nothing to export.')
self._update(record)
else:
record = self._create_data(map_record, fields=fields)
if not record:
return _('Nothing to export.')
self.external_id = self._create(record)
return _('Record exported with ID %s on Amazon.') % self.external_id

View File

@@ -0,0 +1,323 @@
# © 2021 Hibou Corp.
"""
Importers for Amazon.
An import can be skipped if the last sync date is more recent than
the last update in Amazon.
They should call the ``bind`` method if the binder even if the records
are already bound, to update the last sync date.
"""
import logging
from odoo import fields, _
from odoo.addons.component.core import AbstractComponent, Component
from odoo.addons.connector.exception import IDMissingInBackend
from odoo.addons.queue_job.exception import NothingToDoJob
_logger = logging.getLogger(__name__)
class AmazonImporter(AbstractComponent):
""" Base importer for Amazon """
_name = 'amazon.importer'
_inherit = ['base.importer', 'base.amazon.connector']
_usage = 'record.importer'
def __init__(self, work_context):
super(AmazonImporter, self).__init__(work_context)
self.external_id = None
self.amazon_record = None
def _get_amazon_data(self):
""" Return the raw Amazon data for ``self.external_id`` """
return self.backend_adapter.read(self.external_id)
def _before_import(self):
""" Hook called before the import, when we have the Amazon
data"""
def _is_uptodate(self, binding):
"""Return True if the import should be skipped because
it is already up-to-date in Odoo"""
assert self.amazon_record
if not self.amazon_record.get('updated_at'):
return # no update date on Amazon, always import it.
if not binding:
return # it does not exist so it should not be skipped
sync = binding.sync_date
if not sync:
return
from_string = fields.Datetime.from_string
sync_date = from_string(sync)
amazon_date = from_string(self.amazon_record['updated_at'])
# if the last synchronization date is greater than the last
# update in amazon, we skip the import.
# Important: at the beginning of the exporters flows, we have to
# check if the amazon_date is more recent than the sync_date
# and if so, schedule a new import. If we don't do that, we'll
# miss changes done in Amazon
return amazon_date < sync_date
def _import_dependency(self, external_id, binding_model,
importer=None, always=False):
""" Import a dependency.
The importer class is a class or subclass of
:class:`AmazonImporter`. A specific class can be defined.
:param external_id: id of the related binding to import
:param binding_model: name of the binding model for the relation
:type binding_model: str | unicode
:param importer_component: component to use for import
By default: 'importer'
:type importer_component: Component
:param always: if True, the record is updated even if it already
exists, note that it is still skipped if it has
not been modified on Amazon since the last
update. When False, it will import it only when
it does not yet exist.
:type always: boolean
"""
if not external_id:
return
binder = self.binder_for(binding_model)
if always or not binder.to_internal(external_id):
if importer is None:
importer = self.component(usage='record.importer',
model_name=binding_model)
try:
importer.run(external_id)
except NothingToDoJob:
_logger.info(
'Dependency import of %s(%s) has been ignored.',
binding_model._name, external_id
)
def _import_dependencies(self):
""" Import the dependencies for the record
Import of dependencies can be done manually or by calling
:meth:`_import_dependency` for each dependency.
"""
return
def _map_data(self):
""" Returns an instance of
:py:class:`~odoo.addons.connector.components.mapper.MapRecord`
"""
return self.mapper.map_record(self.amazon_record)
def _validate_data(self, data):
""" Check if the values to import are correct
Pro-actively check before the ``_create`` or
``_update`` if some fields are missing or invalid.
Raise `InvalidDataError`
"""
return
def _must_skip(self):
""" Hook called right after we read the data from the backend.
If the method returns a message giving a reason for the
skipping, the import will be interrupted and the message
recorded in the job (if the import is called directly by the
job, not by dependencies).
If it returns None, the import will continue normally.
:returns: None | str | unicode
"""
return
def _get_binding(self):
return self.binder.to_internal(self.external_id)
def _create_data(self, map_record, **kwargs):
return map_record.values(for_create=True, **kwargs)
def _create(self, data):
""" Create the OpenERP record """
# special check on data before import
self._validate_data(data)
model = self.model.with_context(connector_no_export=True)
binding = model.create(data)
_logger.debug('%d created from amazon %s', binding, self.external_id)
return binding
def _update_data(self, map_record, **kwargs):
return map_record.values(**kwargs)
def _update(self, binding, data):
""" Update an OpenERP record """
# special check on data before import
self._validate_data(data)
binding.with_context(connector_no_export=True).write(data)
_logger.debug('%d updated from amazon %s', binding, self.external_id)
return
def _after_import(self, binding):
""" Hook called at the end of the import """
return
def run(self, external_id, force=False):
""" Run the synchronization
:param external_id: identifier of the record on Amazon
"""
self.external_id = external_id
lock_name = 'import({}, {}, {}, {})'.format(
self.backend_record._name,
self.backend_record.id,
self.work.model_name,
external_id,
)
try:
self.amazon_record = self._get_amazon_data()
except IDMissingInBackend:
return _('Record no longer exists in Amazon')
skip = self._must_skip()
if skip:
return skip
binding = self._get_binding()
if not force and self._is_uptodate(binding):
return _('Already up-to-date.')
# Keep a lock on this import until the transaction is committed
# The lock is kept since we have detected that the informations
# will be updated into Odoo
self.advisory_lock_or_retry(lock_name)
self._before_import()
# import the missing linked resources
self._import_dependencies()
map_record = self._map_data()
if binding:
record = self._update_data(map_record)
self._update(binding, record)
else:
record = self._create_data(map_record)
binding = self._create(record)
self.binder.bind(self.external_id, binding)
self._after_import(binding)
class BatchImporter(AbstractComponent):
""" The role of a BatchImporter is to search for a list of
items to import, then it can either import them directly or delay
the import of each item separately.
"""
_name = 'amazon.batch.importer'
_inherit = ['base.importer', 'base.amazon.connector']
_usage = 'batch.importer'
def run(self, filters=None):
""" Run the synchronization """
record_ids = self.backend_adapter.search(filters)
for record_id in record_ids:
self._import_record(record_id)
def _import_record(self, external_id):
""" Import a record directly or delay the import of the record.
Method to implement in sub-classes.
"""
raise NotImplementedError
class DirectBatchImporter(AbstractComponent):
""" Import the records directly, without delaying the jobs. """
_name = 'amazon.direct.batch.importer'
_inherit = 'amazon.batch.importer'
def _import_record(self, external_id):
""" Import the record directly """
self.model.import_record(self.backend_record, external_id)
class DelayedBatchImporter(AbstractComponent):
""" Delay import of the records """
_name = 'amazon.delayed.batch.importer'
_inherit = 'amazon.batch.importer'
def _import_record(self, external_id, job_options=None, **kwargs):
""" Delay the import of the records"""
delayable = self.model.with_delay(**job_options or {})
delayable.import_record(self.backend_record, external_id, **kwargs)
# class SimpleRecordImporter(Component):
# """ Import one Amazon Website """
#
# _name = 'amazon.simple.record.importer'
# _inherit = 'amazon.importer'
# _apply_on = [
# 'amazon.res.partner.category',
# ]
# class TranslationImporter(Component):
# """ Import translations for a record.
#
# Usually called from importers, in ``_after_import``.
# For instance from the products and products' categories importers.
# """
#
# _name = 'amazon.translation.importer'
# _inherit = 'amazon.importer'
# _usage = 'translation.importer'
#
# def _get_amazon_data(self, storeview_id=None):
# """ Return the raw Amazon data for ``self.external_id`` """
# return self.backend_adapter.read(self.external_id, storeview_id)
#
# def run(self, external_id, binding, mapper=None):
# self.external_id = external_id
# storeviews = self.env['amazon.storeview'].search(
# [('backend_id', '=', self.backend_record.id)]
# )
# default_lang = self.backend_record.default_lang_id
# lang_storeviews = [sv for sv in storeviews
# if sv.lang_id and sv.lang_id != default_lang]
# if not lang_storeviews:
# return
#
# # find the translatable fields of the model
# fields = self.model.fields_get()
# translatable_fields = [field for field, attrs in fields.items()
# if attrs.get('translate')]
#
# if mapper is None:
# mapper = self.mapper
# else:
# mapper = self.component_by_name(mapper)
#
# for storeview in lang_storeviews:
# lang_record = self._get_amazon_data(storeview.external_id)
# map_record = mapper.map_record(lang_record)
# record = map_record.values()
#
# data = dict((field, value) for field, value in record.items()
# if field in translatable_fields)
#
# binding.with_context(connector_no_export=True,
# lang=storeview.lang_id.code).write(data)

View File

@@ -0,0 +1,23 @@
# © 2021 Hibou Corp.
from odoo.addons.component.core import AbstractComponent
class AmazonImportMapper(AbstractComponent):
_name = 'amazon.import.mapper'
_inherit = ['base.amazon.connector', 'base.import.mapper']
_usage = 'import.mapper'
class AmazonExportMapper(AbstractComponent):
_name = 'amazon.export.mapper'
_inherit = ['base.amazon.connector', 'base.export.mapper']
_usage = 'export.mapper'
def normalize_datetime(field):
def modifier(self, record, to_attr):
val = record.get(field, '')
val = val.replace('T', ' ').replace('Z', '')
return val
return modifier