mirror of
https://github.com/OCA/bank-statement-import.git
synced 2025-01-20 12:37:43 +02:00
@@ -17,7 +17,7 @@
|
||||
"multi_step_wizard",
|
||||
"web_widget_dropdown_dynamic",
|
||||
],
|
||||
"external_dependencies": {"python": ["xlrd"]},
|
||||
"external_dependencies": {"python": ["xlrd", "chardet"]},
|
||||
"data": [
|
||||
"security/ir.model.access.csv",
|
||||
"data/map_data.xml",
|
||||
|
||||
@@ -28,9 +28,24 @@ class AccountBankStatementImport(models.TransientModel):
|
||||
self.ensure_one()
|
||||
try:
|
||||
Parser = self.env["account.bank.statement.import.sheet.parser"]
|
||||
return Parser.parse(data_file, self.sheet_mapping_id)
|
||||
return Parser.parse(
|
||||
data_file, self.sheet_mapping_id, self.attachment_ids[:1].name
|
||||
)
|
||||
except BaseException:
|
||||
if self.env.context.get("account_bank_statement_import_txt_xlsx_test"):
|
||||
raise
|
||||
_logger.warning("Sheet parser error", exc_info=True)
|
||||
return super()._parse_file(data_file)
|
||||
|
||||
def _create_bank_statements(self, stmts_vals):
|
||||
""" Set balance_end_real if not already provided by the file."""
|
||||
|
||||
statement_line_ids, notifications = super()._create_bank_statements(stmts_vals)
|
||||
statements = self.env["account.bank.statement"].search(
|
||||
[("line_ids", "in", statement_line_ids)]
|
||||
)
|
||||
for statement in statements:
|
||||
if not statement.balance_end_real:
|
||||
amount = sum(statement.line_ids.mapped("amount"))
|
||||
statement.balance_end_real = statement.balance_start + amount
|
||||
return statement_line_ids, notifications
|
||||
|
||||
@@ -54,6 +54,11 @@ class AccountBankStatementImportSheetMapping(models.Model):
|
||||
)
|
||||
quotechar = fields.Char(string="Text qualifier", size=1, default='"')
|
||||
timestamp_format = fields.Char(string="Timestamp Format", required=True)
|
||||
no_header = fields.Boolean(
|
||||
"File does not contain header line",
|
||||
help="When this occurs please indicate the column number in the Columns section "
|
||||
"instead of the column name, considering that the first column is 0",
|
||||
)
|
||||
timestamp_column = fields.Char(string="Timestamp column", required=True)
|
||||
currency_column = fields.Char(
|
||||
string="Currency column",
|
||||
|
||||
@@ -7,8 +7,10 @@ import logging
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from io import StringIO
|
||||
from os import path
|
||||
|
||||
from odoo import _, api, models
|
||||
from odoo.exceptions import UserError
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,6 +21,14 @@ try:
|
||||
except (ImportError, IOError) as err: # pragma: no cover
|
||||
_logger.error(err)
|
||||
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
_logger.warning(
|
||||
"chardet library not found, please install it "
|
||||
"from http://pypi.python.org/pypi/chardet"
|
||||
)
|
||||
|
||||
|
||||
class AccountBankStatementImportSheetParser(models.TransientModel):
|
||||
_name = "account.bank.statement.import.sheet.parser"
|
||||
@@ -42,7 +52,7 @@ class AccountBankStatementImportSheetParser(models.TransientModel):
|
||||
return list(next(csv_data))
|
||||
|
||||
@api.model
|
||||
def parse(self, data_file, mapping):
|
||||
def parse(self, data_file, mapping, filename):
|
||||
journal = self.env["account.journal"].browse(self.env.context.get("journal_id"))
|
||||
currency_code = (journal.currency_id or journal.company_id.currency_id).name
|
||||
account_number = journal.bank_account_id.acc_number
|
||||
@@ -56,6 +66,7 @@ class AccountBankStatementImportSheetParser(models.TransientModel):
|
||||
last_line = lines[-1]
|
||||
data = {
|
||||
"date": first_line["timestamp"].date(),
|
||||
"name": _("%s: %s") % (journal.code, path.basename(filename),),
|
||||
}
|
||||
|
||||
if mapping.balance_column:
|
||||
@@ -68,7 +79,6 @@ class AccountBankStatementImportSheetParser(models.TransientModel):
|
||||
"balance_end_real": float(balance_end),
|
||||
}
|
||||
)
|
||||
|
||||
transactions = list(
|
||||
itertools.chain.from_iterable(
|
||||
map(lambda line: self._convert_line_to_transactions(line), lines)
|
||||
@@ -78,6 +88,47 @@ class AccountBankStatementImportSheetParser(models.TransientModel):
|
||||
|
||||
return currency_code, account_number, [data]
|
||||
|
||||
def _get_column_indexes(self, header, column_name, mapping):
|
||||
column_indexes = []
|
||||
if mapping[column_name] and "," in mapping[column_name]:
|
||||
# We have to concatenate the values
|
||||
column_names_or_indexes = mapping[column_name].split(",")
|
||||
else:
|
||||
column_names_or_indexes = [mapping[column_name]]
|
||||
for column_name_or_index in column_names_or_indexes:
|
||||
if not column_name_or_index:
|
||||
continue
|
||||
column_index = None
|
||||
if mapping.no_header:
|
||||
try:
|
||||
column_index = int(column_name_or_index)
|
||||
except Exception:
|
||||
pass
|
||||
if column_index is not None:
|
||||
column_indexes.append(column_index)
|
||||
else:
|
||||
if column_name_or_index:
|
||||
column_indexes.append(header.index(column_name_or_index))
|
||||
return column_indexes
|
||||
|
||||
def _get_column_names(self):
|
||||
return [
|
||||
"timestamp_column",
|
||||
"currency_column",
|
||||
"amount_column",
|
||||
"balance_column",
|
||||
"original_currency_column",
|
||||
"original_amount_column",
|
||||
"debit_credit_column",
|
||||
"transaction_id_column",
|
||||
"description_column",
|
||||
"notes_column",
|
||||
"reference_column",
|
||||
"partner_name_column",
|
||||
"bank_name_column",
|
||||
"bank_account_column",
|
||||
]
|
||||
|
||||
def _parse_lines(self, mapping, data_file, currency_code):
|
||||
columns = dict()
|
||||
try:
|
||||
@@ -98,69 +149,44 @@ class AccountBankStatementImportSheetParser(models.TransientModel):
|
||||
csv_options["delimiter"] = csv_delimiter
|
||||
if mapping.quotechar:
|
||||
csv_options["quotechar"] = mapping.quotechar
|
||||
csv_or_xlsx = reader(
|
||||
StringIO(data_file.decode(mapping.file_encoding or "utf-8")),
|
||||
**csv_options
|
||||
try:
|
||||
decoded_file = data_file.decode(mapping.file_encoding or "utf-8")
|
||||
except UnicodeDecodeError:
|
||||
# Try auto guessing the format
|
||||
detected_encoding = chardet.detect(data_file).get("encoding", False)
|
||||
if not detected_encoding:
|
||||
raise UserError(
|
||||
_("No valid encoding was found for the attached file")
|
||||
)
|
||||
decoded_file = data_file.decode(detected_encoding)
|
||||
csv_or_xlsx = reader(StringIO(decoded_file), **csv_options)
|
||||
header = False
|
||||
if not mapping.no_header:
|
||||
if isinstance(csv_or_xlsx, tuple):
|
||||
header = [str(value) for value in csv_or_xlsx[1].row_values(0)]
|
||||
else:
|
||||
header = [value.strip() for value in next(csv_or_xlsx)]
|
||||
for column_name in self._get_column_names():
|
||||
columns[column_name] = self._get_column_indexes(
|
||||
header, column_name, mapping
|
||||
)
|
||||
|
||||
if isinstance(csv_or_xlsx, tuple):
|
||||
header = [str(value) for value in csv_or_xlsx[1].row_values(0)]
|
||||
else:
|
||||
header = [value.strip() for value in next(csv_or_xlsx)]
|
||||
columns["timestamp_column"] = header.index(mapping.timestamp_column)
|
||||
columns["currency_column"] = (
|
||||
header.index(mapping.currency_column) if mapping.currency_column else None
|
||||
)
|
||||
columns["amount_column"] = header.index(mapping.amount_column)
|
||||
columns["balance_column"] = (
|
||||
header.index(mapping.balance_column) if mapping.balance_column else None
|
||||
)
|
||||
columns["original_currency_column"] = (
|
||||
header.index(mapping.original_currency_column)
|
||||
if mapping.original_currency_column
|
||||
else None
|
||||
)
|
||||
columns["original_amount_column"] = (
|
||||
header.index(mapping.original_amount_column)
|
||||
if mapping.original_amount_column
|
||||
else None
|
||||
)
|
||||
columns["debit_credit_column"] = (
|
||||
header.index(mapping.debit_credit_column)
|
||||
if mapping.debit_credit_column
|
||||
else None
|
||||
)
|
||||
columns["transaction_id_column"] = (
|
||||
header.index(mapping.transaction_id_column)
|
||||
if mapping.transaction_id_column
|
||||
else None
|
||||
)
|
||||
columns["description_column"] = (
|
||||
header.index(mapping.description_column)
|
||||
if mapping.description_column
|
||||
else None
|
||||
)
|
||||
columns["notes_column"] = (
|
||||
header.index(mapping.notes_column) if mapping.notes_column else None
|
||||
)
|
||||
columns["reference_column"] = (
|
||||
header.index(mapping.reference_column) if mapping.reference_column else None
|
||||
)
|
||||
columns["partner_name_column"] = (
|
||||
header.index(mapping.partner_name_column)
|
||||
if mapping.partner_name_column
|
||||
else None
|
||||
)
|
||||
columns["bank_name_column"] = (
|
||||
header.index(mapping.bank_name_column) if mapping.bank_name_column else None
|
||||
)
|
||||
columns["bank_account_column"] = (
|
||||
header.index(mapping.bank_account_column)
|
||||
if mapping.bank_account_column
|
||||
else None
|
||||
)
|
||||
return self._parse_rows(mapping, currency_code, csv_or_xlsx, columns)
|
||||
|
||||
def _get_values_from_column(self, values, columns, column_name):
|
||||
indexes = columns[column_name]
|
||||
content_l = []
|
||||
max_index = len(values) - 1
|
||||
for index in indexes:
|
||||
if isinstance(index, int):
|
||||
if index <= max_index:
|
||||
content_l.append(values[index])
|
||||
else:
|
||||
if index in values:
|
||||
content_l.append(values[index])
|
||||
if all(isinstance(content, str) for content in content_l):
|
||||
return " ".join(content_l)
|
||||
return content_l[0]
|
||||
|
||||
def _parse_rows(self, mapping, currency_code, csv_or_xlsx, columns): # noqa: C901
|
||||
if isinstance(csv_or_xlsx, tuple):
|
||||
rows = range(1, csv_or_xlsx[1].nrows)
|
||||
@@ -182,66 +208,70 @@ class AccountBankStatementImportSheetParser(models.TransientModel):
|
||||
else:
|
||||
values = list(row)
|
||||
|
||||
timestamp = values[columns["timestamp_column"]]
|
||||
timestamp = self._get_values_from_column(
|
||||
values, columns, "timestamp_column"
|
||||
)
|
||||
currency = (
|
||||
values[columns["currency_column"]]
|
||||
if columns["currency_column"] is not None
|
||||
self._get_values_from_column(values, columns, "currency_column")
|
||||
if columns["currency_column"]
|
||||
else currency_code
|
||||
)
|
||||
amount = values[columns["amount_column"]]
|
||||
amount = self._get_values_from_column(values, columns, "amount_column")
|
||||
balance = (
|
||||
values[columns["balance_column"]]
|
||||
if columns["balance_column"] is not None
|
||||
self._get_values_from_column(values, columns, "balance_column")
|
||||
if columns["balance_column"]
|
||||
else None
|
||||
)
|
||||
original_currency = (
|
||||
values[columns["original_currency_column"]]
|
||||
if columns["original_currency_column"] is not None
|
||||
self._get_values_from_column(
|
||||
values, columns, "original_currency_column"
|
||||
)
|
||||
if columns["original_currency_column"]
|
||||
else None
|
||||
)
|
||||
original_amount = (
|
||||
values[columns["original_amount_column"]]
|
||||
if columns["original_amount_column"] is not None
|
||||
self._get_values_from_column(values, columns, "original_amount_column")
|
||||
if columns["original_amount_column"]
|
||||
else None
|
||||
)
|
||||
debit_credit = (
|
||||
values[columns["debit_credit_column"]]
|
||||
if columns["debit_credit_column"] is not None
|
||||
self._get_values_from_column(values, columns, "debit_credit_column")
|
||||
if columns["debit_credit_column"]
|
||||
else None
|
||||
)
|
||||
transaction_id = (
|
||||
values[columns["transaction_id_column"]]
|
||||
if columns["transaction_id_column"] is not None
|
||||
self._get_values_from_column(values, columns, "transaction_id_column")
|
||||
if columns["transaction_id_column"]
|
||||
else None
|
||||
)
|
||||
description = (
|
||||
values[columns["description_column"]]
|
||||
if columns["description_column"] is not None
|
||||
self._get_values_from_column(values, columns, "description_column")
|
||||
if columns["description_column"]
|
||||
else None
|
||||
)
|
||||
notes = (
|
||||
values[columns["notes_column"]]
|
||||
if columns["notes_column"] is not None
|
||||
self._get_values_from_column(values, columns, "notes_column")
|
||||
if columns["notes_column"]
|
||||
else None
|
||||
)
|
||||
reference = (
|
||||
values[columns["reference_column"]]
|
||||
if columns["reference_column"] is not None
|
||||
self._get_values_from_column(values, columns, "reference_column")
|
||||
if columns["reference_column"]
|
||||
else None
|
||||
)
|
||||
partner_name = (
|
||||
values[columns["partner_name_column"]]
|
||||
if columns["partner_name_column"] is not None
|
||||
self._get_values_from_column(values, columns, "partner_name_column")
|
||||
if columns["partner_name_column"]
|
||||
else None
|
||||
)
|
||||
bank_name = (
|
||||
values[columns["bank_name_column"]]
|
||||
if columns["bank_name_column"] is not None
|
||||
self._get_values_from_column(values, columns, "bank_name_column")
|
||||
if columns["bank_name_column"]
|
||||
else None
|
||||
)
|
||||
bank_account = (
|
||||
values[columns["bank_account_column"]]
|
||||
if columns["bank_account_column"] is not None
|
||||
self._get_values_from_column(values, columns, "bank_account_column")
|
||||
if columns["bank_account_column"]
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
1
account_bank_statement_import_txt_xlsx/tests/fixtures/original_currency_no_header.csv
vendored
Normal file
1
account_bank_statement_import_txt_xlsx/tests/fixtures/original_currency_no_header.csv
vendored
Normal file
@@ -0,0 +1 @@
|
||||
"12/15/2018","Your payment","EUR","1,525.00","-1,000.00","Azure Interior","","INV0001"
|
||||
|
@@ -223,6 +223,56 @@ class TestAccountBankStatementImportTxtXlsx(common.TransactionCase):
|
||||
self.assertEqual(line.currency_id, self.currency_eur)
|
||||
self.assertEqual(line.amount_currency, 1000.0)
|
||||
|
||||
def test_original_currency_no_header(self):
|
||||
no_header_statement_map = self.AccountBankStatementImportSheetMapping.create(
|
||||
{
|
||||
"name": "Sample Statement",
|
||||
"float_thousands_sep": "comma",
|
||||
"float_decimal_sep": "dot",
|
||||
"delimiter": "comma",
|
||||
"quotechar": '"',
|
||||
"timestamp_format": "%m/%d/%Y",
|
||||
"no_header": True,
|
||||
"timestamp_column": "0",
|
||||
"amount_column": "3",
|
||||
"original_currency_column": "2",
|
||||
"original_amount_column": "4",
|
||||
"description_column": "1,7",
|
||||
"partner_name_column": "5",
|
||||
"bank_account_column": "6",
|
||||
}
|
||||
)
|
||||
journal = self.AccountJournal.create(
|
||||
{
|
||||
"name": "Bank",
|
||||
"type": "bank",
|
||||
"code": "BANK",
|
||||
"currency_id": self.currency_usd.id,
|
||||
}
|
||||
)
|
||||
data = self._data_file("fixtures/original_currency_no_header.csv", "utf-8")
|
||||
wizard = self.AccountBankStatementImport.with_context(
|
||||
journal_id=journal.id
|
||||
).create(
|
||||
{
|
||||
"attachment_ids": [
|
||||
(0, 0, {"name": "fixtures/original_currency.csv", "datas": data})
|
||||
],
|
||||
"sheet_mapping_id": no_header_statement_map.id,
|
||||
}
|
||||
)
|
||||
wizard.with_context(
|
||||
account_bank_statement_import_txt_xlsx_test=True
|
||||
).import_file()
|
||||
statement = self.AccountBankStatement.search([("journal_id", "=", journal.id)])
|
||||
self.assertEqual(len(statement), 1)
|
||||
self.assertEqual(len(statement.line_ids), 1)
|
||||
|
||||
line = statement.line_ids
|
||||
self.assertEqual(line.currency_id, self.currency_eur)
|
||||
self.assertEqual(line.amount_currency, 1000.0)
|
||||
self.assertEqual(line.name, "Your payment INV0001")
|
||||
|
||||
def test_original_currency_empty(self):
|
||||
journal = self.AccountJournal.create(
|
||||
{
|
||||
|
||||
@@ -39,6 +39,18 @@
|
||||
<group>
|
||||
<field name="timestamp_format" />
|
||||
</group>
|
||||
<group>
|
||||
<field name="no_header" />
|
||||
<div
|
||||
class="alert alert-warning"
|
||||
role="alert"
|
||||
attrs="{'invisible': [('no_header', '=', False)]}"
|
||||
>
|
||||
<span
|
||||
class="fa fa-info-circle"
|
||||
/> indicate the column number in the Columns section. The first column is 0.
|
||||
</div>
|
||||
</group>
|
||||
<group
|
||||
attrs="{'invisible': [('debit_credit_column', '=', False)]}"
|
||||
>
|
||||
@@ -53,20 +65,31 @@
|
||||
</group>
|
||||
</group>
|
||||
<group string="Columns">
|
||||
<field name="timestamp_column" />
|
||||
<field name="currency_column" />
|
||||
<field name="amount_column" />
|
||||
<field name="balance_column" />
|
||||
<field name="original_currency_column" />
|
||||
<field name="original_amount_column" />
|
||||
<field name="debit_credit_column" />
|
||||
<field name="transaction_id_column" />
|
||||
<field name="description_column" />
|
||||
<field name="notes_column" />
|
||||
<field name="reference_column" />
|
||||
<field name="partner_name_column" />
|
||||
<field name="bank_name_column" />
|
||||
<field name="bank_account_column" />
|
||||
<group colspan="4" col="2">
|
||||
<div class="alert alert-info" role="alert">
|
||||
<span
|
||||
class="fa fa-info-circle"
|
||||
/> Add the column names or column number (when the file has no header).
|
||||
You can concatenate multiple columns in the file into the same field, indicating the
|
||||
column names or numbers separated by comma.
|
||||
</div>
|
||||
</group>
|
||||
<group>
|
||||
<field name="timestamp_column" />
|
||||
<field name="currency_column" />
|
||||
<field name="amount_column" />
|
||||
<field name="balance_column" />
|
||||
<field name="original_currency_column" />
|
||||
<field name="original_amount_column" />
|
||||
<field name="debit_credit_column" />
|
||||
<field name="transaction_id_column" />
|
||||
<field name="description_column" />
|
||||
<field name="notes_column" />
|
||||
<field name="reference_column" />
|
||||
<field name="partner_name_column" />
|
||||
<field name="bank_name_column" />
|
||||
<field name="bank_account_column" />
|
||||
</group>
|
||||
</group>
|
||||
</sheet>
|
||||
</form>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# generated from manifests external_dependencies
|
||||
chardet
|
||||
cryptography
|
||||
ofxparse
|
||||
xlrd
|
||||
|
||||
Reference in New Issue
Block a user