Si vous constatez la perte de tickets, de commentaires ou de projets, veuillez consulter la page https://status.framasoft.org/incident/626.

Commit 3d848307 authored by Christophe Benz's avatar Christophe Benz

Update transpiler and calculate script

parent f845442c
......@@ -4,7 +4,6 @@
import math
from toolz.curried.operator import eq, ge, gt, ne
import infix
# M language functions
......@@ -17,54 +16,3 @@ positif = gt(0)
positif_ou_nul = ge(0)
present = ne(0)
somme = sum
# Division operator overload
def safe_divide(a, b):
return 0 if b == 0 else a / b
div = infix.or_infix(safe_divide)
#
# class wrap_value(object):
# def __init__(self, value):
# self.value = value
#
# def __add__(self, other):
# return wrap_value(self.value + other.value)
#
# def __sub__(self, other):
# return wrap_value(self.value - other.value)
#
# def __mul__(self, other):
# return wrap_value(self.value * other.value)
#
# def __lt__(self, other):
# return wrap_value(self.value < other.value)
#
# def __gt__(self, other):
# return wrap_value(self.value > other.value)
#
# def __truediv__(self, other):
# # if other == 0:
# # import ipdb; ipdb.set_trace()
# return wrap_value(0 if other.value == 0 else self.value / other.value)
# def wrap_function(func):
# def apply(val):
# return wrap_value(func(val.value))
# return apply
# arr = wrap_function(round)
# inf = wrap_function(math.floor)
# null = wrap_function(eq(0))
# positif = wrap_function(gt(0))
# positif_ou_nul = wrap_function(ge(0))
# present = wrap_function(ne(0))
# somme = wrap_function(sum)
......@@ -167,7 +167,7 @@ def visit_pour_formula(node):
def visit_product_expression(node):
return visit_infix_expression(node, operators={'/': '|div|'})
return visit_infix_expression(node)
def visit_regle(node):
......@@ -197,3 +197,16 @@ def visit_ternary_operator(node):
def visit_variable_const(node):
return '{} = {}'.format(node['name'], node['value'])
def visit_verif(node):
return '# verif {name}\n{assertions}\n'.format(
assertions='\n'.join(
'assert {expression}, get_error({error_name!r})'.format(
error_name=condition_node['error_name'],
expression=visit_node(condition_node['expression']),
)
for condition_node in node['conditions']
),
name=node['name'],
)
......@@ -7,9 +7,12 @@ import logging
import os
import sys
from calculateur_impots import core
from toolz import keyfilter
# Note: absolute notation is used here since we are in a script.
from calculateur_impots import core
from calculateur_impots.generated.verif_regles import verif_regles
# Globals
......@@ -56,9 +59,14 @@ def main():
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.verbose or args.debug else logging.WARNING, stream=sys.stdout)
# Required variables_saisies: ANREV, REGCO (tag "contexte"?)
# Set V_IND_TRAIT to "primitif" (value 0?)
variables_saisies = dict(iter_variables_saisies(args.saisies)) \
if args.saisies is not None \
else None
else {}
verif_regles(variables_saisies)
log.debug('variables_saisies: {}'.format(variables_saisies))
core.evaluate_formulas(variables_saisies=variables_saisies)
requested_variables_calculees = list(iter_variables_calculees(args.calculees))
......
......@@ -70,11 +70,14 @@ def write_source_file(file_name, source):
# Load files functions
def iter_ast_json_file_names(*pathnames):
for json_file_path in sorted(mapcat(
lambda pathname: glob.iglob(os.path.join(args.json_dir, 'ast', pathname)),
pathnames,
)):
def iter_ast_json_file_names(pathnames, excluded_pathnames=None):
json_file_paths = pipe(
pathnames,
mapcat(lambda pathname: glob.iglob(os.path.join(args.json_dir, 'ast', pathname))),
filter(lambda file_path: excluded_pathnames is None or os.path.basename(file_path) not in excluded_pathnames),
sorted,
)
for json_file_path in json_file_paths:
json_file_name = os.path.basename(json_file_path)
if args.json is None or json_file_name in args.json:
file_name_head = os.path.splitext(json_file_name)[0]
......@@ -92,9 +95,15 @@ def load_regles_file(json_file_name):
return formula_name_and_source_pairs
# def load_verifs_file(json_file_name):
# log.info('Loading "{}"...'.format(json_file_name))
# verifs_nodes = read_ast_json_file(json_file_name)
def load_verifs_file(json_file_name):
log.info('Loading "{}"...'.format(json_file_name))
verifs_nodes = read_ast_json_file(json_file_name)
batch_application_verifs_nodes = filter(
lambda node: 'batch' in node['applications'],
verifs_nodes,
)
verif_functions_sources = map(python_source_visitors.visit_node, batch_application_verifs_nodes)
return verif_functions_sources
# Main
......@@ -146,38 +155,73 @@ def main():
source='variable_definition_by_name = {}\n'.format(pprint.pformat(variable_definition_by_name, width=120)),
)
# Transpile formulas
# Transpile initial formulas
formula_source_by_name = dict(list(mapcat(
initial_formula_source_by_name = dict(list(mapcat(
load_regles_file,
iter_ast_json_file_names('chap-*.json', 'res-ser*.json'),
iter_ast_json_file_names(pathnames=['chap-ini.json']),
)))
assert formula_source_by_name
variables_dependencies_file_name = os.path.join('semantic_data', 'variables_dependencies.json')
variable_dependencies_by_name = read_json_file(json_file_name=variables_dependencies_file_name)
ordered_formulas_names_file_name = os.path.join('semantic_data', 'ordered_formulas.json')
ordered_formulas_names = read_json_file(json_file_name=ordered_formulas_names_file_name)
write_source_file(
file_name='formulas.py',
source=lines_to_python_source(itertools.chain(
(
'from ..core import *',
'from .constants import *',
'\n',
),
pipe(
ordered_formulas_names,
map(python_source_visitors.sanitized_variable_name),
map(lambda formula_name: formula_source_by_name.get(formula_name, '{} = 0'.format(formula_name))),
ordered_formulas_names = list(map(
python_source_visitors.sanitized_variable_name,
read_json_file(json_file_name=ordered_formulas_names_file_name),
))
if initial_formula_source_by_name:
write_source_file(
file_name='initial_formulas.py',
source=lines_to_python_source(itertools.chain(
(
'from ..core import *',
'from .constants import *',
'\n',
),
pipe(
ordered_formulas_names,
filter(lambda formula_name: formula_name in initial_formula_source_by_name),
map(lambda formula_name: initial_formula_source_by_name[formula_name]),
),
),
))
# Transpile formulas
formula_source_by_name = dict(list(mapcat(
load_regles_file,
iter_ast_json_file_names(
excluded_pathnames=['chap-ini.json'],
pathnames=['chap-*.json', 'res-ser*.json'],
),
))
)))
if formula_source_by_name:
ordered_formulas_names_file_name = os.path.join('semantic_data', 'ordered_formulas.json')
ordered_formulas_names = read_json_file(json_file_name=ordered_formulas_names_file_name)
write_source_file(
file_name='formulas.py',
source=lines_to_python_source(itertools.chain(
(
'from ..core import *',
'from .constants import *',
'\n',
),
pipe(
ordered_formulas_names,
filter(lambda formula_name: formula_name not in initial_formula_source_by_name),
map(lambda formula_name: formula_source_by_name.get(formula_name, '{} = 0'.format(formula_name))),
),
),
))
# for json_file_name in iter_ast_json_file_names('coc*.json', 'coi*.json'):
# load_verifs_file(json_file_name)
verif_functions_sources = list(
mapcat(load_verifs_file, iter_ast_json_file_names(pathnames=['coc*.json', 'coi*.json']))
)
if verif_functions_sources:
verif_regles_source = 'def verif_regles(saisies):\n{}'.format(
textwrap.indent(lines_to_python_source(verif_functions_sources), prefix=4 * ' '),
)
write_source_file(
file_name='verif_regles.py',
source=verif_regles_source,
)
return 0
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment